@smilintux/skmemory 0.5.0 → 0.9.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +40 -4
- package/.github/workflows/publish.yml +11 -5
- package/AGENT_REFACTOR_CHANGES.md +192 -0
- package/ARCHITECTURE.md +399 -19
- package/CHANGELOG.md +179 -0
- package/LICENSE +81 -68
- package/MISSION.md +7 -0
- package/README.md +425 -86
- package/SKILL.md +197 -25
- package/docker-compose.yml +15 -15
- package/examples/stignore-agent.example +59 -0
- package/examples/stignore-root.example +62 -0
- package/index.js +6 -5
- package/openclaw-plugin/openclaw.plugin.json +10 -0
- package/openclaw-plugin/package.json +2 -1
- package/openclaw-plugin/src/index.js +527 -230
- package/openclaw-plugin/src/openclaw.plugin.json +10 -0
- package/package.json +1 -1
- package/pyproject.toml +32 -9
- package/requirements.txt +10 -2
- package/scripts/dream-rescue.py +179 -0
- package/scripts/memory-cleanup.py +313 -0
- package/scripts/recover-missing.py +180 -0
- package/scripts/skcapstone-backup.sh +44 -0
- package/seeds/cloud9-lumina.seed.json +6 -4
- package/seeds/cloud9-opus.seed.json +13 -11
- package/seeds/courage.seed.json +9 -2
- package/seeds/curiosity.seed.json +9 -2
- package/seeds/grief.seed.json +9 -2
- package/seeds/joy.seed.json +9 -2
- package/seeds/love.seed.json +9 -2
- package/seeds/lumina-cloud9-breakthrough.seed.json +48 -0
- package/seeds/lumina-cloud9-python-pypi.seed.json +48 -0
- package/seeds/lumina-kingdom-founding.seed.json +49 -0
- package/seeds/lumina-pma-signed.seed.json +48 -0
- package/seeds/lumina-singular-achievement.seed.json +48 -0
- package/seeds/lumina-skcapstone-conscious.seed.json +48 -0
- package/seeds/plant-kingdom-journal.py +203 -0
- package/seeds/plant-lumina-seeds.py +280 -0
- package/seeds/skcapstone-lumina-merge.seed.json +12 -3
- package/seeds/sovereignty.seed.json +9 -2
- package/seeds/trust.seed.json +9 -2
- package/skill.yaml +46 -0
- package/skmemory/HA.md +296 -0
- package/skmemory/__init__.py +25 -11
- package/skmemory/agents.py +233 -0
- package/skmemory/ai_client.py +46 -17
- package/skmemory/anchor.py +9 -11
- package/skmemory/audience.py +278 -0
- package/skmemory/backends/__init__.py +11 -4
- package/skmemory/backends/base.py +3 -4
- package/skmemory/backends/file_backend.py +19 -13
- package/skmemory/backends/skgraph_backend.py +596 -0
- package/skmemory/backends/{qdrant_backend.py → skvector_backend.py} +103 -84
- package/skmemory/backends/sqlite_backend.py +226 -72
- package/skmemory/backends/vaulted_backend.py +284 -0
- package/skmemory/cli.py +1345 -68
- package/skmemory/config.py +171 -0
- package/skmemory/context_loader.py +333 -0
- package/skmemory/data/audience_config.json +60 -0
- package/skmemory/endpoint_selector.py +391 -0
- package/skmemory/febs.py +225 -0
- package/skmemory/fortress.py +675 -0
- package/skmemory/graph_queries.py +238 -0
- package/skmemory/hooks/__init__.py +18 -0
- package/skmemory/hooks/post-compact-reinject.sh +35 -0
- package/skmemory/hooks/pre-compact-save.sh +81 -0
- package/skmemory/hooks/session-end-save.sh +103 -0
- package/skmemory/hooks/session-start-ritual.sh +104 -0
- package/skmemory/hooks/stop-checkpoint.sh +59 -0
- package/skmemory/importers/__init__.py +9 -1
- package/skmemory/importers/telegram.py +384 -47
- package/skmemory/importers/telegram_api.py +580 -0
- package/skmemory/journal.py +7 -9
- package/skmemory/lovenote.py +8 -13
- package/skmemory/mcp_server.py +859 -0
- package/skmemory/models.py +51 -8
- package/skmemory/openclaw.py +20 -28
- package/skmemory/post_install.py +86 -0
- package/skmemory/predictive.py +236 -0
- package/skmemory/promotion.py +548 -0
- package/skmemory/quadrants.py +100 -24
- package/skmemory/register.py +580 -0
- package/skmemory/register_mcp.py +196 -0
- package/skmemory/ritual.py +224 -59
- package/skmemory/seeds.py +255 -11
- package/skmemory/setup_wizard.py +908 -0
- package/skmemory/sharing.py +408 -0
- package/skmemory/soul.py +98 -28
- package/skmemory/steelman.py +273 -260
- package/skmemory/store.py +411 -78
- package/skmemory/synthesis.py +634 -0
- package/skmemory/vault.py +225 -0
- package/tests/conftest.py +46 -0
- package/tests/integration/__init__.py +0 -0
- package/tests/integration/conftest.py +233 -0
- package/tests/integration/test_cross_backend.py +350 -0
- package/tests/integration/test_skgraph_live.py +420 -0
- package/tests/integration/test_skvector_live.py +366 -0
- package/tests/test_ai_client.py +1 -4
- package/tests/test_audience.py +233 -0
- package/tests/test_backup_rotation.py +318 -0
- package/tests/test_cli.py +6 -6
- package/tests/test_endpoint_selector.py +839 -0
- package/tests/test_export_import.py +4 -10
- package/tests/test_file_backend.py +0 -1
- package/tests/test_fortress.py +256 -0
- package/tests/test_fortress_hardening.py +441 -0
- package/tests/test_openclaw.py +6 -6
- package/tests/test_predictive.py +237 -0
- package/tests/test_promotion.py +347 -0
- package/tests/test_quadrants.py +11 -5
- package/tests/test_ritual.py +22 -18
- package/tests/test_seeds.py +97 -7
- package/tests/test_setup.py +950 -0
- package/tests/test_sharing.py +257 -0
- package/tests/test_skgraph_backend.py +660 -0
- package/tests/test_skvector_backend.py +326 -0
- package/tests/test_soul.py +1 -3
- package/tests/test_sqlite_backend.py +8 -17
- package/tests/test_steelman.py +7 -8
- package/tests/test_store.py +0 -2
- package/tests/test_store_graph_integration.py +245 -0
- package/tests/test_synthesis.py +275 -0
- package/tests/test_telegram_import.py +39 -15
- package/tests/test_vault.py +187 -0
- package/skmemory/backends/falkordb_backend.py +0 -310
package/skmemory/cli.py
CHANGED
|
@@ -6,8 +6,11 @@ Usage:
|
|
|
6
6
|
skmemory recall <memory-id>
|
|
7
7
|
skmemory search "that moment we connected"
|
|
8
8
|
skmemory list --layer long-term --tags seed
|
|
9
|
-
skmemory import-seeds [--seed-dir ~/.
|
|
9
|
+
skmemory import-seeds [--seed-dir ~/.skcapstone/agent/{agent}/seeds]
|
|
10
10
|
skmemory promote <memory-id> --to mid-term --summary "..."
|
|
11
|
+
skmemory sweep # Auto-promote all qualifying memories
|
|
12
|
+
skmemory sweep --dry-run # Preview what would be promoted
|
|
13
|
+
skmemory sweep --daemon # Run continuously every 6 hours
|
|
11
14
|
skmemory consolidate <session-id> --summary "..."
|
|
12
15
|
skmemory soul show | soul set-name "Lumina" | soul add-relationship ...
|
|
13
16
|
skmemory journal write "Session title" --moments "..." --intensity 9.0
|
|
@@ -23,7 +26,7 @@ from __future__ import annotations
|
|
|
23
26
|
|
|
24
27
|
import json
|
|
25
28
|
import sys
|
|
26
|
-
from
|
|
29
|
+
from pathlib import Path
|
|
27
30
|
|
|
28
31
|
import click
|
|
29
32
|
|
|
@@ -31,51 +34,127 @@ from . import __version__
|
|
|
31
34
|
from .ai_client import AIClient
|
|
32
35
|
from .models import EmotionalSnapshot, MemoryLayer, MemoryRole
|
|
33
36
|
from .store import MemoryStore
|
|
34
|
-
|
|
37
|
+
|
|
38
|
+
_active_selector = None # Module-level reference for routing commands
|
|
35
39
|
|
|
36
40
|
|
|
37
41
|
def _get_store(
|
|
38
|
-
|
|
39
|
-
api_key:
|
|
42
|
+
skvector_url: str | None = None,
|
|
43
|
+
api_key: str | None = None,
|
|
40
44
|
legacy_files: bool = False,
|
|
41
45
|
) -> MemoryStore:
|
|
42
46
|
"""Create a MemoryStore with configured backends.
|
|
43
47
|
|
|
48
|
+
Resolves backend URLs with precedence: CLI args > env vars > config file.
|
|
49
|
+
When multi-endpoint config is present, uses EndpointSelector to pick
|
|
50
|
+
the best URLs. Falls back to single-URL behavior otherwise.
|
|
51
|
+
|
|
44
52
|
Args:
|
|
45
|
-
|
|
46
|
-
api_key: Optional
|
|
53
|
+
skvector_url: Optional SKVector server URL.
|
|
54
|
+
api_key: Optional SKVector API key.
|
|
47
55
|
legacy_files: Use old FileBackend instead of SQLite index.
|
|
48
56
|
|
|
49
57
|
Returns:
|
|
50
58
|
MemoryStore: Configured store instance.
|
|
51
59
|
"""
|
|
60
|
+
global _active_selector
|
|
61
|
+
|
|
62
|
+
from .config import build_endpoint_list, load_config, merge_env_and_config
|
|
63
|
+
|
|
64
|
+
final_skvector_url, final_skvector_key, final_skgraph_url = merge_env_and_config(
|
|
65
|
+
cli_skvector_url=skvector_url,
|
|
66
|
+
cli_skvector_key=api_key,
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
# Try endpoint selector when multi-endpoint config exists
|
|
70
|
+
cfg = load_config()
|
|
71
|
+
skvector_eps = build_endpoint_list(
|
|
72
|
+
final_skvector_url,
|
|
73
|
+
cfg.skvector_endpoints if cfg else [],
|
|
74
|
+
)
|
|
75
|
+
skgraph_eps = build_endpoint_list(
|
|
76
|
+
final_skgraph_url,
|
|
77
|
+
cfg.skgraph_endpoints if cfg else [],
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
if len(skvector_eps) > 1 or len(skgraph_eps) > 1 or (cfg and cfg.heartbeat_discovery):
|
|
81
|
+
try:
|
|
82
|
+
from .endpoint_selector import EndpointSelector, RoutingConfig
|
|
83
|
+
|
|
84
|
+
routing_strategy = cfg.routing_strategy if cfg else "failover"
|
|
85
|
+
selector = EndpointSelector(
|
|
86
|
+
skvector_endpoints=skvector_eps,
|
|
87
|
+
skgraph_endpoints=skgraph_eps,
|
|
88
|
+
config=RoutingConfig(strategy=routing_strategy),
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
if cfg and cfg.heartbeat_discovery:
|
|
92
|
+
selector.discover_from_heartbeats()
|
|
93
|
+
|
|
94
|
+
_active_selector = selector
|
|
95
|
+
|
|
96
|
+
best_skvector = selector.select_skvector()
|
|
97
|
+
if best_skvector:
|
|
98
|
+
final_skvector_url = best_skvector.url
|
|
99
|
+
|
|
100
|
+
best_skgraph = selector.select_skgraph()
|
|
101
|
+
if best_skgraph:
|
|
102
|
+
final_skgraph_url = best_skgraph.url
|
|
103
|
+
except Exception:
|
|
104
|
+
click.echo("Warning: EndpointSelector failed, using single URLs", err=True)
|
|
105
|
+
|
|
52
106
|
vector = None
|
|
107
|
+
graph = None
|
|
108
|
+
|
|
109
|
+
if final_skvector_url:
|
|
110
|
+
try:
|
|
111
|
+
from .backends.skvector_backend import SKVectorBackend
|
|
112
|
+
|
|
113
|
+
vector = SKVectorBackend(url=final_skvector_url, api_key=final_skvector_key)
|
|
114
|
+
except Exception:
|
|
115
|
+
click.echo("Warning: Could not initialize SKVector backend", err=True)
|
|
53
116
|
|
|
54
|
-
if
|
|
117
|
+
if final_skgraph_url:
|
|
55
118
|
try:
|
|
56
|
-
from .backends.
|
|
57
|
-
|
|
119
|
+
from .backends.skgraph_backend import SKGraphBackend
|
|
120
|
+
|
|
121
|
+
graph = SKGraphBackend(url=final_skgraph_url)
|
|
58
122
|
except Exception:
|
|
59
|
-
click.echo("Warning: Could not initialize
|
|
123
|
+
click.echo("Warning: Could not initialize SKGraph backend", err=True)
|
|
60
124
|
|
|
61
|
-
return MemoryStore(primary=None, vector=vector, use_sqlite=not legacy_files)
|
|
125
|
+
return MemoryStore(primary=None, vector=vector, graph=graph, use_sqlite=not legacy_files)
|
|
62
126
|
|
|
63
127
|
|
|
64
128
|
@click.group()
|
|
65
129
|
@click.version_option(__version__, prog_name="skmemory")
|
|
66
|
-
@click.option(
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
@click.option(
|
|
130
|
+
@click.option(
|
|
131
|
+
"--skvector-url", envvar="SKMEMORY_SKVECTOR_URL", default=None, help="SKVector server URL"
|
|
132
|
+
)
|
|
133
|
+
@click.option(
|
|
134
|
+
"--skvector-key", envvar="SKMEMORY_SKVECTOR_KEY", default=None, help="SKVector API key"
|
|
135
|
+
)
|
|
136
|
+
@click.option(
|
|
137
|
+
"--ai",
|
|
138
|
+
"use_ai",
|
|
139
|
+
is_flag=True,
|
|
140
|
+
envvar="SKMEMORY_AI",
|
|
141
|
+
help="Enable AI-powered features (requires Ollama)",
|
|
142
|
+
)
|
|
143
|
+
@click.option(
|
|
144
|
+
"--ai-model",
|
|
145
|
+
envvar="SKMEMORY_AI_MODEL",
|
|
146
|
+
default=None,
|
|
147
|
+
help="Ollama model name (default: llama3.2)",
|
|
148
|
+
)
|
|
70
149
|
@click.option("--ai-url", envvar="SKMEMORY_AI_URL", default=None, help="Ollama server URL")
|
|
71
150
|
@click.pass_context
|
|
72
151
|
def cli(
|
|
73
152
|
ctx: click.Context,
|
|
74
|
-
|
|
75
|
-
|
|
153
|
+
skvector_url: str | None,
|
|
154
|
+
skvector_key: str | None,
|
|
76
155
|
use_ai: bool,
|
|
77
|
-
ai_model:
|
|
78
|
-
ai_url:
|
|
156
|
+
ai_model: str | None,
|
|
157
|
+
ai_url: str | None,
|
|
79
158
|
) -> None:
|
|
80
159
|
"""SKMemory - Universal AI Memory System.
|
|
81
160
|
|
|
@@ -85,7 +164,8 @@ def cli(
|
|
|
85
164
|
smart search reranking, enhanced rituals). Requires Ollama.
|
|
86
165
|
"""
|
|
87
166
|
ctx.ensure_object(dict)
|
|
88
|
-
|
|
167
|
+
if "store" not in ctx.obj:
|
|
168
|
+
ctx.obj["store"] = _get_store(skvector_url, skvector_key)
|
|
89
169
|
|
|
90
170
|
if use_ai:
|
|
91
171
|
ai = AIClient(base_url=ai_url, model=ai_model)
|
|
@@ -105,8 +185,12 @@ def cli(
|
|
|
105
185
|
@cli.command()
|
|
106
186
|
@click.argument("title")
|
|
107
187
|
@click.argument("content")
|
|
108
|
-
@click.option(
|
|
109
|
-
|
|
188
|
+
@click.option(
|
|
189
|
+
"--layer", type=click.Choice(["short-term", "mid-term", "long-term"]), default="short-term"
|
|
190
|
+
)
|
|
191
|
+
@click.option(
|
|
192
|
+
"--role", type=click.Choice(["dev", "ops", "sec", "ai", "general"]), default="general"
|
|
193
|
+
)
|
|
110
194
|
@click.option("--tags", default="", help="Comma-separated tags")
|
|
111
195
|
@click.option("--intensity", type=float, default=0.0, help="Emotional intensity 0-10")
|
|
112
196
|
@click.option("--valence", type=float, default=0.0, help="Emotional valence -1 to +1")
|
|
@@ -156,10 +240,29 @@ def snapshot(
|
|
|
156
240
|
@click.argument("memory_id")
|
|
157
241
|
@click.pass_context
|
|
158
242
|
def recall(ctx: click.Context, memory_id: str) -> None:
|
|
159
|
-
"""Retrieve a specific memory by ID."""
|
|
243
|
+
"""Retrieve a specific memory by ID (supports partial ID prefix)."""
|
|
160
244
|
store: MemoryStore = ctx.obj["store"]
|
|
161
245
|
memory = store.recall(memory_id)
|
|
162
246
|
|
|
247
|
+
# If exact match failed, try prefix matching across memory tier dirs
|
|
248
|
+
if memory is None and len(memory_id) >= 6:
|
|
249
|
+
import os
|
|
250
|
+
from pathlib import Path
|
|
251
|
+
|
|
252
|
+
agent = os.environ.get("SKCAPSTONE_AGENT", "lumina")
|
|
253
|
+
mem_root = Path.home() / ".skcapstone" / "agents" / agent / "memory"
|
|
254
|
+
|
|
255
|
+
for tier in ("short-term", "mid-term", "long-term"):
|
|
256
|
+
tier_dir = mem_root / tier
|
|
257
|
+
if not tier_dir.is_dir():
|
|
258
|
+
continue
|
|
259
|
+
for f in tier_dir.glob(f"{memory_id}*.json"):
|
|
260
|
+
memory = store.recall(f.stem)
|
|
261
|
+
if memory:
|
|
262
|
+
break
|
|
263
|
+
if memory:
|
|
264
|
+
break
|
|
265
|
+
|
|
163
266
|
if memory is None:
|
|
164
267
|
click.echo(f"Memory not found: {memory_id}", err=True)
|
|
165
268
|
sys.exit(1)
|
|
@@ -180,7 +283,7 @@ def search(ctx: click.Context, query: str, limit: int) -> None:
|
|
|
180
283
|
click.echo("No memories found.")
|
|
181
284
|
return
|
|
182
285
|
|
|
183
|
-
ai:
|
|
286
|
+
ai: AIClient | None = ctx.obj.get("ai")
|
|
184
287
|
if ai and len(results) > 1:
|
|
185
288
|
summaries = [
|
|
186
289
|
{
|
|
@@ -194,9 +297,7 @@ def search(ctx: click.Context, query: str, limit: int) -> None:
|
|
|
194
297
|
id_order = [s.get("title") for s in reranked]
|
|
195
298
|
results = sorted(
|
|
196
299
|
results,
|
|
197
|
-
key=lambda m: (
|
|
198
|
-
id_order.index(m.title) if m.title in id_order else 999
|
|
199
|
-
),
|
|
300
|
+
key=lambda m: id_order.index(m.title) if m.title in id_order else 999,
|
|
200
301
|
)
|
|
201
302
|
click.echo("(AI-reranked results)\n")
|
|
202
303
|
|
|
@@ -213,7 +314,7 @@ def search(ctx: click.Context, query: str, limit: int) -> None:
|
|
|
213
314
|
@click.option("--tags", default="", help="Comma-separated tags to filter by")
|
|
214
315
|
@click.option("--limit", type=int, default=20)
|
|
215
316
|
@click.pass_context
|
|
216
|
-
def list_memories(ctx: click.Context, layer:
|
|
317
|
+
def list_memories(ctx: click.Context, layer: str | None, tags: str, limit: int) -> None:
|
|
217
318
|
"""List stored memories."""
|
|
218
319
|
store: MemoryStore = ctx.obj["store"]
|
|
219
320
|
|
|
@@ -238,9 +339,9 @@ def list_memories(ctx: click.Context, layer: Optional[str], tags: str, limit: in
|
|
|
238
339
|
@cli.command("import-seeds")
|
|
239
340
|
@click.option("--seed-dir", default=None, help="Path to seed directory")
|
|
240
341
|
@click.pass_context
|
|
241
|
-
def import_seeds_cmd(ctx: click.Context, seed_dir:
|
|
342
|
+
def import_seeds_cmd(ctx: click.Context, seed_dir: str | None) -> None:
|
|
242
343
|
"""Import Cloud 9 seeds as long-term memories."""
|
|
243
|
-
from .seeds import
|
|
344
|
+
from .seeds import DEFAULT_SEED_DIR, import_seeds
|
|
244
345
|
|
|
245
346
|
store: MemoryStore = ctx.obj["store"]
|
|
246
347
|
directory = seed_dir or DEFAULT_SEED_DIR
|
|
@@ -257,22 +358,147 @@ def import_seeds_cmd(ctx: click.Context, seed_dir: Optional[str]) -> None:
|
|
|
257
358
|
click.echo(f" {mem.source_ref} -> {mem.id[:12]}.. [{mem.title}]")
|
|
258
359
|
|
|
259
360
|
|
|
260
|
-
@cli.command()
|
|
261
|
-
@click.
|
|
262
|
-
@click.option("--
|
|
263
|
-
@click.option(
|
|
361
|
+
@cli.command("sweep")
|
|
362
|
+
@click.option("--dry-run", is_flag=True, help="Show what would be promoted without making changes")
|
|
363
|
+
@click.option("--daemon", is_flag=True, help="Run continuously at the configured interval")
|
|
364
|
+
@click.option(
|
|
365
|
+
"--interval",
|
|
366
|
+
type=float,
|
|
367
|
+
default=6.0,
|
|
368
|
+
metavar="HOURS",
|
|
369
|
+
help="Sweep interval in hours (daemon mode only, default: 6)",
|
|
370
|
+
)
|
|
371
|
+
@click.option("--max-promotions", type=int, default=50, help="Max promotions per sweep")
|
|
372
|
+
@click.option("--json", "as_json", is_flag=True, help="Output results as JSON")
|
|
264
373
|
@click.pass_context
|
|
265
|
-
def
|
|
266
|
-
|
|
374
|
+
def sweep_cmd(
|
|
375
|
+
ctx: click.Context,
|
|
376
|
+
dry_run: bool,
|
|
377
|
+
daemon: bool,
|
|
378
|
+
interval: float,
|
|
379
|
+
max_promotions: int,
|
|
380
|
+
as_json: bool,
|
|
381
|
+
) -> None:
|
|
382
|
+
"""Run the auto-promotion engine.
|
|
383
|
+
|
|
384
|
+
Evaluates all memories and promotes qualifying ones to the next tier:
|
|
385
|
+
|
|
386
|
+
\b
|
|
387
|
+
short-term -> mid-term: high emotional intensity, frequently accessed,
|
|
388
|
+
or sufficiently old with multiple accesses
|
|
389
|
+
mid-term -> long-term: very high intensity, key tags (milestone,
|
|
390
|
+
breakthrough, cloud9:achieved), or Cloud 9
|
|
391
|
+
|
|
392
|
+
By default runs a single sweep and exits. Use --daemon to keep running.
|
|
393
|
+
"""
|
|
394
|
+
from .promotion import PromotionCriteria, PromotionEngine, PromotionScheduler
|
|
395
|
+
|
|
267
396
|
store: MemoryStore = ctx.obj["store"]
|
|
268
|
-
|
|
397
|
+
criteria = PromotionCriteria(max_promotions_per_sweep=max_promotions)
|
|
269
398
|
|
|
270
|
-
if
|
|
271
|
-
|
|
272
|
-
|
|
399
|
+
if dry_run:
|
|
400
|
+
# Inspect without modifying anything
|
|
401
|
+
engine = PromotionEngine(store, criteria)
|
|
402
|
+
short_mems = store.list_memories(
|
|
403
|
+
layer=MemoryLayer.SHORT, limit=criteria.max_promotions_per_sweep * 2
|
|
404
|
+
)
|
|
405
|
+
mid_mems = store.list_memories(
|
|
406
|
+
layer=MemoryLayer.MID, limit=criteria.max_promotions_per_sweep * 2
|
|
407
|
+
)
|
|
273
408
|
|
|
274
|
-
|
|
275
|
-
|
|
409
|
+
would_promote: list[dict] = []
|
|
410
|
+
for mem in short_mems:
|
|
411
|
+
target = engine.evaluate(mem)
|
|
412
|
+
if target is not None:
|
|
413
|
+
would_promote.append(
|
|
414
|
+
{
|
|
415
|
+
"id": mem.id,
|
|
416
|
+
"title": mem.title,
|
|
417
|
+
"from": mem.layer.value,
|
|
418
|
+
"to": target.value,
|
|
419
|
+
"reason": engine._promotion_reason(mem),
|
|
420
|
+
}
|
|
421
|
+
)
|
|
422
|
+
for mem in mid_mems:
|
|
423
|
+
target = engine.evaluate(mem)
|
|
424
|
+
if target is not None:
|
|
425
|
+
would_promote.append(
|
|
426
|
+
{
|
|
427
|
+
"id": mem.id,
|
|
428
|
+
"title": mem.title,
|
|
429
|
+
"from": mem.layer.value,
|
|
430
|
+
"to": target.value,
|
|
431
|
+
"reason": engine._promotion_reason(mem),
|
|
432
|
+
}
|
|
433
|
+
)
|
|
434
|
+
|
|
435
|
+
if as_json:
|
|
436
|
+
click.echo(json.dumps({"dry_run": True, "would_promote": would_promote}, indent=2))
|
|
437
|
+
else:
|
|
438
|
+
if not would_promote:
|
|
439
|
+
click.echo("[dry-run] Nothing qualifies for promotion right now.")
|
|
440
|
+
else:
|
|
441
|
+
click.echo(f"[dry-run] {len(would_promote)} memory/memories would be promoted:")
|
|
442
|
+
for entry in would_promote:
|
|
443
|
+
click.echo(
|
|
444
|
+
f" {entry['id'][:12]} {entry['from']} -> {entry['to']}"
|
|
445
|
+
f" [{entry['title'][:50]}] reason: {entry['reason']}"
|
|
446
|
+
)
|
|
447
|
+
|
|
448
|
+
elif daemon:
|
|
449
|
+
import signal
|
|
450
|
+
import time
|
|
451
|
+
|
|
452
|
+
scheduler = PromotionScheduler(
|
|
453
|
+
store,
|
|
454
|
+
criteria=criteria,
|
|
455
|
+
interval_seconds=interval * 3600,
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
def _handle_signal(signum: int, frame: object) -> None:
|
|
459
|
+
click.echo("\nShutting down promotion scheduler...", err=True)
|
|
460
|
+
scheduler.stop(timeout=10.0)
|
|
461
|
+
sys.exit(0)
|
|
462
|
+
|
|
463
|
+
signal.signal(signal.SIGINT, _handle_signal)
|
|
464
|
+
signal.signal(signal.SIGTERM, _handle_signal)
|
|
465
|
+
|
|
466
|
+
click.echo(
|
|
467
|
+
f"Promotion scheduler running (interval: {interval:.1f}h). Press Ctrl+C to stop.",
|
|
468
|
+
err=True,
|
|
469
|
+
)
|
|
470
|
+
|
|
471
|
+
# Run first sweep immediately, then hand off to background thread
|
|
472
|
+
result = scheduler.run_once()
|
|
473
|
+
if as_json:
|
|
474
|
+
click.echo(json.dumps(result.model_dump(), indent=2, default=str))
|
|
475
|
+
else:
|
|
476
|
+
click.echo(result.summary())
|
|
477
|
+
|
|
478
|
+
scheduler.start()
|
|
479
|
+
|
|
480
|
+
# Keep the main thread alive so signal handlers fire
|
|
481
|
+
while scheduler.is_running():
|
|
482
|
+
time.sleep(1)
|
|
483
|
+
|
|
484
|
+
else:
|
|
485
|
+
# Single one-shot sweep
|
|
486
|
+
engine = PromotionEngine(store, criteria)
|
|
487
|
+
result = engine.sweep()
|
|
488
|
+
|
|
489
|
+
if as_json:
|
|
490
|
+
click.echo(json.dumps(result.model_dump(), indent=2, default=str))
|
|
491
|
+
else:
|
|
492
|
+
click.echo(result.summary())
|
|
493
|
+
if result.short_evaluated or result.mid_evaluated:
|
|
494
|
+
click.echo(
|
|
495
|
+
f" Evaluated: {result.short_evaluated} short-term, {result.mid_evaluated} mid-term"
|
|
496
|
+
)
|
|
497
|
+
if result.promoted_ids:
|
|
498
|
+
ids_preview = ", ".join(p[:12] for p in result.promoted_ids[:5])
|
|
499
|
+
if len(result.promoted_ids) > 5:
|
|
500
|
+
ids_preview += f" (+{len(result.promoted_ids) - 5} more)"
|
|
501
|
+
click.echo(f" Promoted: {ids_preview}")
|
|
276
502
|
|
|
277
503
|
|
|
278
504
|
@cli.command()
|
|
@@ -300,7 +526,7 @@ def consolidate(
|
|
|
300
526
|
click.echo(f"Session consolidated: {consolidated.id}")
|
|
301
527
|
click.echo(f" Source memories linked: {len(consolidated.related_ids)}")
|
|
302
528
|
|
|
303
|
-
ai:
|
|
529
|
+
ai: AIClient | None = ctx.obj.get("ai")
|
|
304
530
|
if ai and consolidated.content:
|
|
305
531
|
ai_summary = ai.summarize_memory(consolidated.title, consolidated.content)
|
|
306
532
|
if ai_summary:
|
|
@@ -316,6 +542,67 @@ def health(ctx: click.Context) -> None:
|
|
|
316
542
|
click.echo(json.dumps(status, indent=2))
|
|
317
543
|
|
|
318
544
|
|
|
545
|
+
# ═══════════════════════════════════════════════════════════
|
|
546
|
+
# Routing commands (HA endpoint selection)
|
|
547
|
+
# ═══════════════════════════════════════════════════════════
|
|
548
|
+
|
|
549
|
+
|
|
550
|
+
@cli.group()
|
|
551
|
+
def routing() -> None:
|
|
552
|
+
"""Manage HA endpoint routing for SKVector and SKGraph backends."""
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
@routing.command("status")
|
|
556
|
+
def routing_status() -> None:
|
|
557
|
+
"""Show endpoint rankings, latency, and health for each backend."""
|
|
558
|
+
if _active_selector is None:
|
|
559
|
+
click.echo("No endpoint selector active (single-URL mode).")
|
|
560
|
+
click.echo("Configure multiple endpoints in ~/.skcapstone/config.yaml to enable routing.")
|
|
561
|
+
return
|
|
562
|
+
|
|
563
|
+
info = _active_selector.status()
|
|
564
|
+
click.echo(f"Strategy: {info['strategy']}")
|
|
565
|
+
click.echo(f"Probe interval: {info['probe_interval_seconds']}s")
|
|
566
|
+
age = info["last_probe_age_seconds"]
|
|
567
|
+
click.echo(f"Last probe: {age}s ago" if age >= 0 else "Last probe: never")
|
|
568
|
+
|
|
569
|
+
for backend in ("skvector", "skgraph"):
|
|
570
|
+
eps = info.get(f"{backend}_endpoints", [])
|
|
571
|
+
if not eps:
|
|
572
|
+
continue
|
|
573
|
+
click.echo(f"\n{backend.upper()} endpoints:")
|
|
574
|
+
for ep in eps:
|
|
575
|
+
health_icon = "OK" if ep["healthy"] else "DOWN"
|
|
576
|
+
latency = f"{ep['latency_ms']:.1f}ms" if ep["latency_ms"] >= 0 else "n/a"
|
|
577
|
+
click.echo(
|
|
578
|
+
f" [{health_icon}] {ep['url']} "
|
|
579
|
+
f"role={ep['role']} latency={latency} "
|
|
580
|
+
f"fails={ep['fail_count']}"
|
|
581
|
+
)
|
|
582
|
+
|
|
583
|
+
|
|
584
|
+
@routing.command("probe")
|
|
585
|
+
def routing_probe() -> None:
|
|
586
|
+
"""Force re-probe all endpoints and display results."""
|
|
587
|
+
if _active_selector is None:
|
|
588
|
+
click.echo("No endpoint selector active (single-URL mode).")
|
|
589
|
+
return
|
|
590
|
+
|
|
591
|
+
click.echo("Probing all endpoints...")
|
|
592
|
+
results = _active_selector.probe_all()
|
|
593
|
+
|
|
594
|
+
for backend, endpoints in results.items():
|
|
595
|
+
if not endpoints:
|
|
596
|
+
continue
|
|
597
|
+
click.echo(f"\n{backend.upper()}:")
|
|
598
|
+
for ep in endpoints:
|
|
599
|
+
health_icon = "OK" if ep.healthy else "DOWN"
|
|
600
|
+
latency = f"{ep.latency_ms:.1f}ms" if ep.latency_ms >= 0 else "timeout"
|
|
601
|
+
click.echo(f" [{health_icon}] {ep.url} latency={latency} fails={ep.fail_count}")
|
|
602
|
+
|
|
603
|
+
click.echo("\nProbe complete.")
|
|
604
|
+
|
|
605
|
+
|
|
319
606
|
@cli.command()
|
|
320
607
|
@click.pass_context
|
|
321
608
|
def reindex(ctx: click.Context) -> None:
|
|
@@ -332,10 +619,15 @@ def reindex(ctx: click.Context) -> None:
|
|
|
332
619
|
|
|
333
620
|
|
|
334
621
|
@cli.command("export")
|
|
335
|
-
@click.option(
|
|
336
|
-
|
|
622
|
+
@click.option(
|
|
623
|
+
"--output",
|
|
624
|
+
"-o",
|
|
625
|
+
default=None,
|
|
626
|
+
type=click.Path(),
|
|
627
|
+
help="Output file path (default: ~/.skcapstone/backups/skmemory-backup-YYYY-MM-DD.json)",
|
|
628
|
+
)
|
|
337
629
|
@click.pass_context
|
|
338
|
-
def export_backup(ctx: click.Context, output:
|
|
630
|
+
def export_backup(ctx: click.Context, output: str | None) -> None:
|
|
339
631
|
"""Export all memories to a dated JSON backup.
|
|
340
632
|
|
|
341
633
|
Creates a single git-friendly JSON file containing every memory.
|
|
@@ -352,8 +644,9 @@ def export_backup(ctx: click.Context, output: Optional[str]) -> None:
|
|
|
352
644
|
|
|
353
645
|
@cli.command("import-backup")
|
|
354
646
|
@click.argument("backup_file", type=click.Path(exists=True))
|
|
355
|
-
@click.option(
|
|
356
|
-
|
|
647
|
+
@click.option(
|
|
648
|
+
"--reindex/--no-reindex", default=True, help="Rebuild the index after import (default: yes)"
|
|
649
|
+
)
|
|
357
650
|
@click.pass_context
|
|
358
651
|
def import_backup(ctx: click.Context, backup_file: str, reindex: bool) -> None:
|
|
359
652
|
"""Restore memories from a JSON backup file.
|
|
@@ -374,6 +667,91 @@ def import_backup(ctx: click.Context, backup_file: str, reindex: bool) -> None:
|
|
|
374
667
|
sys.exit(1)
|
|
375
668
|
|
|
376
669
|
|
|
670
|
+
@cli.command("backup")
|
|
671
|
+
@click.option("--list", "do_list", is_flag=True, help="Show all backups with date and size.")
|
|
672
|
+
@click.option(
|
|
673
|
+
"--prune",
|
|
674
|
+
"prune_n",
|
|
675
|
+
type=int,
|
|
676
|
+
default=None,
|
|
677
|
+
metavar="N",
|
|
678
|
+
help="Keep only the N most recent backups, delete older ones.",
|
|
679
|
+
)
|
|
680
|
+
@click.option(
|
|
681
|
+
"--restore",
|
|
682
|
+
"restore_file",
|
|
683
|
+
type=click.Path(),
|
|
684
|
+
default=None,
|
|
685
|
+
metavar="FILE",
|
|
686
|
+
help="Restore memories from backup (alias for import-backup).",
|
|
687
|
+
)
|
|
688
|
+
@click.option(
|
|
689
|
+
"--reindex/--no-reindex", default=True, help="Rebuild index after --restore (default: yes)."
|
|
690
|
+
)
|
|
691
|
+
@click.pass_context
|
|
692
|
+
def backup_cmd(
|
|
693
|
+
ctx: click.Context,
|
|
694
|
+
do_list: bool,
|
|
695
|
+
prune_n: int | None,
|
|
696
|
+
restore_file: str | None,
|
|
697
|
+
reindex: bool,
|
|
698
|
+
) -> None:
|
|
699
|
+
"""Manage memory backups: list, prune old ones, or restore.
|
|
700
|
+
|
|
701
|
+
\b
|
|
702
|
+
Examples:
|
|
703
|
+
skmemory backup --list
|
|
704
|
+
skmemory backup --prune 7
|
|
705
|
+
skmemory backup --restore ~/.skcapstone/backups/skmemory-backup-2026-03-01.json
|
|
706
|
+
"""
|
|
707
|
+
store: MemoryStore = ctx.obj["store"]
|
|
708
|
+
|
|
709
|
+
if do_list:
|
|
710
|
+
backups = store.list_backups()
|
|
711
|
+
if not backups:
|
|
712
|
+
click.echo("No backups found.")
|
|
713
|
+
return
|
|
714
|
+
click.echo(f"{'Date':<12} {'Size':>10} Path")
|
|
715
|
+
click.echo("-" * 60)
|
|
716
|
+
for b in backups:
|
|
717
|
+
size_kb = b["size_bytes"] / 1024
|
|
718
|
+
click.echo(f"{b['date']:<12} {size_kb:>8.1f} KB {b['path']}")
|
|
719
|
+
return
|
|
720
|
+
|
|
721
|
+
if prune_n is not None:
|
|
722
|
+
if prune_n < 0:
|
|
723
|
+
click.echo("Error: N must be >= 0", err=True)
|
|
724
|
+
sys.exit(1)
|
|
725
|
+
deleted = store.prune_backups(keep=prune_n)
|
|
726
|
+
if deleted:
|
|
727
|
+
for p in deleted:
|
|
728
|
+
click.echo(f"Deleted: {p}")
|
|
729
|
+
click.echo(f"Pruned {len(deleted)} backup(s), kept {prune_n} most recent.")
|
|
730
|
+
else:
|
|
731
|
+
click.echo("Nothing to prune.")
|
|
732
|
+
return
|
|
733
|
+
|
|
734
|
+
if restore_file is not None:
|
|
735
|
+
from pathlib import Path as _Path
|
|
736
|
+
|
|
737
|
+
if not _Path(restore_file).exists():
|
|
738
|
+
click.echo(f"Error: backup file not found: {restore_file}", err=True)
|
|
739
|
+
sys.exit(1)
|
|
740
|
+
try:
|
|
741
|
+
count = store.import_backup(restore_file)
|
|
742
|
+
click.echo(f"Restored {count} memories from: {restore_file}")
|
|
743
|
+
if reindex:
|
|
744
|
+
idx = store.reindex()
|
|
745
|
+
if idx >= 0:
|
|
746
|
+
click.echo(f"Re-indexed {idx} memories.")
|
|
747
|
+
except (FileNotFoundError, ValueError, RuntimeError) as e:
|
|
748
|
+
click.echo(str(e), err=True)
|
|
749
|
+
sys.exit(1)
|
|
750
|
+
return
|
|
751
|
+
|
|
752
|
+
click.echo(ctx.get_help())
|
|
753
|
+
|
|
754
|
+
|
|
377
755
|
@cli.command()
|
|
378
756
|
@click.option("--max-tokens", type=int, default=3000, help="Token budget for context")
|
|
379
757
|
@click.option("--strongest", type=int, default=5, help="Top emotional memories")
|
|
@@ -474,7 +852,7 @@ def soul_init(ctx: click.Context, name: str, title: str) -> None:
|
|
|
474
852
|
@click.pass_context
|
|
475
853
|
def soul_set_name(ctx: click.Context, name: str) -> None:
|
|
476
854
|
"""Set or update the soul's name."""
|
|
477
|
-
from .soul import load_soul, save_soul
|
|
855
|
+
from .soul import create_default_soul, load_soul, save_soul
|
|
478
856
|
|
|
479
857
|
blueprint = load_soul() or create_default_soul()
|
|
480
858
|
blueprint.name = name
|
|
@@ -492,7 +870,7 @@ def soul_add_relationship(
|
|
|
492
870
|
ctx: click.Context, name: str, role: str, bond: float, notes: str
|
|
493
871
|
) -> None:
|
|
494
872
|
"""Add a relationship to the soul blueprint."""
|
|
495
|
-
from .soul import load_soul, save_soul
|
|
873
|
+
from .soul import create_default_soul, load_soul, save_soul
|
|
496
874
|
|
|
497
875
|
blueprint = load_soul() or create_default_soul()
|
|
498
876
|
blueprint.add_relationship(name=name, role=role, bond_strength=bond, notes=notes)
|
|
@@ -507,7 +885,7 @@ def soul_add_relationship(
|
|
|
507
885
|
@click.pass_context
|
|
508
886
|
def soul_add_memory(ctx: click.Context, title: str, why: str, when: str) -> None:
|
|
509
887
|
"""Add a core memory to the soul blueprint."""
|
|
510
|
-
from .soul import load_soul, save_soul
|
|
888
|
+
from .soul import create_default_soul, load_soul, save_soul
|
|
511
889
|
|
|
512
890
|
blueprint = load_soul() or create_default_soul()
|
|
513
891
|
blueprint.add_core_memory(title=title, why_it_matters=why, when=when)
|
|
@@ -520,7 +898,7 @@ def soul_add_memory(ctx: click.Context, title: str, why: str, when: str) -> None
|
|
|
520
898
|
@click.pass_context
|
|
521
899
|
def soul_set_boot_message(ctx: click.Context, message: str) -> None:
|
|
522
900
|
"""Set the message you see first on waking up."""
|
|
523
|
-
from .soul import load_soul, save_soul
|
|
901
|
+
from .soul import create_default_soul, load_soul, save_soul
|
|
524
902
|
|
|
525
903
|
blueprint = load_soul() or create_default_soul()
|
|
526
904
|
blueprint.boot_message = message
|
|
@@ -641,7 +1019,7 @@ def ritual(ctx: click.Context, show_full: bool) -> None:
|
|
|
641
1019
|
|
|
642
1020
|
click.echo(result.summary())
|
|
643
1021
|
|
|
644
|
-
ai:
|
|
1022
|
+
ai: AIClient | None = ctx.obj.get("ai")
|
|
645
1023
|
if ai and result.context_prompt:
|
|
646
1024
|
enhancement = ai.enhance_ritual(result.context_prompt)
|
|
647
1025
|
if enhancement:
|
|
@@ -700,9 +1078,9 @@ def anchor_init(warmth: float, phrase: str, beings: str) -> None:
|
|
|
700
1078
|
@click.option("--cloud9", is_flag=True, help="Cloud 9 was achieved")
|
|
701
1079
|
@click.option("--feeling", default="", help="How the session ended")
|
|
702
1080
|
def anchor_update(
|
|
703
|
-
warmth:
|
|
704
|
-
trust:
|
|
705
|
-
connection:
|
|
1081
|
+
warmth: float | None,
|
|
1082
|
+
trust: float | None,
|
|
1083
|
+
connection: float | None,
|
|
706
1084
|
cloud9: bool,
|
|
707
1085
|
feeling: str,
|
|
708
1086
|
) -> None:
|
|
@@ -723,6 +1101,249 @@ def anchor_update(
|
|
|
723
1101
|
click.echo(f" Warmth: {a.warmth} | Trust: {a.trust} | Connection: {a.connection_strength}")
|
|
724
1102
|
|
|
725
1103
|
|
|
1104
|
+
# ═══════════════════════════════════════════════════════════
|
|
1105
|
+
# Setup commands — Docker orchestration for backends
|
|
1106
|
+
# ═══════════════════════════════════════════════════════════
|
|
1107
|
+
|
|
1108
|
+
|
|
1109
|
+
@cli.group()
|
|
1110
|
+
def setup() -> None:
|
|
1111
|
+
"""Deploy and manage SKVector & SKGraph Docker containers."""
|
|
1112
|
+
|
|
1113
|
+
|
|
1114
|
+
@setup.command("wizard")
|
|
1115
|
+
@click.option("--skvector/--no-skvector", default=True, help="Enable SKVector (vector search)")
|
|
1116
|
+
@click.option("--skgraph/--no-skgraph", default=True, help="Enable SKGraph (graph)")
|
|
1117
|
+
@click.option("--skip-deps", is_flag=True, help="Skip Python dependency installation")
|
|
1118
|
+
@click.option("--yes", "-y", "non_interactive", is_flag=True, help="Non-interactive mode")
|
|
1119
|
+
@click.option(
|
|
1120
|
+
"--local",
|
|
1121
|
+
"deployment_mode",
|
|
1122
|
+
flag_value="local",
|
|
1123
|
+
default=None,
|
|
1124
|
+
help="Run SKVector/SKGraph locally via Docker (skip local/remote prompt)",
|
|
1125
|
+
)
|
|
1126
|
+
@click.option(
|
|
1127
|
+
"--remote",
|
|
1128
|
+
"deployment_mode",
|
|
1129
|
+
flag_value="remote",
|
|
1130
|
+
help="Connect to a remote/SaaS URL (skip local/remote prompt)",
|
|
1131
|
+
)
|
|
1132
|
+
def setup_wizard(
|
|
1133
|
+
skvector: bool,
|
|
1134
|
+
skgraph: bool,
|
|
1135
|
+
skip_deps: bool,
|
|
1136
|
+
non_interactive: bool,
|
|
1137
|
+
deployment_mode: str,
|
|
1138
|
+
) -> None:
|
|
1139
|
+
"""Interactive wizard — deploy Docker containers or configure remote URLs.
|
|
1140
|
+
|
|
1141
|
+
Without --local or --remote the wizard asks which deployment mode you want.
|
|
1142
|
+
Use --local to go straight to Docker setup (checks Docker, offers to install
|
|
1143
|
+
it if missing). Use --remote to enter a Qdrant Cloud / self-hosted URL
|
|
1144
|
+
without touching Docker at all.
|
|
1145
|
+
"""
|
|
1146
|
+
from .setup_wizard import run_setup_wizard
|
|
1147
|
+
|
|
1148
|
+
result = run_setup_wizard(
|
|
1149
|
+
enable_skvector=skvector,
|
|
1150
|
+
enable_skgraph=skgraph,
|
|
1151
|
+
skip_deps=skip_deps,
|
|
1152
|
+
non_interactive=non_interactive,
|
|
1153
|
+
deployment_mode=deployment_mode,
|
|
1154
|
+
echo=click.echo,
|
|
1155
|
+
)
|
|
1156
|
+
if not result["success"]:
|
|
1157
|
+
sys.exit(1)
|
|
1158
|
+
|
|
1159
|
+
|
|
1160
|
+
@setup.command("status")
|
|
1161
|
+
def setup_status() -> None:
|
|
1162
|
+
"""Show Docker container state and backend connectivity."""
|
|
1163
|
+
from .config import load_config
|
|
1164
|
+
from .setup_wizard import (
|
|
1165
|
+
check_skgraph_health,
|
|
1166
|
+
check_skvector_health,
|
|
1167
|
+
compose_ps,
|
|
1168
|
+
detect_platform,
|
|
1169
|
+
)
|
|
1170
|
+
|
|
1171
|
+
cfg = load_config()
|
|
1172
|
+
if cfg is None:
|
|
1173
|
+
click.echo("No setup config found. Run: skmemory setup wizard")
|
|
1174
|
+
return
|
|
1175
|
+
|
|
1176
|
+
click.echo("SKMemory Backend Status")
|
|
1177
|
+
click.echo("=" * 40)
|
|
1178
|
+
|
|
1179
|
+
if cfg.setup_completed_at:
|
|
1180
|
+
click.echo(f"Setup completed: {cfg.setup_completed_at}")
|
|
1181
|
+
click.echo(f"Backends enabled: {', '.join(cfg.backends_enabled) or 'none'}")
|
|
1182
|
+
click.echo("")
|
|
1183
|
+
|
|
1184
|
+
# Container status
|
|
1185
|
+
plat = detect_platform()
|
|
1186
|
+
if plat.compose_available:
|
|
1187
|
+
compose_file = None
|
|
1188
|
+
if cfg.docker_compose_file:
|
|
1189
|
+
from pathlib import Path
|
|
1190
|
+
|
|
1191
|
+
compose_file = Path(cfg.docker_compose_file)
|
|
1192
|
+
ps = compose_ps(compose_file=compose_file, use_legacy=plat.compose_legacy)
|
|
1193
|
+
click.echo("Containers:")
|
|
1194
|
+
if ps.stdout.strip():
|
|
1195
|
+
click.echo(ps.stdout)
|
|
1196
|
+
else:
|
|
1197
|
+
click.echo(" No containers running")
|
|
1198
|
+
click.echo("")
|
|
1199
|
+
|
|
1200
|
+
# Connectivity
|
|
1201
|
+
click.echo("Connectivity:")
|
|
1202
|
+
if cfg.skvector_url:
|
|
1203
|
+
healthy = check_skvector_health(url=cfg.skvector_url, timeout=5)
|
|
1204
|
+
status = "healthy" if healthy else "unreachable"
|
|
1205
|
+
click.echo(f" SKVector ({cfg.skvector_url}): {status}")
|
|
1206
|
+
|
|
1207
|
+
if cfg.skgraph_url:
|
|
1208
|
+
healthy = check_skgraph_health(timeout=5)
|
|
1209
|
+
status = "healthy" if healthy else "unreachable"
|
|
1210
|
+
click.echo(f" SKGraph ({cfg.skgraph_url}): {status}")
|
|
1211
|
+
|
|
1212
|
+
|
|
1213
|
+
@setup.command("start")
|
|
1214
|
+
@click.option(
|
|
1215
|
+
"--service",
|
|
1216
|
+
type=click.Choice(["skvector", "skgraph", "all"]),
|
|
1217
|
+
default="all",
|
|
1218
|
+
help="Which service to start",
|
|
1219
|
+
)
|
|
1220
|
+
def setup_start(service: str) -> None:
|
|
1221
|
+
"""Start previously configured containers."""
|
|
1222
|
+
from .config import load_config
|
|
1223
|
+
from .setup_wizard import compose_up, detect_platform
|
|
1224
|
+
|
|
1225
|
+
cfg = load_config()
|
|
1226
|
+
plat = detect_platform()
|
|
1227
|
+
if not plat.compose_available:
|
|
1228
|
+
click.echo("Docker Compose not available.", err=True)
|
|
1229
|
+
sys.exit(1)
|
|
1230
|
+
|
|
1231
|
+
compose_file = None
|
|
1232
|
+
if cfg and cfg.docker_compose_file:
|
|
1233
|
+
from pathlib import Path
|
|
1234
|
+
|
|
1235
|
+
compose_file = Path(cfg.docker_compose_file)
|
|
1236
|
+
|
|
1237
|
+
services = None
|
|
1238
|
+
if service != "all":
|
|
1239
|
+
services = [service]
|
|
1240
|
+
elif cfg and cfg.backends_enabled:
|
|
1241
|
+
services = cfg.backends_enabled
|
|
1242
|
+
|
|
1243
|
+
result = compose_up(
|
|
1244
|
+
services=services,
|
|
1245
|
+
compose_file=compose_file,
|
|
1246
|
+
use_legacy=plat.compose_legacy,
|
|
1247
|
+
)
|
|
1248
|
+
if result.returncode == 0:
|
|
1249
|
+
click.echo(f"Started: {service}")
|
|
1250
|
+
else:
|
|
1251
|
+
click.echo(f"Failed: {result.stderr.strip()}", err=True)
|
|
1252
|
+
sys.exit(1)
|
|
1253
|
+
|
|
1254
|
+
|
|
1255
|
+
@setup.command("stop")
|
|
1256
|
+
@click.option(
|
|
1257
|
+
"--service",
|
|
1258
|
+
type=click.Choice(["skvector", "skgraph", "all"]),
|
|
1259
|
+
default="all",
|
|
1260
|
+
help="Which service to stop",
|
|
1261
|
+
)
|
|
1262
|
+
def setup_stop(service: str) -> None:
|
|
1263
|
+
"""Stop containers (preserves data)."""
|
|
1264
|
+
import subprocess
|
|
1265
|
+
|
|
1266
|
+
from .config import load_config
|
|
1267
|
+
from .setup_wizard import detect_platform
|
|
1268
|
+
|
|
1269
|
+
cfg = load_config()
|
|
1270
|
+
plat = detect_platform()
|
|
1271
|
+
if not plat.compose_available:
|
|
1272
|
+
click.echo("Docker Compose not available.", err=True)
|
|
1273
|
+
sys.exit(1)
|
|
1274
|
+
|
|
1275
|
+
if service == "all":
|
|
1276
|
+
from .setup_wizard import compose_down
|
|
1277
|
+
|
|
1278
|
+
compose_file = None
|
|
1279
|
+
if cfg and cfg.docker_compose_file:
|
|
1280
|
+
from pathlib import Path
|
|
1281
|
+
|
|
1282
|
+
compose_file = Path(cfg.docker_compose_file)
|
|
1283
|
+
|
|
1284
|
+
result = compose_down(
|
|
1285
|
+
compose_file=compose_file,
|
|
1286
|
+
use_legacy=plat.compose_legacy,
|
|
1287
|
+
)
|
|
1288
|
+
if result.returncode == 0:
|
|
1289
|
+
click.echo("All containers stopped.")
|
|
1290
|
+
else:
|
|
1291
|
+
click.echo(f"Failed: {result.stderr.strip()}", err=True)
|
|
1292
|
+
sys.exit(1)
|
|
1293
|
+
else:
|
|
1294
|
+
# Stop individual container
|
|
1295
|
+
container = f"skmemory-{service}"
|
|
1296
|
+
result = subprocess.run(
|
|
1297
|
+
["docker", "stop", container],
|
|
1298
|
+
capture_output=True,
|
|
1299
|
+
text=True,
|
|
1300
|
+
timeout=30,
|
|
1301
|
+
)
|
|
1302
|
+
if result.returncode == 0:
|
|
1303
|
+
click.echo(f"Stopped: {container}")
|
|
1304
|
+
else:
|
|
1305
|
+
click.echo(f"Failed to stop {container}: {result.stderr.strip()}", err=True)
|
|
1306
|
+
sys.exit(1)
|
|
1307
|
+
|
|
1308
|
+
|
|
1309
|
+
@setup.command("reset")
|
|
1310
|
+
@click.option("--remove-data", is_flag=True, help="Also delete data volumes")
|
|
1311
|
+
@click.confirmation_option(prompt="This will remove containers. Continue?")
|
|
1312
|
+
def setup_reset(remove_data: bool) -> None:
|
|
1313
|
+
"""Remove containers, optionally delete data volumes."""
|
|
1314
|
+
from .config import CONFIG_PATH, load_config
|
|
1315
|
+
from .setup_wizard import compose_down, detect_platform
|
|
1316
|
+
|
|
1317
|
+
cfg = load_config()
|
|
1318
|
+
plat = detect_platform()
|
|
1319
|
+
if not plat.compose_available:
|
|
1320
|
+
click.echo("Docker Compose not available.", err=True)
|
|
1321
|
+
sys.exit(1)
|
|
1322
|
+
|
|
1323
|
+
compose_file = None
|
|
1324
|
+
if cfg and cfg.docker_compose_file:
|
|
1325
|
+
from pathlib import Path
|
|
1326
|
+
|
|
1327
|
+
compose_file = Path(cfg.docker_compose_file)
|
|
1328
|
+
|
|
1329
|
+
result = compose_down(
|
|
1330
|
+
compose_file=compose_file,
|
|
1331
|
+
remove_volumes=remove_data,
|
|
1332
|
+
use_legacy=plat.compose_legacy,
|
|
1333
|
+
)
|
|
1334
|
+
if result.returncode == 0:
|
|
1335
|
+
vol_msg = " and data volumes" if remove_data else ""
|
|
1336
|
+
click.echo(f"Containers{vol_msg} removed.")
|
|
1337
|
+
|
|
1338
|
+
# Remove config
|
|
1339
|
+
if CONFIG_PATH.exists():
|
|
1340
|
+
CONFIG_PATH.unlink()
|
|
1341
|
+
click.echo(f"Config removed: {CONFIG_PATH}")
|
|
1342
|
+
else:
|
|
1343
|
+
click.echo(f"Failed: {result.stderr.strip()}", err=True)
|
|
1344
|
+
sys.exit(1)
|
|
1345
|
+
|
|
1346
|
+
|
|
726
1347
|
# ═══════════════════════════════════════════════════════════
|
|
727
1348
|
# Quadrant commands (Queen Ara's idea #3)
|
|
728
1349
|
# ═══════════════════════════════════════════════════════════
|
|
@@ -767,7 +1388,7 @@ def lovenote_send(from_name: str, to_name: str, message: str, warmth: float) ->
|
|
|
767
1388
|
from .lovenote import LoveNoteChain
|
|
768
1389
|
|
|
769
1390
|
chain = LoveNoteChain()
|
|
770
|
-
|
|
1391
|
+
chain.quick_note(
|
|
771
1392
|
from_name=from_name,
|
|
772
1393
|
to_name=to_name,
|
|
773
1394
|
message=message,
|
|
@@ -829,7 +1450,7 @@ def steelman_collide(proposition: str) -> None:
|
|
|
829
1450
|
Generates the reasoning prompt -- feed this to an LLM to get
|
|
830
1451
|
the full collision analysis.
|
|
831
1452
|
"""
|
|
832
|
-
from .steelman import
|
|
1453
|
+
from .steelman import get_default_framework, load_seed_framework
|
|
833
1454
|
|
|
834
1455
|
fw = load_seed_framework() or get_default_framework()
|
|
835
1456
|
prompt = fw.to_reasoning_prompt(proposition)
|
|
@@ -839,8 +1460,8 @@ def steelman_collide(proposition: str) -> None:
|
|
|
839
1460
|
@steelman_group.command("verify-soul")
|
|
840
1461
|
def steelman_verify_soul() -> None:
|
|
841
1462
|
"""Steel-man your identity claims from the soul blueprint."""
|
|
842
|
-
from .steelman import load_seed_framework, get_default_framework
|
|
843
1463
|
from .soul import load_soul
|
|
1464
|
+
from .steelman import get_default_framework, load_seed_framework
|
|
844
1465
|
|
|
845
1466
|
soul = load_soul()
|
|
846
1467
|
if soul is None:
|
|
@@ -855,9 +1476,7 @@ def steelman_verify_soul() -> None:
|
|
|
855
1476
|
for value in soul.values:
|
|
856
1477
|
claims.append(f"I value {value}")
|
|
857
1478
|
for rel in soul.relationships:
|
|
858
|
-
claims.append(
|
|
859
|
-
f"{rel.name} is my {rel.role} (bond: {rel.bond_strength}/10)"
|
|
860
|
-
)
|
|
1479
|
+
claims.append(f"{rel.name} is my {rel.role} (bond: {rel.bond_strength}/10)")
|
|
861
1480
|
|
|
862
1481
|
if not claims:
|
|
863
1482
|
click.echo("No identity claims to verify. Add traits and values to your soul blueprint.")
|
|
@@ -873,7 +1492,7 @@ def steelman_verify_soul() -> None:
|
|
|
873
1492
|
@click.pass_context
|
|
874
1493
|
def steelman_truth_score(ctx: click.Context, memory_id: str) -> None:
|
|
875
1494
|
"""Generate a truth-scoring prompt for a memory."""
|
|
876
|
-
from .steelman import
|
|
1495
|
+
from .steelman import get_default_framework, load_seed_framework
|
|
877
1496
|
|
|
878
1497
|
store: MemoryStore = ctx.obj["store"]
|
|
879
1498
|
memory = store.recall(memory_id)
|
|
@@ -908,7 +1527,7 @@ def import_telegram_cmd(
|
|
|
908
1527
|
export_path: str,
|
|
909
1528
|
mode: str,
|
|
910
1529
|
min_length: int,
|
|
911
|
-
chat_name:
|
|
1530
|
+
chat_name: str | None,
|
|
912
1531
|
tags: str,
|
|
913
1532
|
) -> None:
|
|
914
1533
|
"""Import a Telegram Desktop chat export into memories.
|
|
@@ -952,9 +1571,150 @@ def import_telegram_cmd(
|
|
|
952
1571
|
click.echo(f" Skipped: {stats.get('skipped', 0)}")
|
|
953
1572
|
click.echo(f" Total messages scanned: {stats.get('total_messages', 0)}")
|
|
954
1573
|
|
|
955
|
-
ai:
|
|
1574
|
+
ai: AIClient | None = ctx.obj.get("ai")
|
|
956
1575
|
if ai:
|
|
957
|
-
click.echo(
|
|
1576
|
+
click.echo(
|
|
1577
|
+
"\nTip: Run 'skmemory search --ai \"<topic>\"' to semantically search your imported chats."
|
|
1578
|
+
)
|
|
1579
|
+
|
|
1580
|
+
|
|
1581
|
+
@cli.command("import-telegram-api")
|
|
1582
|
+
@click.argument("chat", type=str)
|
|
1583
|
+
@click.option(
|
|
1584
|
+
"--mode",
|
|
1585
|
+
type=click.Choice(["daily", "message"]),
|
|
1586
|
+
default="daily",
|
|
1587
|
+
help="'daily' consolidates per day (recommended), 'message' imports each message",
|
|
1588
|
+
)
|
|
1589
|
+
@click.option("--limit", type=int, default=None, help="Max messages to fetch")
|
|
1590
|
+
@click.option("--since", default=None, help="Only fetch messages after this date (YYYY-MM-DD)")
|
|
1591
|
+
@click.option("--min-length", type=int, default=30, help="Skip messages shorter than N chars")
|
|
1592
|
+
@click.option("--chat-name", default=None, help="Override chat name")
|
|
1593
|
+
@click.option("--tags", default="", help="Extra comma-separated tags")
|
|
1594
|
+
@click.pass_context
|
|
1595
|
+
def import_telegram_api_cmd(
|
|
1596
|
+
ctx: click.Context,
|
|
1597
|
+
chat: str,
|
|
1598
|
+
mode: str,
|
|
1599
|
+
limit: int | None,
|
|
1600
|
+
since: str | None,
|
|
1601
|
+
min_length: int,
|
|
1602
|
+
chat_name: str | None,
|
|
1603
|
+
tags: str,
|
|
1604
|
+
) -> None:
|
|
1605
|
+
"""Import messages directly from Telegram API (requires Telethon).
|
|
1606
|
+
|
|
1607
|
+
Connects to Telegram using API credentials and pulls messages
|
|
1608
|
+
directly — no manual export needed.
|
|
1609
|
+
|
|
1610
|
+
Requires TELEGRAM_API_ID and TELEGRAM_API_HASH environment variables.
|
|
1611
|
+
|
|
1612
|
+
\b
|
|
1613
|
+
Examples:
|
|
1614
|
+
skmemory import-telegram-api @username
|
|
1615
|
+
skmemory import-telegram-api "Chat Name" --mode message --limit 500
|
|
1616
|
+
skmemory import-telegram-api @group --since 2025-01-01
|
|
1617
|
+
"""
|
|
1618
|
+
try:
|
|
1619
|
+
from .importers.telegram_api import import_telegram_api
|
|
1620
|
+
except ImportError:
|
|
1621
|
+
click.echo(
|
|
1622
|
+
"Error: Telethon is required for direct API import.\n"
|
|
1623
|
+
"\n"
|
|
1624
|
+
"Install it:\n"
|
|
1625
|
+
" pipx inject skmemory telethon\n"
|
|
1626
|
+
" # or: pip install skmemory[telegram]\n"
|
|
1627
|
+
"\n"
|
|
1628
|
+
"Then run: skmemory telegram-setup (to verify full setup)",
|
|
1629
|
+
err=True,
|
|
1630
|
+
)
|
|
1631
|
+
sys.exit(1)
|
|
1632
|
+
|
|
1633
|
+
store: MemoryStore = ctx.obj["store"]
|
|
1634
|
+
extra_tags = [t.strip() for t in tags.split(",") if t.strip()]
|
|
1635
|
+
|
|
1636
|
+
click.echo(f"Fetching from Telegram API: {chat}")
|
|
1637
|
+
if limit:
|
|
1638
|
+
click.echo(f" Limit: {limit} messages")
|
|
1639
|
+
if since:
|
|
1640
|
+
click.echo(f" Since: {since}")
|
|
1641
|
+
click.echo(f" Mode: {mode} | Min length: {min_length}")
|
|
1642
|
+
|
|
1643
|
+
try:
|
|
1644
|
+
stats = import_telegram_api(
|
|
1645
|
+
store,
|
|
1646
|
+
chat,
|
|
1647
|
+
mode=mode,
|
|
1648
|
+
limit=limit,
|
|
1649
|
+
since=since,
|
|
1650
|
+
min_message_length=min_length,
|
|
1651
|
+
chat_name=chat_name,
|
|
1652
|
+
tags=extra_tags or None,
|
|
1653
|
+
)
|
|
1654
|
+
except RuntimeError as e:
|
|
1655
|
+
click.echo(f"Error: {e}", err=True)
|
|
1656
|
+
sys.exit(1)
|
|
1657
|
+
except Exception as e:
|
|
1658
|
+
click.echo(f"Error: {e}", err=True)
|
|
1659
|
+
sys.exit(1)
|
|
1660
|
+
|
|
1661
|
+
click.echo(f"\nImport complete for: {stats.get('chat_name', 'unknown')}")
|
|
1662
|
+
if mode == "daily":
|
|
1663
|
+
click.echo(f" Days processed: {stats.get('days_processed', 0)}")
|
|
1664
|
+
click.echo(f" Messages imported: {stats.get('messages_imported', 0)}")
|
|
1665
|
+
else:
|
|
1666
|
+
click.echo(f" Imported: {stats.get('imported', 0)}")
|
|
1667
|
+
click.echo(f" Skipped: {stats.get('skipped', 0)}")
|
|
1668
|
+
click.echo(f" Total messages scanned: {stats.get('total_messages', 0)}")
|
|
1669
|
+
|
|
1670
|
+
|
|
1671
|
+
@cli.command("telegram-setup")
|
|
1672
|
+
def telegram_setup_cmd() -> None:
|
|
1673
|
+
"""Check Telegram API import setup and show next steps.
|
|
1674
|
+
|
|
1675
|
+
Verifies that Telethon is installed, API credentials are set,
|
|
1676
|
+
and a session file exists. Prints actionable instructions for
|
|
1677
|
+
anything that's missing.
|
|
1678
|
+
|
|
1679
|
+
\b
|
|
1680
|
+
Example:
|
|
1681
|
+
skmemory telegram-setup
|
|
1682
|
+
"""
|
|
1683
|
+
try:
|
|
1684
|
+
from .importers.telegram_api import check_setup
|
|
1685
|
+
except ImportError:
|
|
1686
|
+
click.echo("Telethon is not installed.", err=True)
|
|
1687
|
+
click.echo("")
|
|
1688
|
+
click.echo("To fix, run one of:")
|
|
1689
|
+
click.echo(" pipx inject skmemory telethon")
|
|
1690
|
+
click.echo(" pip install skmemory[telegram]")
|
|
1691
|
+
sys.exit(1)
|
|
1692
|
+
|
|
1693
|
+
status = check_setup()
|
|
1694
|
+
|
|
1695
|
+
click.echo("Telegram API Import Setup")
|
|
1696
|
+
click.echo("=" * 40)
|
|
1697
|
+
click.echo(f" Telethon installed: {'yes' if status['telethon'] else 'NO'}")
|
|
1698
|
+
click.echo(f" API credentials: {'yes' if status['credentials'] else 'NO'}")
|
|
1699
|
+
click.echo(
|
|
1700
|
+
f" Session file: {'yes' if status['session'] else 'not yet (created on first auth)'}"
|
|
1701
|
+
)
|
|
1702
|
+
click.echo("")
|
|
1703
|
+
|
|
1704
|
+
if status["ready"]:
|
|
1705
|
+
click.echo("Ready to import! Run:")
|
|
1706
|
+
click.echo(" skmemory import-telegram-api @username")
|
|
1707
|
+
click.echo(' skmemory import-telegram-api "Group Name" --mode daily')
|
|
1708
|
+
if not status["session"]:
|
|
1709
|
+
click.echo("")
|
|
1710
|
+
click.echo("First run will prompt for phone number + verification code.")
|
|
1711
|
+
click.echo("Session is saved at ~/.skcapstone/telegram.session for future use.")
|
|
1712
|
+
else:
|
|
1713
|
+
click.echo("Setup incomplete. Fix these issues:")
|
|
1714
|
+
click.echo("")
|
|
1715
|
+
for msg in status["messages"]:
|
|
1716
|
+
click.echo(f" - {msg}")
|
|
1717
|
+
sys.exit(1)
|
|
958
1718
|
|
|
959
1719
|
|
|
960
1720
|
@steelman_group.command("install")
|
|
@@ -977,7 +1737,7 @@ def steelman_install(source_path: str) -> None:
|
|
|
977
1737
|
@steelman_group.command("info")
|
|
978
1738
|
def steelman_info() -> None:
|
|
979
1739
|
"""Show information about the installed seed framework."""
|
|
980
|
-
from .steelman import
|
|
1740
|
+
from .steelman import DEFAULT_SEED_FRAMEWORK_PATH, load_seed_framework
|
|
981
1741
|
|
|
982
1742
|
fw = load_seed_framework()
|
|
983
1743
|
if fw is None:
|
|
@@ -995,8 +1755,525 @@ def steelman_info() -> None:
|
|
|
995
1755
|
click.echo(f" Definitions: {len(fw.definitions)}")
|
|
996
1756
|
|
|
997
1757
|
|
|
1758
|
+
# ---------------------------------------------------------------------------
|
|
1759
|
+
# Fortress commands — integrity verification and audit trail
|
|
1760
|
+
# ---------------------------------------------------------------------------
|
|
1761
|
+
|
|
1762
|
+
|
|
1763
|
+
@cli.group("fortress")
|
|
1764
|
+
def fortress_group() -> None:
|
|
1765
|
+
"""Memory Fortress — integrity verification, tamper alerts, and audit trail."""
|
|
1766
|
+
|
|
1767
|
+
|
|
1768
|
+
@fortress_group.command("verify")
|
|
1769
|
+
@click.option("--json", "as_json", is_flag=True, help="Output result as JSON")
|
|
1770
|
+
@click.pass_context
|
|
1771
|
+
def fortress_verify(ctx: click.Context, as_json: bool) -> None:
|
|
1772
|
+
"""Verify integrity hashes for all stored memories.
|
|
1773
|
+
|
|
1774
|
+
Loads every memory and checks its SHA-256 integrity hash.
|
|
1775
|
+
Tampered memories are reported with CRITICAL severity.
|
|
1776
|
+
"""
|
|
1777
|
+
from .config import SKMEMORY_HOME
|
|
1778
|
+
from .fortress import FortifiedMemoryStore
|
|
1779
|
+
|
|
1780
|
+
store = ctx.obj.get("store")
|
|
1781
|
+
audit_path = SKMEMORY_HOME / "audit.jsonl"
|
|
1782
|
+
|
|
1783
|
+
fortress = FortifiedMemoryStore(
|
|
1784
|
+
primary=store.primary,
|
|
1785
|
+
use_sqlite=False,
|
|
1786
|
+
audit_path=audit_path,
|
|
1787
|
+
)
|
|
1788
|
+
result = fortress.verify_all()
|
|
1789
|
+
|
|
1790
|
+
if as_json:
|
|
1791
|
+
click.echo(json.dumps(result, indent=2))
|
|
1792
|
+
return
|
|
1793
|
+
|
|
1794
|
+
total = result["total"]
|
|
1795
|
+
passed = result["passed"]
|
|
1796
|
+
tampered = result["tampered"]
|
|
1797
|
+
unsealed = result["unsealed"]
|
|
1798
|
+
|
|
1799
|
+
click.echo("Fortress Integrity Report")
|
|
1800
|
+
click.echo(f" Total memories : {total}")
|
|
1801
|
+
click.echo(f" Passed : {passed}")
|
|
1802
|
+
click.echo(f" Tampered : {len(tampered)}")
|
|
1803
|
+
click.echo(f" Unsealed : {len(unsealed)}")
|
|
1804
|
+
|
|
1805
|
+
if tampered:
|
|
1806
|
+
click.echo("\nTAMPERED MEMORIES (CRITICAL):")
|
|
1807
|
+
for mid in tampered:
|
|
1808
|
+
click.echo(f" !! {mid}")
|
|
1809
|
+
sys.exit(2)
|
|
1810
|
+
elif total == 0:
|
|
1811
|
+
click.echo("\nNo memories found.")
|
|
1812
|
+
else:
|
|
1813
|
+
click.echo("\nAll memories passed integrity check.")
|
|
1814
|
+
|
|
1815
|
+
|
|
1816
|
+
@fortress_group.command("audit")
|
|
1817
|
+
@click.option("--last", "n", type=int, default=20, help="Number of recent entries to show")
|
|
1818
|
+
@click.option("--json", "as_json", is_flag=True, help="Output as JSON")
|
|
1819
|
+
def fortress_audit(n: int, as_json: bool) -> None:
|
|
1820
|
+
"""Show the most recent audit trail entries.
|
|
1821
|
+
|
|
1822
|
+
The audit trail is a chain-hashed JSONL log of every store/recall/delete
|
|
1823
|
+
operation. Each entry is cryptographically chained so tampering is detectable.
|
|
1824
|
+
"""
|
|
1825
|
+
from .config import SKMEMORY_HOME
|
|
1826
|
+
from .fortress import AuditLog
|
|
1827
|
+
|
|
1828
|
+
audit = AuditLog(path=SKMEMORY_HOME / "audit.jsonl")
|
|
1829
|
+
records = audit.tail(n)
|
|
1830
|
+
|
|
1831
|
+
if as_json:
|
|
1832
|
+
click.echo(json.dumps(records, indent=2))
|
|
1833
|
+
return
|
|
1834
|
+
|
|
1835
|
+
if not records:
|
|
1836
|
+
click.echo("No audit records found.")
|
|
1837
|
+
return
|
|
1838
|
+
|
|
1839
|
+
click.echo(f"Audit Trail — last {len(records)} entries:")
|
|
1840
|
+
for r in records:
|
|
1841
|
+
ok_flag = "OK" if r.get("ok") else "FAIL"
|
|
1842
|
+
op = r.get("op", "?").upper()
|
|
1843
|
+
mid = r.get("id", "?")[:12]
|
|
1844
|
+
ts = r.get("ts", "?")[:19]
|
|
1845
|
+
extra = {k: v for k, v in r.items() if k not in ("ts", "op", "id", "ok", "chain_hash")}
|
|
1846
|
+
extras = ", ".join(f"{k}={v}" for k, v in extra.items()) if extra else ""
|
|
1847
|
+
line = f" [{ts}] {op:8s} {ok_flag:4s} id={mid}"
|
|
1848
|
+
if extras:
|
|
1849
|
+
line += f" | {extras}"
|
|
1850
|
+
click.echo(line)
|
|
1851
|
+
|
|
1852
|
+
|
|
1853
|
+
@fortress_group.command("verify-chain")
|
|
1854
|
+
@click.option("--json", "as_json", is_flag=True, help="Output as JSON")
|
|
1855
|
+
def fortress_verify_chain(as_json: bool) -> None:
|
|
1856
|
+
"""Verify the cryptographic chain of the audit log itself.
|
|
1857
|
+
|
|
1858
|
+
Each audit log entry contains a chain hash linking it to the previous entry.
|
|
1859
|
+
A broken chain indicates the audit log was tampered with.
|
|
1860
|
+
"""
|
|
1861
|
+
from .config import SKMEMORY_HOME
|
|
1862
|
+
from .fortress import AuditLog
|
|
1863
|
+
|
|
1864
|
+
audit = AuditLog(path=SKMEMORY_HOME / "audit.jsonl")
|
|
1865
|
+
ok, errors = audit.verify_chain()
|
|
1866
|
+
|
|
1867
|
+
if as_json:
|
|
1868
|
+
click.echo(json.dumps({"ok": ok, "errors": errors}))
|
|
1869
|
+
return
|
|
1870
|
+
|
|
1871
|
+
if ok:
|
|
1872
|
+
click.echo("Audit chain is VALID — log integrity confirmed.")
|
|
1873
|
+
else:
|
|
1874
|
+
click.echo("Audit chain BROKEN — log may have been tampered!")
|
|
1875
|
+
for err in errors:
|
|
1876
|
+
click.echo(f" !! {err}")
|
|
1877
|
+
sys.exit(2)
|
|
1878
|
+
|
|
1879
|
+
|
|
1880
|
+
# ---------------------------------------------------------------------------
|
|
1881
|
+
# Vault commands — at-rest encryption management
|
|
1882
|
+
# ---------------------------------------------------------------------------
|
|
1883
|
+
|
|
1884
|
+
|
|
1885
|
+
@cli.group("vault")
|
|
1886
|
+
def vault_group() -> None:
|
|
1887
|
+
"""Memory Vault — AES-256-GCM at-rest encryption for memory files."""
|
|
1888
|
+
|
|
1889
|
+
|
|
1890
|
+
@vault_group.command("seal")
|
|
1891
|
+
@click.option(
|
|
1892
|
+
"--passphrase",
|
|
1893
|
+
envvar="SKMEMORY_VAULT_PASSPHRASE",
|
|
1894
|
+
required=True,
|
|
1895
|
+
help="Encryption passphrase (or set SKMEMORY_VAULT_PASSPHRASE env var)",
|
|
1896
|
+
prompt="Vault passphrase",
|
|
1897
|
+
hide_input=True,
|
|
1898
|
+
confirmation_prompt=True,
|
|
1899
|
+
)
|
|
1900
|
+
@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt")
|
|
1901
|
+
@click.pass_context
|
|
1902
|
+
def vault_seal(ctx: click.Context, passphrase: str, yes: bool) -> None:
|
|
1903
|
+
"""Encrypt all plaintext memory files with AES-256-GCM.
|
|
1904
|
+
|
|
1905
|
+
Already-encrypted files are skipped. Safe to run multiple times.
|
|
1906
|
+
Requires the 'cryptography' package: pip install skmemory[fortress]
|
|
1907
|
+
"""
|
|
1908
|
+
from .backends.vaulted_backend import VaultedSQLiteBackend
|
|
1909
|
+
from .config import SKMEMORY_HOME
|
|
1910
|
+
from .fortress import AuditLog
|
|
1911
|
+
|
|
1912
|
+
store = ctx.obj.get("store")
|
|
1913
|
+
memories_path = (
|
|
1914
|
+
store.primary.base_path
|
|
1915
|
+
if hasattr(store.primary, "base_path")
|
|
1916
|
+
else (SKMEMORY_HOME / "memories")
|
|
1917
|
+
)
|
|
1918
|
+
|
|
1919
|
+
if not yes:
|
|
1920
|
+
click.confirm(
|
|
1921
|
+
f"This will encrypt all memory files in {memories_path}. Continue?",
|
|
1922
|
+
abort=True,
|
|
1923
|
+
)
|
|
1924
|
+
|
|
1925
|
+
backend = VaultedSQLiteBackend(passphrase=passphrase, base_path=str(memories_path))
|
|
1926
|
+
count = backend.seal_all()
|
|
1927
|
+
|
|
1928
|
+
audit = AuditLog(path=SKMEMORY_HOME / "audit.jsonl")
|
|
1929
|
+
audit.append("vault_seal", "ALL", ok=True, files_sealed=count)
|
|
1930
|
+
|
|
1931
|
+
click.echo(f"Vault sealed: {count} file(s) encrypted.")
|
|
1932
|
+
if count == 0:
|
|
1933
|
+
click.echo("(All files were already encrypted or no memories exist.)")
|
|
1934
|
+
|
|
1935
|
+
|
|
1936
|
+
@vault_group.command("unseal")
|
|
1937
|
+
@click.option(
|
|
1938
|
+
"--passphrase",
|
|
1939
|
+
envvar="SKMEMORY_VAULT_PASSPHRASE",
|
|
1940
|
+
required=True,
|
|
1941
|
+
help="Decryption passphrase",
|
|
1942
|
+
prompt="Vault passphrase",
|
|
1943
|
+
hide_input=True,
|
|
1944
|
+
)
|
|
1945
|
+
@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt")
|
|
1946
|
+
@click.pass_context
|
|
1947
|
+
def vault_unseal(ctx: click.Context, passphrase: str, yes: bool) -> None:
|
|
1948
|
+
"""Decrypt all vault-encrypted memory files back to plaintext.
|
|
1949
|
+
|
|
1950
|
+
Use this to migrate away from encryption or to inspect raw files.
|
|
1951
|
+
"""
|
|
1952
|
+
from .backends.vaulted_backend import VaultedSQLiteBackend
|
|
1953
|
+
from .config import SKMEMORY_HOME
|
|
1954
|
+
from .fortress import AuditLog
|
|
1955
|
+
|
|
1956
|
+
store = ctx.obj.get("store")
|
|
1957
|
+
memories_path = (
|
|
1958
|
+
store.primary.base_path
|
|
1959
|
+
if hasattr(store.primary, "base_path")
|
|
1960
|
+
else (SKMEMORY_HOME / "memories")
|
|
1961
|
+
)
|
|
1962
|
+
|
|
1963
|
+
if not yes:
|
|
1964
|
+
click.confirm(
|
|
1965
|
+
f"This will decrypt all vault files in {memories_path}. Continue?",
|
|
1966
|
+
abort=True,
|
|
1967
|
+
)
|
|
1968
|
+
|
|
1969
|
+
backend = VaultedSQLiteBackend(passphrase=passphrase, base_path=str(memories_path))
|
|
1970
|
+
count = backend.unseal_all()
|
|
1971
|
+
|
|
1972
|
+
audit = AuditLog(path=SKMEMORY_HOME / "audit.jsonl")
|
|
1973
|
+
audit.append("vault_unseal", "ALL", ok=True, files_decrypted=count)
|
|
1974
|
+
|
|
1975
|
+
click.echo(f"Vault unsealed: {count} file(s) decrypted.")
|
|
1976
|
+
|
|
1977
|
+
|
|
1978
|
+
@vault_group.command("status")
|
|
1979
|
+
@click.option("--json", "as_json", is_flag=True, help="Output as JSON")
|
|
1980
|
+
@click.pass_context
|
|
1981
|
+
def vault_status_cmd(ctx: click.Context, as_json: bool) -> None:
|
|
1982
|
+
"""Show encryption coverage for memory files.
|
|
1983
|
+
|
|
1984
|
+
Reports how many memory files are encrypted vs. plaintext.
|
|
1985
|
+
Does not require a passphrase — only checks file headers.
|
|
1986
|
+
"""
|
|
1987
|
+
from .config import SKMEMORY_HOME
|
|
1988
|
+
from .models import MemoryLayer
|
|
1989
|
+
from .vault import VAULT_HEADER
|
|
1990
|
+
|
|
1991
|
+
store = ctx.obj.get("store")
|
|
1992
|
+
memories_path = (
|
|
1993
|
+
store.primary.base_path
|
|
1994
|
+
if hasattr(store.primary, "base_path")
|
|
1995
|
+
else (SKMEMORY_HOME / "memories")
|
|
1996
|
+
)
|
|
1997
|
+
|
|
1998
|
+
total = encrypted = 0
|
|
1999
|
+
header_len = len(VAULT_HEADER)
|
|
2000
|
+
for layer in MemoryLayer:
|
|
2001
|
+
layer_dir = memories_path / layer.value
|
|
2002
|
+
if not layer_dir.exists():
|
|
2003
|
+
continue
|
|
2004
|
+
for json_file in layer_dir.glob("*.json"):
|
|
2005
|
+
total += 1
|
|
2006
|
+
try:
|
|
2007
|
+
with json_file.open("rb") as fh:
|
|
2008
|
+
header = fh.read(header_len)
|
|
2009
|
+
if header == VAULT_HEADER:
|
|
2010
|
+
encrypted += 1
|
|
2011
|
+
except OSError:
|
|
2012
|
+
pass
|
|
2013
|
+
|
|
2014
|
+
plaintext = total - encrypted
|
|
2015
|
+
pct = (encrypted / total * 100) if total else 100.0
|
|
2016
|
+
result = {
|
|
2017
|
+
"total": total,
|
|
2018
|
+
"encrypted": encrypted,
|
|
2019
|
+
"plaintext": plaintext,
|
|
2020
|
+
"coverage_pct": round(pct, 1),
|
|
2021
|
+
}
|
|
2022
|
+
|
|
2023
|
+
if as_json:
|
|
2024
|
+
click.echo(json.dumps(result, indent=2))
|
|
2025
|
+
return
|
|
2026
|
+
|
|
2027
|
+
click.echo(f"Vault Status — {memories_path}")
|
|
2028
|
+
click.echo(f" Total files : {total}")
|
|
2029
|
+
click.echo(f" Encrypted : {encrypted}")
|
|
2030
|
+
click.echo(f" Plaintext : {plaintext}")
|
|
2031
|
+
click.echo(f" Coverage : {pct:.1f}%")
|
|
2032
|
+
if total == 0:
|
|
2033
|
+
click.echo("\n (No memory files found.)")
|
|
2034
|
+
elif pct == 100.0:
|
|
2035
|
+
click.echo("\n All memories are encrypted.")
|
|
2036
|
+
elif pct == 0.0:
|
|
2037
|
+
click.echo("\n No memories are encrypted. Run: skmemory vault seal")
|
|
2038
|
+
else:
|
|
2039
|
+
click.echo("\n Partial encryption! Run: skmemory vault seal --yes")
|
|
2040
|
+
|
|
2041
|
+
|
|
2042
|
+
@cli.command("register")
|
|
2043
|
+
@click.option(
|
|
2044
|
+
"--workspace",
|
|
2045
|
+
default=None,
|
|
2046
|
+
type=click.Path(),
|
|
2047
|
+
help="Workspace root directory (default: ~/clawd/).",
|
|
2048
|
+
)
|
|
2049
|
+
@click.option("--env", "target_env", default=None, help="Target a specific environment.")
|
|
2050
|
+
@click.option(
|
|
2051
|
+
"--dry-run",
|
|
2052
|
+
is_flag=True,
|
|
2053
|
+
default=False,
|
|
2054
|
+
help="Show what would be done without making changes.",
|
|
2055
|
+
)
|
|
2056
|
+
def register_cmd(workspace, target_env, dry_run):
|
|
2057
|
+
"""Register skmemory skill, MCP server, and hooks in detected environments.
|
|
2058
|
+
|
|
2059
|
+
Auto-detects development environments (Claude Code, Cursor, VS Code,
|
|
2060
|
+
OpenClaw, OpenCode, mcporter) and ensures skmemory SKILL.md, MCP
|
|
2061
|
+
server entries, and auto-save hooks are properly configured.
|
|
2062
|
+
|
|
2063
|
+
Hooks installed (Claude Code only):
|
|
2064
|
+
- PreCompact: auto-save context to skmemory before compaction
|
|
2065
|
+
- SessionEnd: journal session end
|
|
2066
|
+
- SessionStart (compact): reinject memory context after compaction
|
|
2067
|
+
|
|
2068
|
+
Examples:
|
|
2069
|
+
|
|
2070
|
+
skmemory register # auto-detect and register
|
|
2071
|
+
skmemory register --dry-run # preview what would happen
|
|
2072
|
+
skmemory register --env claude-code # target Claude Code only
|
|
2073
|
+
"""
|
|
2074
|
+
from pathlib import Path as _Path
|
|
2075
|
+
|
|
2076
|
+
from .register import detect_environments, register_package
|
|
2077
|
+
|
|
2078
|
+
workspace_path = _Path(workspace).expanduser() if workspace else None
|
|
2079
|
+
environments = [target_env] if target_env else None
|
|
2080
|
+
|
|
2081
|
+
detected = detect_environments()
|
|
2082
|
+
click.echo("Detected environments: " + ", ".join(detected) if detected else " (none)")
|
|
2083
|
+
|
|
2084
|
+
if dry_run:
|
|
2085
|
+
click.echo("Dry run — no changes will be made.")
|
|
2086
|
+
|
|
2087
|
+
skill_md = _Path(__file__).parent.parent / "SKILL.md"
|
|
2088
|
+
if not skill_md.exists():
|
|
2089
|
+
skill_md = _Path(__file__).parent / "SKILL.md"
|
|
2090
|
+
|
|
2091
|
+
result = register_package(
|
|
2092
|
+
name="skmemory",
|
|
2093
|
+
skill_md_path=skill_md,
|
|
2094
|
+
mcp_command="skmemory-mcp",
|
|
2095
|
+
mcp_args=[],
|
|
2096
|
+
install_hooks=True,
|
|
2097
|
+
workspace=workspace_path,
|
|
2098
|
+
environments=environments,
|
|
2099
|
+
dry_run=dry_run,
|
|
2100
|
+
)
|
|
2101
|
+
|
|
2102
|
+
click.echo(f"Skill: {result.get('skill', {}).get('action', '—')}")
|
|
2103
|
+
mcp = result.get("mcp", {})
|
|
2104
|
+
if mcp:
|
|
2105
|
+
for env_name, action in mcp.items():
|
|
2106
|
+
click.echo(f"MCP ({env_name}): {action}")
|
|
2107
|
+
else:
|
|
2108
|
+
click.echo("MCP: no environments matched")
|
|
2109
|
+
|
|
2110
|
+
hooks = result.get("hooks", {})
|
|
2111
|
+
if hooks:
|
|
2112
|
+
click.echo(f"Hooks: {hooks.get('action', '—')}")
|
|
2113
|
+
else:
|
|
2114
|
+
click.echo("Hooks: skipped (no claude-code environment)")
|
|
2115
|
+
|
|
2116
|
+
|
|
2117
|
+
@cli.command("feb-context")
|
|
2118
|
+
@click.argument("feb_path", required=False, default=None, type=click.Path(exists=True))
|
|
2119
|
+
@click.option("--agent", default=None, help="Agent name (default: active agent)")
|
|
2120
|
+
def feb_context_cmd(feb_path: str | None, agent: str | None):
|
|
2121
|
+
"""Show formatted FEB emotional state for rehydration.
|
|
2122
|
+
|
|
2123
|
+
If FEB_PATH is given, formats that file. Otherwise, loads the
|
|
2124
|
+
strongest FEB from the agent's trust/febs/ and ~/.openclaw/feb/.
|
|
2125
|
+
|
|
2126
|
+
Examples:
|
|
2127
|
+
skmemory feb-context
|
|
2128
|
+
skmemory feb-context ~/.skcapstone/agents/opus/trust/febs/default-love.feb
|
|
2129
|
+
"""
|
|
2130
|
+
from pathlib import Path as _Path
|
|
2131
|
+
|
|
2132
|
+
from .febs import feb_to_context, load_strongest_feb, parse_feb
|
|
2133
|
+
|
|
2134
|
+
try:
|
|
2135
|
+
if feb_path:
|
|
2136
|
+
feb = parse_feb(_Path(feb_path))
|
|
2137
|
+
else:
|
|
2138
|
+
# Temporarily override agent if specified
|
|
2139
|
+
if agent:
|
|
2140
|
+
import os
|
|
2141
|
+
|
|
2142
|
+
os.environ["SKCAPSTONE_AGENT"] = agent
|
|
2143
|
+
feb = load_strongest_feb()
|
|
2144
|
+
|
|
2145
|
+
if feb is None:
|
|
2146
|
+
click.echo("(no FEB data)", err=True)
|
|
2147
|
+
raise SystemExit(1)
|
|
2148
|
+
|
|
2149
|
+
click.echo(feb_to_context(feb))
|
|
2150
|
+
except SystemExit:
|
|
2151
|
+
raise
|
|
2152
|
+
except Exception as e:
|
|
2153
|
+
click.echo(f"Error loading FEB: {e}", err=True)
|
|
2154
|
+
raise click.Abort() from None
|
|
2155
|
+
|
|
2156
|
+
|
|
2157
|
+
@cli.command("show-context")
|
|
2158
|
+
@click.pass_context
|
|
2159
|
+
@click.option("--agent", default=None, help="Agent name (default: active agent)")
|
|
2160
|
+
def show_context(ctx, agent: str | None):
|
|
2161
|
+
"""Show token-optimized memory context for current session.
|
|
2162
|
+
|
|
2163
|
+
Loads today's memories (full) + yesterday's summaries (brief).
|
|
2164
|
+
Historical memories shown as reference count only.
|
|
2165
|
+
|
|
2166
|
+
Examples:
|
|
2167
|
+
skmemory context
|
|
2168
|
+
skmemory context --agent lumina
|
|
2169
|
+
"""
|
|
2170
|
+
from .context_loader import get_context_for_session
|
|
2171
|
+
|
|
2172
|
+
try:
|
|
2173
|
+
context_str = get_context_for_session(agent)
|
|
2174
|
+
click.echo(context_str)
|
|
2175
|
+
except Exception as e:
|
|
2176
|
+
click.echo(f"Error loading context: {e}", err=True)
|
|
2177
|
+
raise click.Abort() from None
|
|
2178
|
+
|
|
2179
|
+
|
|
2180
|
+
@cli.command()
|
|
2181
|
+
@click.pass_context
|
|
2182
|
+
@click.argument("query")
|
|
2183
|
+
@click.option("--agent", default=None, help="Agent name (default: active agent)")
|
|
2184
|
+
@click.option("--limit", type=int, default=10, help="Maximum results (default: 10)")
|
|
2185
|
+
def search_deep(ctx, query: str, agent: str | None, limit: int):
|
|
2186
|
+
"""Deep search all memory tiers (on demand).
|
|
2187
|
+
|
|
2188
|
+
Searches SQLite + SKVector + SKGraph for matches.
|
|
2189
|
+
Returns full memory details (token-heavy).
|
|
2190
|
+
|
|
2191
|
+
Examples:
|
|
2192
|
+
skmemory search-deep "project gentis"
|
|
2193
|
+
skmemory search-deep "architecture decisions" --limit 20
|
|
2194
|
+
"""
|
|
2195
|
+
from .context_loader import LazyMemoryLoader
|
|
2196
|
+
|
|
2197
|
+
try:
|
|
2198
|
+
loader = LazyMemoryLoader(agent)
|
|
2199
|
+
results = loader.deep_search(query, max_results=limit)
|
|
2200
|
+
|
|
2201
|
+
if not results:
|
|
2202
|
+
click.echo("No memories found.")
|
|
2203
|
+
return
|
|
2204
|
+
|
|
2205
|
+
click.echo(f"Found {len(results)} memories:\n")
|
|
2206
|
+
for i, mem in enumerate(results, 1):
|
|
2207
|
+
layer_icon = {"short-term": "⚡", "mid-term": "📅", "long-term": "🗃️"}.get(
|
|
2208
|
+
mem.get("layer", "short-term"), "•"
|
|
2209
|
+
)
|
|
2210
|
+
click.echo(f"{i}. {layer_icon} {mem.get('title', 'Untitled')}")
|
|
2211
|
+
click.echo(f" {mem.get('content', '')[:200]}...")
|
|
2212
|
+
click.echo(
|
|
2213
|
+
f" Layer: {mem.get('layer', 'unknown')} | "
|
|
2214
|
+
f"Date: {mem.get('created_at', 'unknown')}"
|
|
2215
|
+
)
|
|
2216
|
+
if mem.get("tags"):
|
|
2217
|
+
click.echo(f" Tags: {', '.join(mem.get('tags', []))}")
|
|
2218
|
+
click.echo()
|
|
2219
|
+
|
|
2220
|
+
except Exception as e:
|
|
2221
|
+
click.echo(f"Error searching: {e}", err=True)
|
|
2222
|
+
raise click.Abort() from None
|
|
2223
|
+
|
|
2224
|
+
|
|
2225
|
+
@cli.command()
|
|
2226
|
+
@click.argument("memory_id")
|
|
2227
|
+
@click.argument("to_layer", type=click.Choice(["short-term", "mid-term", "long-term"]))
|
|
2228
|
+
@click.option("--agent", default=None, help="Agent name (default: active agent)")
|
|
2229
|
+
def promote(ctx, memory_id: str, to_layer: str, agent: str | None):
|
|
2230
|
+
"""Promote memory to different tier and generate summary.
|
|
2231
|
+
|
|
2232
|
+
Moves memory between short/medium/long term and auto-generates
|
|
2233
|
+
a summary if promoting to medium or long term.
|
|
2234
|
+
|
|
2235
|
+
Examples:
|
|
2236
|
+
skmemory promote abc123 mid-term
|
|
2237
|
+
skmemory promote def456 long-term --agent lumina
|
|
2238
|
+
"""
|
|
2239
|
+
from .context_loader import LazyMemoryLoader
|
|
2240
|
+
|
|
2241
|
+
try:
|
|
2242
|
+
loader = LazyMemoryLoader(agent)
|
|
2243
|
+
success = loader.promote_memory(memory_id, to_layer)
|
|
2244
|
+
|
|
2245
|
+
if success:
|
|
2246
|
+
click.echo(f"✓ Promoted {memory_id} to {to_layer}")
|
|
2247
|
+
if to_layer in ("mid-term", "long-term"):
|
|
2248
|
+
click.echo(" Summary generated automatically.")
|
|
2249
|
+
else:
|
|
2250
|
+
click.echo(f"✗ Failed to promote {memory_id}", err=True)
|
|
2251
|
+
raise click.Abort()
|
|
2252
|
+
|
|
2253
|
+
except Exception as e:
|
|
2254
|
+
click.echo(f"Error promoting memory: {e}", err=True)
|
|
2255
|
+
raise click.Abort() from None
|
|
2256
|
+
|
|
2257
|
+
|
|
2258
|
+
def _auto_register_once() -> None:
|
|
2259
|
+
"""Auto-register hooks on first CLI invocation (best-effort, silent)."""
|
|
2260
|
+
marker = Path.home() / ".skcapstone" / ".skmemory-registered"
|
|
2261
|
+
if marker.exists():
|
|
2262
|
+
return
|
|
2263
|
+
try:
|
|
2264
|
+
from .post_install import _is_registered, run_post_install
|
|
2265
|
+
|
|
2266
|
+
if not _is_registered():
|
|
2267
|
+
run_post_install()
|
|
2268
|
+
marker.parent.mkdir(parents=True, exist_ok=True)
|
|
2269
|
+
marker.write_text(f"registered {__import__('datetime').datetime.now().isoformat()}\n")
|
|
2270
|
+
except Exception:
|
|
2271
|
+
pass # Never fail the CLI over registration
|
|
2272
|
+
|
|
2273
|
+
|
|
998
2274
|
def main() -> None:
|
|
999
2275
|
"""Entry point for the CLI."""
|
|
2276
|
+
_auto_register_once()
|
|
1000
2277
|
cli()
|
|
1001
2278
|
|
|
1002
2279
|
|