@dinasor/mnemo-cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/CHANGELOG.md +46 -0
  2. package/LICENSE +21 -0
  3. package/README.md +263 -0
  4. package/VERSION +1 -0
  5. package/bin/mnemo.js +139 -0
  6. package/memory.ps1 +178 -0
  7. package/memory_mac.sh +2447 -0
  8. package/package.json +36 -0
  9. package/scripts/memory/installer/bootstrap.ps1 +21 -0
  10. package/scripts/memory/installer/core/bridge.ps1 +285 -0
  11. package/scripts/memory/installer/core/io.ps1 +110 -0
  12. package/scripts/memory/installer/core/paths.ps1 +83 -0
  13. package/scripts/memory/installer/features/gitignore_setup.ps1 +80 -0
  14. package/scripts/memory/installer/features/hooks_setup.ps1 +157 -0
  15. package/scripts/memory/installer/features/mcp_setup.ps1 +87 -0
  16. package/scripts/memory/installer/features/memory_scaffold.ps1 +541 -0
  17. package/scripts/memory/installer/features/vector_setup.ps1 +103 -0
  18. package/scripts/memory/installer/templates/add-journal-entry.ps1 +122 -0
  19. package/scripts/memory/installer/templates/add-lesson.ps1 +151 -0
  20. package/scripts/memory/installer/templates/autonomy/__init__.py +6 -0
  21. package/scripts/memory/installer/templates/autonomy/context_safety.py +181 -0
  22. package/scripts/memory/installer/templates/autonomy/entity_resolver.py +215 -0
  23. package/scripts/memory/installer/templates/autonomy/ingest_pipeline.py +252 -0
  24. package/scripts/memory/installer/templates/autonomy/lifecycle_engine.py +254 -0
  25. package/scripts/memory/installer/templates/autonomy/policies.yaml +59 -0
  26. package/scripts/memory/installer/templates/autonomy/reranker.py +220 -0
  27. package/scripts/memory/installer/templates/autonomy/retrieval_router.py +148 -0
  28. package/scripts/memory/installer/templates/autonomy/runner.py +272 -0
  29. package/scripts/memory/installer/templates/autonomy/schema.py +150 -0
  30. package/scripts/memory/installer/templates/autonomy/vault_policy.py +205 -0
  31. package/scripts/memory/installer/templates/build-memory-sqlite.py +111 -0
  32. package/scripts/memory/installer/templates/clear-active.ps1 +55 -0
  33. package/scripts/memory/installer/templates/customization.md +84 -0
  34. package/scripts/memory/installer/templates/lint-memory.ps1 +217 -0
  35. package/scripts/memory/installer/templates/mnemo_vector.py +556 -0
  36. package/scripts/memory/installer/templates/query-memory-sqlite.py +95 -0
  37. package/scripts/memory/installer/templates/query-memory.ps1 +122 -0
  38. package/scripts/memory/installer/templates/rebuild-memory-index.ps1 +293 -0
@@ -0,0 +1,220 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ reranker.py - Score-fusion reranker for Mnemo retrieval.
4
+
5
+ Combines four signals into a final relevance score:
6
+ 1. Semantic similarity (cosine distance from vector search)
7
+ 2. Authority weight (memory_type hierarchy)
8
+ 3. Temporal relevance (recency decay for episodic, boost for time queries)
9
+ 4. Entity consistency (bonus when queried entity found in unit's entity_tags)
10
+
11
+ Output is a sorted list of RankedResult objects ready for context packing.
12
+ """
13
+ import re
14
+ import time
15
+ import sqlite3
16
+ from dataclasses import dataclass
17
+ from typing import Optional
18
+
19
+ from autonomy.schema import get_db
20
+
21
+ # Score fusion weights (must sum to 1.0)
22
+ W_SEMANTIC = 0.55
23
+ W_AUTHORITY = 0.25
24
+ W_TEMPORAL = 0.10
25
+ W_ENTITY = 0.10
26
+
27
+ # Temporal decay half-life in days for episodic memory
28
+ EPISODIC_HALF_LIFE_DAYS = 30.0
29
+
30
+ # Time-sensitive query keywords
31
+ TIME_WORDS = frozenset({
32
+ "today", "yesterday", "last week", "last month", "recent", "latest",
33
+ "just", "now", "currently", "this week", "this month",
34
+ })
35
+
36
+
37
+ @dataclass
38
+ class RankedResult:
39
+ ref_path: str
40
+ content: str
41
+ source_file: str
42
+ semantic_score: float
43
+ authority_score: float
44
+ temporal_score: float
45
+ entity_score: float
46
+ final_score: float
47
+ memory_type: str
48
+ route_intent: str = ""
49
+
50
+ def to_dict(self) -> dict:
51
+ return {
52
+ "ref_path": self.ref_path,
53
+ "content": self.content[:500],
54
+ "final_score": round(self.final_score, 4),
55
+ "semantic": round(self.semantic_score, 3),
56
+ "authority": round(self.authority_score, 3),
57
+ "temporal": round(self.temporal_score, 3),
58
+ "entity": round(self.entity_score, 3),
59
+ "memory_type": self.memory_type,
60
+ }
61
+
62
+
63
+ def _temporal_score(time_scope: str, updated_at_ts: Optional[float], query: str) -> float:
64
+ """Calculate temporal relevance score [0, 1]."""
65
+ if time_scope == "atemporal":
66
+ return 0.8 # timeless facts always moderately relevant
67
+
68
+ has_time_query = any(tw in query.lower() for tw in TIME_WORDS)
69
+
70
+ if time_scope == "recency-sensitive" and updated_at_ts:
71
+ age_days = (time.time() - updated_at_ts) / 86400.0
72
+ # Exponential decay: fresh = high score, stale = low
73
+ import math
74
+ decay = math.exp(-0.693 * age_days / EPISODIC_HALF_LIFE_DAYS)
75
+ if has_time_query:
76
+ return min(decay * 1.5, 1.0) # extra boost for time queries
77
+ return decay
78
+
79
+ if time_scope == "time-bound":
80
+ return 0.6 # neutral for general content
81
+
82
+ return 0.5
83
+
84
+
85
+ def _entity_score(entity_tags_json: str, query: str, db: sqlite3.Connection) -> float:
86
+ """Calculate entity match bonus [0, 1]."""
87
+ if not entity_tags_json or entity_tags_json == "[]":
88
+ return 0.0
89
+ try:
90
+ import json
91
+ entity_ids = json.loads(entity_tags_json)
92
+ except Exception:
93
+ return 0.0
94
+
95
+ if not entity_ids:
96
+ return 0.0
97
+
98
+ # Get entity names for these IDs
99
+ placeholders = ",".join("?" * len(entity_ids))
100
+ rows = db.execute(
101
+ f"SELECT entity_name FROM entities WHERE entity_id IN ({placeholders})",
102
+ entity_ids,
103
+ ).fetchall()
104
+
105
+ q_lower = query.lower()
106
+ for row in rows:
107
+ name_lower = row["entity_name"].lower()
108
+ if name_lower in q_lower or any(
109
+ part in q_lower for part in name_lower.split("_") if len(part) > 3
110
+ ):
111
+ return 1.0
112
+
113
+ # Alias check
114
+ alias_rows = db.execute(
115
+ f"""
116
+ SELECT alias_text FROM entity_aliases
117
+ WHERE entity_id IN ({placeholders})
118
+ """,
119
+ entity_ids,
120
+ ).fetchall()
121
+ for row in alias_rows:
122
+ if row["alias_text"].lower() in q_lower:
123
+ return 0.8
124
+
125
+ return 0.0
126
+
127
+
128
+ class ScoreFusionReranker:
129
+ def __init__(self, db: Optional[sqlite3.Connection] = None):
130
+ self.db = db or get_db()
131
+
132
+ def rerank(
133
+ self,
134
+ query: str,
135
+ raw_results: list[dict], # {ref_path, content, source_file, distance, memory_type?, time_scope?, entity_tags?}
136
+ top_k: int = 5,
137
+ route_intent: str = "",
138
+ ) -> list[RankedResult]:
139
+ """
140
+ Apply score fusion to raw vector search results.
141
+ raw_results: each dict must have at least ref_path, content, distance.
142
+ Returns top_k RankedResult sorted by final_score desc.
143
+ """
144
+ from autonomy.ingest_pipeline import AUTHORITY_WEIGHTS, _infer_memory_type, _infer_time_scope
145
+
146
+ ranked: list[RankedResult] = []
147
+
148
+ for r in raw_results:
149
+ ref = r.get("ref_path", "")
150
+ content = r.get("content", "")
151
+ source_file = r.get("source_file", ref)
152
+ distance = float(r.get("distance", 0.5))
153
+
154
+ # 1. Semantic score from cosine distance
155
+ semantic_score = max(0.0, 1.0 - distance)
156
+
157
+ # 2. Authority
158
+ mem_type = r.get("memory_type") or _infer_memory_type(ref)
159
+ authority_score = AUTHORITY_WEIGHTS.get(mem_type, 0.5)
160
+
161
+ # Skip vault content (sensitivity guard)
162
+ if mem_type == "vault":
163
+ continue
164
+
165
+ # 3. Temporal
166
+ time_scope = r.get("time_scope") or _infer_time_scope(mem_type)
167
+ updated_at = r.get("updated_at")
168
+ if updated_at is None:
169
+ # Look up from DB
170
+ row = self.db.execute(
171
+ "SELECT mu.updated_at FROM memory_units mu WHERE mu.source_ref = ?",
172
+ (source_file,),
173
+ ).fetchone()
174
+ updated_at = row["updated_at"] if row else None
175
+ temporal = _temporal_score(time_scope, updated_at, query)
176
+
177
+ # 4. Entity
178
+ entity_tags_json = r.get("entity_tags", "[]")
179
+ if entity_tags_json is None:
180
+ unit_row = self.db.execute(
181
+ "SELECT entity_tags FROM memory_units WHERE source_ref = ?",
182
+ (source_file,),
183
+ ).fetchone()
184
+ entity_tags_json = unit_row["entity_tags"] if unit_row else "[]"
185
+ entity = _entity_score(entity_tags_json, query, self.db)
186
+
187
+ # Fusion
188
+ final_score = (
189
+ W_SEMANTIC * semantic_score
190
+ + W_AUTHORITY * authority_score
191
+ + W_TEMPORAL * temporal
192
+ + W_ENTITY * entity
193
+ )
194
+
195
+ ranked.append(RankedResult(
196
+ ref_path=ref,
197
+ content=content,
198
+ source_file=source_file,
199
+ semantic_score=semantic_score,
200
+ authority_score=authority_score,
201
+ temporal_score=temporal,
202
+ entity_score=entity,
203
+ final_score=final_score,
204
+ memory_type=mem_type,
205
+ route_intent=route_intent,
206
+ ))
207
+
208
+ ranked.sort(key=lambda r: r.final_score, reverse=True)
209
+ return ranked[:top_k]
210
+
211
+ def explain(self, result: RankedResult) -> str:
212
+ """Human-readable explanation of why this result was ranked here."""
213
+ return (
214
+ f"[final={result.final_score:.3f}] "
215
+ f"semantic={result.semantic_score:.3f} "
216
+ f"authority={result.authority_score:.3f} "
217
+ f"temporal={result.temporal_score:.3f} "
218
+ f"entity={result.entity_score:.3f} "
219
+ f"type={result.memory_type}"
220
+ )
@@ -0,0 +1,148 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ retrieval_router.py - Active retrieval router with intent/topic classification.
4
+
5
+ Routes queries to appropriate memory classes based on detected intent.
6
+ Falls back to global search when confidence is low.
7
+
8
+ Memory class routing:
9
+ - procedural → lessons/ (how to do things, rules)
10
+ - episodic → journal/, active-context (recent events, history)
11
+ - core → hot-rules.md, memo.md (invariants, ownership)
12
+ - semantic → digests/ (summaries, concepts)
13
+ - global → all classes (fallback)
14
+ """
15
+ import re
16
+ import sqlite3
17
+ from dataclasses import dataclass
18
+ from typing import Optional
19
+
20
+ from autonomy.schema import get_db
21
+
22
+ # Intent → memory_type routing table
23
+ _INTENT_PATTERNS: list[tuple[str, list[str], str]] = [
24
+ # (label, patterns, memory_type)
25
+ ("error", [r"\b(error|crash|fail|bug|exception|broke|broken)\b"], "procedural"),
26
+ ("lesson", [r"\b(lesson|rule|pattern|pitfall|don.t|avoid|never|always)\b"], "procedural"),
27
+ ("history", [r"\b(when|yesterday|last\s+week|last\s+month|what\s+happened|journal)\b"], "episodic"),
28
+ ("recent", [r"\b(recent|latest|today|current\s+session|currently\s+working)\b"], "episodic"),
29
+ ("ownership", [r"\b(who\s+owns|owner|responsible\s+for|which\s+(file|module|class))\b"], "core"),
30
+ ("invariant", [r"\b(invariant|constraint|rule|must\s+not|forbidden|allowed)\b"], "core"),
31
+ ("architecture", [r"\b(architecture|design|structure|module|component)\b"], "semantic"),
32
+ ("summary", [r"\b(summary|overview|digest|what\s+is|describe)\b"], "semantic"),
33
+ ]
34
+
35
+ CONFIDENCE_THRESHOLD = 0.5
36
+
37
+
38
+ @dataclass
39
+ class RouteDecision:
40
+ intent: str
41
+ memory_types: list[str] # ordered by priority
42
+ confidence: float
43
+ fallback: bool # True if routing to global fallback
44
+
45
+
46
+ def classify_intent(query: str) -> RouteDecision:
47
+ """
48
+ Classify query intent and route to memory class(es).
49
+ Returns RouteDecision with ordered memory types to search.
50
+ """
51
+ q_lower = query.lower()
52
+ scores: dict[str, float] = {}
53
+
54
+ for label, patterns, mem_type in _INTENT_PATTERNS:
55
+ for pattern in patterns:
56
+ if re.search(pattern, q_lower, re.IGNORECASE):
57
+ scores[mem_type] = scores.get(mem_type, 0.0) + 1.0
58
+
59
+ if not scores:
60
+ return RouteDecision(
61
+ intent="global", memory_types=["core", "procedural", "episodic", "semantic"],
62
+ confidence=0.0, fallback=True
63
+ )
64
+
65
+ # Normalize
66
+ total = sum(scores.values())
67
+ normed = {k: v / total for k, v in scores.items()}
68
+
69
+ best_type = max(normed, key=lambda k: normed[k])
70
+ best_conf = normed[best_type]
71
+
72
+ if best_conf < CONFIDENCE_THRESHOLD:
73
+ # Include all types but prioritize best match
74
+ all_types = list(normed.keys()) + [t for t in ["core", "procedural", "episodic", "semantic"] if t not in normed]
75
+ return RouteDecision(intent="mixed", memory_types=all_types, confidence=best_conf, fallback=True)
76
+
77
+ # Include secondary routes
78
+ ordered = sorted(normed, key=lambda k: normed[k], reverse=True)
79
+ # Always include global fallback types not in primary route
80
+ for t in ["core", "procedural", "episodic", "semantic"]:
81
+ if t not in ordered:
82
+ ordered.append(t)
83
+
84
+ return RouteDecision(
85
+ intent=best_type, memory_types=ordered, confidence=best_conf, fallback=False
86
+ )
87
+
88
+
89
+ class RetrievalRouter:
90
+ def __init__(self, db: Optional[sqlite3.Connection] = None):
91
+ self.db = db or get_db()
92
+
93
+ def route_query(self, query: str, top_k: int = 5) -> tuple[RouteDecision, list[dict]]:
94
+ """
95
+ Route query to appropriate memory classes, return (decision, candidates).
96
+ Candidates are dict with {ref_path, content, source_ref, memory_type,
97
+ authority, time_scope, entity_tags}.
98
+ """
99
+ decision = classify_intent(query)
100
+ candidates: list[dict] = []
101
+
102
+ # Primary: search by memory_type priority
103
+ for mem_type in decision.memory_types:
104
+ rows = self.db.execute(
105
+ """
106
+ SELECT mu.unit_id, mu.source_ref, mu.memory_type, mu.authority,
107
+ mu.time_scope, mu.entity_tags, mu.sensitivity
108
+ FROM memory_units mu
109
+ WHERE mu.memory_type = ? AND mu.sensitivity != 'secret'
110
+ ORDER BY mu.authority DESC, mu.updated_at DESC
111
+ LIMIT ?
112
+ """,
113
+ (mem_type, top_k),
114
+ ).fetchall()
115
+ for row in rows:
116
+ candidates.append(dict(row))
117
+ if len(candidates) >= top_k:
118
+ break
119
+
120
+ # Fallback: add any missing from global pool
121
+ if len(candidates) < top_k:
122
+ existing_ids = {c["unit_id"] for c in candidates}
123
+ extra = self.db.execute(
124
+ """
125
+ SELECT mu.unit_id, mu.source_ref, mu.memory_type, mu.authority,
126
+ mu.time_scope, mu.entity_tags, mu.sensitivity
127
+ FROM memory_units mu
128
+ WHERE mu.unit_id NOT IN ({})
129
+ AND mu.sensitivity != 'secret'
130
+ ORDER BY mu.authority DESC, mu.updated_at DESC
131
+ LIMIT ?
132
+ """.format(",".join("?" * len(existing_ids)) if existing_ids else "'__none__'"),
133
+ tuple(existing_ids) + (top_k - len(candidates),) if existing_ids else (top_k - len(candidates),),
134
+ ).fetchall()
135
+ for row in extra:
136
+ candidates.append(dict(row))
137
+
138
+ return decision, candidates
139
+
140
+ def get_route_metadata(self, query: str) -> dict:
141
+ """Return routing metadata as dict (for logging/debugging)."""
142
+ decision = classify_intent(query)
143
+ return {
144
+ "intent": decision.intent,
145
+ "memory_types": decision.memory_types,
146
+ "confidence": round(decision.confidence, 3),
147
+ "fallback": decision.fallback,
148
+ }
@@ -0,0 +1,272 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ runner.py - Mnemo Autonomous Memory Runtime (no-human-in-the-loop).
4
+
5
+ Triggered by:
6
+ - Git hooks: post-commit, post-merge, post-checkout
7
+ - Periodic scheduler tick (--mode schedule)
8
+ - Direct invocation: python runner.py [--mode {auto|schedule|once}]
9
+
10
+ Responsibilities:
11
+ 1. Change detection on .mnemo/memory/**/*.md (with .cursor bridge compatibility)
12
+ 2. Ingest and chunk changed files
13
+ 3. Metadata classification + entity resolution
14
+ 4. Fact lifecycle (ADD/UPDATE/DEPRECATE/NOOP)
15
+ 5. Vector index update
16
+ 6. Autonomous journal delta generation
17
+ 7. Lesson promotion from stable signals
18
+ 8. Safety check on retrieval packs
19
+ """
20
+ import argparse
21
+ import json
22
+ import os
23
+ import signal
24
+ import sys
25
+ import time
26
+ import traceback
27
+ from pathlib import Path
28
+ from datetime import datetime, timezone
29
+
30
+ # Allow running from scripts/memory/ directory
31
+ _HERE = Path(__file__).resolve().parent
32
+ sys.path.insert(0, str(_HERE.parent))
33
+
34
+ from autonomy.schema import get_db
35
+ from autonomy.ingest_pipeline import IngestPipeline
36
+ from autonomy.lifecycle_engine import LifecycleEngine
37
+ from autonomy.entity_resolver import EntityResolver
38
+
39
+
40
+ LOCK_PATH: Path | None = None
41
+ STATE_KEY_LAST_RUN = "last_run_ts"
42
+ STATE_KEY_CYCLE = "cycle_count"
43
+ SCHEDULE_INTERVAL_S = int(os.getenv("MNEMO_SCHEDULE_INTERVAL", "300")) # 5 min default
44
+ MAX_LOCK_AGE_S = 600 # stale lock timeout
45
+
46
+
47
+ def resolve_memory_root(repo_root: Path) -> Path:
48
+ override = os.getenv("MNEMO_MEMORY_ROOT", "").strip()
49
+ if override:
50
+ return Path(override).expanduser().resolve()
51
+
52
+ candidates = [
53
+ repo_root / ".mnemo" / "memory",
54
+ repo_root / ".cursor" / "memory",
55
+ ]
56
+ for candidate in candidates:
57
+ if candidate.exists():
58
+ return candidate
59
+ return candidates[0]
60
+
61
+
62
+ def _require_lock_path() -> Path:
63
+ if LOCK_PATH is None:
64
+ raise RuntimeError("LOCK_PATH is not initialized")
65
+ return LOCK_PATH
66
+
67
+
68
+ def _acquire_lock() -> bool:
69
+ lock_path = _require_lock_path()
70
+ lock_path.parent.mkdir(parents=True, exist_ok=True)
71
+ if lock_path.exists():
72
+ try:
73
+ mtime = lock_path.stat().st_mtime
74
+ age = time.time() - mtime
75
+ if age < MAX_LOCK_AGE_S:
76
+ return False
77
+ # Stale lock — remove and proceed
78
+ lock_path.unlink()
79
+ except OSError:
80
+ return False
81
+ try:
82
+ lock_path.write_text(str(os.getpid()), encoding="utf-8")
83
+ return True
84
+ except OSError:
85
+ return False
86
+
87
+
88
+ def _release_lock() -> None:
89
+ lock_path = _require_lock_path()
90
+ try:
91
+ if lock_path.exists():
92
+ lock_path.unlink()
93
+ except OSError:
94
+ pass
95
+
96
+
97
+ def _emit_log(level: str, msg: str) -> None:
98
+ ts = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
99
+ print(f"[{ts}] [{level}] {msg}", flush=True)
100
+
101
+
102
+ def run_once(repo_root: Path) -> dict:
103
+ """Execute one autonomous memory cycle. Returns summary dict."""
104
+ summary = {"status": "ok", "ingested": 0, "facts_added": 0, "facts_deprecated": 0, "errors": []}
105
+
106
+ db = get_db()
107
+ ingester = IngestPipeline(db=db, repo_root=repo_root)
108
+ lifecycle = LifecycleEngine(db=db)
109
+ resolver = EntityResolver(db=db)
110
+
111
+ # 1. Detect changed files
112
+ changed = ingester.detect_changes()
113
+ _emit_log("INFO", f"Detected {len(changed)} changed memory files")
114
+
115
+ # 2. Ingest each changed file
116
+ for path in changed:
117
+ try:
118
+ units = ingester.ingest_file(path)
119
+ summary["ingested"] += len(units)
120
+
121
+ # 3. Entity resolution per unit
122
+ for unit in units:
123
+ resolver.resolve(unit)
124
+
125
+ # 4. Fact lifecycle decisions
126
+ for unit in units:
127
+ result = lifecycle.process(unit)
128
+ if result.operation == "ADD":
129
+ summary["facts_added"] += 1
130
+ elif result.operation == "DEPRECATE":
131
+ summary["facts_deprecated"] += 1
132
+
133
+ except Exception as e:
134
+ err = f"{path}: {e}"
135
+ summary["errors"].append(err)
136
+ _emit_log("ERROR", err)
137
+
138
+ # 5. Autonomous journal delta
139
+ if summary["ingested"] > 0:
140
+ try:
141
+ _write_autonomy_journal_delta(repo_root, summary)
142
+ except Exception as e:
143
+ _emit_log("WARN", f"Journal delta failed: {e}")
144
+
145
+ # 6. Lesson promotion (stable signals → new lessons)
146
+ try:
147
+ promoted = lifecycle.promote_lessons(repo_root=repo_root)
148
+ if promoted:
149
+ _emit_log("INFO", f"Promoted {len(promoted)} new lessons")
150
+ summary["lessons_promoted"] = len(promoted)
151
+ except Exception as e:
152
+ _emit_log("WARN", f"Lesson promotion failed: {e}")
153
+
154
+ # 7. Persist cycle state
155
+ now_ts = str(time.time())
156
+ db.execute(
157
+ "INSERT OR REPLACE INTO autonomy_state(key, value, updated_at) VALUES ('last_run_ts', ?, unixepoch('now'))",
158
+ (now_ts,),
159
+ )
160
+ db.execute(
161
+ """
162
+ INSERT INTO autonomy_state(key, value, updated_at) VALUES ('cycle_count', '1', unixepoch('now'))
163
+ ON CONFLICT(key) DO UPDATE SET
164
+ value = CAST(CAST(value AS INTEGER) + 1 AS TEXT),
165
+ updated_at = unixepoch('now')
166
+ """
167
+ )
168
+ db.commit()
169
+ db.close()
170
+
171
+ if summary["errors"]:
172
+ summary["status"] = "partial"
173
+ return summary
174
+
175
+
176
+ def _write_autonomy_journal_delta(repo_root: Path, summary: dict) -> None:
177
+ """Write an autonomous journal entry summarizing the cycle."""
178
+ today = datetime.now().strftime("%Y-%m-%d")
179
+ month = today[:7]
180
+ journal_path = resolve_memory_root(repo_root) / "journal" / f"{month}.md"
181
+ journal_path.parent.mkdir(parents=True, exist_ok=True)
182
+
183
+ facts_line = f"{summary['facts_added']} facts added"
184
+ if summary.get("facts_deprecated"):
185
+ facts_line += f", {summary['facts_deprecated']} deprecated"
186
+ if summary.get("lessons_promoted"):
187
+ facts_line += f", {summary['lessons_promoted']} lessons promoted"
188
+
189
+ entry_lines = [
190
+ f"- [Process][Autonomy] Auto-cycle: ingested {summary['ingested']} units ({facts_line})",
191
+ f" - System: Mnemo autonomous runner (no human in loop)",
192
+ ]
193
+ if summary.get("errors"):
194
+ entry_lines.append(f" - Warnings: {len(summary['errors'])} errors (see runner log)")
195
+
196
+ entry = "\n".join(entry_lines)
197
+ date_heading = f"## {today}"
198
+
199
+ if journal_path.exists():
200
+ text = journal_path.read_text(encoding="utf-8-sig")
201
+ if f"## {today}" in text and "[Process][Autonomy]" in text:
202
+ return # Don't spam: one autonomy entry per day per file
203
+ if f"## {today}" in text:
204
+ text = text.rstrip() + "\n\n" + entry + "\n"
205
+ else:
206
+ text = text.rstrip() + f"\n\n{date_heading}\n\n{entry}\n"
207
+ journal_path.write_text(text, encoding="utf-8")
208
+ else:
209
+ project = repo_root.name
210
+ content = f"# Development Journal - {project} ({month})\n\n{date_heading}\n\n{entry}\n"
211
+ journal_path.write_text(content, encoding="utf-8")
212
+
213
+
214
+ def run_schedule(repo_root: Path) -> None:
215
+ """Run continuously on a fixed interval schedule."""
216
+ _emit_log("INFO", f"Scheduler started (interval={SCHEDULE_INTERVAL_S}s)")
217
+
218
+ def _handle_signal(sig, frame):
219
+ _emit_log("INFO", "Runner received shutdown signal")
220
+ _release_lock()
221
+ sys.exit(0)
222
+
223
+ signal.signal(signal.SIGINT, _handle_signal)
224
+ signal.signal(signal.SIGTERM, _handle_signal)
225
+
226
+ while True:
227
+ if _acquire_lock():
228
+ try:
229
+ summary = run_once(repo_root)
230
+ _emit_log("INFO", f"Cycle complete: {json.dumps(summary)}")
231
+ except Exception:
232
+ _emit_log("ERROR", traceback.format_exc())
233
+ finally:
234
+ _release_lock()
235
+ else:
236
+ _emit_log("DEBUG", "Another runner is active; skipping cycle")
237
+ time.sleep(SCHEDULE_INTERVAL_S)
238
+
239
+
240
+ def main() -> int:
241
+ ap = argparse.ArgumentParser(description="Mnemo autonomous memory runner")
242
+ ap.add_argument("--mode", choices=["auto", "schedule", "once"], default="once",
243
+ help="auto=once+return, schedule=loop, once=single run")
244
+ ap.add_argument("--repo", default=str(Path.cwd()), help="Repo root directory")
245
+ args = ap.parse_args()
246
+
247
+ repo_root = Path(args.repo).resolve()
248
+ os.chdir(repo_root)
249
+ global LOCK_PATH
250
+ LOCK_PATH = resolve_memory_root(repo_root) / ".autonomy" / "runner.lock"
251
+
252
+ if args.mode == "schedule":
253
+ run_schedule(repo_root)
254
+ return 0
255
+
256
+ if not _acquire_lock():
257
+ _emit_log("INFO", "Another runner is active; exiting")
258
+ return 0
259
+
260
+ try:
261
+ summary = run_once(repo_root)
262
+ _emit_log("INFO", f"Done: {json.dumps(summary)}")
263
+ return 0 if summary["status"] != "error" else 1
264
+ except Exception:
265
+ _emit_log("ERROR", traceback.format_exc())
266
+ return 1
267
+ finally:
268
+ _release_lock()
269
+
270
+
271
+ if __name__ == "__main__":
272
+ raise SystemExit(main())