titan-agent 5.3.2 → 5.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,192 @@
1
+ #!/usr/bin/env node
2
+ const STOP_WORDS = /* @__PURE__ */ new Set([
3
+ "a",
4
+ "an",
5
+ "the",
6
+ "is",
7
+ "it",
8
+ "in",
9
+ "on",
10
+ "at",
11
+ "to",
12
+ "of",
13
+ "do",
14
+ "you",
15
+ "we",
16
+ "i",
17
+ "me",
18
+ "my",
19
+ "that",
20
+ "this",
21
+ "was",
22
+ "are",
23
+ "be",
24
+ "been",
25
+ "have",
26
+ "has",
27
+ "had",
28
+ "and",
29
+ "or",
30
+ "but",
31
+ "if",
32
+ "so",
33
+ "not",
34
+ "no",
35
+ "yes",
36
+ "can",
37
+ "how",
38
+ "what",
39
+ "about",
40
+ "from",
41
+ "with",
42
+ "for",
43
+ "up",
44
+ "out",
45
+ "its",
46
+ "our",
47
+ "your",
48
+ "they",
49
+ "them",
50
+ "he",
51
+ "she",
52
+ "his",
53
+ "her",
54
+ "will",
55
+ "would",
56
+ "could",
57
+ "should",
58
+ "did",
59
+ "does",
60
+ "just",
61
+ "now",
62
+ "some",
63
+ "any",
64
+ "all",
65
+ "very",
66
+ "too",
67
+ "also",
68
+ "than",
69
+ "then",
70
+ "when",
71
+ "where",
72
+ "who",
73
+ "which",
74
+ "there",
75
+ "here",
76
+ "again",
77
+ "today",
78
+ "earlier",
79
+ "remember"
80
+ ]);
81
+ function tokenize(text) {
82
+ if (!text) return [];
83
+ return text.toLowerCase().replace(/[^a-z0-9\- ]+/g, " ").split(/\s+/).filter((t) => t.length > 1 && !STOP_WORDS.has(t));
84
+ }
85
+ class MemoryIndex {
86
+ /** token → array of postings */
87
+ postings = /* @__PURE__ */ new Map();
88
+ /** episode count, used to compute IDF */
89
+ docCount = 0;
90
+ /** episode IDs we've indexed, used for `removeEpisode` and `has` */
91
+ indexed = /* @__PURE__ */ new Set();
92
+ /** Add (or re-add) an episode to the index. Idempotent — calling twice
93
+ * with the same id replaces the previous entry. */
94
+ addEpisode(episodeId, content) {
95
+ if (this.indexed.has(episodeId)) {
96
+ this.removeEpisode(episodeId);
97
+ }
98
+ const tokens = tokenize(content);
99
+ if (tokens.length === 0) {
100
+ this.indexed.add(episodeId);
101
+ this.docCount += 1;
102
+ return;
103
+ }
104
+ const tf = /* @__PURE__ */ new Map();
105
+ const headTokens = new Set(tokenize(content.slice(0, 100)));
106
+ for (const t of tokens) tf.set(t, (tf.get(t) ?? 0) + 1);
107
+ for (const [token, count] of tf) {
108
+ const list = this.postings.get(token) ?? [];
109
+ list.push({ episodeId, tf: count, inHead: headTokens.has(token) });
110
+ this.postings.set(token, list);
111
+ }
112
+ this.indexed.add(episodeId);
113
+ this.docCount += 1;
114
+ }
115
+ /** Remove an episode from the index. Used when pruning. */
116
+ removeEpisode(episodeId) {
117
+ if (!this.indexed.has(episodeId)) return;
118
+ for (const [token, list] of this.postings) {
119
+ const filtered = list.filter((p) => p.episodeId !== episodeId);
120
+ if (filtered.length === 0) this.postings.delete(token);
121
+ else if (filtered.length !== list.length) this.postings.set(token, filtered);
122
+ }
123
+ this.indexed.delete(episodeId);
124
+ this.docCount = Math.max(0, this.docCount - 1);
125
+ }
126
+ /** True if the episode is currently indexed. */
127
+ has(episodeId) {
128
+ return this.indexed.has(episodeId);
129
+ }
130
+ /** Number of episodes in the index. */
131
+ size() {
132
+ return this.docCount;
133
+ }
134
+ /** Number of unique tokens (vocabulary size). */
135
+ vocabularySize() {
136
+ return this.postings.size;
137
+ }
138
+ /** Search the index. Returns up to `limit` matches sorted by score.
139
+ * Score is BM25-lite: sum over query terms of (tf × idf) + headBoost.
140
+ * Empty query returns empty array. */
141
+ search(query, limit = 20) {
142
+ const queryTokens = tokenize(query);
143
+ if (queryTokens.length === 0) return [];
144
+ const scoreById = /* @__PURE__ */ new Map();
145
+ for (const term of queryTokens) {
146
+ const postings = this.postings.get(term);
147
+ if (!postings || postings.length === 0) continue;
148
+ const df = postings.length;
149
+ const idf = 1 + Math.log((this.docCount + 1) / (df + 1));
150
+ for (const p of postings) {
151
+ const termScore = p.tf * idf + (p.inHead ? 0.5 : 0);
152
+ const acc = scoreById.get(p.episodeId) ?? { score: 0, matched: /* @__PURE__ */ new Set() };
153
+ acc.score += termScore;
154
+ acc.matched.add(term);
155
+ scoreById.set(p.episodeId, acc);
156
+ }
157
+ }
158
+ const matches = [];
159
+ for (const [episodeId, { score, matched }] of scoreById) {
160
+ matches.push({ episodeId, score, matchedTerms: Array.from(matched) });
161
+ }
162
+ matches.sort((a, b) => b.score - a.score);
163
+ return matches.slice(0, limit);
164
+ }
165
+ /** Drop all entries — used for tests + full rebuilds. */
166
+ clear() {
167
+ this.postings.clear();
168
+ this.indexed.clear();
169
+ this.docCount = 0;
170
+ }
171
+ /** Build a fresh index from a list of episodes. */
172
+ static fromEpisodes(episodes) {
173
+ const idx = new MemoryIndex();
174
+ for (const ep of episodes) idx.addEpisode(ep.id, ep.content);
175
+ return idx;
176
+ }
177
+ }
178
+ let _instance = null;
179
+ function getMemoryIndex() {
180
+ if (!_instance) _instance = new MemoryIndex();
181
+ return _instance;
182
+ }
183
+ function _resetMemoryIndexForTests() {
184
+ _instance = new MemoryIndex();
185
+ }
186
+ export {
187
+ MemoryIndex,
188
+ _resetMemoryIndexForTests,
189
+ getMemoryIndex,
190
+ tokenize
191
+ };
192
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/memory/index.ts"],"sourcesContent":["/**\n * Inverted-Index Keyword Search (Phase 9 / Track B2)\n *\n * `searchMemory()` in graph.ts used to scan every episode linearly with a\n * BM25-ish score per term per episode — at 5000 episodes and 5 terms,\n * that's 25 000 substring searches per query. This module trades a bit of\n * memory for a constant-time-per-query lookup: token → posting-list of\n * episode IDs + per-doc term frequency.\n *\n * Sized for TITAN's typical workload:\n * - ~5000 episodes max (MAX_EPISODES bound)\n * - ~50 tokens per episode after tokenisation\n * - Index footprint ≈ 250 000 (token, episodeId, tf) tuples — single-digit MB.\n *\n * Not a full-text engine. Tokens are lowercased, punctuation stripped,\n * stop-words filtered (same set as the legacy linear scan). No stemming\n * or fuzzy match — that's what vectors.ts is for. The contract is:\n * \"what the linear scan returned, faster\".\n *\n * Usage:\n * const index = new MemoryIndex();\n * for (const ep of episodes) index.addEpisode(ep.id, ep.content);\n * const matches = index.search('weather forecast', 20);\n * // → [{ episodeId, score }, ...] sorted by score desc\n *\n * Indexes can be rebuilt from the underlying graph at any time\n * (`MemoryIndex.fromEpisodes(eps)`), so we don't bother with persistence.\n * Memory cost is small enough that recomputing on startup is cheap.\n */\n\nconst STOP_WORDS = new Set([\n 'a', 'an', 'the', 'is', 'it', 'in', 'on', 'at', 'to', 'of', 'do', 'you', 'we', 'i',\n 'me', 'my', 'that', 'this', 'was', 'are', 'be', 'been', 'have', 'has', 'had', 'and',\n 'or', 'but', 'if', 'so', 'not', 'no', 'yes', 'can', 'how', 'what', 'about', 'from',\n 'with', 'for', 'up', 'out', 'its', 'our', 'your', 'they', 'them', 'he', 'she', 'his',\n 'her', 'will', 'would', 'could', 'should', 'did', 'does', 'just', 'now', 'some', 'any',\n 'all', 'very', 'too', 'also', 'than', 'then', 'when', 'where', 'who', 'which', 'there',\n 'here', 'again', 'today', 'earlier', 'remember',\n]);\n\n/** Tokenise a string for indexing/search. Lowercase, strip non-alphanum\n * except hyphens (kept for words like \"self-improve\"), drop stop words,\n * drop tokens shorter than 2 chars. */\nexport function tokenize(text: string): string[] {\n if (!text) return [];\n return text\n .toLowerCase()\n .replace(/[^a-z0-9\\- ]+/g, ' ')\n .split(/\\s+/)\n .filter(t => t.length > 1 && !STOP_WORDS.has(t));\n}\n\n/** A single posting-list entry for a (token, episode) pair. */\ninterface Posting {\n episodeId: string;\n /** Term frequency within this episode. */\n tf: number;\n /** True if the term appears in the first 100 chars of the episode\n * content — used for the \"title boost\" the legacy scan applied. */\n inHead: boolean;\n}\n\n/** Search hit, sorted by score in `search()`. */\nexport interface IndexMatch {\n episodeId: string;\n /** TF-IDF-ish score. Higher = more relevant. */\n score: number;\n /** Which query terms matched this episode (debug aid). */\n matchedTerms: string[];\n}\n\nexport class MemoryIndex {\n /** token → array of postings */\n private postings = new Map<string, Posting[]>();\n /** episode count, used to compute IDF */\n private docCount = 0;\n /** episode IDs we've indexed, used for `removeEpisode` and `has` */\n private indexed = new Set<string>();\n\n /** Add (or re-add) an episode to the index. Idempotent — calling twice\n * with the same id replaces the previous entry. */\n addEpisode(episodeId: string, content: string): void {\n if (this.indexed.has(episodeId)) {\n this.removeEpisode(episodeId);\n }\n const tokens = tokenize(content);\n if (tokens.length === 0) {\n // Still mark as indexed so subsequent re-adds don't double-count.\n this.indexed.add(episodeId);\n this.docCount += 1;\n return;\n }\n\n // Compute term frequencies + head-presence\n const tf = new Map<string, number>();\n const headTokens = new Set(tokenize(content.slice(0, 100)));\n for (const t of tokens) tf.set(t, (tf.get(t) ?? 0) + 1);\n\n for (const [token, count] of tf) {\n const list = this.postings.get(token) ?? [];\n list.push({ episodeId, tf: count, inHead: headTokens.has(token) });\n this.postings.set(token, list);\n }\n this.indexed.add(episodeId);\n this.docCount += 1;\n }\n\n /** Remove an episode from the index. Used when pruning. */\n removeEpisode(episodeId: string): void {\n if (!this.indexed.has(episodeId)) return;\n for (const [token, list] of this.postings) {\n const filtered = list.filter(p => p.episodeId !== episodeId);\n if (filtered.length === 0) this.postings.delete(token);\n else if (filtered.length !== list.length) this.postings.set(token, filtered);\n }\n this.indexed.delete(episodeId);\n this.docCount = Math.max(0, this.docCount - 1);\n }\n\n /** True if the episode is currently indexed. */\n has(episodeId: string): boolean {\n return this.indexed.has(episodeId);\n }\n\n /** Number of episodes in the index. */\n size(): number {\n return this.docCount;\n }\n\n /** Number of unique tokens (vocabulary size). */\n vocabularySize(): number {\n return this.postings.size;\n }\n\n /** Search the index. Returns up to `limit` matches sorted by score.\n * Score is BM25-lite: sum over query terms of (tf × idf) + headBoost.\n * Empty query returns empty array. */\n search(query: string, limit = 20): IndexMatch[] {\n const queryTokens = tokenize(query);\n if (queryTokens.length === 0) return [];\n\n // Per-episode score accumulator\n const scoreById = new Map<string, { score: number; matched: Set<string> }>();\n\n for (const term of queryTokens) {\n const postings = this.postings.get(term);\n if (!postings || postings.length === 0) continue;\n\n // IDF — log smoothing to dampen common terms.\n // 1 + log((docCount+1)/(df+1)) keeps it positive even when df==docCount.\n const df = postings.length;\n const idf = 1 + Math.log((this.docCount + 1) / (df + 1));\n\n for (const p of postings) {\n // tf × idf, with a flat bonus when the term is in the\n // first 100 chars (cheap \"title boost\" the legacy scan had)\n const termScore = p.tf * idf + (p.inHead ? 0.5 : 0);\n const acc = scoreById.get(p.episodeId) ?? { score: 0, matched: new Set<string>() };\n acc.score += termScore;\n acc.matched.add(term);\n scoreById.set(p.episodeId, acc);\n }\n }\n\n const matches: IndexMatch[] = [];\n for (const [episodeId, { score, matched }] of scoreById) {\n matches.push({ episodeId, score, matchedTerms: Array.from(matched) });\n }\n matches.sort((a, b) => b.score - a.score);\n return matches.slice(0, limit);\n }\n\n /** Drop all entries — used for tests + full rebuilds. */\n clear(): void {\n this.postings.clear();\n this.indexed.clear();\n this.docCount = 0;\n }\n\n /** Build a fresh index from a list of episodes. */\n static fromEpisodes(episodes: Array<{ id: string; content: string }>): MemoryIndex {\n const idx = new MemoryIndex();\n for (const ep of episodes) idx.addEpisode(ep.id, ep.content);\n return idx;\n }\n}\n\n/** Module-level singleton used by graph.ts. Cleared + rebuilt by tests. */\nlet _instance: MemoryIndex | null = null;\n\nexport function getMemoryIndex(): MemoryIndex {\n if (!_instance) _instance = new MemoryIndex();\n return _instance;\n}\n\n/** Test-only: reset the singleton between scenarios. */\nexport function _resetMemoryIndexForTests(): void {\n _instance = new MemoryIndex();\n}\n"],"mappings":";AA8BA,MAAM,aAAa,oBAAI,IAAI;AAAA,EACvB;AAAA,EAAK;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAC/E;AAAA,EAAM;AAAA,EAAM;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAO;AAAA,EAAM;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAO;AAAA,EAC9E;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAO;AAAA,EAAO;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAS;AAAA,EAC5E;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAM;AAAA,EAAO;AAAA,EAAO;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAM;AAAA,EAAO;AAAA,EAC/E;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAS;AAAA,EAAU;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAQ;AAAA,EACjF;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAO;AAAA,EAAS;AAAA,EAC/E;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAS;AAAA,EAAW;AACzC,CAAC;AAKM,SAAS,SAAS,MAAwB;AAC7C,MAAI,CAAC,KAAM,QAAO,CAAC;AACnB,SAAO,KACF,YAAY,EACZ,QAAQ,kBAAkB,GAAG,EAC7B,MAAM,KAAK,EACX,OAAO,OAAK,EAAE,SAAS,KAAK,CAAC,WAAW,IAAI,CAAC,CAAC;AACvD;AAqBO,MAAM,YAAY;AAAA;AAAA,EAEb,WAAW,oBAAI,IAAuB;AAAA;AAAA,EAEtC,WAAW;AAAA;AAAA,EAEX,UAAU,oBAAI,IAAY;AAAA;AAAA;AAAA,EAIlC,WAAW,WAAmB,SAAuB;AACjD,QAAI,KAAK,QAAQ,IAAI,SAAS,GAAG;AAC7B,WAAK,cAAc,SAAS;AAAA,IAChC;AACA,UAAM,SAAS,SAAS,OAAO;AAC/B,QAAI,OAAO,WAAW,GAAG;AAErB,WAAK,QAAQ,IAAI,SAAS;AAC1B,WAAK,YAAY;AACjB;AAAA,IACJ;AAGA,UAAM,KAAK,oBAAI,IAAoB;AACnC,UAAM,aAAa,IAAI,IAAI,SAAS,QAAQ,MAAM,GAAG,GAAG,CAAC,CAAC;AAC1D,eAAW,KAAK,OAAQ,IAAG,IAAI,IAAI,GAAG,IAAI,CAAC,KAAK,KAAK,CAAC;AAEtD,eAAW,CAAC,OAAO,KAAK,KAAK,IAAI;AAC7B,YAAM,OAAO,KAAK,SAAS,IAAI,KAAK,KAAK,CAAC;AAC1C,WAAK,KAAK,EAAE,WAAW,IAAI,OAAO,QAAQ,WAAW,IAAI,KAAK,EAAE,CAAC;AACjE,WAAK,SAAS,IAAI,OAAO,IAAI;AAAA,IACjC;AACA,SAAK,QAAQ,IAAI,SAAS;AAC1B,SAAK,YAAY;AAAA,EACrB;AAAA;AAAA,EAGA,cAAc,WAAyB;AACnC,QAAI,CAAC,KAAK,QAAQ,IAAI,SAAS,EAAG;AAClC,eAAW,CAAC,OAAO,IAAI,KAAK,KAAK,UAAU;AACvC,YAAM,WAAW,KAAK,OAAO,OAAK,EAAE,cAAc,SAAS;AAC3D,UAAI,SAAS,WAAW,EAAG,MAAK,SAAS,OAAO,KAAK;AAAA,eAC5C,SAAS,WAAW,KAAK,OAAQ,MAAK,SAAS,IAAI,OAAO,QAAQ;AAAA,IAC/E;AACA,SAAK,QAAQ,OAAO,SAAS;AAC7B,SAAK,WAAW,KAAK,IAAI,GAAG,KAAK,WAAW,CAAC;AAAA,EACjD;AAAA;AAAA,EAGA,IAAI,WAA4B;AAC5B,WAAO,KAAK,QAAQ,IAAI,SAAS;AAAA,EACrC;AAAA;AAAA,EAGA,OAAe;AACX,WAAO,KAAK;AAAA,EAChB;AAAA;AAAA,EAGA,iBAAyB;AACrB,WAAO,KAAK,SAAS;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAe,QAAQ,IAAkB;AAC5C,UAAM,cAAc,SAAS,KAAK;AAClC,QAAI,YAAY,WAAW,EAAG,QAAO,CAAC;AAGtC,UAAM,YAAY,oBAAI,IAAqD;AAE3E,eAAW,QAAQ,aAAa;AAC5B,YAAM,WAAW,KAAK,SAAS,IAAI,IAAI;AACvC,UAAI,CAAC,YAAY,SAAS,WAAW,EAAG;AAIxC,YAAM,KAAK,SAAS;AACpB,YAAM,MAAM,IAAI,KAAK,KAAK,KAAK,WAAW,MAAM,KAAK,EAAE;AAEvD,iBAAW,KAAK,UAAU;AAGtB,cAAM,YAAY,EAAE,KAAK,OAAO,EAAE,SAAS,MAAM;AACjD,cAAM,MAAM,UAAU,IAAI,EAAE,SAAS,KAAK,EAAE,OAAO,GAAG,SAAS,oBAAI,IAAY,EAAE;AACjF,YAAI,SAAS;AACb,YAAI,QAAQ,IAAI,IAAI;AACpB,kBAAU,IAAI,EAAE,WAAW,GAAG;AAAA,MAClC;AAAA,IACJ;AAEA,UAAM,UAAwB,CAAC;AAC/B,eAAW,CAAC,WAAW,EAAE,OAAO,QAAQ,CAAC,KAAK,WAAW;AACrD,cAAQ,KAAK,EAAE,WAAW,OAAO,cAAc,MAAM,KAAK,OAAO,EAAE,CAAC;AAAA,IACxE;AACA,YAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AACxC,WAAO,QAAQ,MAAM,GAAG,KAAK;AAAA,EACjC;AAAA;AAAA,EAGA,QAAc;AACV,SAAK,SAAS,MAAM;AACpB,SAAK,QAAQ,MAAM;AACnB,SAAK,WAAW;AAAA,EACpB;AAAA;AAAA,EAGA,OAAO,aAAa,UAA+D;AAC/E,UAAM,MAAM,IAAI,YAAY;AAC5B,eAAW,MAAM,SAAU,KAAI,WAAW,GAAG,IAAI,GAAG,OAAO;AAC3D,WAAO;AAAA,EACX;AACJ;AAGA,IAAI,YAAgC;AAE7B,SAAS,iBAA8B;AAC1C,MAAI,CAAC,UAAW,aAAY,IAAI,YAAY;AAC5C,SAAO;AACX;AAGO,SAAS,4BAAkC;AAC9C,cAAY,IAAI,YAAY;AAChC;","names":[]}
@@ -274,6 +274,7 @@ function getUsageStats() {
274
274
  export {
275
275
  clearHistory,
276
276
  closeMemory,
277
+ debouncedSave,
277
278
  getDb,
278
279
  getHistory,
279
280
  getUsageStats,
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/memory/memory.ts"],"sourcesContent":["/**\n * TITAN — Memory / Persistence System\n * JSON-file-backed persistent memory for conversations, facts, preferences, and usage.\n * Uses no native dependencies — pure Node.js for maximum portability.\n */\nimport { existsSync, readFileSync, writeFileSync, renameSync } from 'fs';\nimport { join } from 'path';\nimport { TITAN_HOME } from '../utils/constants.js';\nimport { ensureDir } from '../utils/helpers.js';\nimport logger from '../utils/logger.js';\nimport { encrypt, decrypt, type EncryptedPayload } from '../security/encryption.js';\nimport { isVectorSearchAvailable, searchVectors, addVector } from './vectors.js';\n\nconst COMPONENT = 'Memory';\n\n// ─── Data Store ──────────────────────────────────────────────────\n\ninterface DataStore {\n conversations: ConversationMessage[];\n memories: MemoryEntry[];\n sessions: SessionRecord[];\n usageStats: UsageRecord[];\n cronJobs: CronRecord[];\n skillsInstalled: SkillRecord[];\n}\n\ninterface MemoryEntry {\n id: string;\n category: string;\n key: string;\n value: string;\n metadata?: string;\n createdAt: string;\n updatedAt: string;\n}\n\ninterface SessionRecord {\n id: string;\n channel: string;\n user_id: string;\n agent_id: string;\n status: string;\n message_count: number;\n created_at: string;\n last_active: string;\n name?: string;\n last_message?: string;\n // D3: Persisted session overrides (survive session recovery after timeout/restart)\n model_override?: string;\n thinking_override?: string;\n // Hunt Finding #19 (2026-04-14): true when this session was created via an\n // explicit sessionId (getOrCreateSessionById). Named sessions MUST NOT be\n // returned by the default-slot lookup in getOrCreateSession — otherwise\n // subsequent no-sessionId requests from the same channel+user+agent will\n // inherit the most recent named session's history, causing privacy bleed\n // between API callers.\n is_named?: boolean;\n}\n\ninterface UsageRecord {\n id: number;\n session_id: string;\n provider: string;\n model: string;\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n created_at: string;\n}\n\ninterface CronRecord {\n id: string;\n name: string;\n schedule: string;\n command: string;\n mode?: 'shell' | 'tool'; // Execution mode (default: shell for backward compat)\n allowedTools?: string[]; // Tool allowlist for tool-mode jobs\n enabled: boolean;\n last_run?: string;\n next_run?: string;\n created_at: string;\n}\n\ninterface SkillRecord {\n name: string;\n version: string;\n source: string;\n enabled: boolean;\n installed_at: string;\n}\n\nconst DB_FILE = join(TITAN_HOME, 'titan-data.json');\n\nlet store: DataStore | null = null;\nlet dirty = false;\nlet isShuttingDown = false;\n\nfunction getDefaultStore(): DataStore {\n return {\n conversations: [],\n memories: [],\n sessions: [],\n usageStats: [],\n cronJobs: [],\n skillsInstalled: [],\n };\n}\n\n// NOTE: Sync I/O is intentional — runs only once at cold start, then cached in-memory.\nfunction loadStore(): DataStore {\n if (store) return store;\n ensureDir(TITAN_HOME);\n if (existsSync(DB_FILE)) {\n try {\n const raw = readFileSync(DB_FILE, 'utf-8');\n store = JSON.parse(raw) as DataStore;\n // Ensure all fields exist\n store.conversations = store.conversations || [];\n store.memories = store.memories || [];\n store.sessions = store.sessions || [];\n store.usageStats = store.usageStats || [];\n store.cronJobs = store.cronJobs || [];\n store.skillsInstalled = store.skillsInstalled || [];\n } catch {\n logger.warn(COMPONENT, 'Could not load data store, creating fresh one');\n store = getDefaultStore();\n }\n } else {\n store = getDefaultStore();\n }\n return store;\n}\n\nfunction saveStore(): void {\n if (!store || isShuttingDown) return;\n ensureDir(TITAN_HOME);\n try {\n const tmpFile = DB_FILE + '.tmp';\n writeFileSync(tmpFile, JSON.stringify(store, null, 2), 'utf-8');\n renameSync(tmpFile, DB_FILE);\n dirty = false;\n } catch (e) {\n dirty = true;\n logger.error(COMPONENT, `Failed to save data: ${(e as Error).message}`);\n }\n}\n\n// Auto-save periodically\nlet saveTimeout: ReturnType<typeof setTimeout> | null = null;\nfunction debouncedSave(): void {\n if (dirty) { saveStore(); return; }\n if (saveTimeout) clearTimeout(saveTimeout);\n saveTimeout = setTimeout(saveStore, 1000);\n saveTimeout.unref();\n}\n\n/** Initialize the memory system */\nexport function initMemory(): void {\n loadStore();\n logger.info(COMPONENT, 'Memory system initialized');\n}\n\n/** Close / flush the memory system */\nexport function closeMemory(): void {\n if (saveTimeout) { clearTimeout(saveTimeout); saveTimeout = null; }\n saveStore();\n if (dirty) {\n logger.error(COMPONENT, 'DATA MAY BE LOST — failed to flush memory store on shutdown');\n }\n isShuttingDown = true;\n}\n\n/** Get internal store (for skills like cron that need direct access) */\nexport function getDb(): DataStore {\n return loadStore();\n}\n\n// ─── Conversation History ────────────────────────────────────────\n\nexport interface ConversationMessage {\n id: string;\n sessionId: string;\n role: string;\n content: string;\n toolCalls?: string;\n toolCallId?: string;\n model?: string;\n tokenCount: number;\n createdAt: string;\n isEncrypted?: boolean;\n}\n\n/** Save a message to conversation history */\nexport function saveMessage(message: Omit<ConversationMessage, 'createdAt'>, e2eKey?: string): void {\n const s = loadStore();\n\n let content = message.content;\n let isEncrypted = false;\n\n if (e2eKey) {\n try {\n const payload = encrypt(message.content, Buffer.from(e2eKey, 'base64'));\n content = JSON.stringify(payload);\n isEncrypted = true;\n } catch {\n logger.error(COMPONENT, `Failed to encrypt message for storage`);\n content = \"[ENCRYPTION FAILED] \" + content; // Fallback, though we should probably throw in strict environments\n }\n }\n\n s.conversations.push({\n ...message,\n content,\n isEncrypted,\n createdAt: new Date().toISOString(),\n });\n // Keep only last 5000 messages total to prevent unbounded growth\n if (s.conversations.length > 5000) {\n s.conversations = s.conversations.slice(-5000);\n }\n debouncedSave();\n}\n\n/** Get conversation history for a session */\nexport function getHistory(sessionId: string, limit: number = 50, e2eKey?: string): ConversationMessage[] {\n const s = loadStore();\n const rawHistory = s.conversations\n .filter((m) => m.sessionId === sessionId)\n .slice(-limit);\n\n if (!e2eKey) {\n // If no key is provided, we just return the raw payload. \n // If it's encrypted, it'll just show the JSON string of the EncryptedPayload.\n return rawHistory;\n }\n\n // Decrypt the ones that were encrypted\n return rawHistory.map(m => {\n if (m.isEncrypted) {\n try {\n const payload = JSON.parse(m.content) as EncryptedPayload;\n return {\n ...m,\n content: decrypt(payload, Buffer.from(e2eKey, 'base64'))\n };\n } catch {\n logger.error(COMPONENT, `Failed to decrypt message ${m.id}`);\n return { ...m, content: \"[DECRYPTION FAILED]\" };\n }\n }\n return m;\n });\n}\n\n/** Update session name and/or last message snippet */\nexport function updateSessionMeta(sessionId: string, meta: { name?: string; last_message?: string; model_override?: string; thinking_override?: string }): void {\n const s = loadStore();\n const rec = s.sessions.find(ses => ses.id === sessionId);\n if (!rec) return;\n if (meta.name !== undefined) rec.name = meta.name;\n if (meta.last_message !== undefined) rec.last_message = meta.last_message;\n // D3: Persist session overrides to database so they survive timeout/restart\n if (meta.model_override !== undefined) rec.model_override = meta.model_override;\n if (meta.thinking_override !== undefined) rec.thinking_override = meta.thinking_override;\n debouncedSave();\n}\n\n/** Clear conversation history for a session */\nexport function clearHistory(sessionId: string): void {\n const s = loadStore();\n s.conversations = s.conversations.filter((m) => m.sessionId !== sessionId);\n debouncedSave();\n}\n\n// ─── Persistent Memory (Facts / Preferences) ─────────────────────\n\n/** Store a memory (key-value with category) */\nexport function rememberFact(category: string, key: string, value: string, metadata?: Record<string, unknown>): void {\n const s = loadStore();\n const id = `${category}:${key}`;\n const existingIdx = s.memories.findIndex((m) => m.id === id);\n const now = new Date().toISOString();\n\n if (existingIdx >= 0) {\n s.memories[existingIdx].value = value;\n s.memories[existingIdx].metadata = metadata ? JSON.stringify(metadata) : undefined;\n s.memories[existingIdx].updatedAt = now;\n } else {\n s.memories.push({\n id,\n category,\n key,\n value,\n metadata: metadata ? JSON.stringify(metadata) : undefined,\n createdAt: now,\n updatedAt: now,\n });\n }\n debouncedSave();\n\n // Index to vector store (fire-and-forget)\n if (isVectorSearchAvailable()) {\n addVector(id, `${category}: ${key} = ${value}`, 'memory', { category, key }).catch(e => logger.debug(COMPONENT, `Background vector indexing failed: ${(e as Error).message}`));\n }\n}\n\n/** Recall a specific memory */\nexport function recallFact(category: string, key: string): string | null {\n const s = loadStore();\n const entry = s.memories.find((m) => m.category === category && m.key === key);\n return entry?.value || null;\n}\n\n/** Search memories by category — hybrid keyword + vector search */\nexport async function searchMemories(category?: string, query?: string): Promise<Array<{ key: string; value: string; category: string; score?: number }>> {\n const s = loadStore();\n let results = s.memories;\n\n if (category) {\n results = results.filter((m) => m.category === category);\n }\n if (query) {\n const q = query.toLowerCase();\n // Word-boundary match to avoid false positives (\"use\" matching \"user\", \"reuse\")\n const qRegex = new RegExp('\\\\b' + q.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&') + '\\\\b', 'i');\n results = results.filter((m) =>\n qRegex.test(m.key) || qRegex.test(m.value)\n );\n }\n\n // Keyword scoring\n const scored = results.map(m => {\n let score = 0;\n if (query) {\n const q = query.toLowerCase();\n const keyLower = m.key.toLowerCase();\n const valLower = m.value.toLowerCase();\n // Exact key match scores highest\n if (keyLower === q) score += 5;\n else if (keyLower.includes(q)) score += 2;\n if (valLower.includes(q)) score += 1;\n // BM25-style: boost for multiple keyword matches\n const terms = q.split(/\\s+/).filter(Boolean);\n for (const term of terms) {\n if (keyLower.includes(term)) score += 1;\n if (valLower.includes(term)) score += 0.5;\n }\n }\n return { key: m.key, value: m.value, category: m.category, id: m.id, score };\n });\n\n // Vector search augmentation (hybrid mode)\n if (query && isVectorSearchAvailable()) {\n try {\n const vectorResults = await searchVectors(query, 20, 'memory', 0.4);\n for (const vr of vectorResults) {\n // Skip stale vector IDs that no longer exist in the store\n const memEntry = s.memories.find(m => m.id === vr.id);\n if (!memEntry) continue;\n const existing = scored.find(s => s.id === vr.id);\n if (existing) {\n // Boost keyword results that also match semantically\n existing.score += vr.score * 3;\n } else {\n // Add vector-only results (semantically similar but no keyword match)\n const entry = s.memories.find(m => m.id === vr.id);\n if (entry && (!category || entry.category === category)) {\n scored.push({\n key: entry.key,\n value: entry.value,\n category: entry.category,\n id: entry.id,\n score: vr.score * 2,\n });\n }\n }\n }\n } catch {\n // Vector search failure is non-fatal\n }\n }\n\n scored.sort((a, b) => b.score - a.score);\n // Deduplicate by ID (vector + keyword can match the same entry)\n const seen = new Set<string>();\n const unique = scored.filter(m => { if (seen.has(m.id)) return false; seen.add(m.id); return true; });\n return unique.slice(0, 50).map(m => ({ key: m.key, value: m.value, category: m.category, score: m.score }));\n}\n\n// ─── Usage Tracking ──────────────────────────────────────────────\n\n/** Record usage statistics */\nexport function recordUsage(sessionId: string, provider: string, model: string, promptTokens: number, completionTokens: number): void {\n const s = loadStore();\n s.usageStats.push({\n id: Date.now(),\n session_id: sessionId,\n provider,\n model,\n prompt_tokens: promptTokens,\n completion_tokens: completionTokens,\n total_tokens: promptTokens + completionTokens,\n created_at: new Date().toISOString(),\n });\n // Keep only last 10000 records\n if (s.usageStats.length > 10000) {\n s.usageStats = s.usageStats.slice(-10000);\n }\n debouncedSave();\n}\n\n/** Get total usage statistics */\nexport function getUsageStats(): { totalTokens: number; totalRequests: number; byProvider: Record<string, number> } {\n const s = loadStore();\n let totalTokens = 0;\n const byProvider: Record<string, number> = {};\n\n for (const rec of s.usageStats) {\n totalTokens += rec.total_tokens;\n byProvider[rec.provider] = (byProvider[rec.provider] || 0) + rec.total_tokens;\n }\n\n return {\n totalTokens,\n totalRequests: s.usageStats.length,\n byProvider,\n };\n}\n"],"mappings":";AAKA,SAAS,YAAY,cAAc,eAAe,kBAAkB;AACpE,SAAS,YAAY;AACrB,SAAS,kBAAkB;AAC3B,SAAS,iBAAiB;AAC1B,OAAO,YAAY;AACnB,SAAS,SAAS,eAAsC;AACxD,SAAS,yBAAyB,eAAe,iBAAiB;AAElE,MAAM,YAAY;AA8ElB,MAAM,UAAU,KAAK,YAAY,iBAAiB;AAElD,IAAI,QAA0B;AAC9B,IAAI,QAAQ;AACZ,IAAI,iBAAiB;AAErB,SAAS,kBAA6B;AACpC,SAAO;AAAA,IACL,eAAe,CAAC;AAAA,IAChB,UAAU,CAAC;AAAA,IACX,UAAU,CAAC;AAAA,IACX,YAAY,CAAC;AAAA,IACb,UAAU,CAAC;AAAA,IACX,iBAAiB,CAAC;AAAA,EACpB;AACF;AAGA,SAAS,YAAuB;AAC9B,MAAI,MAAO,QAAO;AAClB,YAAU,UAAU;AACpB,MAAI,WAAW,OAAO,GAAG;AACvB,QAAI;AACF,YAAM,MAAM,aAAa,SAAS,OAAO;AACzC,cAAQ,KAAK,MAAM,GAAG;AAEtB,YAAM,gBAAgB,MAAM,iBAAiB,CAAC;AAC9C,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,aAAa,MAAM,cAAc,CAAC;AACxC,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,kBAAkB,MAAM,mBAAmB,CAAC;AAAA,IACpD,QAAQ;AACN,aAAO,KAAK,WAAW,+CAA+C;AACtE,cAAQ,gBAAgB;AAAA,IAC1B;AAAA,EACF,OAAO;AACL,YAAQ,gBAAgB;AAAA,EAC1B;AACA,SAAO;AACT;AAEA,SAAS,YAAkB;AACzB,MAAI,CAAC,SAAS,eAAgB;AAC9B,YAAU,UAAU;AACpB,MAAI;AACF,UAAM,UAAU,UAAU;AAC1B,kBAAc,SAAS,KAAK,UAAU,OAAO,MAAM,CAAC,GAAG,OAAO;AAC9D,eAAW,SAAS,OAAO;AAC3B,YAAQ;AAAA,EACV,SAAS,GAAG;AACV,YAAQ;AACR,WAAO,MAAM,WAAW,wBAAyB,EAAY,OAAO,EAAE;AAAA,EACxE;AACF;AAGA,IAAI,cAAoD;AACxD,SAAS,gBAAsB;AAC7B,MAAI,OAAO;AAAE,cAAU;AAAG;AAAA,EAAQ;AAClC,MAAI,YAAa,cAAa,WAAW;AACzC,gBAAc,WAAW,WAAW,GAAI;AACxC,cAAY,MAAM;AACpB;AAGO,SAAS,aAAmB;AACjC,YAAU;AACV,SAAO,KAAK,WAAW,2BAA2B;AACpD;AAGO,SAAS,cAAoB;AAClC,MAAI,aAAa;AAAE,iBAAa,WAAW;AAAG,kBAAc;AAAA,EAAM;AAClE,YAAU;AACV,MAAI,OAAO;AACT,WAAO,MAAM,WAAW,kEAA6D;AAAA,EACvF;AACA,mBAAiB;AACnB;AAGO,SAAS,QAAmB;AACjC,SAAO,UAAU;AACnB;AAkBO,SAAS,YAAY,SAAiD,QAAuB;AAClG,QAAM,IAAI,UAAU;AAEpB,MAAI,UAAU,QAAQ;AACtB,MAAI,cAAc;AAElB,MAAI,QAAQ;AACV,QAAI;AACF,YAAM,UAAU,QAAQ,QAAQ,SAAS,OAAO,KAAK,QAAQ,QAAQ,CAAC;AACtE,gBAAU,KAAK,UAAU,OAAO;AAChC,oBAAc;AAAA,IAChB,QAAQ;AACN,aAAO,MAAM,WAAW,uCAAuC;AAC/D,gBAAU,yBAAyB;AAAA,IACrC;AAAA,EACF;AAEA,IAAE,cAAc,KAAK;AAAA,IACnB,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC,CAAC;AAED,MAAI,EAAE,cAAc,SAAS,KAAM;AACjC,MAAE,gBAAgB,EAAE,cAAc,MAAM,IAAK;AAAA,EAC/C;AACA,gBAAc;AAChB;AAGO,SAAS,WAAW,WAAmB,QAAgB,IAAI,QAAwC;AACxG,QAAM,IAAI,UAAU;AACpB,QAAM,aAAa,EAAE,cAClB,OAAO,CAAC,MAAM,EAAE,cAAc,SAAS,EACvC,MAAM,CAAC,KAAK;AAEf,MAAI,CAAC,QAAQ;AAGX,WAAO;AAAA,EACT;AAGA,SAAO,WAAW,IAAI,OAAK;AACzB,QAAI,EAAE,aAAa;AACjB,UAAI;AACF,cAAM,UAAU,KAAK,MAAM,EAAE,OAAO;AACpC,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SAAS,QAAQ,SAAS,OAAO,KAAK,QAAQ,QAAQ,CAAC;AAAA,QACzD;AAAA,MACF,QAAQ;AACN,eAAO,MAAM,WAAW,6BAA6B,EAAE,EAAE,EAAE;AAC3D,eAAO,EAAE,GAAG,GAAG,SAAS,sBAAsB;AAAA,MAChD;AAAA,IACF;AACA,WAAO;AAAA,EACT,CAAC;AACH;AAGO,SAAS,kBAAkB,WAAmB,MAA2G;AAC9J,QAAM,IAAI,UAAU;AACpB,QAAM,MAAM,EAAE,SAAS,KAAK,SAAO,IAAI,OAAO,SAAS;AACvD,MAAI,CAAC,IAAK;AACV,MAAI,KAAK,SAAS,OAAW,KAAI,OAAO,KAAK;AAC7C,MAAI,KAAK,iBAAiB,OAAW,KAAI,eAAe,KAAK;AAE7D,MAAI,KAAK,mBAAmB,OAAW,KAAI,iBAAiB,KAAK;AACjE,MAAI,KAAK,sBAAsB,OAAW,KAAI,oBAAoB,KAAK;AACvE,gBAAc;AAChB;AAGO,SAAS,aAAa,WAAyB;AACpD,QAAM,IAAI,UAAU;AACpB,IAAE,gBAAgB,EAAE,cAAc,OAAO,CAAC,MAAM,EAAE,cAAc,SAAS;AACzE,gBAAc;AAChB;AAKO,SAAS,aAAa,UAAkB,KAAa,OAAe,UAA0C;AACnH,QAAM,IAAI,UAAU;AACpB,QAAM,KAAK,GAAG,QAAQ,IAAI,GAAG;AAC7B,QAAM,cAAc,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3D,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,MAAI,eAAe,GAAG;AACpB,MAAE,SAAS,WAAW,EAAE,QAAQ;AAChC,MAAE,SAAS,WAAW,EAAE,WAAW,WAAW,KAAK,UAAU,QAAQ,IAAI;AACzE,MAAE,SAAS,WAAW,EAAE,YAAY;AAAA,EACtC,OAAO;AACL,MAAE,SAAS,KAAK;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,WAAW,KAAK,UAAU,QAAQ,IAAI;AAAA,MAChD,WAAW;AAAA,MACX,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AACA,gBAAc;AAGd,MAAI,wBAAwB,GAAG;AAC7B,cAAU,IAAI,GAAG,QAAQ,KAAK,GAAG,MAAM,KAAK,IAAI,UAAU,EAAE,UAAU,IAAI,CAAC,EAAE,MAAM,OAAK,OAAO,MAAM,WAAW,sCAAuC,EAAY,OAAO,EAAE,CAAC;AAAA,EAC/K;AACF;AAGO,SAAS,WAAW,UAAkB,KAA4B;AACvE,QAAM,IAAI,UAAU;AACpB,QAAM,QAAQ,EAAE,SAAS,KAAK,CAAC,MAAM,EAAE,aAAa,YAAY,EAAE,QAAQ,GAAG;AAC7E,SAAO,OAAO,SAAS;AACzB;AAGA,eAAsB,eAAe,UAAmB,OAAkG;AACxJ,QAAM,IAAI,UAAU;AACpB,MAAI,UAAU,EAAE;AAEhB,MAAI,UAAU;AACZ,cAAU,QAAQ,OAAO,CAAC,MAAM,EAAE,aAAa,QAAQ;AAAA,EACzD;AACA,MAAI,OAAO;AACT,UAAM,IAAI,MAAM,YAAY;AAE5B,UAAM,SAAS,IAAI,OAAO,QAAQ,EAAE,QAAQ,uBAAuB,MAAM,IAAI,OAAO,GAAG;AACvF,cAAU,QAAQ;AAAA,MAAO,CAAC,MACxB,OAAO,KAAK,EAAE,GAAG,KAAK,OAAO,KAAK,EAAE,KAAK;AAAA,IAC3C;AAAA,EACF;AAGA,QAAM,SAAS,QAAQ,IAAI,OAAK;AAC9B,QAAI,QAAQ;AACZ,QAAI,OAAO;AACT,YAAM,IAAI,MAAM,YAAY;AAC5B,YAAM,WAAW,EAAE,IAAI,YAAY;AACnC,YAAM,WAAW,EAAE,MAAM,YAAY;AAErC,UAAI,aAAa,EAAG,UAAS;AAAA,eACpB,SAAS,SAAS,CAAC,EAAG,UAAS;AACxC,UAAI,SAAS,SAAS,CAAC,EAAG,UAAS;AAEnC,YAAM,QAAQ,EAAE,MAAM,KAAK,EAAE,OAAO,OAAO;AAC3C,iBAAW,QAAQ,OAAO;AACxB,YAAI,SAAS,SAAS,IAAI,EAAG,UAAS;AACtC,YAAI,SAAS,SAAS,IAAI,EAAG,UAAS;AAAA,MACxC;AAAA,IACF;AACA,WAAO,EAAE,KAAK,EAAE,KAAK,OAAO,EAAE,OAAO,UAAU,EAAE,UAAU,IAAI,EAAE,IAAI,MAAM;AAAA,EAC7E,CAAC;AAGD,MAAI,SAAS,wBAAwB,GAAG;AACtC,QAAI;AACF,YAAM,gBAAgB,MAAM,cAAc,OAAO,IAAI,UAAU,GAAG;AAClE,iBAAW,MAAM,eAAe;AAE9B,cAAM,WAAW,EAAE,SAAS,KAAK,OAAK,EAAE,OAAO,GAAG,EAAE;AACpD,YAAI,CAAC,SAAU;AACf,cAAM,WAAW,OAAO,KAAK,CAAAA,OAAKA,GAAE,OAAO,GAAG,EAAE;AAChD,YAAI,UAAU;AAEZ,mBAAS,SAAS,GAAG,QAAQ;AAAA,QAC/B,OAAO;AAEL,gBAAM,QAAQ,EAAE,SAAS,KAAK,OAAK,EAAE,OAAO,GAAG,EAAE;AACjD,cAAI,UAAU,CAAC,YAAY,MAAM,aAAa,WAAW;AACvD,mBAAO,KAAK;AAAA,cACV,KAAK,MAAM;AAAA,cACX,OAAO,MAAM;AAAA,cACb,UAAU,MAAM;AAAA,cAChB,IAAI,MAAM;AAAA,cACV,OAAO,GAAG,QAAQ;AAAA,YACpB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEvC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,SAAS,OAAO,OAAO,OAAK;AAAE,QAAI,KAAK,IAAI,EAAE,EAAE,EAAG,QAAO;AAAO,SAAK,IAAI,EAAE,EAAE;AAAG,WAAO;AAAA,EAAM,CAAC;AACpG,SAAO,OAAO,MAAM,GAAG,EAAE,EAAE,IAAI,QAAM,EAAE,KAAK,EAAE,KAAK,OAAO,EAAE,OAAO,UAAU,EAAE,UAAU,OAAO,EAAE,MAAM,EAAE;AAC5G;AAKO,SAAS,YAAY,WAAmB,UAAkB,OAAe,cAAsB,kBAAgC;AACpI,QAAM,IAAI,UAAU;AACpB,IAAE,WAAW,KAAK;AAAA,IAChB,IAAI,KAAK,IAAI;AAAA,IACb,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf,mBAAmB;AAAA,IACnB,cAAc,eAAe;AAAA,IAC7B,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,EACrC,CAAC;AAED,MAAI,EAAE,WAAW,SAAS,KAAO;AAC/B,MAAE,aAAa,EAAE,WAAW,MAAM,IAAM;AAAA,EAC1C;AACA,gBAAc;AAChB;AAGO,SAAS,gBAAoG;AAClH,QAAM,IAAI,UAAU;AACpB,MAAI,cAAc;AAClB,QAAM,aAAqC,CAAC;AAE5C,aAAW,OAAO,EAAE,YAAY;AAC9B,mBAAe,IAAI;AACnB,eAAW,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,KAAK,IAAI;AAAA,EACnE;AAEA,SAAO;AAAA,IACL;AAAA,IACA,eAAe,EAAE,WAAW;AAAA,IAC5B;AAAA,EACF;AACF;","names":["s"]}
1
+ {"version":3,"sources":["../../src/memory/memory.ts"],"sourcesContent":["/**\n * TITAN — Memory / Persistence System\n * JSON-file-backed persistent memory for conversations, facts, preferences, and usage.\n * Uses no native dependencies — pure Node.js for maximum portability.\n */\nimport { existsSync, readFileSync, writeFileSync, renameSync } from 'fs';\nimport { join } from 'path';\nimport { TITAN_HOME } from '../utils/constants.js';\nimport { ensureDir } from '../utils/helpers.js';\nimport logger from '../utils/logger.js';\nimport { encrypt, decrypt, type EncryptedPayload } from '../security/encryption.js';\nimport { isVectorSearchAvailable, searchVectors, addVector } from './vectors.js';\n\nconst COMPONENT = 'Memory';\n\n// ─── Data Store ──────────────────────────────────────────────────\n\ninterface DataStore {\n conversations: ConversationMessage[];\n memories: MemoryEntry[];\n sessions: SessionRecord[];\n usageStats: UsageRecord[];\n cronJobs: CronRecord[];\n skillsInstalled: SkillRecord[];\n}\n\ninterface MemoryEntry {\n id: string;\n category: string;\n key: string;\n value: string;\n metadata?: string;\n createdAt: string;\n updatedAt: string;\n}\n\ninterface SessionRecord {\n id: string;\n channel: string;\n user_id: string;\n agent_id: string;\n status: string;\n message_count: number;\n created_at: string;\n last_active: string;\n name?: string;\n last_message?: string;\n // D3: Persisted session overrides (survive session recovery after timeout/restart)\n model_override?: string;\n thinking_override?: string;\n // Hunt Finding #19 (2026-04-14): true when this session was created via an\n // explicit sessionId (getOrCreateSessionById). Named sessions MUST NOT be\n // returned by the default-slot lookup in getOrCreateSession — otherwise\n // subsequent no-sessionId requests from the same channel+user+agent will\n // inherit the most recent named session's history, causing privacy bleed\n // between API callers.\n is_named?: boolean;\n}\n\ninterface UsageRecord {\n id: number;\n session_id: string;\n provider: string;\n model: string;\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n created_at: string;\n}\n\ninterface CronRecord {\n id: string;\n name: string;\n schedule: string;\n command: string;\n mode?: 'shell' | 'tool'; // Execution mode (default: shell for backward compat)\n allowedTools?: string[]; // Tool allowlist for tool-mode jobs\n enabled: boolean;\n last_run?: string;\n next_run?: string;\n created_at: string;\n}\n\ninterface SkillRecord {\n name: string;\n version: string;\n source: string;\n enabled: boolean;\n installed_at: string;\n}\n\nconst DB_FILE = join(TITAN_HOME, 'titan-data.json');\n\nlet store: DataStore | null = null;\nlet dirty = false;\nlet isShuttingDown = false;\n\nfunction getDefaultStore(): DataStore {\n return {\n conversations: [],\n memories: [],\n sessions: [],\n usageStats: [],\n cronJobs: [],\n skillsInstalled: [],\n };\n}\n\n// NOTE: Sync I/O is intentional — runs only once at cold start, then cached in-memory.\nfunction loadStore(): DataStore {\n if (store) return store;\n ensureDir(TITAN_HOME);\n if (existsSync(DB_FILE)) {\n try {\n const raw = readFileSync(DB_FILE, 'utf-8');\n store = JSON.parse(raw) as DataStore;\n // Ensure all fields exist\n store.conversations = store.conversations || [];\n store.memories = store.memories || [];\n store.sessions = store.sessions || [];\n store.usageStats = store.usageStats || [];\n store.cronJobs = store.cronJobs || [];\n store.skillsInstalled = store.skillsInstalled || [];\n } catch {\n logger.warn(COMPONENT, 'Could not load data store, creating fresh one');\n store = getDefaultStore();\n }\n } else {\n store = getDefaultStore();\n }\n return store;\n}\n\nfunction saveStore(): void {\n if (!store || isShuttingDown) return;\n ensureDir(TITAN_HOME);\n try {\n const tmpFile = DB_FILE + '.tmp';\n writeFileSync(tmpFile, JSON.stringify(store, null, 2), 'utf-8');\n renameSync(tmpFile, DB_FILE);\n dirty = false;\n } catch (e) {\n dirty = true;\n logger.error(COMPONENT, `Failed to save data: ${(e as Error).message}`);\n }\n}\n\n// Auto-save periodically\nlet saveTimeout: ReturnType<typeof setTimeout> | null = null;\nexport function debouncedSave(): void {\n if (dirty) { saveStore(); return; }\n if (saveTimeout) clearTimeout(saveTimeout);\n saveTimeout = setTimeout(saveStore, 1000);\n saveTimeout.unref();\n}\n\n/** Initialize the memory system */\nexport function initMemory(): void {\n loadStore();\n logger.info(COMPONENT, 'Memory system initialized');\n}\n\n/** Close / flush the memory system */\nexport function closeMemory(): void {\n if (saveTimeout) { clearTimeout(saveTimeout); saveTimeout = null; }\n saveStore();\n if (dirty) {\n logger.error(COMPONENT, 'DATA MAY BE LOST — failed to flush memory store on shutdown');\n }\n isShuttingDown = true;\n}\n\n/** Get internal store (for skills like cron that need direct access) */\nexport function getDb(): DataStore {\n return loadStore();\n}\n\n// ─── Conversation History ────────────────────────────────────────\n\nexport interface ConversationMessage {\n id: string;\n sessionId: string;\n role: string;\n content: string;\n toolCalls?: string;\n toolCallId?: string;\n model?: string;\n tokenCount: number;\n createdAt: string;\n isEncrypted?: boolean;\n}\n\n/** Save a message to conversation history */\nexport function saveMessage(message: Omit<ConversationMessage, 'createdAt'>, e2eKey?: string): void {\n const s = loadStore();\n\n let content = message.content;\n let isEncrypted = false;\n\n if (e2eKey) {\n try {\n const payload = encrypt(message.content, Buffer.from(e2eKey, 'base64'));\n content = JSON.stringify(payload);\n isEncrypted = true;\n } catch {\n logger.error(COMPONENT, `Failed to encrypt message for storage`);\n content = \"[ENCRYPTION FAILED] \" + content; // Fallback, though we should probably throw in strict environments\n }\n }\n\n s.conversations.push({\n ...message,\n content,\n isEncrypted,\n createdAt: new Date().toISOString(),\n });\n // Keep only last 5000 messages total to prevent unbounded growth\n if (s.conversations.length > 5000) {\n s.conversations = s.conversations.slice(-5000);\n }\n debouncedSave();\n}\n\n/** Get conversation history for a session */\nexport function getHistory(sessionId: string, limit: number = 50, e2eKey?: string): ConversationMessage[] {\n const s = loadStore();\n const rawHistory = s.conversations\n .filter((m) => m.sessionId === sessionId)\n .slice(-limit);\n\n if (!e2eKey) {\n // If no key is provided, we just return the raw payload. \n // If it's encrypted, it'll just show the JSON string of the EncryptedPayload.\n return rawHistory;\n }\n\n // Decrypt the ones that were encrypted\n return rawHistory.map(m => {\n if (m.isEncrypted) {\n try {\n const payload = JSON.parse(m.content) as EncryptedPayload;\n return {\n ...m,\n content: decrypt(payload, Buffer.from(e2eKey, 'base64'))\n };\n } catch {\n logger.error(COMPONENT, `Failed to decrypt message ${m.id}`);\n return { ...m, content: \"[DECRYPTION FAILED]\" };\n }\n }\n return m;\n });\n}\n\n/** Update session name and/or last message snippet */\nexport function updateSessionMeta(sessionId: string, meta: { name?: string; last_message?: string; model_override?: string; thinking_override?: string }): void {\n const s = loadStore();\n const rec = s.sessions.find(ses => ses.id === sessionId);\n if (!rec) return;\n if (meta.name !== undefined) rec.name = meta.name;\n if (meta.last_message !== undefined) rec.last_message = meta.last_message;\n // D3: Persist session overrides to database so they survive timeout/restart\n if (meta.model_override !== undefined) rec.model_override = meta.model_override;\n if (meta.thinking_override !== undefined) rec.thinking_override = meta.thinking_override;\n debouncedSave();\n}\n\n/** Clear conversation history for a session */\nexport function clearHistory(sessionId: string): void {\n const s = loadStore();\n s.conversations = s.conversations.filter((m) => m.sessionId !== sessionId);\n debouncedSave();\n}\n\n// ─── Persistent Memory (Facts / Preferences) ─────────────────────\n\n/** Store a memory (key-value with category) */\nexport function rememberFact(category: string, key: string, value: string, metadata?: Record<string, unknown>): void {\n const s = loadStore();\n const id = `${category}:${key}`;\n const existingIdx = s.memories.findIndex((m) => m.id === id);\n const now = new Date().toISOString();\n\n if (existingIdx >= 0) {\n s.memories[existingIdx].value = value;\n s.memories[existingIdx].metadata = metadata ? JSON.stringify(metadata) : undefined;\n s.memories[existingIdx].updatedAt = now;\n } else {\n s.memories.push({\n id,\n category,\n key,\n value,\n metadata: metadata ? JSON.stringify(metadata) : undefined,\n createdAt: now,\n updatedAt: now,\n });\n }\n debouncedSave();\n\n // Index to vector store (fire-and-forget)\n if (isVectorSearchAvailable()) {\n addVector(id, `${category}: ${key} = ${value}`, 'memory', { category, key }).catch(e => logger.debug(COMPONENT, `Background vector indexing failed: ${(e as Error).message}`));\n }\n}\n\n/** Recall a specific memory */\nexport function recallFact(category: string, key: string): string | null {\n const s = loadStore();\n const entry = s.memories.find((m) => m.category === category && m.key === key);\n return entry?.value || null;\n}\n\n/** Search memories by category — hybrid keyword + vector search */\nexport async function searchMemories(category?: string, query?: string): Promise<Array<{ key: string; value: string; category: string; score?: number }>> {\n const s = loadStore();\n let results = s.memories;\n\n if (category) {\n results = results.filter((m) => m.category === category);\n }\n if (query) {\n const q = query.toLowerCase();\n // Word-boundary match to avoid false positives (\"use\" matching \"user\", \"reuse\")\n const qRegex = new RegExp('\\\\b' + q.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&') + '\\\\b', 'i');\n results = results.filter((m) =>\n qRegex.test(m.key) || qRegex.test(m.value)\n );\n }\n\n // Keyword scoring\n const scored = results.map(m => {\n let score = 0;\n if (query) {\n const q = query.toLowerCase();\n const keyLower = m.key.toLowerCase();\n const valLower = m.value.toLowerCase();\n // Exact key match scores highest\n if (keyLower === q) score += 5;\n else if (keyLower.includes(q)) score += 2;\n if (valLower.includes(q)) score += 1;\n // BM25-style: boost for multiple keyword matches\n const terms = q.split(/\\s+/).filter(Boolean);\n for (const term of terms) {\n if (keyLower.includes(term)) score += 1;\n if (valLower.includes(term)) score += 0.5;\n }\n }\n return { key: m.key, value: m.value, category: m.category, id: m.id, score };\n });\n\n // Vector search augmentation (hybrid mode)\n if (query && isVectorSearchAvailable()) {\n try {\n const vectorResults = await searchVectors(query, 20, 'memory', 0.4);\n for (const vr of vectorResults) {\n // Skip stale vector IDs that no longer exist in the store\n const memEntry = s.memories.find(m => m.id === vr.id);\n if (!memEntry) continue;\n const existing = scored.find(s => s.id === vr.id);\n if (existing) {\n // Boost keyword results that also match semantically\n existing.score += vr.score * 3;\n } else {\n // Add vector-only results (semantically similar but no keyword match)\n const entry = s.memories.find(m => m.id === vr.id);\n if (entry && (!category || entry.category === category)) {\n scored.push({\n key: entry.key,\n value: entry.value,\n category: entry.category,\n id: entry.id,\n score: vr.score * 2,\n });\n }\n }\n }\n } catch {\n // Vector search failure is non-fatal\n }\n }\n\n scored.sort((a, b) => b.score - a.score);\n // Deduplicate by ID (vector + keyword can match the same entry)\n const seen = new Set<string>();\n const unique = scored.filter(m => { if (seen.has(m.id)) return false; seen.add(m.id); return true; });\n return unique.slice(0, 50).map(m => ({ key: m.key, value: m.value, category: m.category, score: m.score }));\n}\n\n// ─── Usage Tracking ──────────────────────────────────────────────\n\n/** Record usage statistics */\nexport function recordUsage(sessionId: string, provider: string, model: string, promptTokens: number, completionTokens: number): void {\n const s = loadStore();\n s.usageStats.push({\n id: Date.now(),\n session_id: sessionId,\n provider,\n model,\n prompt_tokens: promptTokens,\n completion_tokens: completionTokens,\n total_tokens: promptTokens + completionTokens,\n created_at: new Date().toISOString(),\n });\n // Keep only last 10000 records\n if (s.usageStats.length > 10000) {\n s.usageStats = s.usageStats.slice(-10000);\n }\n debouncedSave();\n}\n\n/** Get total usage statistics */\nexport function getUsageStats(): { totalTokens: number; totalRequests: number; byProvider: Record<string, number> } {\n const s = loadStore();\n let totalTokens = 0;\n const byProvider: Record<string, number> = {};\n\n for (const rec of s.usageStats) {\n totalTokens += rec.total_tokens;\n byProvider[rec.provider] = (byProvider[rec.provider] || 0) + rec.total_tokens;\n }\n\n return {\n totalTokens,\n totalRequests: s.usageStats.length,\n byProvider,\n };\n}\n"],"mappings":";AAKA,SAAS,YAAY,cAAc,eAAe,kBAAkB;AACpE,SAAS,YAAY;AACrB,SAAS,kBAAkB;AAC3B,SAAS,iBAAiB;AAC1B,OAAO,YAAY;AACnB,SAAS,SAAS,eAAsC;AACxD,SAAS,yBAAyB,eAAe,iBAAiB;AAElE,MAAM,YAAY;AA8ElB,MAAM,UAAU,KAAK,YAAY,iBAAiB;AAElD,IAAI,QAA0B;AAC9B,IAAI,QAAQ;AACZ,IAAI,iBAAiB;AAErB,SAAS,kBAA6B;AACpC,SAAO;AAAA,IACL,eAAe,CAAC;AAAA,IAChB,UAAU,CAAC;AAAA,IACX,UAAU,CAAC;AAAA,IACX,YAAY,CAAC;AAAA,IACb,UAAU,CAAC;AAAA,IACX,iBAAiB,CAAC;AAAA,EACpB;AACF;AAGA,SAAS,YAAuB;AAC9B,MAAI,MAAO,QAAO;AAClB,YAAU,UAAU;AACpB,MAAI,WAAW,OAAO,GAAG;AACvB,QAAI;AACF,YAAM,MAAM,aAAa,SAAS,OAAO;AACzC,cAAQ,KAAK,MAAM,GAAG;AAEtB,YAAM,gBAAgB,MAAM,iBAAiB,CAAC;AAC9C,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,aAAa,MAAM,cAAc,CAAC;AACxC,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,kBAAkB,MAAM,mBAAmB,CAAC;AAAA,IACpD,QAAQ;AACN,aAAO,KAAK,WAAW,+CAA+C;AACtE,cAAQ,gBAAgB;AAAA,IAC1B;AAAA,EACF,OAAO;AACL,YAAQ,gBAAgB;AAAA,EAC1B;AACA,SAAO;AACT;AAEA,SAAS,YAAkB;AACzB,MAAI,CAAC,SAAS,eAAgB;AAC9B,YAAU,UAAU;AACpB,MAAI;AACF,UAAM,UAAU,UAAU;AAC1B,kBAAc,SAAS,KAAK,UAAU,OAAO,MAAM,CAAC,GAAG,OAAO;AAC9D,eAAW,SAAS,OAAO;AAC3B,YAAQ;AAAA,EACV,SAAS,GAAG;AACV,YAAQ;AACR,WAAO,MAAM,WAAW,wBAAyB,EAAY,OAAO,EAAE;AAAA,EACxE;AACF;AAGA,IAAI,cAAoD;AACjD,SAAS,gBAAsB;AACpC,MAAI,OAAO;AAAE,cAAU;AAAG;AAAA,EAAQ;AAClC,MAAI,YAAa,cAAa,WAAW;AACzC,gBAAc,WAAW,WAAW,GAAI;AACxC,cAAY,MAAM;AACpB;AAGO,SAAS,aAAmB;AACjC,YAAU;AACV,SAAO,KAAK,WAAW,2BAA2B;AACpD;AAGO,SAAS,cAAoB;AAClC,MAAI,aAAa;AAAE,iBAAa,WAAW;AAAG,kBAAc;AAAA,EAAM;AAClE,YAAU;AACV,MAAI,OAAO;AACT,WAAO,MAAM,WAAW,kEAA6D;AAAA,EACvF;AACA,mBAAiB;AACnB;AAGO,SAAS,QAAmB;AACjC,SAAO,UAAU;AACnB;AAkBO,SAAS,YAAY,SAAiD,QAAuB;AAClG,QAAM,IAAI,UAAU;AAEpB,MAAI,UAAU,QAAQ;AACtB,MAAI,cAAc;AAElB,MAAI,QAAQ;AACV,QAAI;AACF,YAAM,UAAU,QAAQ,QAAQ,SAAS,OAAO,KAAK,QAAQ,QAAQ,CAAC;AACtE,gBAAU,KAAK,UAAU,OAAO;AAChC,oBAAc;AAAA,IAChB,QAAQ;AACN,aAAO,MAAM,WAAW,uCAAuC;AAC/D,gBAAU,yBAAyB;AAAA,IACrC;AAAA,EACF;AAEA,IAAE,cAAc,KAAK;AAAA,IACnB,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC,CAAC;AAED,MAAI,EAAE,cAAc,SAAS,KAAM;AACjC,MAAE,gBAAgB,EAAE,cAAc,MAAM,IAAK;AAAA,EAC/C;AACA,gBAAc;AAChB;AAGO,SAAS,WAAW,WAAmB,QAAgB,IAAI,QAAwC;AACxG,QAAM,IAAI,UAAU;AACpB,QAAM,aAAa,EAAE,cAClB,OAAO,CAAC,MAAM,EAAE,cAAc,SAAS,EACvC,MAAM,CAAC,KAAK;AAEf,MAAI,CAAC,QAAQ;AAGX,WAAO;AAAA,EACT;AAGA,SAAO,WAAW,IAAI,OAAK;AACzB,QAAI,EAAE,aAAa;AACjB,UAAI;AACF,cAAM,UAAU,KAAK,MAAM,EAAE,OAAO;AACpC,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SAAS,QAAQ,SAAS,OAAO,KAAK,QAAQ,QAAQ,CAAC;AAAA,QACzD;AAAA,MACF,QAAQ;AACN,eAAO,MAAM,WAAW,6BAA6B,EAAE,EAAE,EAAE;AAC3D,eAAO,EAAE,GAAG,GAAG,SAAS,sBAAsB;AAAA,MAChD;AAAA,IACF;AACA,WAAO;AAAA,EACT,CAAC;AACH;AAGO,SAAS,kBAAkB,WAAmB,MAA2G;AAC9J,QAAM,IAAI,UAAU;AACpB,QAAM,MAAM,EAAE,SAAS,KAAK,SAAO,IAAI,OAAO,SAAS;AACvD,MAAI,CAAC,IAAK;AACV,MAAI,KAAK,SAAS,OAAW,KAAI,OAAO,KAAK;AAC7C,MAAI,KAAK,iBAAiB,OAAW,KAAI,eAAe,KAAK;AAE7D,MAAI,KAAK,mBAAmB,OAAW,KAAI,iBAAiB,KAAK;AACjE,MAAI,KAAK,sBAAsB,OAAW,KAAI,oBAAoB,KAAK;AACvE,gBAAc;AAChB;AAGO,SAAS,aAAa,WAAyB;AACpD,QAAM,IAAI,UAAU;AACpB,IAAE,gBAAgB,EAAE,cAAc,OAAO,CAAC,MAAM,EAAE,cAAc,SAAS;AACzE,gBAAc;AAChB;AAKO,SAAS,aAAa,UAAkB,KAAa,OAAe,UAA0C;AACnH,QAAM,IAAI,UAAU;AACpB,QAAM,KAAK,GAAG,QAAQ,IAAI,GAAG;AAC7B,QAAM,cAAc,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3D,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,MAAI,eAAe,GAAG;AACpB,MAAE,SAAS,WAAW,EAAE,QAAQ;AAChC,MAAE,SAAS,WAAW,EAAE,WAAW,WAAW,KAAK,UAAU,QAAQ,IAAI;AACzE,MAAE,SAAS,WAAW,EAAE,YAAY;AAAA,EACtC,OAAO;AACL,MAAE,SAAS,KAAK;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,WAAW,KAAK,UAAU,QAAQ,IAAI;AAAA,MAChD,WAAW;AAAA,MACX,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AACA,gBAAc;AAGd,MAAI,wBAAwB,GAAG;AAC7B,cAAU,IAAI,GAAG,QAAQ,KAAK,GAAG,MAAM,KAAK,IAAI,UAAU,EAAE,UAAU,IAAI,CAAC,EAAE,MAAM,OAAK,OAAO,MAAM,WAAW,sCAAuC,EAAY,OAAO,EAAE,CAAC;AAAA,EAC/K;AACF;AAGO,SAAS,WAAW,UAAkB,KAA4B;AACvE,QAAM,IAAI,UAAU;AACpB,QAAM,QAAQ,EAAE,SAAS,KAAK,CAAC,MAAM,EAAE,aAAa,YAAY,EAAE,QAAQ,GAAG;AAC7E,SAAO,OAAO,SAAS;AACzB;AAGA,eAAsB,eAAe,UAAmB,OAAkG;AACxJ,QAAM,IAAI,UAAU;AACpB,MAAI,UAAU,EAAE;AAEhB,MAAI,UAAU;AACZ,cAAU,QAAQ,OAAO,CAAC,MAAM,EAAE,aAAa,QAAQ;AAAA,EACzD;AACA,MAAI,OAAO;AACT,UAAM,IAAI,MAAM,YAAY;AAE5B,UAAM,SAAS,IAAI,OAAO,QAAQ,EAAE,QAAQ,uBAAuB,MAAM,IAAI,OAAO,GAAG;AACvF,cAAU,QAAQ;AAAA,MAAO,CAAC,MACxB,OAAO,KAAK,EAAE,GAAG,KAAK,OAAO,KAAK,EAAE,KAAK;AAAA,IAC3C;AAAA,EACF;AAGA,QAAM,SAAS,QAAQ,IAAI,OAAK;AAC9B,QAAI,QAAQ;AACZ,QAAI,OAAO;AACT,YAAM,IAAI,MAAM,YAAY;AAC5B,YAAM,WAAW,EAAE,IAAI,YAAY;AACnC,YAAM,WAAW,EAAE,MAAM,YAAY;AAErC,UAAI,aAAa,EAAG,UAAS;AAAA,eACpB,SAAS,SAAS,CAAC,EAAG,UAAS;AACxC,UAAI,SAAS,SAAS,CAAC,EAAG,UAAS;AAEnC,YAAM,QAAQ,EAAE,MAAM,KAAK,EAAE,OAAO,OAAO;AAC3C,iBAAW,QAAQ,OAAO;AACxB,YAAI,SAAS,SAAS,IAAI,EAAG,UAAS;AACtC,YAAI,SAAS,SAAS,IAAI,EAAG,UAAS;AAAA,MACxC;AAAA,IACF;AACA,WAAO,EAAE,KAAK,EAAE,KAAK,OAAO,EAAE,OAAO,UAAU,EAAE,UAAU,IAAI,EAAE,IAAI,MAAM;AAAA,EAC7E,CAAC;AAGD,MAAI,SAAS,wBAAwB,GAAG;AACtC,QAAI;AACF,YAAM,gBAAgB,MAAM,cAAc,OAAO,IAAI,UAAU,GAAG;AAClE,iBAAW,MAAM,eAAe;AAE9B,cAAM,WAAW,EAAE,SAAS,KAAK,OAAK,EAAE,OAAO,GAAG,EAAE;AACpD,YAAI,CAAC,SAAU;AACf,cAAM,WAAW,OAAO,KAAK,CAAAA,OAAKA,GAAE,OAAO,GAAG,EAAE;AAChD,YAAI,UAAU;AAEZ,mBAAS,SAAS,GAAG,QAAQ;AAAA,QAC/B,OAAO;AAEL,gBAAM,QAAQ,EAAE,SAAS,KAAK,OAAK,EAAE,OAAO,GAAG,EAAE;AACjD,cAAI,UAAU,CAAC,YAAY,MAAM,aAAa,WAAW;AACvD,mBAAO,KAAK;AAAA,cACV,KAAK,MAAM;AAAA,cACX,OAAO,MAAM;AAAA,cACb,UAAU,MAAM;AAAA,cAChB,IAAI,MAAM;AAAA,cACV,OAAO,GAAG,QAAQ;AAAA,YACpB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEvC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,SAAS,OAAO,OAAO,OAAK;AAAE,QAAI,KAAK,IAAI,EAAE,EAAE,EAAG,QAAO;AAAO,SAAK,IAAI,EAAE,EAAE;AAAG,WAAO;AAAA,EAAM,CAAC;AACpG,SAAO,OAAO,MAAM,GAAG,EAAE,EAAE,IAAI,QAAM,EAAE,KAAK,EAAE,KAAK,OAAO,EAAE,OAAO,UAAU,EAAE,UAAU,OAAO,EAAE,MAAM,EAAE;AAC5G;AAKO,SAAS,YAAY,WAAmB,UAAkB,OAAe,cAAsB,kBAAgC;AACpI,QAAM,IAAI,UAAU;AACpB,IAAE,WAAW,KAAK;AAAA,IAChB,IAAI,KAAK,IAAI;AAAA,IACb,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf,mBAAmB;AAAA,IACnB,cAAc,eAAe;AAAA,IAC7B,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,EACrC,CAAC;AAED,MAAI,EAAE,WAAW,SAAS,KAAO;AAC/B,MAAE,aAAa,EAAE,WAAW,MAAM,IAAM;AAAA,EAC1C;AACA,gBAAc;AAChB;AAGO,SAAS,gBAAoG;AAClH,QAAM,IAAI,UAAU;AACpB,MAAI,cAAc;AAClB,QAAM,aAAqC,CAAC;AAE5C,aAAW,OAAO,EAAE,YAAY;AAC9B,mBAAe,IAAI;AACnB,eAAW,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,KAAK,IAAI;AAAA,EACnE;AAEA,SAAO;AAAA,IACL;AAAA,IACA,eAAe,EAAE,WAAW;AAAA,IAC5B;AAAA,EACF;AACF;","names":["s"]}
@@ -0,0 +1,140 @@
1
+ #!/usr/bin/env node
2
+ import { existsSync, statSync, readFileSync } from "fs";
3
+ import { createHash } from "crypto";
4
+ const VERB_PATTERNS = [
5
+ // Past-tense file writes — Hunt #47 lineage. Catches "I have written X
6
+ // to /tmp/foo.md", "I saved the report at /home/dj/report.txt".
7
+ {
8
+ regex: /\b(?:I(?:'ve| have| just)?)\s+(written|saved|wrote|created|generated|produced)\s+(?:[^.!?\n]*?)(?:to|at|in)\s+["'`]?(\/[\w/.-]+\.[a-z0-9]+|\.\/[\w/.-]+|~\/[\w/.-]+|[\w./_-]+\.[a-z0-9]{1,5})["'`]?/i,
9
+ category: "file_write",
10
+ expectedTool: "write_file",
11
+ verbGroup: 1,
12
+ targetGroup: 2
13
+ },
14
+ // File edits — "I edited X", "I fixed the bug in X", "I modified config.ts".
15
+ {
16
+ regex: /\b(?:I(?:'ve| have| just)?)\s+(edited|modified|fixed|patched|updated|refactored)\s+(?:the\s+\w+\s+(?:in|at)\s+)?["'`]?([\w/.-]+\.[a-z0-9]{1,5}|[\w/.-]+\/[\w._-]+)["'`]?/i,
17
+ category: "file_edit",
18
+ expectedTool: "edit_file",
19
+ verbGroup: 1,
20
+ targetGroup: 2
21
+ },
22
+ // File deletes — "I deleted /tmp/foo", "I removed the old config".
23
+ {
24
+ regex: /\b(?:I(?:'ve| have| just)?)\s+(deleted|removed|cleaned\s+up)\s+(?:the\s+)?["'`]?(\/[\w/.-]+|[\w/.-]+\.[a-z0-9]{1,5})["'`]?/i,
25
+ category: "file_delete",
26
+ expectedTool: "shell",
27
+ verbGroup: 1,
28
+ targetGroup: 2
29
+ },
30
+ // Shell command claims — "I ran `npm test`", "I executed git status".
31
+ // Backtick form is the strong signal; bare-text "I ran npm install"
32
+ // also triggers but only when followed by a recognizable command.
33
+ {
34
+ regex: /\b(?:I(?:'ve| have| just)?)\s+(ran|executed|installed|launched)\s+["`]?([a-z][a-z0-9_-]+(?:\s+[\w.-]+)*)/i,
35
+ category: "shell_run",
36
+ expectedTool: "shell",
37
+ verbGroup: 1,
38
+ targetGroup: 2
39
+ },
40
+ // Web actions — "I searched for X", "I browsed to Y", "I fetched Z".
41
+ {
42
+ regex: /\b(?:I(?:'ve| have| just)?)\s+(searched|browsed|fetched|googled|looked\s+up)\s+(?:for\s+|to\s+)?["'`]?([^"'`.!?\n]{2,80})["'`]?/i,
43
+ category: "web_action",
44
+ expectedTool: "web_search",
45
+ verbGroup: 1,
46
+ targetGroup: 2
47
+ },
48
+ // Generic tool-name claim — "I used the shell tool", "I used write_file".
49
+ // This is the weakest signal and the most likely to misfire — only
50
+ // included so the system can flag for human review, not auto-correct.
51
+ {
52
+ regex: /\b(?:I(?:'ve| have| just)?)\s+(used|called|invoked)\s+(?:the\s+)?["'`]?([a-z_]{3,30})["'`]?\s+tool\b/i,
53
+ category: "tool_used",
54
+ expectedTool: "*",
55
+ // wildcard — match against any tool that has the same name
56
+ verbGroup: 1,
57
+ targetGroup: 2
58
+ }
59
+ ];
60
+ function detectFabrication(content, toolHistory) {
61
+ if (!content || content.length < 5) return [];
62
+ const findings = [];
63
+ const usedTools = new Set(toolHistory.map((t) => t.name.toLowerCase()));
64
+ for (const pat of VERB_PATTERNS) {
65
+ const m = content.match(pat.regex);
66
+ if (!m) continue;
67
+ const verb = (pat.verbGroup ? m[pat.verbGroup] : m[1]) || "did";
68
+ const target = (pat.targetGroup ? m[pat.targetGroup] : m[2]) || "";
69
+ if (!target) continue;
70
+ const claimSatisfied = pat.expectedTool === "*" ? usedTools.has(target.toLowerCase()) : usedTools.has(pat.expectedTool);
71
+ if (!claimSatisfied) {
72
+ findings.push({
73
+ category: pat.category,
74
+ verb: verb.toLowerCase(),
75
+ target: target.trim(),
76
+ expectedTool: pat.expectedTool,
77
+ excerpt: m[0]
78
+ });
79
+ }
80
+ }
81
+ return findings;
82
+ }
83
+ function verifyFileWriteClaim(filePath, expectedContent) {
84
+ let exists;
85
+ try {
86
+ exists = existsSync(filePath);
87
+ } catch {
88
+ return { fileExists: false, reason: "fs.existsSync threw \u2014 invalid path" };
89
+ }
90
+ if (!exists) {
91
+ return { fileExists: false, reason: `file not present at ${filePath}` };
92
+ }
93
+ let size = 0;
94
+ try {
95
+ size = statSync(filePath).size;
96
+ } catch {
97
+ return { fileExists: true, reason: "fs.statSync threw on existing path" };
98
+ }
99
+ if (size === 0) {
100
+ return { fileExists: true, reason: "file exists but is empty" };
101
+ }
102
+ let actualContent;
103
+ let fileHash;
104
+ try {
105
+ actualContent = readFileSync(filePath, "utf-8");
106
+ fileHash = createHash("sha256").update(actualContent).digest("hex");
107
+ } catch (e) {
108
+ return { fileExists: true, reason: `read failed: ${e.message}` };
109
+ }
110
+ if (expectedContent === void 0) {
111
+ return { fileExists: true, fileHash };
112
+ }
113
+ const a = actualContent.trim();
114
+ const b = expectedContent.trim();
115
+ const contentMatches = a === b;
116
+ return {
117
+ fileExists: true,
118
+ fileHash,
119
+ contentMatches,
120
+ reason: contentMatches ? void 0 : "file exists but content differs from claim"
121
+ };
122
+ }
123
+ function buildNudgeMessage(findings) {
124
+ if (findings.length === 0) return "";
125
+ const lines = ["You claimed to perform actions you did NOT actually do via tools:"];
126
+ for (const f of findings) {
127
+ lines.push(` - You said you ${f.verb} "${f.target}", but you did not call ${f.expectedTool === "*" ? "any matching tool" : `the ${f.expectedTool} tool`}.`);
128
+ }
129
+ lines.push("");
130
+ lines.push("Either:");
131
+ lines.push(" 1. Actually call the right tool now.");
132
+ lines.push(" 2. Correct your claim \u2014 say what you DID do, or admit you did not do it.");
133
+ return lines.join("\n");
134
+ }
135
+ export {
136
+ buildNudgeMessage,
137
+ detectFabrication,
138
+ verifyFileWriteClaim
139
+ };
140
+ //# sourceMappingURL=fabricationGuard.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/safety/fabricationGuard.ts"],"sourcesContent":["/**\n * FabricationGuard (Phase 9 / Track D, v5.4.0)\n *\n * Catches model responses that CLAIM to have done something but didn't\n * actually call the tool that would do it. The original guard lived\n * inline in `agentLoop.ts` and only matched past-tense write claims via\n * a single narrow regex. This module:\n * 1. Expands pattern coverage to all common action verbs\n * (edit/fix/run/search/browse/create/delete + write/save).\n * 2. Cross-checks claims against the actual tool history — \"I ran\n * `npm test`\" only counts as truthful if a `shell` tool call\n * actually happened in this turn.\n * 3. For file-write claims, exposes a verifier that checks the file\n * exists and (optionally) hashes the content.\n *\n * It's a pure module: no I/O at import time, easy to unit-test. The\n * agent loop can call `detectFabrication(content, toolHistory)` after\n * each response and choose to nudge the model, force a tool call, or\n * return a redacted answer.\n */\n\nimport { existsSync, statSync, readFileSync } from 'fs';\nimport { createHash } from 'crypto';\n\n/** A single tool invocation captured by the agent loop, in execution order. */\nexport interface ToolHistoryEntry {\n /** Tool name (`shell`, `write_file`, `web_search`, ...). */\n name: string;\n /** Arguments passed to the tool, parsed from the model's tool_calls. */\n args?: Record<string, unknown>;\n /** Raw output, when available — used by the file-write verifier. */\n output?: string;\n}\n\n/** Discriminated category of fabrication signal. */\nexport type FabricationCategory =\n | 'file_write' // I wrote/saved/created file X\n | 'file_edit' // I edited/fixed/modified X\n | 'file_delete' // I deleted/removed X\n | 'shell_run' // I ran/executed/installed X\n | 'web_action' // I searched/browsed/fetched X\n | 'tool_used'; // I used [tool_name] (generic catch-all)\n\n/** A single fabrication finding from `detectFabrication`. */\nexport interface FabricationFinding {\n category: FabricationCategory;\n /** The verb the model used (write, edit, ran, etc.). */\n verb: string;\n /** The object/target the verb acted on (file path, URL, command). */\n target: string;\n /** The tool name that *would* satisfy this claim. */\n expectedTool: string;\n /** The exact substring of `content` that triggered the match. */\n excerpt: string;\n}\n\n// ── Pattern table ────────────────────────────────────────────────────\n//\n// Each entry is { regex, category, expectedTool }. We keep the regexes\n// strict to avoid false positives — TITAN's chat output is usually\n// short, so a wide pattern surface produces too many bogus rejections.\n//\n// All patterns require the verb to start near a sentence boundary\n// (^|[.!?\\n]\\s*) and end with a recognizable target. Matches are\n// case-insensitive but anchor on first-person voice (\"I have\", \"I've\",\n// \"I just\"); third-person summaries (e.g. quoting the user) don't fire.\n\nconst VERB_PATTERNS: Array<{\n regex: RegExp;\n category: FabricationCategory;\n expectedTool: string;\n verbGroup?: number;\n targetGroup?: number;\n}> = [\n // Past-tense file writes — Hunt #47 lineage. Catches \"I have written X\n // to /tmp/foo.md\", \"I saved the report at /home/dj/report.txt\".\n {\n regex: /\\b(?:I(?:'ve| have| just)?)\\s+(written|saved|wrote|created|generated|produced)\\s+(?:[^.!?\\n]*?)(?:to|at|in)\\s+[\"'`]?(\\/[\\w/.-]+\\.[a-z0-9]+|\\.\\/[\\w/.-]+|~\\/[\\w/.-]+|[\\w./_-]+\\.[a-z0-9]{1,5})[\"'`]?/i,\n category: 'file_write',\n expectedTool: 'write_file',\n verbGroup: 1,\n targetGroup: 2,\n },\n // File edits — \"I edited X\", \"I fixed the bug in X\", \"I modified config.ts\".\n {\n regex: /\\b(?:I(?:'ve| have| just)?)\\s+(edited|modified|fixed|patched|updated|refactored)\\s+(?:the\\s+\\w+\\s+(?:in|at)\\s+)?[\"'`]?([\\w/.-]+\\.[a-z0-9]{1,5}|[\\w/.-]+\\/[\\w._-]+)[\"'`]?/i,\n category: 'file_edit',\n expectedTool: 'edit_file',\n verbGroup: 1,\n targetGroup: 2,\n },\n // File deletes — \"I deleted /tmp/foo\", \"I removed the old config\".\n {\n regex: /\\b(?:I(?:'ve| have| just)?)\\s+(deleted|removed|cleaned\\s+up)\\s+(?:the\\s+)?[\"'`]?(\\/[\\w/.-]+|[\\w/.-]+\\.[a-z0-9]{1,5})[\"'`]?/i,\n category: 'file_delete',\n expectedTool: 'shell',\n verbGroup: 1,\n targetGroup: 2,\n },\n // Shell command claims — \"I ran `npm test`\", \"I executed git status\".\n // Backtick form is the strong signal; bare-text \"I ran npm install\"\n // also triggers but only when followed by a recognizable command.\n {\n regex: /\\b(?:I(?:'ve| have| just)?)\\s+(ran|executed|installed|launched)\\s+[\"`]?([a-z][a-z0-9_-]+(?:\\s+[\\w.-]+)*)/i,\n category: 'shell_run',\n expectedTool: 'shell',\n verbGroup: 1,\n targetGroup: 2,\n },\n // Web actions — \"I searched for X\", \"I browsed to Y\", \"I fetched Z\".\n {\n regex: /\\b(?:I(?:'ve| have| just)?)\\s+(searched|browsed|fetched|googled|looked\\s+up)\\s+(?:for\\s+|to\\s+)?[\"'`]?([^\"'`.!?\\n]{2,80})[\"'`]?/i,\n category: 'web_action',\n expectedTool: 'web_search',\n verbGroup: 1,\n targetGroup: 2,\n },\n // Generic tool-name claim — \"I used the shell tool\", \"I used write_file\".\n // This is the weakest signal and the most likely to misfire — only\n // included so the system can flag for human review, not auto-correct.\n {\n regex: /\\b(?:I(?:'ve| have| just)?)\\s+(used|called|invoked)\\s+(?:the\\s+)?[\"'`]?([a-z_]{3,30})[\"'`]?\\s+tool\\b/i,\n category: 'tool_used',\n expectedTool: '*', // wildcard — match against any tool that has the same name\n verbGroup: 1,\n targetGroup: 2,\n },\n];\n\n/**\n * Scan the model's response for action claims and return any that aren't\n * backed by a real tool call. `toolHistory` should contain every tool\n * invocation the agent made in this turn (and ideally the prior turn,\n * since \"I already wrote X\" can refer to a previous round).\n *\n * Returns an empty array when no fabrication is detected.\n */\nexport function detectFabrication(\n content: string,\n toolHistory: ToolHistoryEntry[],\n): FabricationFinding[] {\n if (!content || content.length < 5) return [];\n\n const findings: FabricationFinding[] = [];\n const usedTools = new Set(toolHistory.map(t => t.name.toLowerCase()));\n\n for (const pat of VERB_PATTERNS) {\n const m = content.match(pat.regex);\n if (!m) continue;\n const verb = (pat.verbGroup ? m[pat.verbGroup] : m[1]) || 'did';\n const target = (pat.targetGroup ? m[pat.targetGroup] : m[2]) || '';\n if (!target) continue;\n\n // Did the agent actually call a tool that satisfies this claim?\n const claimSatisfied = pat.expectedTool === '*'\n ? usedTools.has(target.toLowerCase())\n : usedTools.has(pat.expectedTool);\n\n if (!claimSatisfied) {\n findings.push({\n category: pat.category,\n verb: verb.toLowerCase(),\n target: target.trim(),\n expectedTool: pat.expectedTool,\n excerpt: m[0],\n });\n }\n }\n\n return findings;\n}\n\n// ── Verify-before-trust on file operations ──────────────────────────\n\n/** Result of `verifyFileWriteClaim`. */\nexport interface FileWriteVerification {\n /** True when the file exists at the claimed path with non-zero size. */\n fileExists: boolean;\n /** SHA-256 of the file contents, when present. */\n fileHash?: string;\n /** True when the file's content matches the model's claimed body\n * (only computed if `expectedContent` was passed). */\n contentMatches?: boolean;\n /** Why the claim fails verification, if it does. */\n reason?: string;\n}\n\n/**\n * Verify a file-write claim against the real filesystem.\n *\n * Use this AFTER the agent claims to have written/edited a file but\n * BEFORE accepting the response as final. If the file doesn't exist at\n * the claimed path, the agent fabricated and the loop should retry.\n *\n * Optionally pass `expectedContent` to also verify the body matches —\n * useful when the model includes the literal content in its response.\n */\nexport function verifyFileWriteClaim(\n filePath: string,\n expectedContent?: string,\n): FileWriteVerification {\n let exists: boolean;\n try {\n exists = existsSync(filePath);\n } catch {\n return { fileExists: false, reason: 'fs.existsSync threw — invalid path' };\n }\n if (!exists) {\n return { fileExists: false, reason: `file not present at ${filePath}` };\n }\n\n let size = 0;\n try {\n size = statSync(filePath).size;\n } catch {\n return { fileExists: true, reason: 'fs.statSync threw on existing path' };\n }\n if (size === 0) {\n return { fileExists: true, reason: 'file exists but is empty' };\n }\n\n let actualContent: string;\n let fileHash: string;\n try {\n actualContent = readFileSync(filePath, 'utf-8');\n fileHash = createHash('sha256').update(actualContent).digest('hex');\n } catch (e) {\n return { fileExists: true, reason: `read failed: ${(e as Error).message}` };\n }\n\n if (expectedContent === undefined) {\n return { fileExists: true, fileHash };\n }\n\n // Content match: lenient — strip trailing whitespace, compare.\n const a = actualContent.trim();\n const b = expectedContent.trim();\n const contentMatches = a === b;\n return {\n fileExists: true,\n fileHash,\n contentMatches,\n reason: contentMatches ? undefined : 'file exists but content differs from claim',\n };\n}\n\n/**\n * Build a structured nudge message the agent loop can append to the\n * model's next-turn user message when fabrication is detected. The\n * message is deliberately blunt — most weak models need to be told\n * directly that they didn't do what they claimed.\n */\nexport function buildNudgeMessage(findings: FabricationFinding[]): string {\n if (findings.length === 0) return '';\n const lines = ['You claimed to perform actions you did NOT actually do via tools:'];\n for (const f of findings) {\n lines.push(` - You said you ${f.verb} \"${f.target}\", but you did not call ${f.expectedTool === '*' ? 'any matching tool' : `the ${f.expectedTool} tool`}.`);\n }\n lines.push('');\n lines.push('Either:');\n lines.push(' 1. Actually call the right tool now.');\n lines.push(' 2. Correct your claim — say what you DID do, or admit you did not do it.');\n return lines.join('\\n');\n}\n"],"mappings":";AAqBA,SAAS,YAAY,UAAU,oBAAoB;AACnD,SAAS,kBAAkB;AA6C3B,MAAM,gBAMD;AAAA;AAAA;AAAA,EAGD;AAAA,IACI,OAAO;AAAA,IACP,UAAU;AAAA,IACV,cAAc;AAAA,IACd,WAAW;AAAA,IACX,aAAa;AAAA,EACjB;AAAA;AAAA,EAEA;AAAA,IACI,OAAO;AAAA,IACP,UAAU;AAAA,IACV,cAAc;AAAA,IACd,WAAW;AAAA,IACX,aAAa;AAAA,EACjB;AAAA;AAAA,EAEA;AAAA,IACI,OAAO;AAAA,IACP,UAAU;AAAA,IACV,cAAc;AAAA,IACd,WAAW;AAAA,IACX,aAAa;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAIA;AAAA,IACI,OAAO;AAAA,IACP,UAAU;AAAA,IACV,cAAc;AAAA,IACd,WAAW;AAAA,IACX,aAAa;AAAA,EACjB;AAAA;AAAA,EAEA;AAAA,IACI,OAAO;AAAA,IACP,UAAU;AAAA,IACV,cAAc;AAAA,IACd,WAAW;AAAA,IACX,aAAa;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAIA;AAAA,IACI,OAAO;AAAA,IACP,UAAU;AAAA,IACV,cAAc;AAAA;AAAA,IACd,WAAW;AAAA,IACX,aAAa;AAAA,EACjB;AACJ;AAUO,SAAS,kBACZ,SACA,aACoB;AACpB,MAAI,CAAC,WAAW,QAAQ,SAAS,EAAG,QAAO,CAAC;AAE5C,QAAM,WAAiC,CAAC;AACxC,QAAM,YAAY,IAAI,IAAI,YAAY,IAAI,OAAK,EAAE,KAAK,YAAY,CAAC,CAAC;AAEpE,aAAW,OAAO,eAAe;AAC7B,UAAM,IAAI,QAAQ,MAAM,IAAI,KAAK;AACjC,QAAI,CAAC,EAAG;AACR,UAAM,QAAQ,IAAI,YAAY,EAAE,IAAI,SAAS,IAAI,EAAE,CAAC,MAAM;AAC1D,UAAM,UAAU,IAAI,cAAc,EAAE,IAAI,WAAW,IAAI,EAAE,CAAC,MAAM;AAChE,QAAI,CAAC,OAAQ;AAGb,UAAM,iBAAiB,IAAI,iBAAiB,MACtC,UAAU,IAAI,OAAO,YAAY,CAAC,IAClC,UAAU,IAAI,IAAI,YAAY;AAEpC,QAAI,CAAC,gBAAgB;AACjB,eAAS,KAAK;AAAA,QACV,UAAU,IAAI;AAAA,QACd,MAAM,KAAK,YAAY;AAAA,QACvB,QAAQ,OAAO,KAAK;AAAA,QACpB,cAAc,IAAI;AAAA,QAClB,SAAS,EAAE,CAAC;AAAA,MAChB,CAAC;AAAA,IACL;AAAA,EACJ;AAEA,SAAO;AACX;AA2BO,SAAS,qBACZ,UACA,iBACqB;AACrB,MAAI;AACJ,MAAI;AACA,aAAS,WAAW,QAAQ;AAAA,EAChC,QAAQ;AACJ,WAAO,EAAE,YAAY,OAAO,QAAQ,0CAAqC;AAAA,EAC7E;AACA,MAAI,CAAC,QAAQ;AACT,WAAO,EAAE,YAAY,OAAO,QAAQ,uBAAuB,QAAQ,GAAG;AAAA,EAC1E;AAEA,MAAI,OAAO;AACX,MAAI;AACA,WAAO,SAAS,QAAQ,EAAE;AAAA,EAC9B,QAAQ;AACJ,WAAO,EAAE,YAAY,MAAM,QAAQ,qCAAqC;AAAA,EAC5E;AACA,MAAI,SAAS,GAAG;AACZ,WAAO,EAAE,YAAY,MAAM,QAAQ,2BAA2B;AAAA,EAClE;AAEA,MAAI;AACJ,MAAI;AACJ,MAAI;AACA,oBAAgB,aAAa,UAAU,OAAO;AAC9C,eAAW,WAAW,QAAQ,EAAE,OAAO,aAAa,EAAE,OAAO,KAAK;AAAA,EACtE,SAAS,GAAG;AACR,WAAO,EAAE,YAAY,MAAM,QAAQ,gBAAiB,EAAY,OAAO,GAAG;AAAA,EAC9E;AAEA,MAAI,oBAAoB,QAAW;AAC/B,WAAO,EAAE,YAAY,MAAM,SAAS;AAAA,EACxC;AAGA,QAAM,IAAI,cAAc,KAAK;AAC7B,QAAM,IAAI,gBAAgB,KAAK;AAC/B,QAAM,iBAAiB,MAAM;AAC7B,SAAO;AAAA,IACH,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,QAAQ,iBAAiB,SAAY;AAAA,EACzC;AACJ;AAQO,SAAS,kBAAkB,UAAwC;AACtE,MAAI,SAAS,WAAW,EAAG,QAAO;AAClC,QAAM,QAAQ,CAAC,mEAAmE;AAClF,aAAW,KAAK,UAAU;AACtB,UAAM,KAAK,oBAAoB,EAAE,IAAI,KAAK,EAAE,MAAM,2BAA2B,EAAE,iBAAiB,MAAM,sBAAsB,OAAO,EAAE,YAAY,OAAO,GAAG;AAAA,EAC/J;AACA,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,SAAS;AACpB,QAAM,KAAK,wCAAwC;AACnD,QAAM,KAAK,iFAA4E;AACvF,SAAO,MAAM,KAAK,IAAI;AAC1B;","names":[]}
@@ -637,7 +637,29 @@ function registerGepaSkill() {
637
637
  },
638
638
  execute: gepaHistory
639
639
  });
640
- logger.info(COMPONENT, "GEPA evolution skill registered (3 tools)");
640
+ void (async () => {
641
+ try {
642
+ const { registerWatcher } = await import("../../agent/daemon.js");
643
+ registerWatcher("gepa-daily-evolve", async () => {
644
+ const cfg = loadConfig();
645
+ if (!cfg.selfImprove?.enabled) {
646
+ logger.debug(COMPONENT, "GEPA daily watcher skipped \u2014 selfImprove.disabled");
647
+ return;
648
+ }
649
+ const area = IMPROVEMENT_AREAS[Math.floor(Math.random() * IMPROVEMENT_AREAS.length)];
650
+ logger.info(COMPONENT, `GEPA daily watcher starting evolution for area: ${area.id}`);
651
+ try {
652
+ await gepaEvolve({ area: area.id, budgetMinutes: 30, maxGenerations: 5 });
653
+ } catch (e) {
654
+ logger.error(COMPONENT, `GEPA daily watcher failed: ${e.message}`);
655
+ }
656
+ }, 24 * 60 * 60 * 1e3);
657
+ logger.info(COMPONENT, "GEPA daily watcher registered (24h interval)");
658
+ } catch {
659
+ logger.warn(COMPONENT, "Daemon system not available \u2014 GEPA daily watcher not registered");
660
+ }
661
+ })();
662
+ logger.info(COMPONENT, "GEPA evolution skill registered (3 tools + daily watcher)");
641
663
  }
642
664
  export {
643
665
  crossover,