titan-agent 5.3.2 → 5.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/agent.js +11 -1
- package/dist/agent/agent.js.map +1 -1
- package/dist/agent/agentLoop.js +36 -1
- package/dist/agent/agentLoop.js.map +1 -1
- package/dist/agent/session.js +106 -5
- package/dist/agent/session.js.map +1 -1
- package/dist/agent/subAgent.js +62 -1
- package/dist/agent/subAgent.js.map +1 -1
- package/dist/config/config.js +30 -8
- package/dist/config/config.js.map +1 -1
- package/dist/config/schema.js +25 -2
- package/dist/config/schema.js.map +1 -1
- package/dist/gateway/server.js +32 -1
- package/dist/gateway/server.js.map +1 -1
- package/dist/memory/graph.js +49 -15
- package/dist/memory/graph.js.map +1 -1
- package/dist/memory/index.js +192 -0
- package/dist/memory/index.js.map +1 -0
- package/dist/memory/memory.js +1 -0
- package/dist/memory/memory.js.map +1 -1
- package/dist/mesh/transport.js +60 -8
- package/dist/mesh/transport.js.map +1 -1
- package/dist/providers/anthropic.js +3 -2
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/base.js.map +1 -1
- package/dist/providers/google.js +94 -20
- package/dist/providers/google.js.map +1 -1
- package/dist/providers/modelCapabilities.js +59 -0
- package/dist/providers/modelCapabilities.js.map +1 -0
- package/dist/providers/ollama.js +3 -2
- package/dist/providers/ollama.js.map +1 -1
- package/dist/providers/openai.js +4 -3
- package/dist/providers/openai.js.map +1 -1
- package/dist/providers/openai_compat.js +3 -2
- package/dist/providers/openai_compat.js.map +1 -1
- package/dist/providers/router.js +63 -21
- package/dist/providers/router.js.map +1 -1
- package/dist/safety/fabricationGuard.js +140 -0
- package/dist/safety/fabricationGuard.js.map +1 -0
- package/dist/skills/builtin/gepa.js +23 -1
- package/dist/skills/builtin/gepa.js.map +1 -1
- package/dist/skills/builtin/model_trainer.js +31 -4
- package/dist/skills/builtin/model_trainer.js.map +1 -1
- package/dist/skills/builtin/self_improve.js +50 -2
- package/dist/skills/builtin/self_improve.js.map +1 -1
- package/dist/utils/constants.js +2 -2
- package/dist/utils/constants.js.map +1 -1
- package/package.json +1 -1
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
const STOP_WORDS = /* @__PURE__ */ new Set([
|
|
3
|
+
"a",
|
|
4
|
+
"an",
|
|
5
|
+
"the",
|
|
6
|
+
"is",
|
|
7
|
+
"it",
|
|
8
|
+
"in",
|
|
9
|
+
"on",
|
|
10
|
+
"at",
|
|
11
|
+
"to",
|
|
12
|
+
"of",
|
|
13
|
+
"do",
|
|
14
|
+
"you",
|
|
15
|
+
"we",
|
|
16
|
+
"i",
|
|
17
|
+
"me",
|
|
18
|
+
"my",
|
|
19
|
+
"that",
|
|
20
|
+
"this",
|
|
21
|
+
"was",
|
|
22
|
+
"are",
|
|
23
|
+
"be",
|
|
24
|
+
"been",
|
|
25
|
+
"have",
|
|
26
|
+
"has",
|
|
27
|
+
"had",
|
|
28
|
+
"and",
|
|
29
|
+
"or",
|
|
30
|
+
"but",
|
|
31
|
+
"if",
|
|
32
|
+
"so",
|
|
33
|
+
"not",
|
|
34
|
+
"no",
|
|
35
|
+
"yes",
|
|
36
|
+
"can",
|
|
37
|
+
"how",
|
|
38
|
+
"what",
|
|
39
|
+
"about",
|
|
40
|
+
"from",
|
|
41
|
+
"with",
|
|
42
|
+
"for",
|
|
43
|
+
"up",
|
|
44
|
+
"out",
|
|
45
|
+
"its",
|
|
46
|
+
"our",
|
|
47
|
+
"your",
|
|
48
|
+
"they",
|
|
49
|
+
"them",
|
|
50
|
+
"he",
|
|
51
|
+
"she",
|
|
52
|
+
"his",
|
|
53
|
+
"her",
|
|
54
|
+
"will",
|
|
55
|
+
"would",
|
|
56
|
+
"could",
|
|
57
|
+
"should",
|
|
58
|
+
"did",
|
|
59
|
+
"does",
|
|
60
|
+
"just",
|
|
61
|
+
"now",
|
|
62
|
+
"some",
|
|
63
|
+
"any",
|
|
64
|
+
"all",
|
|
65
|
+
"very",
|
|
66
|
+
"too",
|
|
67
|
+
"also",
|
|
68
|
+
"than",
|
|
69
|
+
"then",
|
|
70
|
+
"when",
|
|
71
|
+
"where",
|
|
72
|
+
"who",
|
|
73
|
+
"which",
|
|
74
|
+
"there",
|
|
75
|
+
"here",
|
|
76
|
+
"again",
|
|
77
|
+
"today",
|
|
78
|
+
"earlier",
|
|
79
|
+
"remember"
|
|
80
|
+
]);
|
|
81
|
+
function tokenize(text) {
|
|
82
|
+
if (!text) return [];
|
|
83
|
+
return text.toLowerCase().replace(/[^a-z0-9\- ]+/g, " ").split(/\s+/).filter((t) => t.length > 1 && !STOP_WORDS.has(t));
|
|
84
|
+
}
|
|
85
|
+
class MemoryIndex {
|
|
86
|
+
/** token → array of postings */
|
|
87
|
+
postings = /* @__PURE__ */ new Map();
|
|
88
|
+
/** episode count, used to compute IDF */
|
|
89
|
+
docCount = 0;
|
|
90
|
+
/** episode IDs we've indexed, used for `removeEpisode` and `has` */
|
|
91
|
+
indexed = /* @__PURE__ */ new Set();
|
|
92
|
+
/** Add (or re-add) an episode to the index. Idempotent — calling twice
|
|
93
|
+
* with the same id replaces the previous entry. */
|
|
94
|
+
addEpisode(episodeId, content) {
|
|
95
|
+
if (this.indexed.has(episodeId)) {
|
|
96
|
+
this.removeEpisode(episodeId);
|
|
97
|
+
}
|
|
98
|
+
const tokens = tokenize(content);
|
|
99
|
+
if (tokens.length === 0) {
|
|
100
|
+
this.indexed.add(episodeId);
|
|
101
|
+
this.docCount += 1;
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
const tf = /* @__PURE__ */ new Map();
|
|
105
|
+
const headTokens = new Set(tokenize(content.slice(0, 100)));
|
|
106
|
+
for (const t of tokens) tf.set(t, (tf.get(t) ?? 0) + 1);
|
|
107
|
+
for (const [token, count] of tf) {
|
|
108
|
+
const list = this.postings.get(token) ?? [];
|
|
109
|
+
list.push({ episodeId, tf: count, inHead: headTokens.has(token) });
|
|
110
|
+
this.postings.set(token, list);
|
|
111
|
+
}
|
|
112
|
+
this.indexed.add(episodeId);
|
|
113
|
+
this.docCount += 1;
|
|
114
|
+
}
|
|
115
|
+
/** Remove an episode from the index. Used when pruning. */
|
|
116
|
+
removeEpisode(episodeId) {
|
|
117
|
+
if (!this.indexed.has(episodeId)) return;
|
|
118
|
+
for (const [token, list] of this.postings) {
|
|
119
|
+
const filtered = list.filter((p) => p.episodeId !== episodeId);
|
|
120
|
+
if (filtered.length === 0) this.postings.delete(token);
|
|
121
|
+
else if (filtered.length !== list.length) this.postings.set(token, filtered);
|
|
122
|
+
}
|
|
123
|
+
this.indexed.delete(episodeId);
|
|
124
|
+
this.docCount = Math.max(0, this.docCount - 1);
|
|
125
|
+
}
|
|
126
|
+
/** True if the episode is currently indexed. */
|
|
127
|
+
has(episodeId) {
|
|
128
|
+
return this.indexed.has(episodeId);
|
|
129
|
+
}
|
|
130
|
+
/** Number of episodes in the index. */
|
|
131
|
+
size() {
|
|
132
|
+
return this.docCount;
|
|
133
|
+
}
|
|
134
|
+
/** Number of unique tokens (vocabulary size). */
|
|
135
|
+
vocabularySize() {
|
|
136
|
+
return this.postings.size;
|
|
137
|
+
}
|
|
138
|
+
/** Search the index. Returns up to `limit` matches sorted by score.
|
|
139
|
+
* Score is BM25-lite: sum over query terms of (tf × idf) + headBoost.
|
|
140
|
+
* Empty query returns empty array. */
|
|
141
|
+
search(query, limit = 20) {
|
|
142
|
+
const queryTokens = tokenize(query);
|
|
143
|
+
if (queryTokens.length === 0) return [];
|
|
144
|
+
const scoreById = /* @__PURE__ */ new Map();
|
|
145
|
+
for (const term of queryTokens) {
|
|
146
|
+
const postings = this.postings.get(term);
|
|
147
|
+
if (!postings || postings.length === 0) continue;
|
|
148
|
+
const df = postings.length;
|
|
149
|
+
const idf = 1 + Math.log((this.docCount + 1) / (df + 1));
|
|
150
|
+
for (const p of postings) {
|
|
151
|
+
const termScore = p.tf * idf + (p.inHead ? 0.5 : 0);
|
|
152
|
+
const acc = scoreById.get(p.episodeId) ?? { score: 0, matched: /* @__PURE__ */ new Set() };
|
|
153
|
+
acc.score += termScore;
|
|
154
|
+
acc.matched.add(term);
|
|
155
|
+
scoreById.set(p.episodeId, acc);
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
const matches = [];
|
|
159
|
+
for (const [episodeId, { score, matched }] of scoreById) {
|
|
160
|
+
matches.push({ episodeId, score, matchedTerms: Array.from(matched) });
|
|
161
|
+
}
|
|
162
|
+
matches.sort((a, b) => b.score - a.score);
|
|
163
|
+
return matches.slice(0, limit);
|
|
164
|
+
}
|
|
165
|
+
/** Drop all entries — used for tests + full rebuilds. */
|
|
166
|
+
clear() {
|
|
167
|
+
this.postings.clear();
|
|
168
|
+
this.indexed.clear();
|
|
169
|
+
this.docCount = 0;
|
|
170
|
+
}
|
|
171
|
+
/** Build a fresh index from a list of episodes. */
|
|
172
|
+
static fromEpisodes(episodes) {
|
|
173
|
+
const idx = new MemoryIndex();
|
|
174
|
+
for (const ep of episodes) idx.addEpisode(ep.id, ep.content);
|
|
175
|
+
return idx;
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
let _instance = null;
|
|
179
|
+
function getMemoryIndex() {
|
|
180
|
+
if (!_instance) _instance = new MemoryIndex();
|
|
181
|
+
return _instance;
|
|
182
|
+
}
|
|
183
|
+
function _resetMemoryIndexForTests() {
|
|
184
|
+
_instance = new MemoryIndex();
|
|
185
|
+
}
|
|
186
|
+
export {
|
|
187
|
+
MemoryIndex,
|
|
188
|
+
_resetMemoryIndexForTests,
|
|
189
|
+
getMemoryIndex,
|
|
190
|
+
tokenize
|
|
191
|
+
};
|
|
192
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/memory/index.ts"],"sourcesContent":["/**\n * Inverted-Index Keyword Search (Phase 9 / Track B2)\n *\n * `searchMemory()` in graph.ts used to scan every episode linearly with a\n * BM25-ish score per term per episode — at 5000 episodes and 5 terms,\n * that's 25 000 substring searches per query. This module trades a bit of\n * memory for a constant-time-per-query lookup: token → posting-list of\n * episode IDs + per-doc term frequency.\n *\n * Sized for TITAN's typical workload:\n * - ~5000 episodes max (MAX_EPISODES bound)\n * - ~50 tokens per episode after tokenisation\n * - Index footprint ≈ 250 000 (token, episodeId, tf) tuples — single-digit MB.\n *\n * Not a full-text engine. Tokens are lowercased, punctuation stripped,\n * stop-words filtered (same set as the legacy linear scan). No stemming\n * or fuzzy match — that's what vectors.ts is for. The contract is:\n * \"what the linear scan returned, faster\".\n *\n * Usage:\n * const index = new MemoryIndex();\n * for (const ep of episodes) index.addEpisode(ep.id, ep.content);\n * const matches = index.search('weather forecast', 20);\n * // → [{ episodeId, score }, ...] sorted by score desc\n *\n * Indexes can be rebuilt from the underlying graph at any time\n * (`MemoryIndex.fromEpisodes(eps)`), so we don't bother with persistence.\n * Memory cost is small enough that recomputing on startup is cheap.\n */\n\nconst STOP_WORDS = new Set([\n 'a', 'an', 'the', 'is', 'it', 'in', 'on', 'at', 'to', 'of', 'do', 'you', 'we', 'i',\n 'me', 'my', 'that', 'this', 'was', 'are', 'be', 'been', 'have', 'has', 'had', 'and',\n 'or', 'but', 'if', 'so', 'not', 'no', 'yes', 'can', 'how', 'what', 'about', 'from',\n 'with', 'for', 'up', 'out', 'its', 'our', 'your', 'they', 'them', 'he', 'she', 'his',\n 'her', 'will', 'would', 'could', 'should', 'did', 'does', 'just', 'now', 'some', 'any',\n 'all', 'very', 'too', 'also', 'than', 'then', 'when', 'where', 'who', 'which', 'there',\n 'here', 'again', 'today', 'earlier', 'remember',\n]);\n\n/** Tokenise a string for indexing/search. Lowercase, strip non-alphanum\n * except hyphens (kept for words like \"self-improve\"), drop stop words,\n * drop tokens shorter than 2 chars. */\nexport function tokenize(text: string): string[] {\n if (!text) return [];\n return text\n .toLowerCase()\n .replace(/[^a-z0-9\\- ]+/g, ' ')\n .split(/\\s+/)\n .filter(t => t.length > 1 && !STOP_WORDS.has(t));\n}\n\n/** A single posting-list entry for a (token, episode) pair. */\ninterface Posting {\n episodeId: string;\n /** Term frequency within this episode. */\n tf: number;\n /** True if the term appears in the first 100 chars of the episode\n * content — used for the \"title boost\" the legacy scan applied. */\n inHead: boolean;\n}\n\n/** Search hit, sorted by score in `search()`. */\nexport interface IndexMatch {\n episodeId: string;\n /** TF-IDF-ish score. Higher = more relevant. */\n score: number;\n /** Which query terms matched this episode (debug aid). */\n matchedTerms: string[];\n}\n\nexport class MemoryIndex {\n /** token → array of postings */\n private postings = new Map<string, Posting[]>();\n /** episode count, used to compute IDF */\n private docCount = 0;\n /** episode IDs we've indexed, used for `removeEpisode` and `has` */\n private indexed = new Set<string>();\n\n /** Add (or re-add) an episode to the index. Idempotent — calling twice\n * with the same id replaces the previous entry. */\n addEpisode(episodeId: string, content: string): void {\n if (this.indexed.has(episodeId)) {\n this.removeEpisode(episodeId);\n }\n const tokens = tokenize(content);\n if (tokens.length === 0) {\n // Still mark as indexed so subsequent re-adds don't double-count.\n this.indexed.add(episodeId);\n this.docCount += 1;\n return;\n }\n\n // Compute term frequencies + head-presence\n const tf = new Map<string, number>();\n const headTokens = new Set(tokenize(content.slice(0, 100)));\n for (const t of tokens) tf.set(t, (tf.get(t) ?? 0) + 1);\n\n for (const [token, count] of tf) {\n const list = this.postings.get(token) ?? [];\n list.push({ episodeId, tf: count, inHead: headTokens.has(token) });\n this.postings.set(token, list);\n }\n this.indexed.add(episodeId);\n this.docCount += 1;\n }\n\n /** Remove an episode from the index. Used when pruning. */\n removeEpisode(episodeId: string): void {\n if (!this.indexed.has(episodeId)) return;\n for (const [token, list] of this.postings) {\n const filtered = list.filter(p => p.episodeId !== episodeId);\n if (filtered.length === 0) this.postings.delete(token);\n else if (filtered.length !== list.length) this.postings.set(token, filtered);\n }\n this.indexed.delete(episodeId);\n this.docCount = Math.max(0, this.docCount - 1);\n }\n\n /** True if the episode is currently indexed. */\n has(episodeId: string): boolean {\n return this.indexed.has(episodeId);\n }\n\n /** Number of episodes in the index. */\n size(): number {\n return this.docCount;\n }\n\n /** Number of unique tokens (vocabulary size). */\n vocabularySize(): number {\n return this.postings.size;\n }\n\n /** Search the index. Returns up to `limit` matches sorted by score.\n * Score is BM25-lite: sum over query terms of (tf × idf) + headBoost.\n * Empty query returns empty array. */\n search(query: string, limit = 20): IndexMatch[] {\n const queryTokens = tokenize(query);\n if (queryTokens.length === 0) return [];\n\n // Per-episode score accumulator\n const scoreById = new Map<string, { score: number; matched: Set<string> }>();\n\n for (const term of queryTokens) {\n const postings = this.postings.get(term);\n if (!postings || postings.length === 0) continue;\n\n // IDF — log smoothing to dampen common terms.\n // 1 + log((docCount+1)/(df+1)) keeps it positive even when df==docCount.\n const df = postings.length;\n const idf = 1 + Math.log((this.docCount + 1) / (df + 1));\n\n for (const p of postings) {\n // tf × idf, with a flat bonus when the term is in the\n // first 100 chars (cheap \"title boost\" the legacy scan had)\n const termScore = p.tf * idf + (p.inHead ? 0.5 : 0);\n const acc = scoreById.get(p.episodeId) ?? { score: 0, matched: new Set<string>() };\n acc.score += termScore;\n acc.matched.add(term);\n scoreById.set(p.episodeId, acc);\n }\n }\n\n const matches: IndexMatch[] = [];\n for (const [episodeId, { score, matched }] of scoreById) {\n matches.push({ episodeId, score, matchedTerms: Array.from(matched) });\n }\n matches.sort((a, b) => b.score - a.score);\n return matches.slice(0, limit);\n }\n\n /** Drop all entries — used for tests + full rebuilds. */\n clear(): void {\n this.postings.clear();\n this.indexed.clear();\n this.docCount = 0;\n }\n\n /** Build a fresh index from a list of episodes. */\n static fromEpisodes(episodes: Array<{ id: string; content: string }>): MemoryIndex {\n const idx = new MemoryIndex();\n for (const ep of episodes) idx.addEpisode(ep.id, ep.content);\n return idx;\n }\n}\n\n/** Module-level singleton used by graph.ts. Cleared + rebuilt by tests. */\nlet _instance: MemoryIndex | null = null;\n\nexport function getMemoryIndex(): MemoryIndex {\n if (!_instance) _instance = new MemoryIndex();\n return _instance;\n}\n\n/** Test-only: reset the singleton between scenarios. */\nexport function _resetMemoryIndexForTests(): void {\n _instance = new MemoryIndex();\n}\n"],"mappings":";AA8BA,MAAM,aAAa,oBAAI,IAAI;AAAA,EACvB;AAAA,EAAK;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAC/E;AAAA,EAAM;AAAA,EAAM;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAO;AAAA,EAAM;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAO;AAAA,EAC9E;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAM;AAAA,EAAO;AAAA,EAAM;AAAA,EAAO;AAAA,EAAO;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAS;AAAA,EAC5E;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAM;AAAA,EAAO;AAAA,EAAO;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAM;AAAA,EAAO;AAAA,EAC/E;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAS;AAAA,EAAU;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAQ;AAAA,EACjF;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAO;AAAA,EAAS;AAAA,EAC/E;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAS;AAAA,EAAW;AACzC,CAAC;AAKM,SAAS,SAAS,MAAwB;AAC7C,MAAI,CAAC,KAAM,QAAO,CAAC;AACnB,SAAO,KACF,YAAY,EACZ,QAAQ,kBAAkB,GAAG,EAC7B,MAAM,KAAK,EACX,OAAO,OAAK,EAAE,SAAS,KAAK,CAAC,WAAW,IAAI,CAAC,CAAC;AACvD;AAqBO,MAAM,YAAY;AAAA;AAAA,EAEb,WAAW,oBAAI,IAAuB;AAAA;AAAA,EAEtC,WAAW;AAAA;AAAA,EAEX,UAAU,oBAAI,IAAY;AAAA;AAAA;AAAA,EAIlC,WAAW,WAAmB,SAAuB;AACjD,QAAI,KAAK,QAAQ,IAAI,SAAS,GAAG;AAC7B,WAAK,cAAc,SAAS;AAAA,IAChC;AACA,UAAM,SAAS,SAAS,OAAO;AAC/B,QAAI,OAAO,WAAW,GAAG;AAErB,WAAK,QAAQ,IAAI,SAAS;AAC1B,WAAK,YAAY;AACjB;AAAA,IACJ;AAGA,UAAM,KAAK,oBAAI,IAAoB;AACnC,UAAM,aAAa,IAAI,IAAI,SAAS,QAAQ,MAAM,GAAG,GAAG,CAAC,CAAC;AAC1D,eAAW,KAAK,OAAQ,IAAG,IAAI,IAAI,GAAG,IAAI,CAAC,KAAK,KAAK,CAAC;AAEtD,eAAW,CAAC,OAAO,KAAK,KAAK,IAAI;AAC7B,YAAM,OAAO,KAAK,SAAS,IAAI,KAAK,KAAK,CAAC;AAC1C,WAAK,KAAK,EAAE,WAAW,IAAI,OAAO,QAAQ,WAAW,IAAI,KAAK,EAAE,CAAC;AACjE,WAAK,SAAS,IAAI,OAAO,IAAI;AAAA,IACjC;AACA,SAAK,QAAQ,IAAI,SAAS;AAC1B,SAAK,YAAY;AAAA,EACrB;AAAA;AAAA,EAGA,cAAc,WAAyB;AACnC,QAAI,CAAC,KAAK,QAAQ,IAAI,SAAS,EAAG;AAClC,eAAW,CAAC,OAAO,IAAI,KAAK,KAAK,UAAU;AACvC,YAAM,WAAW,KAAK,OAAO,OAAK,EAAE,cAAc,SAAS;AAC3D,UAAI,SAAS,WAAW,EAAG,MAAK,SAAS,OAAO,KAAK;AAAA,eAC5C,SAAS,WAAW,KAAK,OAAQ,MAAK,SAAS,IAAI,OAAO,QAAQ;AAAA,IAC/E;AACA,SAAK,QAAQ,OAAO,SAAS;AAC7B,SAAK,WAAW,KAAK,IAAI,GAAG,KAAK,WAAW,CAAC;AAAA,EACjD;AAAA;AAAA,EAGA,IAAI,WAA4B;AAC5B,WAAO,KAAK,QAAQ,IAAI,SAAS;AAAA,EACrC;AAAA;AAAA,EAGA,OAAe;AACX,WAAO,KAAK;AAAA,EAChB;AAAA;AAAA,EAGA,iBAAyB;AACrB,WAAO,KAAK,SAAS;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAe,QAAQ,IAAkB;AAC5C,UAAM,cAAc,SAAS,KAAK;AAClC,QAAI,YAAY,WAAW,EAAG,QAAO,CAAC;AAGtC,UAAM,YAAY,oBAAI,IAAqD;AAE3E,eAAW,QAAQ,aAAa;AAC5B,YAAM,WAAW,KAAK,SAAS,IAAI,IAAI;AACvC,UAAI,CAAC,YAAY,SAAS,WAAW,EAAG;AAIxC,YAAM,KAAK,SAAS;AACpB,YAAM,MAAM,IAAI,KAAK,KAAK,KAAK,WAAW,MAAM,KAAK,EAAE;AAEvD,iBAAW,KAAK,UAAU;AAGtB,cAAM,YAAY,EAAE,KAAK,OAAO,EAAE,SAAS,MAAM;AACjD,cAAM,MAAM,UAAU,IAAI,EAAE,SAAS,KAAK,EAAE,OAAO,GAAG,SAAS,oBAAI,IAAY,EAAE;AACjF,YAAI,SAAS;AACb,YAAI,QAAQ,IAAI,IAAI;AACpB,kBAAU,IAAI,EAAE,WAAW,GAAG;AAAA,MAClC;AAAA,IACJ;AAEA,UAAM,UAAwB,CAAC;AAC/B,eAAW,CAAC,WAAW,EAAE,OAAO,QAAQ,CAAC,KAAK,WAAW;AACrD,cAAQ,KAAK,EAAE,WAAW,OAAO,cAAc,MAAM,KAAK,OAAO,EAAE,CAAC;AAAA,IACxE;AACA,YAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AACxC,WAAO,QAAQ,MAAM,GAAG,KAAK;AAAA,EACjC;AAAA;AAAA,EAGA,QAAc;AACV,SAAK,SAAS,MAAM;AACpB,SAAK,QAAQ,MAAM;AACnB,SAAK,WAAW;AAAA,EACpB;AAAA;AAAA,EAGA,OAAO,aAAa,UAA+D;AAC/E,UAAM,MAAM,IAAI,YAAY;AAC5B,eAAW,MAAM,SAAU,KAAI,WAAW,GAAG,IAAI,GAAG,OAAO;AAC3D,WAAO;AAAA,EACX;AACJ;AAGA,IAAI,YAAgC;AAE7B,SAAS,iBAA8B;AAC1C,MAAI,CAAC,UAAW,aAAY,IAAI,YAAY;AAC5C,SAAO;AACX;AAGO,SAAS,4BAAkC;AAC9C,cAAY,IAAI,YAAY;AAChC;","names":[]}
|
package/dist/memory/memory.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/memory/memory.ts"],"sourcesContent":["/**\n * TITAN — Memory / Persistence System\n * JSON-file-backed persistent memory for conversations, facts, preferences, and usage.\n * Uses no native dependencies — pure Node.js for maximum portability.\n */\nimport { existsSync, readFileSync, writeFileSync, renameSync } from 'fs';\nimport { join } from 'path';\nimport { TITAN_HOME } from '../utils/constants.js';\nimport { ensureDir } from '../utils/helpers.js';\nimport logger from '../utils/logger.js';\nimport { encrypt, decrypt, type EncryptedPayload } from '../security/encryption.js';\nimport { isVectorSearchAvailable, searchVectors, addVector } from './vectors.js';\n\nconst COMPONENT = 'Memory';\n\n// ─── Data Store ──────────────────────────────────────────────────\n\ninterface DataStore {\n conversations: ConversationMessage[];\n memories: MemoryEntry[];\n sessions: SessionRecord[];\n usageStats: UsageRecord[];\n cronJobs: CronRecord[];\n skillsInstalled: SkillRecord[];\n}\n\ninterface MemoryEntry {\n id: string;\n category: string;\n key: string;\n value: string;\n metadata?: string;\n createdAt: string;\n updatedAt: string;\n}\n\ninterface SessionRecord {\n id: string;\n channel: string;\n user_id: string;\n agent_id: string;\n status: string;\n message_count: number;\n created_at: string;\n last_active: string;\n name?: string;\n last_message?: string;\n // D3: Persisted session overrides (survive session recovery after timeout/restart)\n model_override?: string;\n thinking_override?: string;\n // Hunt Finding #19 (2026-04-14): true when this session was created via an\n // explicit sessionId (getOrCreateSessionById). Named sessions MUST NOT be\n // returned by the default-slot lookup in getOrCreateSession — otherwise\n // subsequent no-sessionId requests from the same channel+user+agent will\n // inherit the most recent named session's history, causing privacy bleed\n // between API callers.\n is_named?: boolean;\n}\n\ninterface UsageRecord {\n id: number;\n session_id: string;\n provider: string;\n model: string;\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n created_at: string;\n}\n\ninterface CronRecord {\n id: string;\n name: string;\n schedule: string;\n command: string;\n mode?: 'shell' | 'tool'; // Execution mode (default: shell for backward compat)\n allowedTools?: string[]; // Tool allowlist for tool-mode jobs\n enabled: boolean;\n last_run?: string;\n next_run?: string;\n created_at: string;\n}\n\ninterface SkillRecord {\n name: string;\n version: string;\n source: string;\n enabled: boolean;\n installed_at: string;\n}\n\nconst DB_FILE = join(TITAN_HOME, 'titan-data.json');\n\nlet store: DataStore | null = null;\nlet dirty = false;\nlet isShuttingDown = false;\n\nfunction getDefaultStore(): DataStore {\n return {\n conversations: [],\n memories: [],\n sessions: [],\n usageStats: [],\n cronJobs: [],\n skillsInstalled: [],\n };\n}\n\n// NOTE: Sync I/O is intentional — runs only once at cold start, then cached in-memory.\nfunction loadStore(): DataStore {\n if (store) return store;\n ensureDir(TITAN_HOME);\n if (existsSync(DB_FILE)) {\n try {\n const raw = readFileSync(DB_FILE, 'utf-8');\n store = JSON.parse(raw) as DataStore;\n // Ensure all fields exist\n store.conversations = store.conversations || [];\n store.memories = store.memories || [];\n store.sessions = store.sessions || [];\n store.usageStats = store.usageStats || [];\n store.cronJobs = store.cronJobs || [];\n store.skillsInstalled = store.skillsInstalled || [];\n } catch {\n logger.warn(COMPONENT, 'Could not load data store, creating fresh one');\n store = getDefaultStore();\n }\n } else {\n store = getDefaultStore();\n }\n return store;\n}\n\nfunction saveStore(): void {\n if (!store || isShuttingDown) return;\n ensureDir(TITAN_HOME);\n try {\n const tmpFile = DB_FILE + '.tmp';\n writeFileSync(tmpFile, JSON.stringify(store, null, 2), 'utf-8');\n renameSync(tmpFile, DB_FILE);\n dirty = false;\n } catch (e) {\n dirty = true;\n logger.error(COMPONENT, `Failed to save data: ${(e as Error).message}`);\n }\n}\n\n// Auto-save periodically\nlet saveTimeout: ReturnType<typeof setTimeout> | null = null;\nfunction debouncedSave(): void {\n if (dirty) { saveStore(); return; }\n if (saveTimeout) clearTimeout(saveTimeout);\n saveTimeout = setTimeout(saveStore, 1000);\n saveTimeout.unref();\n}\n\n/** Initialize the memory system */\nexport function initMemory(): void {\n loadStore();\n logger.info(COMPONENT, 'Memory system initialized');\n}\n\n/** Close / flush the memory system */\nexport function closeMemory(): void {\n if (saveTimeout) { clearTimeout(saveTimeout); saveTimeout = null; }\n saveStore();\n if (dirty) {\n logger.error(COMPONENT, 'DATA MAY BE LOST — failed to flush memory store on shutdown');\n }\n isShuttingDown = true;\n}\n\n/** Get internal store (for skills like cron that need direct access) */\nexport function getDb(): DataStore {\n return loadStore();\n}\n\n// ─── Conversation History ────────────────────────────────────────\n\nexport interface ConversationMessage {\n id: string;\n sessionId: string;\n role: string;\n content: string;\n toolCalls?: string;\n toolCallId?: string;\n model?: string;\n tokenCount: number;\n createdAt: string;\n isEncrypted?: boolean;\n}\n\n/** Save a message to conversation history */\nexport function saveMessage(message: Omit<ConversationMessage, 'createdAt'>, e2eKey?: string): void {\n const s = loadStore();\n\n let content = message.content;\n let isEncrypted = false;\n\n if (e2eKey) {\n try {\n const payload = encrypt(message.content, Buffer.from(e2eKey, 'base64'));\n content = JSON.stringify(payload);\n isEncrypted = true;\n } catch {\n logger.error(COMPONENT, `Failed to encrypt message for storage`);\n content = \"[ENCRYPTION FAILED] \" + content; // Fallback, though we should probably throw in strict environments\n }\n }\n\n s.conversations.push({\n ...message,\n content,\n isEncrypted,\n createdAt: new Date().toISOString(),\n });\n // Keep only last 5000 messages total to prevent unbounded growth\n if (s.conversations.length > 5000) {\n s.conversations = s.conversations.slice(-5000);\n }\n debouncedSave();\n}\n\n/** Get conversation history for a session */\nexport function getHistory(sessionId: string, limit: number = 50, e2eKey?: string): ConversationMessage[] {\n const s = loadStore();\n const rawHistory = s.conversations\n .filter((m) => m.sessionId === sessionId)\n .slice(-limit);\n\n if (!e2eKey) {\n // If no key is provided, we just return the raw payload. \n // If it's encrypted, it'll just show the JSON string of the EncryptedPayload.\n return rawHistory;\n }\n\n // Decrypt the ones that were encrypted\n return rawHistory.map(m => {\n if (m.isEncrypted) {\n try {\n const payload = JSON.parse(m.content) as EncryptedPayload;\n return {\n ...m,\n content: decrypt(payload, Buffer.from(e2eKey, 'base64'))\n };\n } catch {\n logger.error(COMPONENT, `Failed to decrypt message ${m.id}`);\n return { ...m, content: \"[DECRYPTION FAILED]\" };\n }\n }\n return m;\n });\n}\n\n/** Update session name and/or last message snippet */\nexport function updateSessionMeta(sessionId: string, meta: { name?: string; last_message?: string; model_override?: string; thinking_override?: string }): void {\n const s = loadStore();\n const rec = s.sessions.find(ses => ses.id === sessionId);\n if (!rec) return;\n if (meta.name !== undefined) rec.name = meta.name;\n if (meta.last_message !== undefined) rec.last_message = meta.last_message;\n // D3: Persist session overrides to database so they survive timeout/restart\n if (meta.model_override !== undefined) rec.model_override = meta.model_override;\n if (meta.thinking_override !== undefined) rec.thinking_override = meta.thinking_override;\n debouncedSave();\n}\n\n/** Clear conversation history for a session */\nexport function clearHistory(sessionId: string): void {\n const s = loadStore();\n s.conversations = s.conversations.filter((m) => m.sessionId !== sessionId);\n debouncedSave();\n}\n\n// ─── Persistent Memory (Facts / Preferences) ─────────────────────\n\n/** Store a memory (key-value with category) */\nexport function rememberFact(category: string, key: string, value: string, metadata?: Record<string, unknown>): void {\n const s = loadStore();\n const id = `${category}:${key}`;\n const existingIdx = s.memories.findIndex((m) => m.id === id);\n const now = new Date().toISOString();\n\n if (existingIdx >= 0) {\n s.memories[existingIdx].value = value;\n s.memories[existingIdx].metadata = metadata ? JSON.stringify(metadata) : undefined;\n s.memories[existingIdx].updatedAt = now;\n } else {\n s.memories.push({\n id,\n category,\n key,\n value,\n metadata: metadata ? JSON.stringify(metadata) : undefined,\n createdAt: now,\n updatedAt: now,\n });\n }\n debouncedSave();\n\n // Index to vector store (fire-and-forget)\n if (isVectorSearchAvailable()) {\n addVector(id, `${category}: ${key} = ${value}`, 'memory', { category, key }).catch(e => logger.debug(COMPONENT, `Background vector indexing failed: ${(e as Error).message}`));\n }\n}\n\n/** Recall a specific memory */\nexport function recallFact(category: string, key: string): string | null {\n const s = loadStore();\n const entry = s.memories.find((m) => m.category === category && m.key === key);\n return entry?.value || null;\n}\n\n/** Search memories by category — hybrid keyword + vector search */\nexport async function searchMemories(category?: string, query?: string): Promise<Array<{ key: string; value: string; category: string; score?: number }>> {\n const s = loadStore();\n let results = s.memories;\n\n if (category) {\n results = results.filter((m) => m.category === category);\n }\n if (query) {\n const q = query.toLowerCase();\n // Word-boundary match to avoid false positives (\"use\" matching \"user\", \"reuse\")\n const qRegex = new RegExp('\\\\b' + q.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&') + '\\\\b', 'i');\n results = results.filter((m) =>\n qRegex.test(m.key) || qRegex.test(m.value)\n );\n }\n\n // Keyword scoring\n const scored = results.map(m => {\n let score = 0;\n if (query) {\n const q = query.toLowerCase();\n const keyLower = m.key.toLowerCase();\n const valLower = m.value.toLowerCase();\n // Exact key match scores highest\n if (keyLower === q) score += 5;\n else if (keyLower.includes(q)) score += 2;\n if (valLower.includes(q)) score += 1;\n // BM25-style: boost for multiple keyword matches\n const terms = q.split(/\\s+/).filter(Boolean);\n for (const term of terms) {\n if (keyLower.includes(term)) score += 1;\n if (valLower.includes(term)) score += 0.5;\n }\n }\n return { key: m.key, value: m.value, category: m.category, id: m.id, score };\n });\n\n // Vector search augmentation (hybrid mode)\n if (query && isVectorSearchAvailable()) {\n try {\n const vectorResults = await searchVectors(query, 20, 'memory', 0.4);\n for (const vr of vectorResults) {\n // Skip stale vector IDs that no longer exist in the store\n const memEntry = s.memories.find(m => m.id === vr.id);\n if (!memEntry) continue;\n const existing = scored.find(s => s.id === vr.id);\n if (existing) {\n // Boost keyword results that also match semantically\n existing.score += vr.score * 3;\n } else {\n // Add vector-only results (semantically similar but no keyword match)\n const entry = s.memories.find(m => m.id === vr.id);\n if (entry && (!category || entry.category === category)) {\n scored.push({\n key: entry.key,\n value: entry.value,\n category: entry.category,\n id: entry.id,\n score: vr.score * 2,\n });\n }\n }\n }\n } catch {\n // Vector search failure is non-fatal\n }\n }\n\n scored.sort((a, b) => b.score - a.score);\n // Deduplicate by ID (vector + keyword can match the same entry)\n const seen = new Set<string>();\n const unique = scored.filter(m => { if (seen.has(m.id)) return false; seen.add(m.id); return true; });\n return unique.slice(0, 50).map(m => ({ key: m.key, value: m.value, category: m.category, score: m.score }));\n}\n\n// ─── Usage Tracking ──────────────────────────────────────────────\n\n/** Record usage statistics */\nexport function recordUsage(sessionId: string, provider: string, model: string, promptTokens: number, completionTokens: number): void {\n const s = loadStore();\n s.usageStats.push({\n id: Date.now(),\n session_id: sessionId,\n provider,\n model,\n prompt_tokens: promptTokens,\n completion_tokens: completionTokens,\n total_tokens: promptTokens + completionTokens,\n created_at: new Date().toISOString(),\n });\n // Keep only last 10000 records\n if (s.usageStats.length > 10000) {\n s.usageStats = s.usageStats.slice(-10000);\n }\n debouncedSave();\n}\n\n/** Get total usage statistics */\nexport function getUsageStats(): { totalTokens: number; totalRequests: number; byProvider: Record<string, number> } {\n const s = loadStore();\n let totalTokens = 0;\n const byProvider: Record<string, number> = {};\n\n for (const rec of s.usageStats) {\n totalTokens += rec.total_tokens;\n byProvider[rec.provider] = (byProvider[rec.provider] || 0) + rec.total_tokens;\n }\n\n return {\n totalTokens,\n totalRequests: s.usageStats.length,\n byProvider,\n };\n}\n"],"mappings":";AAKA,SAAS,YAAY,cAAc,eAAe,kBAAkB;AACpE,SAAS,YAAY;AACrB,SAAS,kBAAkB;AAC3B,SAAS,iBAAiB;AAC1B,OAAO,YAAY;AACnB,SAAS,SAAS,eAAsC;AACxD,SAAS,yBAAyB,eAAe,iBAAiB;AAElE,MAAM,YAAY;AA8ElB,MAAM,UAAU,KAAK,YAAY,iBAAiB;AAElD,IAAI,QAA0B;AAC9B,IAAI,QAAQ;AACZ,IAAI,iBAAiB;AAErB,SAAS,kBAA6B;AACpC,SAAO;AAAA,IACL,eAAe,CAAC;AAAA,IAChB,UAAU,CAAC;AAAA,IACX,UAAU,CAAC;AAAA,IACX,YAAY,CAAC;AAAA,IACb,UAAU,CAAC;AAAA,IACX,iBAAiB,CAAC;AAAA,EACpB;AACF;AAGA,SAAS,YAAuB;AAC9B,MAAI,MAAO,QAAO;AAClB,YAAU,UAAU;AACpB,MAAI,WAAW,OAAO,GAAG;AACvB,QAAI;AACF,YAAM,MAAM,aAAa,SAAS,OAAO;AACzC,cAAQ,KAAK,MAAM,GAAG;AAEtB,YAAM,gBAAgB,MAAM,iBAAiB,CAAC;AAC9C,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,aAAa,MAAM,cAAc,CAAC;AACxC,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,kBAAkB,MAAM,mBAAmB,CAAC;AAAA,IACpD,QAAQ;AACN,aAAO,KAAK,WAAW,+CAA+C;AACtE,cAAQ,gBAAgB;AAAA,IAC1B;AAAA,EACF,OAAO;AACL,YAAQ,gBAAgB;AAAA,EAC1B;AACA,SAAO;AACT;AAEA,SAAS,YAAkB;AACzB,MAAI,CAAC,SAAS,eAAgB;AAC9B,YAAU,UAAU;AACpB,MAAI;AACF,UAAM,UAAU,UAAU;AAC1B,kBAAc,SAAS,KAAK,UAAU,OAAO,MAAM,CAAC,GAAG,OAAO;AAC9D,eAAW,SAAS,OAAO;AAC3B,YAAQ;AAAA,EACV,SAAS,GAAG;AACV,YAAQ;AACR,WAAO,MAAM,WAAW,wBAAyB,EAAY,OAAO,EAAE;AAAA,EACxE;AACF;AAGA,IAAI,cAAoD;AACxD,SAAS,gBAAsB;AAC7B,MAAI,OAAO;AAAE,cAAU;AAAG;AAAA,EAAQ;AAClC,MAAI,YAAa,cAAa,WAAW;AACzC,gBAAc,WAAW,WAAW,GAAI;AACxC,cAAY,MAAM;AACpB;AAGO,SAAS,aAAmB;AACjC,YAAU;AACV,SAAO,KAAK,WAAW,2BAA2B;AACpD;AAGO,SAAS,cAAoB;AAClC,MAAI,aAAa;AAAE,iBAAa,WAAW;AAAG,kBAAc;AAAA,EAAM;AAClE,YAAU;AACV,MAAI,OAAO;AACT,WAAO,MAAM,WAAW,kEAA6D;AAAA,EACvF;AACA,mBAAiB;AACnB;AAGO,SAAS,QAAmB;AACjC,SAAO,UAAU;AACnB;AAkBO,SAAS,YAAY,SAAiD,QAAuB;AAClG,QAAM,IAAI,UAAU;AAEpB,MAAI,UAAU,QAAQ;AACtB,MAAI,cAAc;AAElB,MAAI,QAAQ;AACV,QAAI;AACF,YAAM,UAAU,QAAQ,QAAQ,SAAS,OAAO,KAAK,QAAQ,QAAQ,CAAC;AACtE,gBAAU,KAAK,UAAU,OAAO;AAChC,oBAAc;AAAA,IAChB,QAAQ;AACN,aAAO,MAAM,WAAW,uCAAuC;AAC/D,gBAAU,yBAAyB;AAAA,IACrC;AAAA,EACF;AAEA,IAAE,cAAc,KAAK;AAAA,IACnB,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC,CAAC;AAED,MAAI,EAAE,cAAc,SAAS,KAAM;AACjC,MAAE,gBAAgB,EAAE,cAAc,MAAM,IAAK;AAAA,EAC/C;AACA,gBAAc;AAChB;AAGO,SAAS,WAAW,WAAmB,QAAgB,IAAI,QAAwC;AACxG,QAAM,IAAI,UAAU;AACpB,QAAM,aAAa,EAAE,cAClB,OAAO,CAAC,MAAM,EAAE,cAAc,SAAS,EACvC,MAAM,CAAC,KAAK;AAEf,MAAI,CAAC,QAAQ;AAGX,WAAO;AAAA,EACT;AAGA,SAAO,WAAW,IAAI,OAAK;AACzB,QAAI,EAAE,aAAa;AACjB,UAAI;AACF,cAAM,UAAU,KAAK,MAAM,EAAE,OAAO;AACpC,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SAAS,QAAQ,SAAS,OAAO,KAAK,QAAQ,QAAQ,CAAC;AAAA,QACzD;AAAA,MACF,QAAQ;AACN,eAAO,MAAM,WAAW,6BAA6B,EAAE,EAAE,EAAE;AAC3D,eAAO,EAAE,GAAG,GAAG,SAAS,sBAAsB;AAAA,MAChD;AAAA,IACF;AACA,WAAO;AAAA,EACT,CAAC;AACH;AAGO,SAAS,kBAAkB,WAAmB,MAA2G;AAC9J,QAAM,IAAI,UAAU;AACpB,QAAM,MAAM,EAAE,SAAS,KAAK,SAAO,IAAI,OAAO,SAAS;AACvD,MAAI,CAAC,IAAK;AACV,MAAI,KAAK,SAAS,OAAW,KAAI,OAAO,KAAK;AAC7C,MAAI,KAAK,iBAAiB,OAAW,KAAI,eAAe,KAAK;AAE7D,MAAI,KAAK,mBAAmB,OAAW,KAAI,iBAAiB,KAAK;AACjE,MAAI,KAAK,sBAAsB,OAAW,KAAI,oBAAoB,KAAK;AACvE,gBAAc;AAChB;AAGO,SAAS,aAAa,WAAyB;AACpD,QAAM,IAAI,UAAU;AACpB,IAAE,gBAAgB,EAAE,cAAc,OAAO,CAAC,MAAM,EAAE,cAAc,SAAS;AACzE,gBAAc;AAChB;AAKO,SAAS,aAAa,UAAkB,KAAa,OAAe,UAA0C;AACnH,QAAM,IAAI,UAAU;AACpB,QAAM,KAAK,GAAG,QAAQ,IAAI,GAAG;AAC7B,QAAM,cAAc,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3D,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,MAAI,eAAe,GAAG;AACpB,MAAE,SAAS,WAAW,EAAE,QAAQ;AAChC,MAAE,SAAS,WAAW,EAAE,WAAW,WAAW,KAAK,UAAU,QAAQ,IAAI;AACzE,MAAE,SAAS,WAAW,EAAE,YAAY;AAAA,EACtC,OAAO;AACL,MAAE,SAAS,KAAK;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,WAAW,KAAK,UAAU,QAAQ,IAAI;AAAA,MAChD,WAAW;AAAA,MACX,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AACA,gBAAc;AAGd,MAAI,wBAAwB,GAAG;AAC7B,cAAU,IAAI,GAAG,QAAQ,KAAK,GAAG,MAAM,KAAK,IAAI,UAAU,EAAE,UAAU,IAAI,CAAC,EAAE,MAAM,OAAK,OAAO,MAAM,WAAW,sCAAuC,EAAY,OAAO,EAAE,CAAC;AAAA,EAC/K;AACF;AAGO,SAAS,WAAW,UAAkB,KAA4B;AACvE,QAAM,IAAI,UAAU;AACpB,QAAM,QAAQ,EAAE,SAAS,KAAK,CAAC,MAAM,EAAE,aAAa,YAAY,EAAE,QAAQ,GAAG;AAC7E,SAAO,OAAO,SAAS;AACzB;AAGA,eAAsB,eAAe,UAAmB,OAAkG;AACxJ,QAAM,IAAI,UAAU;AACpB,MAAI,UAAU,EAAE;AAEhB,MAAI,UAAU;AACZ,cAAU,QAAQ,OAAO,CAAC,MAAM,EAAE,aAAa,QAAQ;AAAA,EACzD;AACA,MAAI,OAAO;AACT,UAAM,IAAI,MAAM,YAAY;AAE5B,UAAM,SAAS,IAAI,OAAO,QAAQ,EAAE,QAAQ,uBAAuB,MAAM,IAAI,OAAO,GAAG;AACvF,cAAU,QAAQ;AAAA,MAAO,CAAC,MACxB,OAAO,KAAK,EAAE,GAAG,KAAK,OAAO,KAAK,EAAE,KAAK;AAAA,IAC3C;AAAA,EACF;AAGA,QAAM,SAAS,QAAQ,IAAI,OAAK;AAC9B,QAAI,QAAQ;AACZ,QAAI,OAAO;AACT,YAAM,IAAI,MAAM,YAAY;AAC5B,YAAM,WAAW,EAAE,IAAI,YAAY;AACnC,YAAM,WAAW,EAAE,MAAM,YAAY;AAErC,UAAI,aAAa,EAAG,UAAS;AAAA,eACpB,SAAS,SAAS,CAAC,EAAG,UAAS;AACxC,UAAI,SAAS,SAAS,CAAC,EAAG,UAAS;AAEnC,YAAM,QAAQ,EAAE,MAAM,KAAK,EAAE,OAAO,OAAO;AAC3C,iBAAW,QAAQ,OAAO;AACxB,YAAI,SAAS,SAAS,IAAI,EAAG,UAAS;AACtC,YAAI,SAAS,SAAS,IAAI,EAAG,UAAS;AAAA,MACxC;AAAA,IACF;AACA,WAAO,EAAE,KAAK,EAAE,KAAK,OAAO,EAAE,OAAO,UAAU,EAAE,UAAU,IAAI,EAAE,IAAI,MAAM;AAAA,EAC7E,CAAC;AAGD,MAAI,SAAS,wBAAwB,GAAG;AACtC,QAAI;AACF,YAAM,gBAAgB,MAAM,cAAc,OAAO,IAAI,UAAU,GAAG;AAClE,iBAAW,MAAM,eAAe;AAE9B,cAAM,WAAW,EAAE,SAAS,KAAK,OAAK,EAAE,OAAO,GAAG,EAAE;AACpD,YAAI,CAAC,SAAU;AACf,cAAM,WAAW,OAAO,KAAK,CAAAA,OAAKA,GAAE,OAAO,GAAG,EAAE;AAChD,YAAI,UAAU;AAEZ,mBAAS,SAAS,GAAG,QAAQ;AAAA,QAC/B,OAAO;AAEL,gBAAM,QAAQ,EAAE,SAAS,KAAK,OAAK,EAAE,OAAO,GAAG,EAAE;AACjD,cAAI,UAAU,CAAC,YAAY,MAAM,aAAa,WAAW;AACvD,mBAAO,KAAK;AAAA,cACV,KAAK,MAAM;AAAA,cACX,OAAO,MAAM;AAAA,cACb,UAAU,MAAM;AAAA,cAChB,IAAI,MAAM;AAAA,cACV,OAAO,GAAG,QAAQ;AAAA,YACpB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEvC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,SAAS,OAAO,OAAO,OAAK;AAAE,QAAI,KAAK,IAAI,EAAE,EAAE,EAAG,QAAO;AAAO,SAAK,IAAI,EAAE,EAAE;AAAG,WAAO;AAAA,EAAM,CAAC;AACpG,SAAO,OAAO,MAAM,GAAG,EAAE,EAAE,IAAI,QAAM,EAAE,KAAK,EAAE,KAAK,OAAO,EAAE,OAAO,UAAU,EAAE,UAAU,OAAO,EAAE,MAAM,EAAE;AAC5G;AAKO,SAAS,YAAY,WAAmB,UAAkB,OAAe,cAAsB,kBAAgC;AACpI,QAAM,IAAI,UAAU;AACpB,IAAE,WAAW,KAAK;AAAA,IAChB,IAAI,KAAK,IAAI;AAAA,IACb,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf,mBAAmB;AAAA,IACnB,cAAc,eAAe;AAAA,IAC7B,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,EACrC,CAAC;AAED,MAAI,EAAE,WAAW,SAAS,KAAO;AAC/B,MAAE,aAAa,EAAE,WAAW,MAAM,IAAM;AAAA,EAC1C;AACA,gBAAc;AAChB;AAGO,SAAS,gBAAoG;AAClH,QAAM,IAAI,UAAU;AACpB,MAAI,cAAc;AAClB,QAAM,aAAqC,CAAC;AAE5C,aAAW,OAAO,EAAE,YAAY;AAC9B,mBAAe,IAAI;AACnB,eAAW,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,KAAK,IAAI;AAAA,EACnE;AAEA,SAAO;AAAA,IACL;AAAA,IACA,eAAe,EAAE,WAAW;AAAA,IAC5B;AAAA,EACF;AACF;","names":["s"]}
|
|
1
|
+
{"version":3,"sources":["../../src/memory/memory.ts"],"sourcesContent":["/**\n * TITAN — Memory / Persistence System\n * JSON-file-backed persistent memory for conversations, facts, preferences, and usage.\n * Uses no native dependencies — pure Node.js for maximum portability.\n */\nimport { existsSync, readFileSync, writeFileSync, renameSync } from 'fs';\nimport { join } from 'path';\nimport { TITAN_HOME } from '../utils/constants.js';\nimport { ensureDir } from '../utils/helpers.js';\nimport logger from '../utils/logger.js';\nimport { encrypt, decrypt, type EncryptedPayload } from '../security/encryption.js';\nimport { isVectorSearchAvailable, searchVectors, addVector } from './vectors.js';\n\nconst COMPONENT = 'Memory';\n\n// ─── Data Store ──────────────────────────────────────────────────\n\ninterface DataStore {\n conversations: ConversationMessage[];\n memories: MemoryEntry[];\n sessions: SessionRecord[];\n usageStats: UsageRecord[];\n cronJobs: CronRecord[];\n skillsInstalled: SkillRecord[];\n}\n\ninterface MemoryEntry {\n id: string;\n category: string;\n key: string;\n value: string;\n metadata?: string;\n createdAt: string;\n updatedAt: string;\n}\n\ninterface SessionRecord {\n id: string;\n channel: string;\n user_id: string;\n agent_id: string;\n status: string;\n message_count: number;\n created_at: string;\n last_active: string;\n name?: string;\n last_message?: string;\n // D3: Persisted session overrides (survive session recovery after timeout/restart)\n model_override?: string;\n thinking_override?: string;\n // Hunt Finding #19 (2026-04-14): true when this session was created via an\n // explicit sessionId (getOrCreateSessionById). Named sessions MUST NOT be\n // returned by the default-slot lookup in getOrCreateSession — otherwise\n // subsequent no-sessionId requests from the same channel+user+agent will\n // inherit the most recent named session's history, causing privacy bleed\n // between API callers.\n is_named?: boolean;\n}\n\ninterface UsageRecord {\n id: number;\n session_id: string;\n provider: string;\n model: string;\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n created_at: string;\n}\n\ninterface CronRecord {\n id: string;\n name: string;\n schedule: string;\n command: string;\n mode?: 'shell' | 'tool'; // Execution mode (default: shell for backward compat)\n allowedTools?: string[]; // Tool allowlist for tool-mode jobs\n enabled: boolean;\n last_run?: string;\n next_run?: string;\n created_at: string;\n}\n\ninterface SkillRecord {\n name: string;\n version: string;\n source: string;\n enabled: boolean;\n installed_at: string;\n}\n\nconst DB_FILE = join(TITAN_HOME, 'titan-data.json');\n\nlet store: DataStore | null = null;\nlet dirty = false;\nlet isShuttingDown = false;\n\nfunction getDefaultStore(): DataStore {\n return {\n conversations: [],\n memories: [],\n sessions: [],\n usageStats: [],\n cronJobs: [],\n skillsInstalled: [],\n };\n}\n\n// NOTE: Sync I/O is intentional — runs only once at cold start, then cached in-memory.\nfunction loadStore(): DataStore {\n if (store) return store;\n ensureDir(TITAN_HOME);\n if (existsSync(DB_FILE)) {\n try {\n const raw = readFileSync(DB_FILE, 'utf-8');\n store = JSON.parse(raw) as DataStore;\n // Ensure all fields exist\n store.conversations = store.conversations || [];\n store.memories = store.memories || [];\n store.sessions = store.sessions || [];\n store.usageStats = store.usageStats || [];\n store.cronJobs = store.cronJobs || [];\n store.skillsInstalled = store.skillsInstalled || [];\n } catch {\n logger.warn(COMPONENT, 'Could not load data store, creating fresh one');\n store = getDefaultStore();\n }\n } else {\n store = getDefaultStore();\n }\n return store;\n}\n\nfunction saveStore(): void {\n if (!store || isShuttingDown) return;\n ensureDir(TITAN_HOME);\n try {\n const tmpFile = DB_FILE + '.tmp';\n writeFileSync(tmpFile, JSON.stringify(store, null, 2), 'utf-8');\n renameSync(tmpFile, DB_FILE);\n dirty = false;\n } catch (e) {\n dirty = true;\n logger.error(COMPONENT, `Failed to save data: ${(e as Error).message}`);\n }\n}\n\n// Auto-save periodically\nlet saveTimeout: ReturnType<typeof setTimeout> | null = null;\nexport function debouncedSave(): void {\n if (dirty) { saveStore(); return; }\n if (saveTimeout) clearTimeout(saveTimeout);\n saveTimeout = setTimeout(saveStore, 1000);\n saveTimeout.unref();\n}\n\n/** Initialize the memory system */\nexport function initMemory(): void {\n loadStore();\n logger.info(COMPONENT, 'Memory system initialized');\n}\n\n/** Close / flush the memory system */\nexport function closeMemory(): void {\n if (saveTimeout) { clearTimeout(saveTimeout); saveTimeout = null; }\n saveStore();\n if (dirty) {\n logger.error(COMPONENT, 'DATA MAY BE LOST — failed to flush memory store on shutdown');\n }\n isShuttingDown = true;\n}\n\n/** Get internal store (for skills like cron that need direct access) */\nexport function getDb(): DataStore {\n return loadStore();\n}\n\n// ─── Conversation History ────────────────────────────────────────\n\nexport interface ConversationMessage {\n id: string;\n sessionId: string;\n role: string;\n content: string;\n toolCalls?: string;\n toolCallId?: string;\n model?: string;\n tokenCount: number;\n createdAt: string;\n isEncrypted?: boolean;\n}\n\n/** Save a message to conversation history */\nexport function saveMessage(message: Omit<ConversationMessage, 'createdAt'>, e2eKey?: string): void {\n const s = loadStore();\n\n let content = message.content;\n let isEncrypted = false;\n\n if (e2eKey) {\n try {\n const payload = encrypt(message.content, Buffer.from(e2eKey, 'base64'));\n content = JSON.stringify(payload);\n isEncrypted = true;\n } catch {\n logger.error(COMPONENT, `Failed to encrypt message for storage`);\n content = \"[ENCRYPTION FAILED] \" + content; // Fallback, though we should probably throw in strict environments\n }\n }\n\n s.conversations.push({\n ...message,\n content,\n isEncrypted,\n createdAt: new Date().toISOString(),\n });\n // Keep only last 5000 messages total to prevent unbounded growth\n if (s.conversations.length > 5000) {\n s.conversations = s.conversations.slice(-5000);\n }\n debouncedSave();\n}\n\n/** Get conversation history for a session */\nexport function getHistory(sessionId: string, limit: number = 50, e2eKey?: string): ConversationMessage[] {\n const s = loadStore();\n const rawHistory = s.conversations\n .filter((m) => m.sessionId === sessionId)\n .slice(-limit);\n\n if (!e2eKey) {\n // If no key is provided, we just return the raw payload. \n // If it's encrypted, it'll just show the JSON string of the EncryptedPayload.\n return rawHistory;\n }\n\n // Decrypt the ones that were encrypted\n return rawHistory.map(m => {\n if (m.isEncrypted) {\n try {\n const payload = JSON.parse(m.content) as EncryptedPayload;\n return {\n ...m,\n content: decrypt(payload, Buffer.from(e2eKey, 'base64'))\n };\n } catch {\n logger.error(COMPONENT, `Failed to decrypt message ${m.id}`);\n return { ...m, content: \"[DECRYPTION FAILED]\" };\n }\n }\n return m;\n });\n}\n\n/** Update session name and/or last message snippet */\nexport function updateSessionMeta(sessionId: string, meta: { name?: string; last_message?: string; model_override?: string; thinking_override?: string }): void {\n const s = loadStore();\n const rec = s.sessions.find(ses => ses.id === sessionId);\n if (!rec) return;\n if (meta.name !== undefined) rec.name = meta.name;\n if (meta.last_message !== undefined) rec.last_message = meta.last_message;\n // D3: Persist session overrides to database so they survive timeout/restart\n if (meta.model_override !== undefined) rec.model_override = meta.model_override;\n if (meta.thinking_override !== undefined) rec.thinking_override = meta.thinking_override;\n debouncedSave();\n}\n\n/** Clear conversation history for a session */\nexport function clearHistory(sessionId: string): void {\n const s = loadStore();\n s.conversations = s.conversations.filter((m) => m.sessionId !== sessionId);\n debouncedSave();\n}\n\n// ─── Persistent Memory (Facts / Preferences) ─────────────────────\n\n/** Store a memory (key-value with category) */\nexport function rememberFact(category: string, key: string, value: string, metadata?: Record<string, unknown>): void {\n const s = loadStore();\n const id = `${category}:${key}`;\n const existingIdx = s.memories.findIndex((m) => m.id === id);\n const now = new Date().toISOString();\n\n if (existingIdx >= 0) {\n s.memories[existingIdx].value = value;\n s.memories[existingIdx].metadata = metadata ? JSON.stringify(metadata) : undefined;\n s.memories[existingIdx].updatedAt = now;\n } else {\n s.memories.push({\n id,\n category,\n key,\n value,\n metadata: metadata ? JSON.stringify(metadata) : undefined,\n createdAt: now,\n updatedAt: now,\n });\n }\n debouncedSave();\n\n // Index to vector store (fire-and-forget)\n if (isVectorSearchAvailable()) {\n addVector(id, `${category}: ${key} = ${value}`, 'memory', { category, key }).catch(e => logger.debug(COMPONENT, `Background vector indexing failed: ${(e as Error).message}`));\n }\n}\n\n/** Recall a specific memory */\nexport function recallFact(category: string, key: string): string | null {\n const s = loadStore();\n const entry = s.memories.find((m) => m.category === category && m.key === key);\n return entry?.value || null;\n}\n\n/** Search memories by category — hybrid keyword + vector search */\nexport async function searchMemories(category?: string, query?: string): Promise<Array<{ key: string; value: string; category: string; score?: number }>> {\n const s = loadStore();\n let results = s.memories;\n\n if (category) {\n results = results.filter((m) => m.category === category);\n }\n if (query) {\n const q = query.toLowerCase();\n // Word-boundary match to avoid false positives (\"use\" matching \"user\", \"reuse\")\n const qRegex = new RegExp('\\\\b' + q.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&') + '\\\\b', 'i');\n results = results.filter((m) =>\n qRegex.test(m.key) || qRegex.test(m.value)\n );\n }\n\n // Keyword scoring\n const scored = results.map(m => {\n let score = 0;\n if (query) {\n const q = query.toLowerCase();\n const keyLower = m.key.toLowerCase();\n const valLower = m.value.toLowerCase();\n // Exact key match scores highest\n if (keyLower === q) score += 5;\n else if (keyLower.includes(q)) score += 2;\n if (valLower.includes(q)) score += 1;\n // BM25-style: boost for multiple keyword matches\n const terms = q.split(/\\s+/).filter(Boolean);\n for (const term of terms) {\n if (keyLower.includes(term)) score += 1;\n if (valLower.includes(term)) score += 0.5;\n }\n }\n return { key: m.key, value: m.value, category: m.category, id: m.id, score };\n });\n\n // Vector search augmentation (hybrid mode)\n if (query && isVectorSearchAvailable()) {\n try {\n const vectorResults = await searchVectors(query, 20, 'memory', 0.4);\n for (const vr of vectorResults) {\n // Skip stale vector IDs that no longer exist in the store\n const memEntry = s.memories.find(m => m.id === vr.id);\n if (!memEntry) continue;\n const existing = scored.find(s => s.id === vr.id);\n if (existing) {\n // Boost keyword results that also match semantically\n existing.score += vr.score * 3;\n } else {\n // Add vector-only results (semantically similar but no keyword match)\n const entry = s.memories.find(m => m.id === vr.id);\n if (entry && (!category || entry.category === category)) {\n scored.push({\n key: entry.key,\n value: entry.value,\n category: entry.category,\n id: entry.id,\n score: vr.score * 2,\n });\n }\n }\n }\n } catch {\n // Vector search failure is non-fatal\n }\n }\n\n scored.sort((a, b) => b.score - a.score);\n // Deduplicate by ID (vector + keyword can match the same entry)\n const seen = new Set<string>();\n const unique = scored.filter(m => { if (seen.has(m.id)) return false; seen.add(m.id); return true; });\n return unique.slice(0, 50).map(m => ({ key: m.key, value: m.value, category: m.category, score: m.score }));\n}\n\n// ─── Usage Tracking ──────────────────────────────────────────────\n\n/** Record usage statistics */\nexport function recordUsage(sessionId: string, provider: string, model: string, promptTokens: number, completionTokens: number): void {\n const s = loadStore();\n s.usageStats.push({\n id: Date.now(),\n session_id: sessionId,\n provider,\n model,\n prompt_tokens: promptTokens,\n completion_tokens: completionTokens,\n total_tokens: promptTokens + completionTokens,\n created_at: new Date().toISOString(),\n });\n // Keep only last 10000 records\n if (s.usageStats.length > 10000) {\n s.usageStats = s.usageStats.slice(-10000);\n }\n debouncedSave();\n}\n\n/** Get total usage statistics */\nexport function getUsageStats(): { totalTokens: number; totalRequests: number; byProvider: Record<string, number> } {\n const s = loadStore();\n let totalTokens = 0;\n const byProvider: Record<string, number> = {};\n\n for (const rec of s.usageStats) {\n totalTokens += rec.total_tokens;\n byProvider[rec.provider] = (byProvider[rec.provider] || 0) + rec.total_tokens;\n }\n\n return {\n totalTokens,\n totalRequests: s.usageStats.length,\n byProvider,\n };\n}\n"],"mappings":";AAKA,SAAS,YAAY,cAAc,eAAe,kBAAkB;AACpE,SAAS,YAAY;AACrB,SAAS,kBAAkB;AAC3B,SAAS,iBAAiB;AAC1B,OAAO,YAAY;AACnB,SAAS,SAAS,eAAsC;AACxD,SAAS,yBAAyB,eAAe,iBAAiB;AAElE,MAAM,YAAY;AA8ElB,MAAM,UAAU,KAAK,YAAY,iBAAiB;AAElD,IAAI,QAA0B;AAC9B,IAAI,QAAQ;AACZ,IAAI,iBAAiB;AAErB,SAAS,kBAA6B;AACpC,SAAO;AAAA,IACL,eAAe,CAAC;AAAA,IAChB,UAAU,CAAC;AAAA,IACX,UAAU,CAAC;AAAA,IACX,YAAY,CAAC;AAAA,IACb,UAAU,CAAC;AAAA,IACX,iBAAiB,CAAC;AAAA,EACpB;AACF;AAGA,SAAS,YAAuB;AAC9B,MAAI,MAAO,QAAO;AAClB,YAAU,UAAU;AACpB,MAAI,WAAW,OAAO,GAAG;AACvB,QAAI;AACF,YAAM,MAAM,aAAa,SAAS,OAAO;AACzC,cAAQ,KAAK,MAAM,GAAG;AAEtB,YAAM,gBAAgB,MAAM,iBAAiB,CAAC;AAC9C,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,aAAa,MAAM,cAAc,CAAC;AACxC,YAAM,WAAW,MAAM,YAAY,CAAC;AACpC,YAAM,kBAAkB,MAAM,mBAAmB,CAAC;AAAA,IACpD,QAAQ;AACN,aAAO,KAAK,WAAW,+CAA+C;AACtE,cAAQ,gBAAgB;AAAA,IAC1B;AAAA,EACF,OAAO;AACL,YAAQ,gBAAgB;AAAA,EAC1B;AACA,SAAO;AACT;AAEA,SAAS,YAAkB;AACzB,MAAI,CAAC,SAAS,eAAgB;AAC9B,YAAU,UAAU;AACpB,MAAI;AACF,UAAM,UAAU,UAAU;AAC1B,kBAAc,SAAS,KAAK,UAAU,OAAO,MAAM,CAAC,GAAG,OAAO;AAC9D,eAAW,SAAS,OAAO;AAC3B,YAAQ;AAAA,EACV,SAAS,GAAG;AACV,YAAQ;AACR,WAAO,MAAM,WAAW,wBAAyB,EAAY,OAAO,EAAE;AAAA,EACxE;AACF;AAGA,IAAI,cAAoD;AACjD,SAAS,gBAAsB;AACpC,MAAI,OAAO;AAAE,cAAU;AAAG;AAAA,EAAQ;AAClC,MAAI,YAAa,cAAa,WAAW;AACzC,gBAAc,WAAW,WAAW,GAAI;AACxC,cAAY,MAAM;AACpB;AAGO,SAAS,aAAmB;AACjC,YAAU;AACV,SAAO,KAAK,WAAW,2BAA2B;AACpD;AAGO,SAAS,cAAoB;AAClC,MAAI,aAAa;AAAE,iBAAa,WAAW;AAAG,kBAAc;AAAA,EAAM;AAClE,YAAU;AACV,MAAI,OAAO;AACT,WAAO,MAAM,WAAW,kEAA6D;AAAA,EACvF;AACA,mBAAiB;AACnB;AAGO,SAAS,QAAmB;AACjC,SAAO,UAAU;AACnB;AAkBO,SAAS,YAAY,SAAiD,QAAuB;AAClG,QAAM,IAAI,UAAU;AAEpB,MAAI,UAAU,QAAQ;AACtB,MAAI,cAAc;AAElB,MAAI,QAAQ;AACV,QAAI;AACF,YAAM,UAAU,QAAQ,QAAQ,SAAS,OAAO,KAAK,QAAQ,QAAQ,CAAC;AACtE,gBAAU,KAAK,UAAU,OAAO;AAChC,oBAAc;AAAA,IAChB,QAAQ;AACN,aAAO,MAAM,WAAW,uCAAuC;AAC/D,gBAAU,yBAAyB;AAAA,IACrC;AAAA,EACF;AAEA,IAAE,cAAc,KAAK;AAAA,IACnB,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC,CAAC;AAED,MAAI,EAAE,cAAc,SAAS,KAAM;AACjC,MAAE,gBAAgB,EAAE,cAAc,MAAM,IAAK;AAAA,EAC/C;AACA,gBAAc;AAChB;AAGO,SAAS,WAAW,WAAmB,QAAgB,IAAI,QAAwC;AACxG,QAAM,IAAI,UAAU;AACpB,QAAM,aAAa,EAAE,cAClB,OAAO,CAAC,MAAM,EAAE,cAAc,SAAS,EACvC,MAAM,CAAC,KAAK;AAEf,MAAI,CAAC,QAAQ;AAGX,WAAO;AAAA,EACT;AAGA,SAAO,WAAW,IAAI,OAAK;AACzB,QAAI,EAAE,aAAa;AACjB,UAAI;AACF,cAAM,UAAU,KAAK,MAAM,EAAE,OAAO;AACpC,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SAAS,QAAQ,SAAS,OAAO,KAAK,QAAQ,QAAQ,CAAC;AAAA,QACzD;AAAA,MACF,QAAQ;AACN,eAAO,MAAM,WAAW,6BAA6B,EAAE,EAAE,EAAE;AAC3D,eAAO,EAAE,GAAG,GAAG,SAAS,sBAAsB;AAAA,MAChD;AAAA,IACF;AACA,WAAO;AAAA,EACT,CAAC;AACH;AAGO,SAAS,kBAAkB,WAAmB,MAA2G;AAC9J,QAAM,IAAI,UAAU;AACpB,QAAM,MAAM,EAAE,SAAS,KAAK,SAAO,IAAI,OAAO,SAAS;AACvD,MAAI,CAAC,IAAK;AACV,MAAI,KAAK,SAAS,OAAW,KAAI,OAAO,KAAK;AAC7C,MAAI,KAAK,iBAAiB,OAAW,KAAI,eAAe,KAAK;AAE7D,MAAI,KAAK,mBAAmB,OAAW,KAAI,iBAAiB,KAAK;AACjE,MAAI,KAAK,sBAAsB,OAAW,KAAI,oBAAoB,KAAK;AACvE,gBAAc;AAChB;AAGO,SAAS,aAAa,WAAyB;AACpD,QAAM,IAAI,UAAU;AACpB,IAAE,gBAAgB,EAAE,cAAc,OAAO,CAAC,MAAM,EAAE,cAAc,SAAS;AACzE,gBAAc;AAChB;AAKO,SAAS,aAAa,UAAkB,KAAa,OAAe,UAA0C;AACnH,QAAM,IAAI,UAAU;AACpB,QAAM,KAAK,GAAG,QAAQ,IAAI,GAAG;AAC7B,QAAM,cAAc,EAAE,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3D,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,MAAI,eAAe,GAAG;AACpB,MAAE,SAAS,WAAW,EAAE,QAAQ;AAChC,MAAE,SAAS,WAAW,EAAE,WAAW,WAAW,KAAK,UAAU,QAAQ,IAAI;AACzE,MAAE,SAAS,WAAW,EAAE,YAAY;AAAA,EACtC,OAAO;AACL,MAAE,SAAS,KAAK;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,WAAW,KAAK,UAAU,QAAQ,IAAI;AAAA,MAChD,WAAW;AAAA,MACX,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AACA,gBAAc;AAGd,MAAI,wBAAwB,GAAG;AAC7B,cAAU,IAAI,GAAG,QAAQ,KAAK,GAAG,MAAM,KAAK,IAAI,UAAU,EAAE,UAAU,IAAI,CAAC,EAAE,MAAM,OAAK,OAAO,MAAM,WAAW,sCAAuC,EAAY,OAAO,EAAE,CAAC;AAAA,EAC/K;AACF;AAGO,SAAS,WAAW,UAAkB,KAA4B;AACvE,QAAM,IAAI,UAAU;AACpB,QAAM,QAAQ,EAAE,SAAS,KAAK,CAAC,MAAM,EAAE,aAAa,YAAY,EAAE,QAAQ,GAAG;AAC7E,SAAO,OAAO,SAAS;AACzB;AAGA,eAAsB,eAAe,UAAmB,OAAkG;AACxJ,QAAM,IAAI,UAAU;AACpB,MAAI,UAAU,EAAE;AAEhB,MAAI,UAAU;AACZ,cAAU,QAAQ,OAAO,CAAC,MAAM,EAAE,aAAa,QAAQ;AAAA,EACzD;AACA,MAAI,OAAO;AACT,UAAM,IAAI,MAAM,YAAY;AAE5B,UAAM,SAAS,IAAI,OAAO,QAAQ,EAAE,QAAQ,uBAAuB,MAAM,IAAI,OAAO,GAAG;AACvF,cAAU,QAAQ;AAAA,MAAO,CAAC,MACxB,OAAO,KAAK,EAAE,GAAG,KAAK,OAAO,KAAK,EAAE,KAAK;AAAA,IAC3C;AAAA,EACF;AAGA,QAAM,SAAS,QAAQ,IAAI,OAAK;AAC9B,QAAI,QAAQ;AACZ,QAAI,OAAO;AACT,YAAM,IAAI,MAAM,YAAY;AAC5B,YAAM,WAAW,EAAE,IAAI,YAAY;AACnC,YAAM,WAAW,EAAE,MAAM,YAAY;AAErC,UAAI,aAAa,EAAG,UAAS;AAAA,eACpB,SAAS,SAAS,CAAC,EAAG,UAAS;AACxC,UAAI,SAAS,SAAS,CAAC,EAAG,UAAS;AAEnC,YAAM,QAAQ,EAAE,MAAM,KAAK,EAAE,OAAO,OAAO;AAC3C,iBAAW,QAAQ,OAAO;AACxB,YAAI,SAAS,SAAS,IAAI,EAAG,UAAS;AACtC,YAAI,SAAS,SAAS,IAAI,EAAG,UAAS;AAAA,MACxC;AAAA,IACF;AACA,WAAO,EAAE,KAAK,EAAE,KAAK,OAAO,EAAE,OAAO,UAAU,EAAE,UAAU,IAAI,EAAE,IAAI,MAAM;AAAA,EAC7E,CAAC;AAGD,MAAI,SAAS,wBAAwB,GAAG;AACtC,QAAI;AACF,YAAM,gBAAgB,MAAM,cAAc,OAAO,IAAI,UAAU,GAAG;AAClE,iBAAW,MAAM,eAAe;AAE9B,cAAM,WAAW,EAAE,SAAS,KAAK,OAAK,EAAE,OAAO,GAAG,EAAE;AACpD,YAAI,CAAC,SAAU;AACf,cAAM,WAAW,OAAO,KAAK,CAAAA,OAAKA,GAAE,OAAO,GAAG,EAAE;AAChD,YAAI,UAAU;AAEZ,mBAAS,SAAS,GAAG,QAAQ;AAAA,QAC/B,OAAO;AAEL,gBAAM,QAAQ,EAAE,SAAS,KAAK,OAAK,EAAE,OAAO,GAAG,EAAE;AACjD,cAAI,UAAU,CAAC,YAAY,MAAM,aAAa,WAAW;AACvD,mBAAO,KAAK;AAAA,cACV,KAAK,MAAM;AAAA,cACX,OAAO,MAAM;AAAA,cACb,UAAU,MAAM;AAAA,cAChB,IAAI,MAAM;AAAA,cACV,OAAO,GAAG,QAAQ;AAAA,YACpB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEvC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,SAAS,OAAO,OAAO,OAAK;AAAE,QAAI,KAAK,IAAI,EAAE,EAAE,EAAG,QAAO;AAAO,SAAK,IAAI,EAAE,EAAE;AAAG,WAAO;AAAA,EAAM,CAAC;AACpG,SAAO,OAAO,MAAM,GAAG,EAAE,EAAE,IAAI,QAAM,EAAE,KAAK,EAAE,KAAK,OAAO,EAAE,OAAO,UAAU,EAAE,UAAU,OAAO,EAAE,MAAM,EAAE;AAC5G;AAKO,SAAS,YAAY,WAAmB,UAAkB,OAAe,cAAsB,kBAAgC;AACpI,QAAM,IAAI,UAAU;AACpB,IAAE,WAAW,KAAK;AAAA,IAChB,IAAI,KAAK,IAAI;AAAA,IACb,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf,mBAAmB;AAAA,IACnB,cAAc,eAAe;AAAA,IAC7B,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,EACrC,CAAC;AAED,MAAI,EAAE,WAAW,SAAS,KAAO;AAC/B,MAAE,aAAa,EAAE,WAAW,MAAM,IAAM;AAAA,EAC1C;AACA,gBAAc;AAChB;AAGO,SAAS,gBAAoG;AAClH,QAAM,IAAI,UAAU;AACpB,MAAI,cAAc;AAClB,QAAM,aAAqC,CAAC;AAE5C,aAAW,OAAO,EAAE,YAAY;AAC9B,mBAAe,IAAI;AACnB,eAAW,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,KAAK,IAAI;AAAA,EACnE;AAEA,SAAO;AAAA,IACL;AAAA,IACA,eAAe,EAAE,WAAW;AAAA,IAC5B;AAAA,EACF;AACF;","names":["s"]}
|
package/dist/mesh/transport.js
CHANGED
|
@@ -47,8 +47,32 @@ function findNextHop(destinationNodeId) {
|
|
|
47
47
|
routingTable.delete(destinationNodeId);
|
|
48
48
|
return null;
|
|
49
49
|
}
|
|
50
|
+
const nextHopWs = peerConnections.get(entry.nextHopNodeId);
|
|
51
|
+
if (!nextHopWs || nextHopWs.readyState !== WebSocket.OPEN) {
|
|
52
|
+
routingTable.delete(destinationNodeId);
|
|
53
|
+
logger.debug(COMPONENT, `Pruned route to ${destinationNodeId} via dead next-hop ${entry.nextHopNodeId}`);
|
|
54
|
+
return null;
|
|
55
|
+
}
|
|
50
56
|
return entry.nextHopNodeId;
|
|
51
57
|
}
|
|
58
|
+
function invalidateRoutesVia(disconnectedNodeId) {
|
|
59
|
+
let removed = 0;
|
|
60
|
+
for (const [dest, entry] of routingTable) {
|
|
61
|
+
if (entry.nextHopNodeId === disconnectedNodeId || dest === disconnectedNodeId) {
|
|
62
|
+
routingTable.delete(dest);
|
|
63
|
+
removed++;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
if (removed > 0) {
|
|
67
|
+
logger.info(COMPONENT, `Mesh peer ${disconnectedNodeId} dropped \u2014 invalidated ${removed} route(s); broadcasting refresh`);
|
|
68
|
+
try {
|
|
69
|
+
broadcastRouteAdvertisement();
|
|
70
|
+
} catch (err) {
|
|
71
|
+
logger.debug(COMPONENT, `Route refresh broadcast failed: ${err.message}`);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return removed;
|
|
75
|
+
}
|
|
52
76
|
function upsertRoute(entry) {
|
|
53
77
|
const existing = routingTable.get(entry.destinationNodeId);
|
|
54
78
|
if (!existing || entry.cost < existing.cost) {
|
|
@@ -239,6 +263,7 @@ async function connectToPeer(address, port, localNodeId, meshSecret) {
|
|
|
239
263
|
pendingRequests.delete(reqId);
|
|
240
264
|
}
|
|
241
265
|
}
|
|
266
|
+
invalidateRoutesVia(remoteNodeId);
|
|
242
267
|
logger.info(COMPONENT, `Peer disconnected: ${remoteNodeId}`);
|
|
243
268
|
}
|
|
244
269
|
if (!resolved) {
|
|
@@ -393,6 +418,8 @@ function handleMeshWebSocket(ws, nodeId, localNodeId, onTaskRequest) {
|
|
|
393
418
|
if (innerAction === "task_request" && onTaskRequest && msg.requestId) {
|
|
394
419
|
activeRemoteTasks++;
|
|
395
420
|
let replied = false;
|
|
421
|
+
const originalRequesterId = msg.fromNodeId;
|
|
422
|
+
const originalRequestId = msg.requestId;
|
|
396
423
|
const sendReply = (payload) => {
|
|
397
424
|
if (replied) return;
|
|
398
425
|
replied = true;
|
|
@@ -401,18 +428,40 @@ function handleMeshWebSocket(ws, nodeId, localNodeId, onTaskRequest) {
|
|
|
401
428
|
type: "mesh",
|
|
402
429
|
action: "task_response",
|
|
403
430
|
fromNodeId: localNodeId2,
|
|
404
|
-
toNodeId:
|
|
405
|
-
requestId:
|
|
406
|
-
payload,
|
|
431
|
+
toNodeId: originalRequesterId,
|
|
432
|
+
requestId: originalRequestId,
|
|
433
|
+
payload: { ...payload, originalRequesterId },
|
|
407
434
|
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
408
435
|
};
|
|
409
|
-
if (
|
|
436
|
+
if (peerConnections.has(originalRequesterId)) {
|
|
437
|
+
sendToPeer(originalRequesterId, reply);
|
|
438
|
+
return;
|
|
439
|
+
}
|
|
440
|
+
const routed = routeMessageMultiHop(originalRequesterId, {
|
|
441
|
+
...reply,
|
|
442
|
+
action: "route_forward",
|
|
443
|
+
payload: {
|
|
444
|
+
innerAction: "task_response",
|
|
445
|
+
originalRequesterId,
|
|
446
|
+
...payload
|
|
447
|
+
}
|
|
448
|
+
});
|
|
449
|
+
if (!routed && ws.readyState === WebSocket.OPEN) {
|
|
450
|
+
ws.send(JSON.stringify(reply));
|
|
451
|
+
}
|
|
410
452
|
};
|
|
411
453
|
try {
|
|
412
454
|
onTaskRequest({ ...msg, action: "task_request" }, sendReply);
|
|
413
455
|
} catch (err) {
|
|
414
456
|
sendReply({ error: `Handler error: ${err.message}` });
|
|
415
457
|
}
|
|
458
|
+
} else if (innerAction === "task_response" && msg.requestId) {
|
|
459
|
+
const pending = pendingRequests.get(msg.requestId);
|
|
460
|
+
if (pending) {
|
|
461
|
+
clearTimeout(pending.timeout);
|
|
462
|
+
pendingRequests.delete(msg.requestId);
|
|
463
|
+
pending.resolve(msg.payload);
|
|
464
|
+
}
|
|
416
465
|
}
|
|
417
466
|
} else {
|
|
418
467
|
routeMessageMultiHop(msg.toNodeId, msg);
|
|
@@ -421,17 +470,20 @@ function handleMeshWebSocket(ws, nodeId, localNodeId, onTaskRequest) {
|
|
|
421
470
|
} catch {
|
|
422
471
|
}
|
|
423
472
|
});
|
|
424
|
-
|
|
473
|
+
const cleanup = (cause) => {
|
|
425
474
|
peerConnections.delete(nodeId);
|
|
426
475
|
for (const [reqId, req] of pendingRequests) {
|
|
427
476
|
if (req.peerNodeId === nodeId) {
|
|
428
477
|
clearTimeout(req.timeout);
|
|
429
|
-
req.reject(new Error(`Peer
|
|
478
|
+
req.reject(new Error(`Peer ${cause}: ${nodeId}`));
|
|
430
479
|
pendingRequests.delete(reqId);
|
|
431
480
|
}
|
|
432
481
|
}
|
|
433
|
-
|
|
434
|
-
|
|
482
|
+
invalidateRoutesVia(nodeId);
|
|
483
|
+
logger.info(COMPONENT, `Mesh peer disconnected (${cause}): ${nodeId}`);
|
|
484
|
+
};
|
|
485
|
+
ws.on("close", () => cleanup("close"));
|
|
486
|
+
ws.on("error", () => cleanup("error"));
|
|
435
487
|
}
|
|
436
488
|
function startHeartbeat(localNodeId, payload, intervalMs = 6e4) {
|
|
437
489
|
if (heartbeatInterval) return;
|