kongbrain 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +385 -0
- package/openclaw.plugin.json +66 -0
- package/package.json +65 -0
- package/src/acan.ts +309 -0
- package/src/causal.ts +237 -0
- package/src/cognitive-check.ts +330 -0
- package/src/config.ts +64 -0
- package/src/context-engine.ts +487 -0
- package/src/daemon-manager.ts +148 -0
- package/src/daemon-types.ts +65 -0
- package/src/embeddings.ts +77 -0
- package/src/errors.ts +43 -0
- package/src/graph-context.ts +989 -0
- package/src/hooks/after-tool-call.ts +99 -0
- package/src/hooks/before-prompt-build.ts +44 -0
- package/src/hooks/before-tool-call.ts +86 -0
- package/src/hooks/llm-output.ts +173 -0
- package/src/identity.ts +218 -0
- package/src/index.ts +435 -0
- package/src/intent.ts +190 -0
- package/src/memory-daemon.ts +495 -0
- package/src/orchestrator.ts +348 -0
- package/src/prefetch.ts +200 -0
- package/src/reflection.ts +280 -0
- package/src/retrieval-quality.ts +266 -0
- package/src/schema.surql +387 -0
- package/src/skills.ts +343 -0
- package/src/soul.ts +936 -0
- package/src/state.ts +119 -0
- package/src/surreal.ts +1371 -0
- package/src/tools/core-memory.ts +120 -0
- package/src/tools/introspect.ts +329 -0
- package/src/tools/recall.ts +102 -0
- package/src/wakeup.ts +318 -0
- package/src/workspace-migrate.ts +752 -0
package/src/acan.ts
ADDED
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ACAN — Attentive Cross-Attention Network for learned memory scoring.
|
|
3
|
+
*
|
|
4
|
+
* Replaces the fixed 6-signal WMR weights in scoreResults() with a learned
|
|
5
|
+
* cross-attention model. Ships dormant — auto-trains and activates when
|
|
6
|
+
* enough retrieval outcome data accumulates (5000+ labeled pairs).
|
|
7
|
+
*
|
|
8
|
+
* Ported from kongbrain — uses SurrealStore instead of module-level DB.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync } from "node:fs";
|
|
12
|
+
import { join } from "node:path";
|
|
13
|
+
import { homedir } from "node:os";
|
|
14
|
+
import { Worker } from "node:worker_threads";
|
|
15
|
+
import type { SurrealStore } from "./surreal.js";
|
|
16
|
+
import { swallow } from "./errors.js";
|
|
17
|
+
|
|
18
|
+
// ── Types ──
|
|
19
|
+
|
|
20
|
+
export interface ACANWeights {
|
|
21
|
+
W_q: number[][];
|
|
22
|
+
W_k: number[][];
|
|
23
|
+
W_final: number[];
|
|
24
|
+
bias: number;
|
|
25
|
+
version: number;
|
|
26
|
+
trainedAt?: number;
|
|
27
|
+
trainedOnSamples?: number;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export interface ACANCandidate {
|
|
31
|
+
embedding: number[];
|
|
32
|
+
recency: number;
|
|
33
|
+
importance: number;
|
|
34
|
+
access: number;
|
|
35
|
+
neighborBonus: number;
|
|
36
|
+
provenUtility: number;
|
|
37
|
+
reflectionBoost?: number;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
interface TrainingSample {
|
|
41
|
+
query_embedding: number[];
|
|
42
|
+
memory_embedding: number[];
|
|
43
|
+
retrieval_score: number;
|
|
44
|
+
was_neighbor: boolean;
|
|
45
|
+
utilization: number;
|
|
46
|
+
importance: number;
|
|
47
|
+
access_count: number;
|
|
48
|
+
recency: number;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
interface TrainingConfig {
|
|
52
|
+
epochs: number;
|
|
53
|
+
lr: number;
|
|
54
|
+
earlyStopPatience: number;
|
|
55
|
+
lrDecayPatience: number;
|
|
56
|
+
lrFloor: number;
|
|
57
|
+
valSplit: number;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// ── Module state ──
|
|
61
|
+
|
|
62
|
+
let _weights: ACANWeights | null = null;
|
|
63
|
+
let _active = false;
|
|
64
|
+
|
|
65
|
+
const ATTN_DIM = 64;
|
|
66
|
+
const EMBED_DIM = 1024;
|
|
67
|
+
const FEATURE_COUNT = 7;
|
|
68
|
+
const WEIGHTS_FILENAME = "acan_weights.json";
|
|
69
|
+
const TRAINING_THRESHOLD = 5000;
|
|
70
|
+
|
|
71
|
+
function getKongDir(): string {
|
|
72
|
+
const dir = join(homedir(), ".kongbrain");
|
|
73
|
+
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
74
|
+
return dir;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const DEFAULT_TRAINING_CONFIG: TrainingConfig = {
|
|
78
|
+
epochs: 80,
|
|
79
|
+
lr: 0.001,
|
|
80
|
+
earlyStopPatience: 8,
|
|
81
|
+
lrDecayPatience: 4,
|
|
82
|
+
lrFloor: 0.00005,
|
|
83
|
+
valSplit: 0.2,
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
// ── Weight loading / saving ──
|
|
87
|
+
|
|
88
|
+
function loadWeights(path: string): ACANWeights | null {
|
|
89
|
+
try {
|
|
90
|
+
if (!existsSync(path)) return null;
|
|
91
|
+
const raw = JSON.parse(readFileSync(path, "utf-8"));
|
|
92
|
+
if (raw.version !== 1) return null;
|
|
93
|
+
if (!Array.isArray(raw.W_q) || raw.W_q.length !== EMBED_DIM) return null;
|
|
94
|
+
if (!Array.isArray(raw.W_k) || raw.W_k.length !== EMBED_DIM) return null;
|
|
95
|
+
if (!Array.isArray(raw.W_final) || raw.W_final.length !== FEATURE_COUNT) return null;
|
|
96
|
+
if (typeof raw.bias !== "number") return null;
|
|
97
|
+
if (raw.W_q[0].length !== ATTN_DIM || raw.W_k[0].length !== ATTN_DIM) return null;
|
|
98
|
+
return raw as ACANWeights;
|
|
99
|
+
} catch (e) {
|
|
100
|
+
swallow("acan:loadWeights", e);
|
|
101
|
+
return null;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
function saveWeights(weights: ACANWeights, path: string): void {
|
|
106
|
+
const dir = join(path, "..");
|
|
107
|
+
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
108
|
+
writeFileSync(path, JSON.stringify(weights), "utf-8");
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
export function initACAN(weightsDir?: string): boolean {
|
|
112
|
+
const dir = weightsDir ?? getKongDir();
|
|
113
|
+
_weights = loadWeights(join(dir, WEIGHTS_FILENAME));
|
|
114
|
+
_active = _weights !== null;
|
|
115
|
+
return _active;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
export function isACANActive(): boolean {
|
|
119
|
+
return _active;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// ── Linear algebra ──
|
|
123
|
+
|
|
124
|
+
function dot(a: number[], b: number[]): number {
|
|
125
|
+
let sum = 0;
|
|
126
|
+
for (let i = 0; i < a.length; i++) sum += a[i] * b[i];
|
|
127
|
+
return sum;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
function projectVec(vec: number[], matrix: number[][]): number[] {
|
|
131
|
+
const out = new Array(matrix[0].length).fill(0);
|
|
132
|
+
for (let i = 0; i < vec.length; i++) {
|
|
133
|
+
if (vec[i] === 0) continue;
|
|
134
|
+
const row = matrix[i];
|
|
135
|
+
for (let j = 0; j < out.length; j++) out[j] += vec[i] * row[j];
|
|
136
|
+
}
|
|
137
|
+
return out;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// ── ACAN inference ──
|
|
141
|
+
|
|
142
|
+
export function scoreWithACAN(queryEmbedding: number[], candidates: ACANCandidate[]): number[] {
|
|
143
|
+
if (!_weights || candidates.length === 0) return [];
|
|
144
|
+
|
|
145
|
+
const q = projectVec(queryEmbedding, _weights.W_q);
|
|
146
|
+
const scale = Math.sqrt(ATTN_DIM);
|
|
147
|
+
const scores: number[] = [];
|
|
148
|
+
|
|
149
|
+
for (const cand of candidates) {
|
|
150
|
+
const k = projectVec(cand.embedding, _weights.W_k);
|
|
151
|
+
const attnLogit = dot(q, k) / scale;
|
|
152
|
+
const features = [
|
|
153
|
+
attnLogit, cand.recency, cand.importance, cand.access,
|
|
154
|
+
cand.neighborBonus, cand.provenUtility, cand.reflectionBoost ?? 0,
|
|
155
|
+
];
|
|
156
|
+
scores.push(dot(features, _weights.W_final) + _weights.bias);
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
return scores;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// ── Training data fetching ──
|
|
163
|
+
|
|
164
|
+
async function getTrainingDataCount(store: SurrealStore): Promise<number> {
|
|
165
|
+
if (!store.isAvailable()) return 0;
|
|
166
|
+
try {
|
|
167
|
+
const flat = await store.queryFirst<{ count: number }>(
|
|
168
|
+
`SELECT count() AS count FROM retrieval_outcome WHERE query_embedding != NONE GROUP ALL`,
|
|
169
|
+
);
|
|
170
|
+
return flat[0]?.count ?? 0;
|
|
171
|
+
} catch (e) {
|
|
172
|
+
swallow("acan:count", e);
|
|
173
|
+
return 0;
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
async function fetchTrainingData(store: SurrealStore): Promise<TrainingSample[]> {
|
|
178
|
+
if (!store.isAvailable()) return [];
|
|
179
|
+
|
|
180
|
+
const outcomes = await store.queryFirst<any>(
|
|
181
|
+
`SELECT query_embedding, memory_id, memory_table,
|
|
182
|
+
IF llm_relevance != NONE THEN llm_relevance ELSE utilization END AS utilization,
|
|
183
|
+
retrieval_score, was_neighbor,
|
|
184
|
+
importance, access_count, recency, created_at
|
|
185
|
+
FROM retrieval_outcome
|
|
186
|
+
WHERE query_embedding != NONE
|
|
187
|
+
ORDER BY created_at ASC`,
|
|
188
|
+
);
|
|
189
|
+
if (outcomes.length === 0) return [];
|
|
190
|
+
|
|
191
|
+
const uniqueMemIds = [...new Set(outcomes.map((r: any) => String(r.memory_id)))];
|
|
192
|
+
const embeddingMap = new Map<string, number[]>();
|
|
193
|
+
for (const mid of uniqueMemIds) {
|
|
194
|
+
try {
|
|
195
|
+
const flat = await store.queryFirst<{ id: string; embedding: number[] }>(
|
|
196
|
+
`SELECT id, embedding FROM type::record($mid) WHERE embedding != NONE`,
|
|
197
|
+
{ mid },
|
|
198
|
+
);
|
|
199
|
+
if (flat[0]?.embedding) embeddingMap.set(mid, flat[0].embedding);
|
|
200
|
+
} catch (e) { swallow("acan:fetchEmb", e); }
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const samples: TrainingSample[] = [];
|
|
204
|
+
for (const row of outcomes) {
|
|
205
|
+
const memEmb = embeddingMap.get(String(row.memory_id));
|
|
206
|
+
if (!memEmb || !row.query_embedding) continue;
|
|
207
|
+
samples.push({
|
|
208
|
+
query_embedding: row.query_embedding,
|
|
209
|
+
memory_embedding: memEmb,
|
|
210
|
+
retrieval_score: row.retrieval_score ?? 0,
|
|
211
|
+
was_neighbor: row.was_neighbor ?? false,
|
|
212
|
+
utilization: row.utilization ?? 0,
|
|
213
|
+
importance: row.importance ?? 0.5,
|
|
214
|
+
access_count: row.access_count ?? 0,
|
|
215
|
+
recency: row.recency ?? 0.5,
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
return samples;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// ── Background training ──
|
|
222
|
+
|
|
223
|
+
function trainInBackground(
|
|
224
|
+
samples: TrainingSample[],
|
|
225
|
+
weightsPath: string,
|
|
226
|
+
warmStart?: ACANWeights,
|
|
227
|
+
config?: Partial<TrainingConfig>,
|
|
228
|
+
): void {
|
|
229
|
+
const cfg = { ...DEFAULT_TRAINING_CONFIG, ...config };
|
|
230
|
+
|
|
231
|
+
const workerCode = `
|
|
232
|
+
import { parentPort, workerData } from "node:worker_threads";
|
|
233
|
+
const { samples, cfg, warmStart, EMBED_DIM, ATTN_DIM, FEATURE_COUNT } = workerData;
|
|
234
|
+
function dot(a, b) { let s = 0; for (let i = 0; i < a.length; i++) s += a[i] * b[i]; return s; }
|
|
235
|
+
function projectVec(vec, matrix) {
|
|
236
|
+
const out = new Array(matrix[0].length).fill(0);
|
|
237
|
+
for (let i = 0; i < vec.length; i++) { if (vec[i] === 0) continue; const row = matrix[i]; for (let j = 0; j < out.length; j++) out[j] += vec[i] * row[j]; }
|
|
238
|
+
return out;
|
|
239
|
+
}
|
|
240
|
+
function shuffle(arr) { for (let i = arr.length - 1; i > 0; i--) { const j = Math.floor(Math.random() * (i + 1)); [arr[i], arr[j]] = [arr[j], arr[i]]; } return arr; }
|
|
241
|
+
const n = samples.length;
|
|
242
|
+
const auxFeatures = samples.map(s => [s.recency, s.importance, s.access_count, s.was_neighbor ? 1.0 : 0.0, 0.0, 0.0]);
|
|
243
|
+
const indices = shuffle(Array.from({ length: n }, (_, i) => i));
|
|
244
|
+
const valSize = Math.max(1, Math.floor(n * cfg.valSplit));
|
|
245
|
+
const valIdx = indices.slice(0, valSize);
|
|
246
|
+
const trainIdx = indices.slice(valSize);
|
|
247
|
+
const nTrain = trainIdx.length;
|
|
248
|
+
let W_q, W_k, W_final, bias;
|
|
249
|
+
if (warmStart) { W_q = JSON.parse(JSON.stringify(warmStart.W_q)); W_k = JSON.parse(JSON.stringify(warmStart.W_k)); W_final = [...warmStart.W_final]; bias = warmStart.bias; }
|
|
250
|
+
else { const xQK = Math.sqrt(2/(EMBED_DIM+ATTN_DIM)), xF = Math.sqrt(2/(FEATURE_COUNT+1)); W_q = []; W_k = []; for (let i = 0; i < EMBED_DIM; i++) { W_q.push(Array.from({length:ATTN_DIM}, () => (Math.random()*2-1)*xQK)); W_k.push(Array.from({length:ATTN_DIM}, () => (Math.random()*2-1)*xQK)); } W_final = Array.from({length:FEATURE_COUNT}, () => (Math.random()*2-1)*xF); W_final[0] = 0.3; bias = 0.0; }
|
|
251
|
+
const scale = Math.sqrt(ATTN_DIM);
|
|
252
|
+
function evalLoss(idxList) { let total = 0; for (const si of idxList) { const s = samples[si]; const q = projectVec(s.query_embedding, W_q); const k = projectVec(s.memory_embedding, W_k); const attn = dot(q,k)/scale; const features = [attn,...auxFeatures[si]]; const score = dot(features, W_final)+bias; const err = score - s.utilization; total += err*err; } return total/idxList.length; }
|
|
253
|
+
let lr = cfg.lr, bestValLoss = Infinity, epochsSinceImprovement = 0, epochsSinceLrDecay = 0, lastTrainLoss = Infinity, actualEpochs = 0;
|
|
254
|
+
for (let epoch = 0; epoch < cfg.epochs; epoch++) {
|
|
255
|
+
actualEpochs = epoch+1; shuffle(trainIdx); let totalLoss = 0;
|
|
256
|
+
for (const si of trainIdx) { const s = samples[si]; const q = projectVec(s.query_embedding, W_q); const k = projectVec(s.memory_embedding, W_k); const attn = dot(q,k)/scale; const features = [attn,...auxFeatures[si]]; const score = dot(features, W_final)+bias; const err = score - s.utilization; totalLoss += err*err; const dScore = (2/nTrain)*err; for (let j = 0; j < FEATURE_COUNT; j++) W_final[j] -= lr*dScore*features[j]; bias -= lr*dScore; const dAttn = dScore*W_final[0]; const dQ = new Array(ATTN_DIM), dK = new Array(ATTN_DIM); for (let j = 0; j < ATTN_DIM; j++) { dQ[j] = dAttn*k[j]/scale; dK[j] = dAttn*q[j]/scale; } for (let i = 0; i < EMBED_DIM; i++) { if (s.query_embedding[i]!==0) { const qi=s.query_embedding[i], row=W_q[i]; for (let j=0;j<ATTN_DIM;j++) row[j]-=lr*dQ[j]*qi; } if (s.memory_embedding[i]!==0) { const mi=s.memory_embedding[i], row=W_k[i]; for (let j=0;j<ATTN_DIM;j++) row[j]-=lr*dK[j]*mi; } } }
|
|
257
|
+
lastTrainLoss = totalLoss/nTrain; const valLoss = evalLoss(valIdx);
|
|
258
|
+
if (valLoss < bestValLoss) { bestValLoss = valLoss; epochsSinceImprovement = 0; epochsSinceLrDecay = 0; } else { epochsSinceImprovement++; epochsSinceLrDecay++; }
|
|
259
|
+
if (epochsSinceLrDecay >= cfg.lrDecayPatience && lr > cfg.lrFloor) { lr = Math.max(lr*0.5, cfg.lrFloor); epochsSinceLrDecay = 0; }
|
|
260
|
+
if (epochsSinceImprovement >= cfg.earlyStopPatience) break;
|
|
261
|
+
}
|
|
262
|
+
parentPort.postMessage({ weights: { W_q, W_k, W_final, bias, version: 1, trainedAt: Date.now(), trainedOnSamples: n }, trainLoss: lastTrainLoss, valLoss: bestValLoss, actualEpochs, finalLr: lr, config: cfg });
|
|
263
|
+
`;
|
|
264
|
+
|
|
265
|
+
const worker = new Worker(new URL(`data:text/javascript,${encodeURIComponent(workerCode)}`), {
|
|
266
|
+
workerData: { samples, cfg, warmStart: warmStart ?? null, EMBED_DIM, ATTN_DIM, FEATURE_COUNT },
|
|
267
|
+
});
|
|
268
|
+
|
|
269
|
+
worker.on("message", (msg: any) => {
|
|
270
|
+
try {
|
|
271
|
+
saveWeights(msg.weights, weightsPath);
|
|
272
|
+
_weights = msg.weights;
|
|
273
|
+
_active = true;
|
|
274
|
+
} catch { /* non-fatal */ }
|
|
275
|
+
});
|
|
276
|
+
|
|
277
|
+
worker.on("error", () => { /* training failure is non-fatal */ });
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
// ── Startup: auto-train and activate ──
|
|
281
|
+
|
|
282
|
+
const STALENESS_GROWTH_FACTOR = 0.5;
|
|
283
|
+
const STALENESS_MAX_AGE_MS = 7 * 24 * 60 * 60 * 1000;
|
|
284
|
+
|
|
285
|
+
export async function checkACANReadiness(store?: SurrealStore): Promise<void> {
|
|
286
|
+
if (!store) return;
|
|
287
|
+
const weightsPath = join(getKongDir(), WEIGHTS_FILENAME);
|
|
288
|
+
const hasWeights = initACAN();
|
|
289
|
+
const count = await getTrainingDataCount(store);
|
|
290
|
+
|
|
291
|
+
if (hasWeights && _weights) {
|
|
292
|
+
const trainedOn = _weights.trainedOnSamples ?? 0;
|
|
293
|
+
const trainedAt = _weights.trainedAt ?? 0;
|
|
294
|
+
const growthRatio = trainedOn > 0 ? (count - trainedOn) / trainedOn : Infinity;
|
|
295
|
+
const ageMs = Date.now() - trainedAt;
|
|
296
|
+
const isStale = growthRatio >= STALENESS_GROWTH_FACTOR || ageMs >= STALENESS_MAX_AGE_MS;
|
|
297
|
+
if (!isStale) return;
|
|
298
|
+
} else if (count < TRAINING_THRESHOLD) {
|
|
299
|
+
return;
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
try {
|
|
303
|
+
const samples = await fetchTrainingData(store);
|
|
304
|
+
if (samples.length < TRAINING_THRESHOLD) return;
|
|
305
|
+
trainInBackground(samples, weightsPath, hasWeights ? _weights ?? undefined : undefined);
|
|
306
|
+
} catch {
|
|
307
|
+
// training is best-effort
|
|
308
|
+
}
|
|
309
|
+
}
|
package/src/causal.ts
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Causal Memory Graph
|
|
3
|
+
*
|
|
4
|
+
* Activates the dormant caused_by/supports/contradicts edges in the graph.
|
|
5
|
+
* At session end, analyzes the conversation for cause-effect sequences
|
|
6
|
+
* (bug->investigation->fix->outcome) and creates causal chains linking memories.
|
|
7
|
+
* During retrieval, traverses causal edges to pull full chains as context.
|
|
8
|
+
*
|
|
9
|
+
* Ported from kongbrain — takes SurrealStore/EmbeddingService as params.
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
import type { EmbeddingService } from "./embeddings.js";
|
|
13
|
+
import type { SurrealStore, VectorSearchResult } from "./surreal.js";
|
|
14
|
+
import { swallow } from "./errors.js";
|
|
15
|
+
|
|
16
|
+
// --- Types ---
|
|
17
|
+
|
|
18
|
+
export interface CausalChain {
|
|
19
|
+
triggerText: string;
|
|
20
|
+
outcomeText: string;
|
|
21
|
+
chainType: "debug" | "refactor" | "feature" | "fix";
|
|
22
|
+
success: boolean;
|
|
23
|
+
confidence: number;
|
|
24
|
+
description: string;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Create memory nodes for each end of the chain and link them with
|
|
29
|
+
* caused_by/supports/contradicts edges.
|
|
30
|
+
*/
|
|
31
|
+
export async function linkCausalEdges(
|
|
32
|
+
chains: CausalChain[],
|
|
33
|
+
sessionId: string,
|
|
34
|
+
store: SurrealStore,
|
|
35
|
+
embeddings: EmbeddingService,
|
|
36
|
+
): Promise<void> {
|
|
37
|
+
if (chains.length === 0 || !store.isAvailable()) return;
|
|
38
|
+
|
|
39
|
+
for (const chain of chains) {
|
|
40
|
+
try {
|
|
41
|
+
// Create trigger memory
|
|
42
|
+
let triggerEmb: number[] | null = null;
|
|
43
|
+
if (embeddings.isAvailable()) {
|
|
44
|
+
try { triggerEmb = await embeddings.embed(chain.triggerText); } catch (e) { swallow("causal:ok", e); }
|
|
45
|
+
}
|
|
46
|
+
const triggerId = await store.createMemory(
|
|
47
|
+
chain.triggerText, triggerEmb, 5, `causal_trigger_${chain.chainType}`, sessionId,
|
|
48
|
+
);
|
|
49
|
+
|
|
50
|
+
// Create outcome memory
|
|
51
|
+
let outcomeEmb: number[] | null = null;
|
|
52
|
+
if (embeddings.isAvailable()) {
|
|
53
|
+
try { outcomeEmb = await embeddings.embed(chain.outcomeText); } catch (e) { swallow("causal:ok", e); }
|
|
54
|
+
}
|
|
55
|
+
const outcomeId = await store.createMemory(
|
|
56
|
+
chain.outcomeText, outcomeEmb, 6, `causal_outcome_${chain.chainType}`, sessionId,
|
|
57
|
+
);
|
|
58
|
+
|
|
59
|
+
if (!triggerId || !outcomeId) continue;
|
|
60
|
+
|
|
61
|
+
// Create causal edges
|
|
62
|
+
await store.relate(outcomeId, "caused_by", triggerId).catch(e => swallow.warn("causal:relateCausedBy", e));
|
|
63
|
+
if (chain.success) {
|
|
64
|
+
await store.relate(outcomeId, "supports", triggerId).catch(e => swallow.warn("causal:relateSupports", e));
|
|
65
|
+
} else {
|
|
66
|
+
await store.relate(outcomeId, "contradicts", triggerId).catch(e => swallow.warn("causal:relateContradicts", e));
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Embed the description as a searchable memory node
|
|
70
|
+
let descriptionId: string | null = null;
|
|
71
|
+
if (chain.description && chain.description.length > 10) {
|
|
72
|
+
const descText = `[${chain.chainType}${chain.success ? "" : " FAILED"}] ${chain.description}`;
|
|
73
|
+
let descEmb: number[] | null = null;
|
|
74
|
+
if (embeddings.isAvailable()) {
|
|
75
|
+
try { descEmb = await embeddings.embed(descText); } catch (e) { swallow("causal:ok", e); }
|
|
76
|
+
}
|
|
77
|
+
descriptionId = await store.createMemory(
|
|
78
|
+
descText, descEmb, 5, `causal_description_${chain.chainType}`, sessionId,
|
|
79
|
+
);
|
|
80
|
+
if (descriptionId) {
|
|
81
|
+
await store.relate(descriptionId, "describes", triggerId).catch(e => swallow.warn("causal:relateDescTrigger", e));
|
|
82
|
+
await store.relate(descriptionId, "describes", outcomeId).catch(e => swallow.warn("causal:relateDescOutcome", e));
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Store chain metadata
|
|
87
|
+
await store.queryExec(`CREATE causal_chain CONTENT $data`, {
|
|
88
|
+
data: {
|
|
89
|
+
session_id: sessionId,
|
|
90
|
+
trigger_memory: triggerId,
|
|
91
|
+
outcome_memory: outcomeId,
|
|
92
|
+
description_memory: descriptionId,
|
|
93
|
+
chain_type: chain.chainType,
|
|
94
|
+
success: chain.success,
|
|
95
|
+
confidence: chain.confidence,
|
|
96
|
+
description: chain.description,
|
|
97
|
+
},
|
|
98
|
+
}).catch(e => swallow.warn("causal:storeChain", e));
|
|
99
|
+
} catch (e) {
|
|
100
|
+
swallow("causal:silent", e);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// --- Causal Context Retrieval ---
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Given seed memory IDs from vector search, traverse causal edges
|
|
109
|
+
* (caused_by, supports, contradicts) up to `hops` deep.
|
|
110
|
+
* Computes cosine similarity server-side so results compete fairly in scoring.
|
|
111
|
+
*/
|
|
112
|
+
export async function queryCausalContext(
|
|
113
|
+
seedIds: string[],
|
|
114
|
+
queryVec: number[],
|
|
115
|
+
hops = 2,
|
|
116
|
+
minConfidence = 0.4,
|
|
117
|
+
store?: SurrealStore,
|
|
118
|
+
): Promise<VectorSearchResult[]> {
|
|
119
|
+
if (seedIds.length === 0 || !store?.isAvailable()) return [];
|
|
120
|
+
|
|
121
|
+
const RECORD_ID_RE = /^[a-zA-Z_][a-zA-Z0-9_]*:[a-zA-Z0-9_]+$/;
|
|
122
|
+
const validIds = seedIds.filter((id) => RECORD_ID_RE.test(id)).slice(0, 10);
|
|
123
|
+
if (validIds.length === 0) return [];
|
|
124
|
+
|
|
125
|
+
const causalEdges = ["caused_by", "supports", "contradicts", "describes"];
|
|
126
|
+
const seen = new Set<string>(validIds);
|
|
127
|
+
let frontier = validIds;
|
|
128
|
+
const results: VectorSearchResult[] = [];
|
|
129
|
+
const bindings = { vec: queryVec };
|
|
130
|
+
|
|
131
|
+
const scoreExpr = `, IF embedding != NONE AND array::len(embedding) > 0
|
|
132
|
+
THEN vector::similarity::cosine(embedding, $vec)
|
|
133
|
+
ELSE 0 END AS score`;
|
|
134
|
+
|
|
135
|
+
for (let hop = 0; hop < hops && frontier.length > 0; hop++) {
|
|
136
|
+
const queries = frontier.flatMap((id) =>
|
|
137
|
+
causalEdges.map((edge) =>
|
|
138
|
+
store.queryFirst<any>(
|
|
139
|
+
`SELECT id, text, importance, access_count AS accessCount,
|
|
140
|
+
created_at AS timestamp, category, meta::tb(id) AS table${scoreExpr}
|
|
141
|
+
FROM ${id}->${edge}->? LIMIT 3`,
|
|
142
|
+
bindings,
|
|
143
|
+
).catch(e => { swallow.warn("causal:edge-query", e); return [] as any[]; }),
|
|
144
|
+
),
|
|
145
|
+
);
|
|
146
|
+
|
|
147
|
+
const reverseQueries = frontier.flatMap((id) =>
|
|
148
|
+
causalEdges.map((edge) =>
|
|
149
|
+
store.queryFirst<any>(
|
|
150
|
+
`SELECT id, text, importance, access_count AS accessCount,
|
|
151
|
+
created_at AS timestamp, category, meta::tb(id) AS table${scoreExpr}
|
|
152
|
+
FROM ${id}<-${edge}<-? LIMIT 3`,
|
|
153
|
+
bindings,
|
|
154
|
+
).catch(e => { swallow.warn("causal:edge-query", e); return [] as any[]; }),
|
|
155
|
+
),
|
|
156
|
+
);
|
|
157
|
+
|
|
158
|
+
const allQueryResults = await Promise.all([...queries, ...reverseQueries]);
|
|
159
|
+
const nextFrontier: string[] = [];
|
|
160
|
+
|
|
161
|
+
for (const rows of allQueryResults) {
|
|
162
|
+
for (const row of rows) {
|
|
163
|
+
const nodeId = String(row.id);
|
|
164
|
+
if (seen.has(nodeId)) continue;
|
|
165
|
+
seen.add(nodeId);
|
|
166
|
+
|
|
167
|
+
const text = row.text ?? "";
|
|
168
|
+
if (text) {
|
|
169
|
+
results.push({
|
|
170
|
+
id: nodeId,
|
|
171
|
+
text,
|
|
172
|
+
score: row.score ?? 0,
|
|
173
|
+
importance: row.importance,
|
|
174
|
+
accessCount: row.accessCount,
|
|
175
|
+
timestamp: row.timestamp,
|
|
176
|
+
table: String(row.table ?? "memory"),
|
|
177
|
+
source: row.category,
|
|
178
|
+
});
|
|
179
|
+
if (RECORD_ID_RE.test(nodeId)) {
|
|
180
|
+
nextFrontier.push(nodeId);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
frontier = nextFrontier.slice(0, 5);
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Filter by causal_chain confidence
|
|
190
|
+
if (results.length > 0 && minConfidence > 0) {
|
|
191
|
+
const resultIds = results.map(r => r.id);
|
|
192
|
+
try {
|
|
193
|
+
const chains = await store.queryFirst<{ trigger_memory: string; outcome_memory: string; confidence: number }>(
|
|
194
|
+
`SELECT trigger_memory, outcome_memory, confidence FROM causal_chain
|
|
195
|
+
WHERE confidence >= $minConf AND (trigger_memory IN $ids OR outcome_memory IN $ids)`,
|
|
196
|
+
{ minConf: minConfidence, ids: resultIds },
|
|
197
|
+
);
|
|
198
|
+
const allowedIds = new Set<string>();
|
|
199
|
+
for (const c of chains) {
|
|
200
|
+
allowedIds.add(String(c.trigger_memory));
|
|
201
|
+
allowedIds.add(String(c.outcome_memory));
|
|
202
|
+
}
|
|
203
|
+
return results.filter(r => allowedIds.has(r.id));
|
|
204
|
+
} catch (e) {
|
|
205
|
+
swallow.warn("causal:confidence-filter", e);
|
|
206
|
+
return results;
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
return results;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
/**
|
|
214
|
+
* Get causal chain metadata for a session (for metrics/display).
|
|
215
|
+
*/
|
|
216
|
+
export async function getSessionCausalChains(
|
|
217
|
+
sessionId: string,
|
|
218
|
+
store: SurrealStore,
|
|
219
|
+
): Promise<{ count: number; successRate: number }> {
|
|
220
|
+
try {
|
|
221
|
+
if (!store.isAvailable()) return { count: 0, successRate: 0 };
|
|
222
|
+
const rows = await store.queryFirst<{ total: number; successes: number }>(
|
|
223
|
+
`SELECT count() AS total, math::sum(IF success THEN 1 ELSE 0 END) AS successes
|
|
224
|
+
FROM causal_chain WHERE session_id = $sid GROUP ALL`,
|
|
225
|
+
{ sid: sessionId },
|
|
226
|
+
);
|
|
227
|
+
const row = rows[0];
|
|
228
|
+
if (!row || !row.total) return { count: 0, successRate: 0 };
|
|
229
|
+
return {
|
|
230
|
+
count: Number(row.total),
|
|
231
|
+
successRate: Number(row.successes) / Number(row.total),
|
|
232
|
+
};
|
|
233
|
+
} catch (e) {
|
|
234
|
+
swallow("causal:metrics", e);
|
|
235
|
+
return { count: 0, successRate: 0 };
|
|
236
|
+
}
|
|
237
|
+
}
|