ctx-mcp 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +169 -0
- package/migrations/001_init.sql +52 -0
- package/migrations/002_fts.sql +27 -0
- package/package.json +33 -0
- package/scripts/risk-check.ts +46 -0
- package/scripts/seed-trace.ts +144 -0
- package/scripts/similarity.ts +35 -0
- package/src/explain/explain.ts +125 -0
- package/src/graph/graph.ts +153 -0
- package/src/mcp/resources.ts +234 -0
- package/src/mcp/tools.ts +522 -0
- package/src/schemas.ts +238 -0
- package/src/server.ts +38 -0
- package/src/similarity/similarity.ts +299 -0
- package/src/storage/db.ts +17 -0
- package/src/storage/engine.ts +14 -0
- package/src/storage/migrations.ts +19 -0
- package/src/storage/queries.ts +294 -0
- package/src/storage/sqljsEngine.ts +207 -0
- package/src/util/fs.ts +15 -0
- package/src/util/hashing.ts +5 -0
- package/src/util/ids.ts +8 -0
- package/src/util/redact.ts +26 -0
- package/tests/explain.test.ts +81 -0
- package/tests/graph.test.ts +85 -0
- package/tests/query.test.ts +275 -0
- package/tests/similarity.test.ts +103 -0
- package/tests/storage.test.ts +91 -0
- package/tsconfig.json +16 -0
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
import type { StorageEngine } from "./engine.js";
|
|
2
|
+
|
|
3
|
+
export type TraceRow = {
|
|
4
|
+
trace_id: string;
|
|
5
|
+
workflow_id: string | null;
|
|
6
|
+
actor: string | null;
|
|
7
|
+
intent: string;
|
|
8
|
+
tags_json: string;
|
|
9
|
+
metadata_json: string;
|
|
10
|
+
started_at: number;
|
|
11
|
+
finished_at: number | null;
|
|
12
|
+
status: string | null;
|
|
13
|
+
outcome_json: string | null;
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
export type NodeRow = {
|
|
17
|
+
node_id: string;
|
|
18
|
+
trace_id: string;
|
|
19
|
+
type: string;
|
|
20
|
+
summary: string;
|
|
21
|
+
data_json: string;
|
|
22
|
+
confidence: number | null;
|
|
23
|
+
metadata_json: string;
|
|
24
|
+
created_at: number;
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export type EdgeRow = {
|
|
28
|
+
edge_id: string;
|
|
29
|
+
trace_id: string;
|
|
30
|
+
from_node_id: string;
|
|
31
|
+
to_node_id: string;
|
|
32
|
+
relation_type: string;
|
|
33
|
+
data_json: string;
|
|
34
|
+
created_at: number;
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
export type ArtifactRow = {
|
|
38
|
+
artifact_id: string;
|
|
39
|
+
trace_id: string;
|
|
40
|
+
node_id: string | null;
|
|
41
|
+
artifact_type: string;
|
|
42
|
+
content: string;
|
|
43
|
+
redaction_level: string;
|
|
44
|
+
metadata_json: string;
|
|
45
|
+
sha256: string;
|
|
46
|
+
created_at: number;
|
|
47
|
+
};
|
|
48
|
+
|
|
49
|
+
export function insertTrace(engine: StorageEngine, row: TraceRow): void {
|
|
50
|
+
engine.exec(
|
|
51
|
+
`INSERT INTO traces(
|
|
52
|
+
trace_id, workflow_id, actor, intent, tags_json, metadata_json, started_at, finished_at, status, outcome_json
|
|
53
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?);`,
|
|
54
|
+
[
|
|
55
|
+
row.trace_id,
|
|
56
|
+
row.workflow_id,
|
|
57
|
+
row.actor,
|
|
58
|
+
row.intent,
|
|
59
|
+
row.tags_json,
|
|
60
|
+
row.metadata_json,
|
|
61
|
+
row.started_at,
|
|
62
|
+
row.finished_at,
|
|
63
|
+
row.status,
|
|
64
|
+
row.outcome_json,
|
|
65
|
+
]
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
export function insertNode(engine: StorageEngine, row: NodeRow): void {
|
|
70
|
+
engine.exec(
|
|
71
|
+
`INSERT INTO nodes(
|
|
72
|
+
node_id, trace_id, type, summary, data_json, confidence, metadata_json, created_at
|
|
73
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?);`,
|
|
74
|
+
[
|
|
75
|
+
row.node_id,
|
|
76
|
+
row.trace_id,
|
|
77
|
+
row.type,
|
|
78
|
+
row.summary,
|
|
79
|
+
row.data_json,
|
|
80
|
+
row.confidence,
|
|
81
|
+
row.metadata_json,
|
|
82
|
+
row.created_at,
|
|
83
|
+
]
|
|
84
|
+
);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
export function insertEdge(engine: StorageEngine, row: EdgeRow): void {
|
|
88
|
+
engine.exec(
|
|
89
|
+
`INSERT INTO edges(
|
|
90
|
+
edge_id, trace_id, from_node_id, to_node_id, relation_type, data_json, created_at
|
|
91
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?);`,
|
|
92
|
+
[
|
|
93
|
+
row.edge_id,
|
|
94
|
+
row.trace_id,
|
|
95
|
+
row.from_node_id,
|
|
96
|
+
row.to_node_id,
|
|
97
|
+
row.relation_type,
|
|
98
|
+
row.data_json,
|
|
99
|
+
row.created_at,
|
|
100
|
+
]
|
|
101
|
+
);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
export function insertArtifact(engine: StorageEngine, row: ArtifactRow): void {
|
|
105
|
+
engine.exec(
|
|
106
|
+
`INSERT INTO artifacts(
|
|
107
|
+
artifact_id, trace_id, node_id, artifact_type, content, redaction_level, metadata_json, sha256, created_at
|
|
108
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);`,
|
|
109
|
+
[
|
|
110
|
+
row.artifact_id,
|
|
111
|
+
row.trace_id,
|
|
112
|
+
row.node_id,
|
|
113
|
+
row.artifact_type,
|
|
114
|
+
row.content,
|
|
115
|
+
row.redaction_level,
|
|
116
|
+
row.metadata_json,
|
|
117
|
+
row.sha256,
|
|
118
|
+
row.created_at,
|
|
119
|
+
]
|
|
120
|
+
);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
export function finishTrace(
|
|
124
|
+
engine: StorageEngine,
|
|
125
|
+
traceId: string,
|
|
126
|
+
finishedAt: number,
|
|
127
|
+
status: string | null,
|
|
128
|
+
outcomeJson: string | null
|
|
129
|
+
): void {
|
|
130
|
+
engine.exec(
|
|
131
|
+
`UPDATE traces
|
|
132
|
+
SET finished_at = ?, status = ?, outcome_json = ?
|
|
133
|
+
WHERE trace_id = ?;`,
|
|
134
|
+
[finishedAt, status, outcomeJson, traceId]
|
|
135
|
+
);
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
export function getTrace(engine: StorageEngine, traceId: string): TraceRow | null {
|
|
139
|
+
return engine.queryOne<TraceRow>("SELECT * FROM traces WHERE trace_id = ?;", [traceId]);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
export function getAllTraces(engine: StorageEngine): TraceRow[] {
|
|
143
|
+
return engine.queryAll<TraceRow>("SELECT * FROM traces ORDER BY started_at DESC;");
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
export function getNode(engine: StorageEngine, nodeId: string): NodeRow | null {
|
|
147
|
+
return engine.queryOne<NodeRow>("SELECT * FROM nodes WHERE node_id = ?;", [nodeId]);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
export function getTraceNodes(engine: StorageEngine, traceId: string): NodeRow[] {
|
|
151
|
+
return engine.queryAll<NodeRow>("SELECT * FROM nodes WHERE trace_id = ? ORDER BY created_at ASC;", [traceId]);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
export function getTraceEdges(engine: StorageEngine, traceId: string): EdgeRow[] {
|
|
155
|
+
return engine.queryAll<EdgeRow>("SELECT * FROM edges WHERE trace_id = ? ORDER BY created_at ASC;", [traceId]);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
export function getTraceArtifacts(engine: StorageEngine, traceId: string): ArtifactRow[] {
|
|
159
|
+
return engine.queryAll<ArtifactRow>(
|
|
160
|
+
"SELECT * FROM artifacts WHERE trace_id = ? ORDER BY created_at ASC;",
|
|
161
|
+
[traceId]
|
|
162
|
+
);
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
export function getArtifactsForNodes(engine: StorageEngine, nodeIds: string[]): ArtifactRow[] {
|
|
166
|
+
if (nodeIds.length === 0) return [];
|
|
167
|
+
const placeholders = nodeIds.map(() => "?").join(",");
|
|
168
|
+
return engine.queryAll<ArtifactRow>(
|
|
169
|
+
`SELECT * FROM artifacts WHERE node_id IN (${placeholders}) ORDER BY created_at ASC;`,
|
|
170
|
+
nodeIds
|
|
171
|
+
);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
export function searchNodes(
|
|
175
|
+
engine: StorageEngine,
|
|
176
|
+
opts: {
|
|
177
|
+
traceId?: string;
|
|
178
|
+
type?: string;
|
|
179
|
+
text?: string;
|
|
180
|
+
limit: number;
|
|
181
|
+
offset: number;
|
|
182
|
+
}
|
|
183
|
+
): NodeRow[] {
|
|
184
|
+
if (opts.text && hasFts(engine)) {
|
|
185
|
+
const where: string[] = [];
|
|
186
|
+
const params: unknown[] = [];
|
|
187
|
+
|
|
188
|
+
if (opts.traceId) {
|
|
189
|
+
where.push("n.trace_id = ?");
|
|
190
|
+
params.push(opts.traceId);
|
|
191
|
+
}
|
|
192
|
+
if (opts.type) {
|
|
193
|
+
where.push("n.type = ?");
|
|
194
|
+
params.push(opts.type);
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
where.push("nodes_fts MATCH ?");
|
|
198
|
+
params.push(opts.text);
|
|
199
|
+
|
|
200
|
+
const clause = where.length ? `WHERE ${where.join(" AND ")}` : "";
|
|
201
|
+
const sql = `SELECT n.* FROM nodes n JOIN nodes_fts ON nodes_fts.rowid = n.rowid ${clause}\n ORDER BY n.created_at DESC LIMIT ? OFFSET ?;`;
|
|
202
|
+
params.push(opts.limit, opts.offset);
|
|
203
|
+
|
|
204
|
+
return engine.queryAll<NodeRow>(sql, params);
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
const where: string[] = [];
|
|
208
|
+
const params: unknown[] = [];
|
|
209
|
+
|
|
210
|
+
if (opts.traceId) {
|
|
211
|
+
where.push("trace_id = ?");
|
|
212
|
+
params.push(opts.traceId);
|
|
213
|
+
}
|
|
214
|
+
if (opts.type) {
|
|
215
|
+
where.push("type = ?");
|
|
216
|
+
params.push(opts.type);
|
|
217
|
+
}
|
|
218
|
+
if (opts.text) {
|
|
219
|
+
where.push("summary LIKE ?");
|
|
220
|
+
params.push(`%${opts.text}%`);
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
const clause = where.length ? `WHERE ${where.join(" AND ")}` : "";
|
|
224
|
+
const sql = `SELECT * FROM nodes ${clause} ORDER BY created_at DESC LIMIT ? OFFSET ?;`;
|
|
225
|
+
params.push(opts.limit, opts.offset);
|
|
226
|
+
|
|
227
|
+
return engine.queryAll<NodeRow>(sql, params);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
export function searchNodesForTraces(
|
|
231
|
+
engine: StorageEngine,
|
|
232
|
+
opts: {
|
|
233
|
+
traceIds: string[];
|
|
234
|
+
type?: string;
|
|
235
|
+
text?: string;
|
|
236
|
+
limit: number;
|
|
237
|
+
offset: number;
|
|
238
|
+
}
|
|
239
|
+
): NodeRow[] {
|
|
240
|
+
if (opts.traceIds.length === 0) return [];
|
|
241
|
+
|
|
242
|
+
const placeholders = opts.traceIds.map(() => "?").join(",");
|
|
243
|
+
|
|
244
|
+
if (opts.text && hasFts(engine)) {
|
|
245
|
+
const where: string[] = [`n.trace_id IN (${placeholders})`];
|
|
246
|
+
const params: unknown[] = [...opts.traceIds];
|
|
247
|
+
|
|
248
|
+
if (opts.type) {
|
|
249
|
+
where.push("n.type = ?");
|
|
250
|
+
params.push(opts.type);
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
where.push("nodes_fts MATCH ?");
|
|
254
|
+
params.push(opts.text);
|
|
255
|
+
|
|
256
|
+
const clause = `WHERE ${where.join(" AND ")}`;
|
|
257
|
+
const sql = `SELECT n.* FROM nodes n JOIN nodes_fts ON nodes_fts.rowid = n.rowid ${clause}
|
|
258
|
+
ORDER BY n.created_at DESC LIMIT ? OFFSET ?;`;
|
|
259
|
+
params.push(opts.limit, opts.offset);
|
|
260
|
+
|
|
261
|
+
return engine.queryAll<NodeRow>(sql, params);
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
const where: string[] = [`trace_id IN (${placeholders})`];
|
|
265
|
+
const params: unknown[] = [...opts.traceIds];
|
|
266
|
+
|
|
267
|
+
if (opts.type) {
|
|
268
|
+
where.push("type = ?");
|
|
269
|
+
params.push(opts.type);
|
|
270
|
+
}
|
|
271
|
+
if (opts.text) {
|
|
272
|
+
where.push("summary LIKE ?");
|
|
273
|
+
params.push(`%${opts.text}%`);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
const clause = `WHERE ${where.join(" AND ")}`;
|
|
277
|
+
const sql = `SELECT * FROM nodes ${clause} ORDER BY created_at DESC LIMIT ? OFFSET ?;`;
|
|
278
|
+
params.push(opts.limit, opts.offset);
|
|
279
|
+
|
|
280
|
+
return engine.queryAll<NodeRow>(sql, params);
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
const ftsCache = new WeakMap<StorageEngine, boolean>();
|
|
284
|
+
|
|
285
|
+
function hasFts(engine: StorageEngine): boolean {
|
|
286
|
+
const cached = ftsCache.get(engine);
|
|
287
|
+
if (cached !== undefined) return cached;
|
|
288
|
+
const row = engine.queryOne<{ name: string }>(
|
|
289
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='nodes_fts';"
|
|
290
|
+
);
|
|
291
|
+
const enabled = Boolean(row?.name);
|
|
292
|
+
ftsCache.set(engine, enabled);
|
|
293
|
+
return enabled;
|
|
294
|
+
}
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { createRequire } from "node:module";
|
|
5
|
+
|
|
6
|
+
import initSqlJs, { type Database, type SqlJsStatic } from "sql.js";
|
|
7
|
+
|
|
8
|
+
import type { StorageEngine, TxFn } from "./engine.js";
|
|
9
|
+
import { defaultMigrations, type Migration } from "./migrations.js";
|
|
10
|
+
import { atomicWriteFile } from "../util/fs.js";
|
|
11
|
+
|
|
12
|
+
export type SqlJsEngineOptions = {
|
|
13
|
+
dbPath?: string;
|
|
14
|
+
flushDebounceMs?: number;
|
|
15
|
+
locateFile?: (file: string) => string;
|
|
16
|
+
onLog?: (msg: string) => void;
|
|
17
|
+
migrations?: Migration[];
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
export class SqlJsEngine implements StorageEngine {
|
|
21
|
+
private SQL!: SqlJsStatic;
|
|
22
|
+
private db!: Database;
|
|
23
|
+
private dbPath: string;
|
|
24
|
+
private dirty = false;
|
|
25
|
+
private flushTimer: NodeJS.Timeout | null = null;
|
|
26
|
+
private flushDebounceMs: number;
|
|
27
|
+
private onLog: (msg: string) => void;
|
|
28
|
+
private locateFile?: (file: string) => string;
|
|
29
|
+
private migrations: Migration[];
|
|
30
|
+
|
|
31
|
+
constructor(opts: SqlJsEngineOptions = {}) {
|
|
32
|
+
const defaultDir = path.join(os.homedir(), ".decision-trace");
|
|
33
|
+
const defaultPath = path.join(defaultDir, "trace.db");
|
|
34
|
+
const require = createRequire(import.meta.url);
|
|
35
|
+
const wasmPath = require.resolve("sql.js/dist/sql-wasm.wasm");
|
|
36
|
+
const defaultLocateFile = (file: string) =>
|
|
37
|
+
file.endsWith(".wasm") ? wasmPath : file;
|
|
38
|
+
|
|
39
|
+
this.dbPath = opts.dbPath || process.env.TRACE_DB_PATH || defaultPath;
|
|
40
|
+
this.flushDebounceMs = opts.flushDebounceMs ?? 1000;
|
|
41
|
+
this.onLog = opts.onLog ?? (() => undefined);
|
|
42
|
+
this.locateFile = opts.locateFile ?? defaultLocateFile;
|
|
43
|
+
this.migrations = opts.migrations ?? defaultMigrations;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async init(): Promise<void> {
|
|
47
|
+
fs.mkdirSync(path.dirname(this.dbPath), { recursive: true });
|
|
48
|
+
|
|
49
|
+
this.SQL = await initSqlJs({ locateFile: this.locateFile });
|
|
50
|
+
|
|
51
|
+
let dbBytes: Uint8Array | undefined;
|
|
52
|
+
if (fs.existsSync(this.dbPath)) {
|
|
53
|
+
const buf = fs.readFileSync(this.dbPath);
|
|
54
|
+
dbBytes = new Uint8Array(buf);
|
|
55
|
+
this.onLog(`Loaded DB from ${this.dbPath} (${buf.length} bytes)`);
|
|
56
|
+
} else {
|
|
57
|
+
this.onLog(`Creating new DB at ${this.dbPath}`);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
this.db = new this.SQL.Database(dbBytes);
|
|
61
|
+
|
|
62
|
+
try {
|
|
63
|
+
this.exec("PRAGMA foreign_keys = ON;");
|
|
64
|
+
this.exec("PRAGMA journal_mode = WAL;");
|
|
65
|
+
this.exec("PRAGMA synchronous = NORMAL;");
|
|
66
|
+
this.exec("PRAGMA temp_store = MEMORY;");
|
|
67
|
+
} catch {
|
|
68
|
+
// Ignore pragma failures.
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
this.applyMigrations();
|
|
72
|
+
this.installShutdownHooks();
|
|
73
|
+
|
|
74
|
+
await this.flushNow();
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
transaction<T>(fn: TxFn<T>): T {
|
|
78
|
+
this.exec("BEGIN;");
|
|
79
|
+
try {
|
|
80
|
+
const out = fn();
|
|
81
|
+
this.exec("COMMIT;");
|
|
82
|
+
this.markDirty();
|
|
83
|
+
return out;
|
|
84
|
+
} catch (err) {
|
|
85
|
+
this.exec("ROLLBACK;");
|
|
86
|
+
throw err;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
exec(sql: string, params?: unknown[]): void {
|
|
91
|
+
if (!params || params.length === 0) {
|
|
92
|
+
this.db.exec(sql);
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
const stmt = this.db.prepare(sql);
|
|
96
|
+
try {
|
|
97
|
+
stmt.bind(params as any[]);
|
|
98
|
+
while (stmt.step()) {
|
|
99
|
+
// consume
|
|
100
|
+
}
|
|
101
|
+
} finally {
|
|
102
|
+
stmt.free();
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
queryAll<T = unknown>(sql: string, params?: unknown[]): T[] {
|
|
107
|
+
const stmt = this.db.prepare(sql);
|
|
108
|
+
try {
|
|
109
|
+
if (params) stmt.bind(params as any[]);
|
|
110
|
+
const rows: T[] = [];
|
|
111
|
+
while (stmt.step()) {
|
|
112
|
+
rows.push(stmt.getAsObject() as T);
|
|
113
|
+
}
|
|
114
|
+
return rows;
|
|
115
|
+
} finally {
|
|
116
|
+
stmt.free();
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
queryOne<T = unknown>(sql: string, params?: unknown[]): T | null {
|
|
121
|
+
const rows = this.queryAll<T>(sql, params);
|
|
122
|
+
return rows.length ? rows[0] : null;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
async flushIfDirty(): Promise<void> {
|
|
126
|
+
if (!this.dirty) return;
|
|
127
|
+
await this.flushNow();
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
async flushNow(): Promise<void> {
|
|
131
|
+
const bytes = this.db.export();
|
|
132
|
+
const buf = Buffer.from(bytes);
|
|
133
|
+
|
|
134
|
+
atomicWriteFile(this.dbPath, buf);
|
|
135
|
+
|
|
136
|
+
this.dirty = false;
|
|
137
|
+
this.onLog(`Flushed DB to ${this.dbPath} (${buf.length} bytes)`);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
private markDirty(): void {
|
|
141
|
+
this.dirty = true;
|
|
142
|
+
if (this.flushTimer) return;
|
|
143
|
+
this.flushTimer = setTimeout(() => {
|
|
144
|
+
this.flushTimer = null;
|
|
145
|
+
void this.flushIfDirty();
|
|
146
|
+
}, this.flushDebounceMs);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
private applyMigrations(): void {
|
|
150
|
+
this.exec(`
|
|
151
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
152
|
+
version INTEGER NOT NULL
|
|
153
|
+
);
|
|
154
|
+
`);
|
|
155
|
+
|
|
156
|
+
const row = this.queryOne<{ version: number }>(
|
|
157
|
+
"SELECT version FROM schema_version LIMIT 1;"
|
|
158
|
+
);
|
|
159
|
+
const current = row?.version ?? 0;
|
|
160
|
+
|
|
161
|
+
let version = current;
|
|
162
|
+
for (const migration of this.migrations) {
|
|
163
|
+
if (migration.version <= version) continue;
|
|
164
|
+
this.onLog(`Applying migration v${migration.version}`);
|
|
165
|
+
try {
|
|
166
|
+
this.exec(migration.sql);
|
|
167
|
+
} catch (err) {
|
|
168
|
+
if (migration.optional) {
|
|
169
|
+
this.onLog(
|
|
170
|
+
`Migration v${migration.version} failed (continuing): ${(err as Error).message}`
|
|
171
|
+
);
|
|
172
|
+
continue;
|
|
173
|
+
}
|
|
174
|
+
throw err;
|
|
175
|
+
}
|
|
176
|
+
version = migration.version;
|
|
177
|
+
if (current === 0) {
|
|
178
|
+
this.exec("DELETE FROM schema_version;");
|
|
179
|
+
this.exec("INSERT INTO schema_version(version) VALUES (?);", [version]);
|
|
180
|
+
} else {
|
|
181
|
+
this.exec("UPDATE schema_version SET version=?;", [version]);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
private installShutdownHooks(): void {
|
|
187
|
+
const flushAndExit = async (signal: string) => {
|
|
188
|
+
this.onLog(`Received ${signal}; flushing DB...`);
|
|
189
|
+
try {
|
|
190
|
+
await this.flushIfDirty();
|
|
191
|
+
} finally {
|
|
192
|
+
process.exit(0);
|
|
193
|
+
}
|
|
194
|
+
};
|
|
195
|
+
|
|
196
|
+
process.once("SIGINT", () => void flushAndExit("SIGINT"));
|
|
197
|
+
process.once("SIGTERM", () => void flushAndExit("SIGTERM"));
|
|
198
|
+
|
|
199
|
+
process.once("beforeExit", () => {
|
|
200
|
+
try {
|
|
201
|
+
void this.flushIfDirty();
|
|
202
|
+
} catch {
|
|
203
|
+
// Ignore shutdown flush errors.
|
|
204
|
+
}
|
|
205
|
+
});
|
|
206
|
+
}
|
|
207
|
+
}
|
package/src/util/fs.ts
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
export function mkdirp(dir: string): void {
|
|
5
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function atomicWriteFile(targetPath: string, data: Buffer): void {
|
|
9
|
+
const dir = path.dirname(targetPath);
|
|
10
|
+
mkdirp(dir);
|
|
11
|
+
|
|
12
|
+
const tmpPath = `${targetPath}.tmp`;
|
|
13
|
+
fs.writeFileSync(tmpPath, data);
|
|
14
|
+
fs.renameSync(tmpPath, targetPath);
|
|
15
|
+
}
|
package/src/util/ids.ts
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
const DEFAULT_PATTERNS: RegExp[] = [
|
|
2
|
+
/sk-[A-Za-z0-9_-]{16,}/g,
|
|
3
|
+
/AIza[0-9A-Za-z_-]{35}/g,
|
|
4
|
+
/(?<=Bearer\s)[A-Za-z0-9._-]+/g,
|
|
5
|
+
/(?<=apikey=)[A-Za-z0-9._-]+/gi,
|
|
6
|
+
];
|
|
7
|
+
|
|
8
|
+
export type RedactionResult = {
|
|
9
|
+
redacted: string;
|
|
10
|
+
wasRedacted: boolean;
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
export function redactSecrets(input: string, patterns: RegExp[] = DEFAULT_PATTERNS): RedactionResult {
|
|
14
|
+
let redacted = input;
|
|
15
|
+
let wasRedacted = false;
|
|
16
|
+
|
|
17
|
+
for (const pattern of patterns) {
|
|
18
|
+
const next = redacted.replace(pattern, "[REDACTED]");
|
|
19
|
+
if (next !== redacted) {
|
|
20
|
+
wasRedacted = true;
|
|
21
|
+
redacted = next;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
return { redacted, wasRedacted };
|
|
26
|
+
}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import { describe, expect, it } from "vitest";
|
|
2
|
+
import { explainDecision } from "../src/explain/explain.js";
|
|
3
|
+
import type { ArtifactRow, EdgeRow, NodeRow } from "../src/storage/queries.js";
|
|
4
|
+
|
|
5
|
+
const nodes: NodeRow[] = [
|
|
6
|
+
{
|
|
7
|
+
node_id: "assumption-1",
|
|
8
|
+
trace_id: "t1",
|
|
9
|
+
type: "Assumption",
|
|
10
|
+
summary: "Assume X",
|
|
11
|
+
data_json: "{}",
|
|
12
|
+
confidence: null,
|
|
13
|
+
metadata_json: "{}",
|
|
14
|
+
created_at: 1,
|
|
15
|
+
},
|
|
16
|
+
{
|
|
17
|
+
node_id: "decision-1",
|
|
18
|
+
trace_id: "t1",
|
|
19
|
+
type: "Decision",
|
|
20
|
+
summary: "Decide Y",
|
|
21
|
+
data_json: "{}",
|
|
22
|
+
confidence: null,
|
|
23
|
+
metadata_json: "{}",
|
|
24
|
+
created_at: 2,
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
node_id: "outcome-1",
|
|
28
|
+
trace_id: "t1",
|
|
29
|
+
type: "Outcome",
|
|
30
|
+
summary: "Outcome Z",
|
|
31
|
+
data_json: "{}",
|
|
32
|
+
confidence: null,
|
|
33
|
+
metadata_json: "{}",
|
|
34
|
+
created_at: 3,
|
|
35
|
+
},
|
|
36
|
+
];
|
|
37
|
+
|
|
38
|
+
const edges: EdgeRow[] = [
|
|
39
|
+
{
|
|
40
|
+
edge_id: "e1",
|
|
41
|
+
trace_id: "t1",
|
|
42
|
+
from_node_id: "assumption-1",
|
|
43
|
+
to_node_id: "decision-1",
|
|
44
|
+
relation_type: "justified_by",
|
|
45
|
+
data_json: "{}",
|
|
46
|
+
created_at: 1,
|
|
47
|
+
},
|
|
48
|
+
{
|
|
49
|
+
edge_id: "e2",
|
|
50
|
+
trace_id: "t1",
|
|
51
|
+
from_node_id: "decision-1",
|
|
52
|
+
to_node_id: "outcome-1",
|
|
53
|
+
relation_type: "causes",
|
|
54
|
+
data_json: "{}",
|
|
55
|
+
created_at: 2,
|
|
56
|
+
},
|
|
57
|
+
];
|
|
58
|
+
|
|
59
|
+
const artifacts: ArtifactRow[] = [
|
|
60
|
+
{
|
|
61
|
+
artifact_id: "artifact-1",
|
|
62
|
+
trace_id: "t1",
|
|
63
|
+
node_id: "decision-1",
|
|
64
|
+
artifact_type: "log",
|
|
65
|
+
content: "decision detail",
|
|
66
|
+
redaction_level: "internal",
|
|
67
|
+
metadata_json: "{}",
|
|
68
|
+
sha256: "abc",
|
|
69
|
+
created_at: 2,
|
|
70
|
+
},
|
|
71
|
+
];
|
|
72
|
+
|
|
73
|
+
describe("explainDecision", () => {
|
|
74
|
+
it("groups upstream nodes and finds outcome", () => {
|
|
75
|
+
const explanation = explainDecision("decision-1", nodes, edges, artifacts, 3);
|
|
76
|
+
|
|
77
|
+
expect(explanation.assumptions).toHaveLength(1);
|
|
78
|
+
expect(explanation.outcome?.node_id).toBe("outcome-1");
|
|
79
|
+
expect(explanation.artifacts).toHaveLength(1);
|
|
80
|
+
});
|
|
81
|
+
});
|