kotadb 2.0.1-next.20260203000100 → 2.0.1-next.20260203164934
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/api/openapi/builder.ts +1 -1
- package/src/db/migrations/004_memory_layer.sql +183 -0
- package/src/db/sqlite-schema.sql +207 -0
- package/src/instrument.ts +1 -4
- package/src/logging/logger.ts +3 -10
- package/src/mcp/server.ts +71 -0
- package/src/mcp/tools.ts +758 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "kotadb",
|
|
3
|
-
"version": "2.0.1-next.
|
|
3
|
+
"version": "2.0.1-next.20260203164934",
|
|
4
4
|
"description": "Local-only code intelligence tool for CLI agents. SQLite-backed repository indexing and code search via MCP.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"module": "src/index.ts",
|
|
@@ -184,7 +184,7 @@ Repository indexing and advanced code intelligence operations are available via
|
|
|
184
184
|
const duration = Date.now() - startTime;
|
|
185
185
|
const pathCount = Object.keys(spec.paths || {}).length;
|
|
186
186
|
|
|
187
|
-
process.
|
|
187
|
+
process.stderr.write(JSON.stringify({
|
|
188
188
|
level: 'info',
|
|
189
189
|
module: 'openapi-builder',
|
|
190
190
|
message: 'OpenAPI spec generated',
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
-- SQLite Migration: Memory Layer Schema Extensions
|
|
2
|
+
--
|
|
3
|
+
-- Migration: 004_memory_layer
|
|
4
|
+
-- Issue: Memory Layer for Agent Intelligence
|
|
5
|
+
-- Author: Claude Code
|
|
6
|
+
-- Date: 2026-02-03
|
|
7
|
+
--
|
|
8
|
+
-- This migration extends the memory layer tables with additional schema:
|
|
9
|
+
-- - decisions: Add status column (active, superseded, deprecated)
|
|
10
|
+
-- - failed_approaches: Alternative to failures with clearer naming
|
|
11
|
+
-- - pattern_annotations: Enhanced patterns with evidence/confidence scoring
|
|
12
|
+
-- - agent_sessions: Track agent work sessions
|
|
13
|
+
-- - session_insights: Insights linked to sessions with file references
|
|
14
|
+
--
|
|
15
|
+
-- Note: The base sqlite-schema.sql already has decisions, failures, patterns,
|
|
16
|
+
-- and insights tables. This migration adds enhanced versions and the missing
|
|
17
|
+
-- agent_sessions table for complete session tracking.
|
|
18
|
+
|
|
19
|
+
-- ============================================================================
|
|
20
|
+
-- 1. Extend Decisions Table - Add status column
|
|
21
|
+
-- ============================================================================
|
|
22
|
+
-- Add status column to track decision lifecycle
|
|
23
|
+
|
|
24
|
+
ALTER TABLE decisions ADD COLUMN status TEXT DEFAULT 'active';
|
|
25
|
+
|
|
26
|
+
-- Add index for active decisions (most common query)
|
|
27
|
+
CREATE INDEX IF NOT EXISTS idx_decisions_status ON decisions(status) WHERE status = 'active';
|
|
28
|
+
|
|
29
|
+
-- ============================================================================
|
|
30
|
+
-- 2. Failed Approaches Table (alternative to failures with clearer naming)
|
|
31
|
+
-- ============================================================================
|
|
32
|
+
-- Tracks what didn't work to prevent repeating mistakes
|
|
33
|
+
|
|
34
|
+
CREATE TABLE IF NOT EXISTS failed_approaches (
|
|
35
|
+
id TEXT PRIMARY KEY, -- uuid → TEXT
|
|
36
|
+
repository_id TEXT NOT NULL, -- Foreign key to repositories
|
|
37
|
+
title TEXT NOT NULL, -- Short description
|
|
38
|
+
problem TEXT NOT NULL, -- What problem was being solved
|
|
39
|
+
approach TEXT NOT NULL, -- What was tried
|
|
40
|
+
failure_reason TEXT NOT NULL, -- Why it failed
|
|
41
|
+
related_files TEXT, -- JSON array of related file paths
|
|
42
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
43
|
+
|
|
44
|
+
FOREIGN KEY (repository_id) REFERENCES repositories(id) ON DELETE CASCADE
|
|
45
|
+
);
|
|
46
|
+
|
|
47
|
+
-- Indexes for common queries
|
|
48
|
+
CREATE INDEX IF NOT EXISTS idx_failed_approaches_repository_id ON failed_approaches(repository_id);
|
|
49
|
+
CREATE INDEX IF NOT EXISTS idx_failed_approaches_created_at ON failed_approaches(created_at DESC);
|
|
50
|
+
|
|
51
|
+
-- ============================================================================
|
|
52
|
+
-- 3. Failed Approaches FTS5 Virtual Table
|
|
53
|
+
-- ============================================================================
|
|
54
|
+
-- External content FTS5 for searching failed approaches
|
|
55
|
+
|
|
56
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS failed_approaches_fts USING fts5(
|
|
57
|
+
title,
|
|
58
|
+
problem,
|
|
59
|
+
approach,
|
|
60
|
+
failure_reason,
|
|
61
|
+
content='failed_approaches',
|
|
62
|
+
content_rowid='rowid'
|
|
63
|
+
);
|
|
64
|
+
|
|
65
|
+
-- ============================================================================
|
|
66
|
+
-- 4. Failed Approaches FTS5 Sync Triggers
|
|
67
|
+
-- ============================================================================
|
|
68
|
+
|
|
69
|
+
-- After INSERT: Add new failed approach to FTS index
|
|
70
|
+
CREATE TRIGGER IF NOT EXISTS failed_approaches_fts_ai
|
|
71
|
+
AFTER INSERT ON failed_approaches
|
|
72
|
+
BEGIN
|
|
73
|
+
INSERT INTO failed_approaches_fts(rowid, title, problem, approach, failure_reason)
|
|
74
|
+
VALUES (new.rowid, new.title, new.problem, new.approach, new.failure_reason);
|
|
75
|
+
END;
|
|
76
|
+
|
|
77
|
+
-- After DELETE: Remove failed approach from FTS index
|
|
78
|
+
CREATE TRIGGER IF NOT EXISTS failed_approaches_fts_ad
|
|
79
|
+
AFTER DELETE ON failed_approaches
|
|
80
|
+
BEGIN
|
|
81
|
+
INSERT INTO failed_approaches_fts(failed_approaches_fts, rowid, title, problem, approach, failure_reason)
|
|
82
|
+
VALUES ('delete', old.rowid, old.title, old.problem, old.approach, old.failure_reason);
|
|
83
|
+
END;
|
|
84
|
+
|
|
85
|
+
-- After UPDATE: Update failed approach in FTS index (delete old, insert new)
|
|
86
|
+
CREATE TRIGGER IF NOT EXISTS failed_approaches_fts_au
|
|
87
|
+
AFTER UPDATE ON failed_approaches
|
|
88
|
+
BEGIN
|
|
89
|
+
INSERT INTO failed_approaches_fts(failed_approaches_fts, rowid, title, problem, approach, failure_reason)
|
|
90
|
+
VALUES ('delete', old.rowid, old.title, old.problem, old.approach, old.failure_reason);
|
|
91
|
+
INSERT INTO failed_approaches_fts(rowid, title, problem, approach, failure_reason)
|
|
92
|
+
VALUES (new.rowid, new.title, new.problem, new.approach, new.failure_reason);
|
|
93
|
+
END;
|
|
94
|
+
|
|
95
|
+
-- ============================================================================
|
|
96
|
+
-- 5. Pattern Annotations Table
|
|
97
|
+
-- ============================================================================
|
|
98
|
+
-- Enhanced pattern detection with evidence counting and confidence scoring
|
|
99
|
+
|
|
100
|
+
CREATE TABLE IF NOT EXISTS pattern_annotations (
|
|
101
|
+
id TEXT PRIMARY KEY, -- uuid → TEXT
|
|
102
|
+
repository_id TEXT NOT NULL, -- Foreign key to repositories
|
|
103
|
+
pattern_type TEXT NOT NULL, -- Pattern category (logging, error-handling, testing, etc.)
|
|
104
|
+
pattern_name TEXT NOT NULL, -- Pattern identifier
|
|
105
|
+
description TEXT NOT NULL, -- Human-readable description
|
|
106
|
+
example_code TEXT, -- Code example (optional)
|
|
107
|
+
evidence_count INTEGER NOT NULL DEFAULT 1, -- Number of occurrences found
|
|
108
|
+
confidence REAL NOT NULL DEFAULT 1.0, -- Confidence score (0.0-1.0)
|
|
109
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
110
|
+
|
|
111
|
+
FOREIGN KEY (repository_id) REFERENCES repositories(id) ON DELETE CASCADE,
|
|
112
|
+
|
|
113
|
+
CHECK (confidence >= 0.0 AND confidence <= 1.0),
|
|
114
|
+
CHECK (evidence_count >= 1)
|
|
115
|
+
);
|
|
116
|
+
|
|
117
|
+
-- Indexes for common queries
|
|
118
|
+
CREATE INDEX IF NOT EXISTS idx_pattern_annotations_repository_id ON pattern_annotations(repository_id);
|
|
119
|
+
CREATE INDEX IF NOT EXISTS idx_pattern_annotations_pattern_type ON pattern_annotations(pattern_type);
|
|
120
|
+
CREATE INDEX IF NOT EXISTS idx_pattern_annotations_confidence ON pattern_annotations(confidence DESC);
|
|
121
|
+
-- Composite index for high-confidence patterns by type
|
|
122
|
+
CREATE INDEX IF NOT EXISTS idx_pattern_annotations_type_confidence
|
|
123
|
+
ON pattern_annotations(repository_id, pattern_type, confidence DESC);
|
|
124
|
+
|
|
125
|
+
-- ============================================================================
|
|
126
|
+
-- 6. Agent Sessions Table
|
|
127
|
+
-- ============================================================================
|
|
128
|
+
-- Tracks agent work sessions for learning and analysis
|
|
129
|
+
|
|
130
|
+
CREATE TABLE IF NOT EXISTS agent_sessions (
|
|
131
|
+
id TEXT PRIMARY KEY, -- uuid → TEXT
|
|
132
|
+
repository_id TEXT NOT NULL, -- Foreign key to repositories
|
|
133
|
+
agent_type TEXT, -- Type of agent (plan, build, improve, etc.)
|
|
134
|
+
task_summary TEXT, -- What the agent was working on
|
|
135
|
+
outcome TEXT, -- Session outcome
|
|
136
|
+
files_modified TEXT, -- JSON array of modified file paths
|
|
137
|
+
started_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
138
|
+
ended_at TEXT, -- NULL if session is ongoing
|
|
139
|
+
|
|
140
|
+
FOREIGN KEY (repository_id) REFERENCES repositories(id) ON DELETE CASCADE,
|
|
141
|
+
|
|
142
|
+
CHECK (outcome IS NULL OR outcome IN ('success', 'failure', 'partial'))
|
|
143
|
+
);
|
|
144
|
+
|
|
145
|
+
-- Indexes for common queries
|
|
146
|
+
CREATE INDEX IF NOT EXISTS idx_agent_sessions_repository_id ON agent_sessions(repository_id);
|
|
147
|
+
CREATE INDEX IF NOT EXISTS idx_agent_sessions_agent_type ON agent_sessions(agent_type) WHERE agent_type IS NOT NULL;
|
|
148
|
+
CREATE INDEX IF NOT EXISTS idx_agent_sessions_outcome ON agent_sessions(outcome) WHERE outcome IS NOT NULL;
|
|
149
|
+
CREATE INDEX IF NOT EXISTS idx_agent_sessions_started_at ON agent_sessions(started_at DESC);
|
|
150
|
+
-- Partial index for ongoing sessions
|
|
151
|
+
CREATE INDEX IF NOT EXISTS idx_agent_sessions_ongoing ON agent_sessions(repository_id) WHERE ended_at IS NULL;
|
|
152
|
+
|
|
153
|
+
-- ============================================================================
|
|
154
|
+
-- 7. Session Insights Table
|
|
155
|
+
-- ============================================================================
|
|
156
|
+
-- Insights discovered during agent sessions with proper foreign keys
|
|
157
|
+
|
|
158
|
+
CREATE TABLE IF NOT EXISTS session_insights (
|
|
159
|
+
id TEXT PRIMARY KEY, -- uuid → TEXT
|
|
160
|
+
session_id TEXT NOT NULL, -- Foreign key to agent_sessions
|
|
161
|
+
insight_type TEXT NOT NULL, -- Type of insight
|
|
162
|
+
content TEXT NOT NULL, -- The insight content
|
|
163
|
+
related_file_id TEXT, -- Optional reference to indexed_files
|
|
164
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
165
|
+
|
|
166
|
+
FOREIGN KEY (session_id) REFERENCES agent_sessions(id) ON DELETE CASCADE,
|
|
167
|
+
FOREIGN KEY (related_file_id) REFERENCES indexed_files(id) ON DELETE SET NULL,
|
|
168
|
+
|
|
169
|
+
CHECK (insight_type IN ('discovery', 'failure', 'workaround'))
|
|
170
|
+
);
|
|
171
|
+
|
|
172
|
+
-- Indexes for common queries
|
|
173
|
+
CREATE INDEX IF NOT EXISTS idx_session_insights_session_id ON session_insights(session_id);
|
|
174
|
+
CREATE INDEX IF NOT EXISTS idx_session_insights_insight_type ON session_insights(insight_type);
|
|
175
|
+
CREATE INDEX IF NOT EXISTS idx_session_insights_related_file ON session_insights(related_file_id)
|
|
176
|
+
WHERE related_file_id IS NOT NULL;
|
|
177
|
+
CREATE INDEX IF NOT EXISTS idx_session_insights_created_at ON session_insights(created_at DESC);
|
|
178
|
+
|
|
179
|
+
-- ============================================================================
|
|
180
|
+
-- 8. Record Migration
|
|
181
|
+
-- ============================================================================
|
|
182
|
+
|
|
183
|
+
INSERT OR IGNORE INTO schema_migrations (name) VALUES ('004_memory_layer');
|
package/src/db/sqlite-schema.sql
CHANGED
|
@@ -258,6 +258,213 @@ CREATE TABLE IF NOT EXISTS schema_migrations (
|
|
|
258
258
|
|
|
259
259
|
CREATE INDEX IF NOT EXISTS idx_schema_migrations_applied ON schema_migrations(applied_at DESC);
|
|
260
260
|
|
|
261
|
+
|
|
262
|
+
-- ============================================================================
|
|
263
|
+
-- 10. Memory Layer Tables (Agent Learning & Knowledge Persistence)
|
|
264
|
+
-- ============================================================================
|
|
265
|
+
-- These tables support the memory layer for cross-session knowledge persistence.
|
|
266
|
+
-- They enable agents to record decisions, track failures, and share insights.
|
|
267
|
+
--
|
|
268
|
+
-- Issue: Memory Layer Implementation
|
|
269
|
+
-- Author: Claude Code
|
|
270
|
+
-- Date: 2026-02-03
|
|
271
|
+
|
|
272
|
+
-- ============================================================================
|
|
273
|
+
-- 10.1 Decisions Table - Architectural and design decisions
|
|
274
|
+
-- ============================================================================
|
|
275
|
+
|
|
276
|
+
CREATE TABLE IF NOT EXISTS decisions (
|
|
277
|
+
id TEXT PRIMARY KEY, -- uuid → TEXT
|
|
278
|
+
repository_id TEXT, -- Foreign key to repositories (nullable)
|
|
279
|
+
title TEXT NOT NULL, -- Decision title/summary
|
|
280
|
+
context TEXT NOT NULL, -- Context/background for the decision
|
|
281
|
+
decision TEXT NOT NULL, -- The actual decision made
|
|
282
|
+
scope TEXT NOT NULL, -- Decision scope
|
|
283
|
+
rationale TEXT, -- Why this decision was made
|
|
284
|
+
alternatives TEXT DEFAULT '[]', -- JSON array of considered alternatives
|
|
285
|
+
related_files TEXT DEFAULT '[]', -- JSON array of related file paths
|
|
286
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
287
|
+
updated_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
288
|
+
metadata TEXT DEFAULT '{}', -- Additional metadata as JSON
|
|
289
|
+
|
|
290
|
+
FOREIGN KEY (repository_id) REFERENCES repositories(id) ON DELETE CASCADE,
|
|
291
|
+
CHECK (scope IN ('architecture', 'pattern', 'convention', 'workaround'))
|
|
292
|
+
);
|
|
293
|
+
|
|
294
|
+
-- Indexes for decisions
|
|
295
|
+
CREATE INDEX IF NOT EXISTS idx_decisions_repository_id ON decisions(repository_id);
|
|
296
|
+
CREATE INDEX IF NOT EXISTS idx_decisions_scope ON decisions(scope);
|
|
297
|
+
CREATE INDEX IF NOT EXISTS idx_decisions_created_at ON decisions(created_at DESC);
|
|
298
|
+
|
|
299
|
+
-- FTS5 virtual table for decision search
|
|
300
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS decisions_fts USING fts5(
|
|
301
|
+
title,
|
|
302
|
+
context,
|
|
303
|
+
decision,
|
|
304
|
+
rationale,
|
|
305
|
+
content='decisions',
|
|
306
|
+
content_rowid='rowid'
|
|
307
|
+
);
|
|
308
|
+
|
|
309
|
+
-- FTS5 sync triggers for decisions
|
|
310
|
+
CREATE TRIGGER IF NOT EXISTS decisions_fts_ai
|
|
311
|
+
AFTER INSERT ON decisions
|
|
312
|
+
BEGIN
|
|
313
|
+
INSERT INTO decisions_fts(rowid, title, context, decision, rationale)
|
|
314
|
+
VALUES (new.rowid, new.title, new.context, new.decision, new.rationale);
|
|
315
|
+
END;
|
|
316
|
+
|
|
317
|
+
CREATE TRIGGER IF NOT EXISTS decisions_fts_ad
|
|
318
|
+
AFTER DELETE ON decisions
|
|
319
|
+
BEGIN
|
|
320
|
+
INSERT INTO decisions_fts(decisions_fts, rowid, title, context, decision, rationale)
|
|
321
|
+
VALUES ('delete', old.rowid, old.title, old.context, old.decision, old.rationale);
|
|
322
|
+
END;
|
|
323
|
+
|
|
324
|
+
CREATE TRIGGER IF NOT EXISTS decisions_fts_au
|
|
325
|
+
AFTER UPDATE ON decisions
|
|
326
|
+
BEGIN
|
|
327
|
+
INSERT INTO decisions_fts(decisions_fts, rowid, title, context, decision, rationale)
|
|
328
|
+
VALUES ('delete', old.rowid, old.title, old.context, old.decision, old.rationale);
|
|
329
|
+
INSERT INTO decisions_fts(rowid, title, context, decision, rationale)
|
|
330
|
+
VALUES (new.rowid, new.title, new.context, new.decision, new.rationale);
|
|
331
|
+
END;
|
|
332
|
+
|
|
333
|
+
-- ============================================================================
|
|
334
|
+
-- 10.2 Failures Table - Failed approaches to avoid repeating mistakes
|
|
335
|
+
-- ============================================================================
|
|
336
|
+
|
|
337
|
+
CREATE TABLE IF NOT EXISTS failures (
|
|
338
|
+
id TEXT PRIMARY KEY, -- uuid → TEXT
|
|
339
|
+
repository_id TEXT, -- Foreign key to repositories (nullable)
|
|
340
|
+
title TEXT NOT NULL, -- Failure title/summary
|
|
341
|
+
problem TEXT NOT NULL, -- The problem being solved
|
|
342
|
+
approach TEXT NOT NULL, -- The approach that was tried
|
|
343
|
+
failure_reason TEXT NOT NULL, -- Why it failed
|
|
344
|
+
related_files TEXT DEFAULT '[]', -- JSON array of related file paths
|
|
345
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
346
|
+
metadata TEXT DEFAULT '{}', -- Additional metadata as JSON
|
|
347
|
+
|
|
348
|
+
FOREIGN KEY (repository_id) REFERENCES repositories(id) ON DELETE CASCADE
|
|
349
|
+
);
|
|
350
|
+
|
|
351
|
+
-- Indexes for failures
|
|
352
|
+
CREATE INDEX IF NOT EXISTS idx_failures_repository_id ON failures(repository_id);
|
|
353
|
+
CREATE INDEX IF NOT EXISTS idx_failures_created_at ON failures(created_at DESC);
|
|
354
|
+
|
|
355
|
+
-- FTS5 virtual table for failure search
|
|
356
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS failures_fts USING fts5(
|
|
357
|
+
title,
|
|
358
|
+
problem,
|
|
359
|
+
approach,
|
|
360
|
+
failure_reason,
|
|
361
|
+
content='failures',
|
|
362
|
+
content_rowid='rowid'
|
|
363
|
+
);
|
|
364
|
+
|
|
365
|
+
-- FTS5 sync triggers for failures
|
|
366
|
+
CREATE TRIGGER IF NOT EXISTS failures_fts_ai
|
|
367
|
+
AFTER INSERT ON failures
|
|
368
|
+
BEGIN
|
|
369
|
+
INSERT INTO failures_fts(rowid, title, problem, approach, failure_reason)
|
|
370
|
+
VALUES (new.rowid, new.title, new.problem, new.approach, new.failure_reason);
|
|
371
|
+
END;
|
|
372
|
+
|
|
373
|
+
CREATE TRIGGER IF NOT EXISTS failures_fts_ad
|
|
374
|
+
AFTER DELETE ON failures
|
|
375
|
+
BEGIN
|
|
376
|
+
INSERT INTO failures_fts(failures_fts, rowid, title, problem, approach, failure_reason)
|
|
377
|
+
VALUES ('delete', old.rowid, old.title, old.problem, old.approach, old.failure_reason);
|
|
378
|
+
END;
|
|
379
|
+
|
|
380
|
+
CREATE TRIGGER IF NOT EXISTS failures_fts_au
|
|
381
|
+
AFTER UPDATE ON failures
|
|
382
|
+
BEGIN
|
|
383
|
+
INSERT INTO failures_fts(failures_fts, rowid, title, problem, approach, failure_reason)
|
|
384
|
+
VALUES ('delete', old.rowid, old.title, old.problem, old.approach, old.failure_reason);
|
|
385
|
+
INSERT INTO failures_fts(rowid, title, problem, approach, failure_reason)
|
|
386
|
+
VALUES (new.rowid, new.title, new.problem, new.approach, new.failure_reason);
|
|
387
|
+
END;
|
|
388
|
+
|
|
389
|
+
-- ============================================================================
|
|
390
|
+
-- 10.3 Patterns Table - Discovered codebase patterns
|
|
391
|
+
-- ============================================================================
|
|
392
|
+
|
|
393
|
+
CREATE TABLE IF NOT EXISTS patterns (
|
|
394
|
+
id TEXT PRIMARY KEY, -- uuid → TEXT
|
|
395
|
+
repository_id TEXT, -- Foreign key to repositories (nullable)
|
|
396
|
+
pattern_type TEXT NOT NULL, -- Type of pattern (e.g., 'error-handling', 'api-call')
|
|
397
|
+
file_path TEXT, -- File where pattern was observed
|
|
398
|
+
description TEXT NOT NULL, -- Description of the pattern
|
|
399
|
+
example TEXT, -- Code example of the pattern
|
|
400
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
401
|
+
metadata TEXT DEFAULT '{}', -- Additional metadata as JSON
|
|
402
|
+
|
|
403
|
+
FOREIGN KEY (repository_id) REFERENCES repositories(id) ON DELETE CASCADE
|
|
404
|
+
);
|
|
405
|
+
|
|
406
|
+
-- Indexes for patterns
|
|
407
|
+
CREATE INDEX IF NOT EXISTS idx_patterns_repository_id ON patterns(repository_id);
|
|
408
|
+
CREATE INDEX IF NOT EXISTS idx_patterns_pattern_type ON patterns(pattern_type);
|
|
409
|
+
CREATE INDEX IF NOT EXISTS idx_patterns_file_path ON patterns(file_path);
|
|
410
|
+
CREATE INDEX IF NOT EXISTS idx_patterns_created_at ON patterns(created_at DESC);
|
|
411
|
+
|
|
412
|
+
-- ============================================================================
|
|
413
|
+
-- 10.4 Insights Table - Session insights for future agents
|
|
414
|
+
-- ============================================================================
|
|
415
|
+
|
|
416
|
+
CREATE TABLE IF NOT EXISTS insights (
|
|
417
|
+
id TEXT PRIMARY KEY, -- uuid → TEXT
|
|
418
|
+
session_id TEXT, -- Session identifier (optional)
|
|
419
|
+
content TEXT NOT NULL, -- The insight content
|
|
420
|
+
insight_type TEXT NOT NULL, -- Type of insight
|
|
421
|
+
related_file TEXT, -- Related file path (optional)
|
|
422
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
423
|
+
metadata TEXT DEFAULT '{}', -- Additional metadata as JSON
|
|
424
|
+
|
|
425
|
+
CHECK (insight_type IN ('discovery', 'failure', 'workaround'))
|
|
426
|
+
);
|
|
427
|
+
|
|
428
|
+
-- Indexes for insights
|
|
429
|
+
CREATE INDEX IF NOT EXISTS idx_insights_session_id ON insights(session_id);
|
|
430
|
+
CREATE INDEX IF NOT EXISTS idx_insights_insight_type ON insights(insight_type);
|
|
431
|
+
CREATE INDEX IF NOT EXISTS idx_insights_related_file ON insights(related_file);
|
|
432
|
+
CREATE INDEX IF NOT EXISTS idx_insights_created_at ON insights(created_at DESC);
|
|
433
|
+
|
|
434
|
+
-- FTS5 virtual table for insight search
|
|
435
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS insights_fts USING fts5(
|
|
436
|
+
content,
|
|
437
|
+
content='insights',
|
|
438
|
+
content_rowid='rowid'
|
|
439
|
+
);
|
|
440
|
+
|
|
441
|
+
-- FTS5 sync triggers for insights
|
|
442
|
+
CREATE TRIGGER IF NOT EXISTS insights_fts_ai
|
|
443
|
+
AFTER INSERT ON insights
|
|
444
|
+
BEGIN
|
|
445
|
+
INSERT INTO insights_fts(rowid, content)
|
|
446
|
+
VALUES (new.rowid, new.content);
|
|
447
|
+
END;
|
|
448
|
+
|
|
449
|
+
CREATE TRIGGER IF NOT EXISTS insights_fts_ad
|
|
450
|
+
AFTER DELETE ON insights
|
|
451
|
+
BEGIN
|
|
452
|
+
INSERT INTO insights_fts(insights_fts, rowid, content)
|
|
453
|
+
VALUES ('delete', old.rowid, old.content);
|
|
454
|
+
END;
|
|
455
|
+
|
|
456
|
+
CREATE TRIGGER IF NOT EXISTS insights_fts_au
|
|
457
|
+
AFTER UPDATE ON insights
|
|
458
|
+
BEGIN
|
|
459
|
+
INSERT INTO insights_fts(insights_fts, rowid, content)
|
|
460
|
+
VALUES ('delete', old.rowid, old.content);
|
|
461
|
+
INSERT INTO insights_fts(rowid, content)
|
|
462
|
+
VALUES (new.rowid, new.content);
|
|
463
|
+
END;
|
|
464
|
+
|
|
465
|
+
-- Record memory layer migration
|
|
466
|
+
INSERT OR IGNORE INTO schema_migrations (name) VALUES ('002_memory_layer_tables');
|
|
467
|
+
|
|
261
468
|
-- Record this migration
|
|
262
469
|
INSERT OR IGNORE INTO schema_migrations (name) VALUES ('001_initial_sqlite_schema');
|
|
263
470
|
|
package/src/instrument.ts
CHANGED
|
@@ -26,9 +26,6 @@ if (process.env.NODE_ENV !== "test") {
|
|
|
26
26
|
// Privacy compliance: don't send IP addresses or user agents automatically
|
|
27
27
|
sendDefaultPii: false,
|
|
28
28
|
|
|
29
|
-
// Enable debug mode in development
|
|
30
|
-
debug: isDevelopment,
|
|
31
|
-
|
|
32
29
|
// Scrub sensitive headers before sending to Sentry
|
|
33
30
|
beforeSend(event, hint) {
|
|
34
31
|
// Remove sensitive headers
|
|
@@ -61,7 +58,7 @@ if (process.env.NODE_ENV !== "test") {
|
|
|
61
58
|
});
|
|
62
59
|
|
|
63
60
|
if (isDevelopment && process.env.SENTRY_DSN) {
|
|
64
|
-
process.
|
|
61
|
+
process.stderr.write(
|
|
65
62
|
JSON.stringify({
|
|
66
63
|
timestamp: new Date().toISOString(),
|
|
67
64
|
level: "info",
|
package/src/logging/logger.ts
CHANGED
|
@@ -100,19 +100,12 @@ function maskSensitiveData(context: LogContext): LogContext {
|
|
|
100
100
|
}
|
|
101
101
|
|
|
102
102
|
/**
|
|
103
|
-
* Format and write log entry to
|
|
103
|
+
* Format and write log entry to stderr (all logs go to stderr to keep stdout clean for JSON output)
|
|
104
104
|
*/
|
|
105
|
-
function writeLog(entry: LogEntry,
|
|
105
|
+
function writeLog(entry: LogEntry, _forceStderr = false): void {
|
|
106
106
|
const json = JSON.stringify(entry);
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
if (forceStderr || entry.level === "error") {
|
|
110
|
-
process.stderr.write(output);
|
|
111
|
-
} else {
|
|
112
|
-
process.stdout.write(output);
|
|
113
|
-
}
|
|
107
|
+
process.stderr.write(`${json}\n`);
|
|
114
108
|
}
|
|
115
|
-
|
|
116
109
|
/**
|
|
117
110
|
* Create a logger instance with optional correlation context
|
|
118
111
|
*/
|
package/src/mcp/server.ts
CHANGED
|
@@ -23,6 +23,14 @@ import {
|
|
|
23
23
|
SYNC_EXPORT_TOOL,
|
|
24
24
|
SYNC_IMPORT_TOOL,
|
|
25
25
|
VALIDATE_IMPLEMENTATION_SPEC_TOOL,
|
|
26
|
+
// Memory Layer tools
|
|
27
|
+
SEARCH_DECISIONS_TOOL,
|
|
28
|
+
RECORD_DECISION_TOOL,
|
|
29
|
+
SEARCH_FAILURES_TOOL,
|
|
30
|
+
RECORD_FAILURE_TOOL,
|
|
31
|
+
SEARCH_PATTERNS_TOOL,
|
|
32
|
+
RECORD_INSIGHT_TOOL,
|
|
33
|
+
// Execute functions
|
|
26
34
|
executeAnalyzeChangeImpact,
|
|
27
35
|
executeGenerateTaskContext,
|
|
28
36
|
executeIndexRepository,
|
|
@@ -32,6 +40,13 @@ import {
|
|
|
32
40
|
executeSyncExport,
|
|
33
41
|
executeSyncImport,
|
|
34
42
|
executeValidateImplementationSpec,
|
|
43
|
+
// Memory Layer execute functions
|
|
44
|
+
executeSearchDecisions,
|
|
45
|
+
executeRecordDecision,
|
|
46
|
+
executeSearchFailures,
|
|
47
|
+
executeRecordFailure,
|
|
48
|
+
executeSearchPatterns,
|
|
49
|
+
executeRecordInsight,
|
|
35
50
|
} from "./tools";
|
|
36
51
|
|
|
37
52
|
const logger = createLogger({ module: "mcp-server" });
|
|
@@ -58,6 +73,12 @@ export interface McpServerContext {
|
|
|
58
73
|
* - kota_sync_export: Export SQLite to JSONL
|
|
59
74
|
* - kota_sync_import: Import JSONL to SQLite
|
|
60
75
|
* - generate_task_context: Generate context for hook-based seeding
|
|
76
|
+
* - search_decisions: Search past architectural decisions
|
|
77
|
+
* - record_decision: Record a new architectural decision
|
|
78
|
+
* - search_failures: Search failed approaches
|
|
79
|
+
* - record_failure: Record a failed approach
|
|
80
|
+
* - search_patterns: Find codebase patterns
|
|
81
|
+
* - record_insight: Store a session insight
|
|
61
82
|
*/
|
|
62
83
|
export function createMcpServer(context: McpServerContext): Server {
|
|
63
84
|
const server = new Server(
|
|
@@ -85,6 +106,13 @@ export function createMcpServer(context: McpServerContext): Server {
|
|
|
85
106
|
SYNC_EXPORT_TOOL,
|
|
86
107
|
SYNC_IMPORT_TOOL,
|
|
87
108
|
GENERATE_TASK_CONTEXT_TOOL,
|
|
109
|
+
// Memory Layer tools
|
|
110
|
+
SEARCH_DECISIONS_TOOL,
|
|
111
|
+
RECORD_DECISION_TOOL,
|
|
112
|
+
SEARCH_FAILURES_TOOL,
|
|
113
|
+
RECORD_FAILURE_TOOL,
|
|
114
|
+
SEARCH_PATTERNS_TOOL,
|
|
115
|
+
RECORD_INSIGHT_TOOL,
|
|
88
116
|
],
|
|
89
117
|
};
|
|
90
118
|
});
|
|
@@ -154,6 +182,49 @@ export function createMcpServer(context: McpServerContext): Server {
|
|
|
154
182
|
context.userId,
|
|
155
183
|
);
|
|
156
184
|
break;
|
|
185
|
+
// Memory Layer tools
|
|
186
|
+
case "search_decisions":
|
|
187
|
+
result = await executeSearchDecisions(
|
|
188
|
+
toolArgs,
|
|
189
|
+
"", // requestId not used
|
|
190
|
+
context.userId,
|
|
191
|
+
);
|
|
192
|
+
break;
|
|
193
|
+
case "record_decision":
|
|
194
|
+
result = await executeRecordDecision(
|
|
195
|
+
toolArgs,
|
|
196
|
+
"", // requestId not used
|
|
197
|
+
context.userId,
|
|
198
|
+
);
|
|
199
|
+
break;
|
|
200
|
+
case "search_failures":
|
|
201
|
+
result = await executeSearchFailures(
|
|
202
|
+
toolArgs,
|
|
203
|
+
"", // requestId not used
|
|
204
|
+
context.userId,
|
|
205
|
+
);
|
|
206
|
+
break;
|
|
207
|
+
case "record_failure":
|
|
208
|
+
result = await executeRecordFailure(
|
|
209
|
+
toolArgs,
|
|
210
|
+
"", // requestId not used
|
|
211
|
+
context.userId,
|
|
212
|
+
);
|
|
213
|
+
break;
|
|
214
|
+
case "search_patterns":
|
|
215
|
+
result = await executeSearchPatterns(
|
|
216
|
+
toolArgs,
|
|
217
|
+
"", // requestId not used
|
|
218
|
+
context.userId,
|
|
219
|
+
);
|
|
220
|
+
break;
|
|
221
|
+
case "record_insight":
|
|
222
|
+
result = await executeRecordInsight(
|
|
223
|
+
toolArgs,
|
|
224
|
+
"", // requestId not used
|
|
225
|
+
context.userId,
|
|
226
|
+
);
|
|
227
|
+
break;
|
|
157
228
|
default:
|
|
158
229
|
const error = new Error(`Unknown tool: ${name}`);
|
|
159
230
|
logger.error("Unknown MCP tool requested", error, {
|
package/src/mcp/tools.ts
CHANGED
|
@@ -372,6 +372,230 @@ export const GENERATE_TASK_CONTEXT_TOOL: ToolDefinition = {
|
|
|
372
372
|
},
|
|
373
373
|
};
|
|
374
374
|
|
|
375
|
+
// ============================================================================
|
|
376
|
+
// Memory Layer Tool Definitions
|
|
377
|
+
// ============================================================================
|
|
378
|
+
|
|
379
|
+
/**
|
|
380
|
+
* Tool: search_decisions
|
|
381
|
+
*/
|
|
382
|
+
export const SEARCH_DECISIONS_TOOL: ToolDefinition = {
|
|
383
|
+
name: "search_decisions",
|
|
384
|
+
description:
|
|
385
|
+
"Search past architectural decisions using FTS5. Returns decisions with relevance scores.",
|
|
386
|
+
inputSchema: {
|
|
387
|
+
type: "object",
|
|
388
|
+
properties: {
|
|
389
|
+
query: {
|
|
390
|
+
type: "string",
|
|
391
|
+
description: "Search query for decisions",
|
|
392
|
+
},
|
|
393
|
+
scope: {
|
|
394
|
+
type: "string",
|
|
395
|
+
enum: ["architecture", "pattern", "convention", "workaround"],
|
|
396
|
+
description: "Optional: Filter by decision scope",
|
|
397
|
+
},
|
|
398
|
+
repository: {
|
|
399
|
+
type: "string",
|
|
400
|
+
description: "Optional: Filter to a specific repository ID or full_name",
|
|
401
|
+
},
|
|
402
|
+
limit: {
|
|
403
|
+
type: "number",
|
|
404
|
+
description: "Optional: Max results (default: 20)",
|
|
405
|
+
},
|
|
406
|
+
},
|
|
407
|
+
required: ["query"],
|
|
408
|
+
},
|
|
409
|
+
};
|
|
410
|
+
|
|
411
|
+
/**
|
|
412
|
+
* Tool: record_decision
|
|
413
|
+
*/
|
|
414
|
+
export const RECORD_DECISION_TOOL: ToolDefinition = {
|
|
415
|
+
name: "record_decision",
|
|
416
|
+
description:
|
|
417
|
+
"Record a new architectural decision for future reference. Decisions are searchable via search_decisions.",
|
|
418
|
+
inputSchema: {
|
|
419
|
+
type: "object",
|
|
420
|
+
properties: {
|
|
421
|
+
title: {
|
|
422
|
+
type: "string",
|
|
423
|
+
description: "Decision title/summary",
|
|
424
|
+
},
|
|
425
|
+
context: {
|
|
426
|
+
type: "string",
|
|
427
|
+
description: "Context and background for the decision",
|
|
428
|
+
},
|
|
429
|
+
decision: {
|
|
430
|
+
type: "string",
|
|
431
|
+
description: "The actual decision made",
|
|
432
|
+
},
|
|
433
|
+
scope: {
|
|
434
|
+
type: "string",
|
|
435
|
+
enum: ["architecture", "pattern", "convention", "workaround"],
|
|
436
|
+
description: "Decision scope/category (default: pattern)",
|
|
437
|
+
},
|
|
438
|
+
rationale: {
|
|
439
|
+
type: "string",
|
|
440
|
+
description: "Optional: Why this decision was made",
|
|
441
|
+
},
|
|
442
|
+
alternatives: {
|
|
443
|
+
type: "array",
|
|
444
|
+
items: { type: "string" },
|
|
445
|
+
description: "Optional: Alternatives that were considered",
|
|
446
|
+
},
|
|
447
|
+
related_files: {
|
|
448
|
+
type: "array",
|
|
449
|
+
items: { type: "string" },
|
|
450
|
+
description: "Optional: Related file paths",
|
|
451
|
+
},
|
|
452
|
+
repository: {
|
|
453
|
+
type: "string",
|
|
454
|
+
description: "Optional: Repository ID or full_name",
|
|
455
|
+
},
|
|
456
|
+
},
|
|
457
|
+
required: ["title", "context", "decision"],
|
|
458
|
+
},
|
|
459
|
+
};
|
|
460
|
+
|
|
461
|
+
/**
|
|
462
|
+
* Tool: search_failures
|
|
463
|
+
*/
|
|
464
|
+
export const SEARCH_FAILURES_TOOL: ToolDefinition = {
|
|
465
|
+
name: "search_failures",
|
|
466
|
+
description:
|
|
467
|
+
"Search failed approaches to avoid repeating mistakes. Returns failures with relevance scores.",
|
|
468
|
+
inputSchema: {
|
|
469
|
+
type: "object",
|
|
470
|
+
properties: {
|
|
471
|
+
query: {
|
|
472
|
+
type: "string",
|
|
473
|
+
description: "Search query for failures",
|
|
474
|
+
},
|
|
475
|
+
repository: {
|
|
476
|
+
type: "string",
|
|
477
|
+
description: "Optional: Filter to a specific repository ID or full_name",
|
|
478
|
+
},
|
|
479
|
+
limit: {
|
|
480
|
+
type: "number",
|
|
481
|
+
description: "Optional: Max results (default: 20)",
|
|
482
|
+
},
|
|
483
|
+
},
|
|
484
|
+
required: ["query"],
|
|
485
|
+
},
|
|
486
|
+
};
|
|
487
|
+
|
|
488
|
+
/**
|
|
489
|
+
* Tool: record_failure
|
|
490
|
+
*/
|
|
491
|
+
export const RECORD_FAILURE_TOOL: ToolDefinition = {
|
|
492
|
+
name: "record_failure",
|
|
493
|
+
description:
|
|
494
|
+
"Record a failed approach for future reference. Helps agents avoid repeating mistakes.",
|
|
495
|
+
inputSchema: {
|
|
496
|
+
type: "object",
|
|
497
|
+
properties: {
|
|
498
|
+
title: {
|
|
499
|
+
type: "string",
|
|
500
|
+
description: "Failure title/summary",
|
|
501
|
+
},
|
|
502
|
+
problem: {
|
|
503
|
+
type: "string",
|
|
504
|
+
description: "The problem being solved",
|
|
505
|
+
},
|
|
506
|
+
approach: {
|
|
507
|
+
type: "string",
|
|
508
|
+
description: "The approach that was tried",
|
|
509
|
+
},
|
|
510
|
+
failure_reason: {
|
|
511
|
+
type: "string",
|
|
512
|
+
description: "Why the approach failed",
|
|
513
|
+
},
|
|
514
|
+
related_files: {
|
|
515
|
+
type: "array",
|
|
516
|
+
items: { type: "string" },
|
|
517
|
+
description: "Optional: Related file paths",
|
|
518
|
+
},
|
|
519
|
+
repository: {
|
|
520
|
+
type: "string",
|
|
521
|
+
description: "Optional: Repository ID or full_name",
|
|
522
|
+
},
|
|
523
|
+
},
|
|
524
|
+
required: ["title", "problem", "approach", "failure_reason"],
|
|
525
|
+
},
|
|
526
|
+
};
|
|
527
|
+
|
|
528
|
+
/**
|
|
529
|
+
* Tool: search_patterns
|
|
530
|
+
*/
|
|
531
|
+
export const SEARCH_PATTERNS_TOOL: ToolDefinition = {
|
|
532
|
+
name: "search_patterns",
|
|
533
|
+
description:
|
|
534
|
+
"Find codebase patterns by type or file. Returns discovered patterns for consistency.",
|
|
535
|
+
inputSchema: {
|
|
536
|
+
type: "object",
|
|
537
|
+
properties: {
|
|
538
|
+
query: {
|
|
539
|
+
type: "string",
|
|
540
|
+
description: "Optional: Search query for pattern name/description",
|
|
541
|
+
},
|
|
542
|
+
pattern_type: {
|
|
543
|
+
type: "string",
|
|
544
|
+
description: "Optional: Filter by pattern type (e.g., error-handling, api-call)",
|
|
545
|
+
},
|
|
546
|
+
file: {
|
|
547
|
+
type: "string",
|
|
548
|
+
description: "Optional: Filter by file path",
|
|
549
|
+
},
|
|
550
|
+
repository: {
|
|
551
|
+
type: "string",
|
|
552
|
+
description: "Optional: Filter to a specific repository ID or full_name",
|
|
553
|
+
},
|
|
554
|
+
limit: {
|
|
555
|
+
type: "number",
|
|
556
|
+
description: "Optional: Max results (default: 20)",
|
|
557
|
+
},
|
|
558
|
+
},
|
|
559
|
+
},
|
|
560
|
+
};
|
|
561
|
+
|
|
562
|
+
/**
|
|
563
|
+
* Tool: record_insight
|
|
564
|
+
*/
|
|
565
|
+
export const RECORD_INSIGHT_TOOL: ToolDefinition = {
|
|
566
|
+
name: "record_insight",
|
|
567
|
+
description:
|
|
568
|
+
"Store a session insight for future agents. Insights are discoveries, failures, or workarounds.",
|
|
569
|
+
inputSchema: {
|
|
570
|
+
type: "object",
|
|
571
|
+
properties: {
|
|
572
|
+
content: {
|
|
573
|
+
type: "string",
|
|
574
|
+
description: "The insight content",
|
|
575
|
+
},
|
|
576
|
+
insight_type: {
|
|
577
|
+
type: "string",
|
|
578
|
+
enum: ["discovery", "failure", "workaround"],
|
|
579
|
+
description: "Type of insight",
|
|
580
|
+
},
|
|
581
|
+
session_id: {
|
|
582
|
+
type: "string",
|
|
583
|
+
description: "Optional: Session identifier for grouping",
|
|
584
|
+
},
|
|
585
|
+
related_file: {
|
|
586
|
+
type: "string",
|
|
587
|
+
description: "Optional: Related file path",
|
|
588
|
+
},
|
|
589
|
+
repository: {
|
|
590
|
+
type: "string",
|
|
591
|
+
description: "Optional: Repository ID or full_name",
|
|
592
|
+
},
|
|
593
|
+
},
|
|
594
|
+
required: ["content", "insight_type"],
|
|
595
|
+
},
|
|
596
|
+
};
|
|
597
|
+
|
|
598
|
+
|
|
375
599
|
/**
|
|
376
600
|
* Get all available tool definitions
|
|
377
601
|
*/
|
|
@@ -386,6 +610,13 @@ export function getToolDefinitions(): ToolDefinition[] {
|
|
|
386
610
|
SYNC_EXPORT_TOOL,
|
|
387
611
|
SYNC_IMPORT_TOOL,
|
|
388
612
|
GENERATE_TASK_CONTEXT_TOOL,
|
|
613
|
+
// Memory Layer tools
|
|
614
|
+
SEARCH_DECISIONS_TOOL,
|
|
615
|
+
RECORD_DECISION_TOOL,
|
|
616
|
+
SEARCH_FAILURES_TOOL,
|
|
617
|
+
RECORD_FAILURE_TOOL,
|
|
618
|
+
SEARCH_PATTERNS_TOOL,
|
|
619
|
+
RECORD_INSIGHT_TOOL,
|
|
389
620
|
];
|
|
390
621
|
}
|
|
391
622
|
|
|
@@ -1131,6 +1362,520 @@ function generateTestFilePatterns(sourcePath: string): string[] {
|
|
|
1131
1362
|
return patterns;
|
|
1132
1363
|
}
|
|
1133
1364
|
|
|
1365
|
+
|
|
1366
|
+
|
|
1367
|
+
// ============================================================================
|
|
1368
|
+
// Memory Layer Tool Executors
|
|
1369
|
+
// ============================================================================
|
|
1370
|
+
|
|
1371
|
+
/**
|
|
1372
|
+
* Escape a term for FTS5 MATCH clause
|
|
1373
|
+
*/
|
|
1374
|
+
function escapeFts5Term(term: string): string {
|
|
1375
|
+
const escaped = term.replace(/"/g, '""');
|
|
1376
|
+
return `"${escaped}"`;
|
|
1377
|
+
}
|
|
1378
|
+
|
|
1379
|
+
/**
|
|
1380
|
+
* Execute search_decisions tool
|
|
1381
|
+
*/
|
|
1382
|
+
export async function executeSearchDecisions(
|
|
1383
|
+
params: unknown,
|
|
1384
|
+
_requestId: string | number,
|
|
1385
|
+
_userId: string,
|
|
1386
|
+
): Promise<unknown> {
|
|
1387
|
+
if (typeof params !== "object" || params === null) {
|
|
1388
|
+
throw new Error("Parameters must be an object");
|
|
1389
|
+
}
|
|
1390
|
+
|
|
1391
|
+
const p = params as Record<string, unknown>;
|
|
1392
|
+
|
|
1393
|
+
if (p.query === undefined) {
|
|
1394
|
+
throw new Error("Missing required parameter: query");
|
|
1395
|
+
}
|
|
1396
|
+
if (typeof p.query !== "string") {
|
|
1397
|
+
throw new Error("Parameter 'query' must be a string");
|
|
1398
|
+
}
|
|
1399
|
+
|
|
1400
|
+
if (p.scope !== undefined && typeof p.scope !== "string") {
|
|
1401
|
+
throw new Error("Parameter 'scope' must be a string");
|
|
1402
|
+
}
|
|
1403
|
+
if (p.scope !== undefined && !["architecture", "pattern", "convention", "workaround"].includes(p.scope as string)) {
|
|
1404
|
+
throw new Error("Parameter 'scope' must be one of: architecture, pattern, convention, workaround");
|
|
1405
|
+
}
|
|
1406
|
+
if (p.repository !== undefined && typeof p.repository !== "string") {
|
|
1407
|
+
throw new Error("Parameter 'repository' must be a string");
|
|
1408
|
+
}
|
|
1409
|
+
if (p.limit !== undefined && typeof p.limit !== "number") {
|
|
1410
|
+
throw new Error("Parameter 'limit' must be a number");
|
|
1411
|
+
}
|
|
1412
|
+
|
|
1413
|
+
const db = getGlobalDatabase();
|
|
1414
|
+
const escapedQuery = escapeFts5Term(p.query as string);
|
|
1415
|
+
const limit = Math.min(Math.max((p.limit as number) || 20, 1), 100);
|
|
1416
|
+
|
|
1417
|
+
let sql = `
|
|
1418
|
+
SELECT
|
|
1419
|
+
d.id,
|
|
1420
|
+
d.title,
|
|
1421
|
+
d.context,
|
|
1422
|
+
d.decision,
|
|
1423
|
+
d.scope,
|
|
1424
|
+
d.rationale,
|
|
1425
|
+
d.alternatives,
|
|
1426
|
+
d.related_files,
|
|
1427
|
+
d.repository_id,
|
|
1428
|
+
d.created_at,
|
|
1429
|
+
bm25(decisions_fts) as relevance
|
|
1430
|
+
FROM decisions_fts
|
|
1431
|
+
JOIN decisions d ON decisions_fts.rowid = d.rowid
|
|
1432
|
+
WHERE decisions_fts MATCH ?
|
|
1433
|
+
`;
|
|
1434
|
+
const queryParams: (string | number)[] = [escapedQuery];
|
|
1435
|
+
|
|
1436
|
+
if (p.scope) {
|
|
1437
|
+
sql += " AND d.scope = ?";
|
|
1438
|
+
queryParams.push(p.scope as string);
|
|
1439
|
+
}
|
|
1440
|
+
|
|
1441
|
+
if (p.repository) {
|
|
1442
|
+
const repoResult = resolveRepositoryIdentifierWithError(p.repository as string);
|
|
1443
|
+
if (!("error" in repoResult)) {
|
|
1444
|
+
sql += " AND d.repository_id = ?";
|
|
1445
|
+
queryParams.push(repoResult.id);
|
|
1446
|
+
}
|
|
1447
|
+
}
|
|
1448
|
+
|
|
1449
|
+
sql += " ORDER BY relevance LIMIT ?";
|
|
1450
|
+
queryParams.push(limit);
|
|
1451
|
+
|
|
1452
|
+
const rows = db.query<{
|
|
1453
|
+
id: string;
|
|
1454
|
+
title: string;
|
|
1455
|
+
context: string;
|
|
1456
|
+
decision: string;
|
|
1457
|
+
scope: string;
|
|
1458
|
+
rationale: string | null;
|
|
1459
|
+
alternatives: string;
|
|
1460
|
+
related_files: string;
|
|
1461
|
+
repository_id: string | null;
|
|
1462
|
+
created_at: string;
|
|
1463
|
+
relevance: number;
|
|
1464
|
+
}>(sql, queryParams);
|
|
1465
|
+
|
|
1466
|
+
return {
|
|
1467
|
+
results: rows.map((row) => ({
|
|
1468
|
+
id: row.id,
|
|
1469
|
+
title: row.title,
|
|
1470
|
+
context: row.context,
|
|
1471
|
+
decision: row.decision,
|
|
1472
|
+
scope: row.scope,
|
|
1473
|
+
rationale: row.rationale,
|
|
1474
|
+
alternatives: JSON.parse(row.alternatives || "[]"),
|
|
1475
|
+
related_files: JSON.parse(row.related_files || "[]"),
|
|
1476
|
+
repository_id: row.repository_id,
|
|
1477
|
+
created_at: row.created_at,
|
|
1478
|
+
relevance: Math.abs(row.relevance),
|
|
1479
|
+
})),
|
|
1480
|
+
count: rows.length,
|
|
1481
|
+
};
|
|
1482
|
+
}
|
|
1483
|
+
|
|
1484
|
+
/**
|
|
1485
|
+
* Execute record_decision tool
|
|
1486
|
+
*/
|
|
1487
|
+
export async function executeRecordDecision(
|
|
1488
|
+
params: unknown,
|
|
1489
|
+
_requestId: string | number,
|
|
1490
|
+
_userId: string,
|
|
1491
|
+
): Promise<unknown> {
|
|
1492
|
+
if (typeof params !== "object" || params === null) {
|
|
1493
|
+
throw new Error("Parameters must be an object");
|
|
1494
|
+
}
|
|
1495
|
+
|
|
1496
|
+
const p = params as Record<string, unknown>;
|
|
1497
|
+
|
|
1498
|
+
if (p.title === undefined || typeof p.title !== "string") {
|
|
1499
|
+
throw new Error("Missing or invalid required parameter: title");
|
|
1500
|
+
}
|
|
1501
|
+
if (p.context === undefined || typeof p.context !== "string") {
|
|
1502
|
+
throw new Error("Missing or invalid required parameter: context");
|
|
1503
|
+
}
|
|
1504
|
+
if (p.decision === undefined || typeof p.decision !== "string") {
|
|
1505
|
+
throw new Error("Missing or invalid required parameter: decision");
|
|
1506
|
+
}
|
|
1507
|
+
|
|
1508
|
+
const scope = (p.scope as string) || "pattern";
|
|
1509
|
+
if (!["architecture", "pattern", "convention", "workaround"].includes(scope)) {
|
|
1510
|
+
throw new Error("Parameter 'scope' must be one of: architecture, pattern, convention, workaround");
|
|
1511
|
+
}
|
|
1512
|
+
|
|
1513
|
+
if (p.rationale !== undefined && typeof p.rationale !== "string") {
|
|
1514
|
+
throw new Error("Parameter 'rationale' must be a string");
|
|
1515
|
+
}
|
|
1516
|
+
if (p.alternatives !== undefined && !Array.isArray(p.alternatives)) {
|
|
1517
|
+
throw new Error("Parameter 'alternatives' must be an array");
|
|
1518
|
+
}
|
|
1519
|
+
if (p.related_files !== undefined && !Array.isArray(p.related_files)) {
|
|
1520
|
+
throw new Error("Parameter 'related_files' must be an array");
|
|
1521
|
+
}
|
|
1522
|
+
if (p.repository !== undefined && typeof p.repository !== "string") {
|
|
1523
|
+
throw new Error("Parameter 'repository' must be a string");
|
|
1524
|
+
}
|
|
1525
|
+
|
|
1526
|
+
const db = getGlobalDatabase();
|
|
1527
|
+
const { randomUUID } = await import("node:crypto");
|
|
1528
|
+
const id = randomUUID();
|
|
1529
|
+
|
|
1530
|
+
let repositoryId: string | null = null;
|
|
1531
|
+
if (p.repository) {
|
|
1532
|
+
const repoResult = resolveRepositoryIdentifierWithError(p.repository as string);
|
|
1533
|
+
if (!("error" in repoResult)) {
|
|
1534
|
+
repositoryId = repoResult.id;
|
|
1535
|
+
}
|
|
1536
|
+
}
|
|
1537
|
+
|
|
1538
|
+
const sql = `
|
|
1539
|
+
INSERT INTO decisions (
|
|
1540
|
+
id, repository_id, title, context, decision, scope,
|
|
1541
|
+
rationale, alternatives, related_files, created_at, updated_at
|
|
1542
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))
|
|
1543
|
+
`;
|
|
1544
|
+
|
|
1545
|
+
db.run(sql, [
|
|
1546
|
+
id,
|
|
1547
|
+
repositoryId,
|
|
1548
|
+
p.title as string,
|
|
1549
|
+
p.context as string,
|
|
1550
|
+
p.decision as string,
|
|
1551
|
+
scope,
|
|
1552
|
+
(p.rationale as string) || null,
|
|
1553
|
+
JSON.stringify((p.alternatives as string[]) || []),
|
|
1554
|
+
JSON.stringify((p.related_files as string[]) || []),
|
|
1555
|
+
]);
|
|
1556
|
+
|
|
1557
|
+
logger.info("Decision recorded", { id, title: p.title, scope });
|
|
1558
|
+
|
|
1559
|
+
return {
|
|
1560
|
+
success: true,
|
|
1561
|
+
id,
|
|
1562
|
+
message: "Decision recorded successfully",
|
|
1563
|
+
};
|
|
1564
|
+
}
|
|
1565
|
+
|
|
1566
|
+
/**
|
|
1567
|
+
* Execute search_failures tool
|
|
1568
|
+
*/
|
|
1569
|
+
export async function executeSearchFailures(
|
|
1570
|
+
params: unknown,
|
|
1571
|
+
_requestId: string | number,
|
|
1572
|
+
_userId: string,
|
|
1573
|
+
): Promise<unknown> {
|
|
1574
|
+
if (typeof params !== "object" || params === null) {
|
|
1575
|
+
throw new Error("Parameters must be an object");
|
|
1576
|
+
}
|
|
1577
|
+
|
|
1578
|
+
const p = params as Record<string, unknown>;
|
|
1579
|
+
|
|
1580
|
+
if (p.query === undefined) {
|
|
1581
|
+
throw new Error("Missing required parameter: query");
|
|
1582
|
+
}
|
|
1583
|
+
if (typeof p.query !== "string") {
|
|
1584
|
+
throw new Error("Parameter 'query' must be a string");
|
|
1585
|
+
}
|
|
1586
|
+
|
|
1587
|
+
if (p.repository !== undefined && typeof p.repository !== "string") {
|
|
1588
|
+
throw new Error("Parameter 'repository' must be a string");
|
|
1589
|
+
}
|
|
1590
|
+
if (p.limit !== undefined && typeof p.limit !== "number") {
|
|
1591
|
+
throw new Error("Parameter 'limit' must be a number");
|
|
1592
|
+
}
|
|
1593
|
+
|
|
1594
|
+
const db = getGlobalDatabase();
|
|
1595
|
+
const escapedQuery = escapeFts5Term(p.query as string);
|
|
1596
|
+
const limit = Math.min(Math.max((p.limit as number) || 20, 1), 100);
|
|
1597
|
+
|
|
1598
|
+
let sql = `
|
|
1599
|
+
SELECT
|
|
1600
|
+
f.id,
|
|
1601
|
+
f.title,
|
|
1602
|
+
f.problem,
|
|
1603
|
+
f.approach,
|
|
1604
|
+
f.failure_reason,
|
|
1605
|
+
f.related_files,
|
|
1606
|
+
f.repository_id,
|
|
1607
|
+
f.created_at,
|
|
1608
|
+
bm25(failures_fts) as relevance
|
|
1609
|
+
FROM failures_fts
|
|
1610
|
+
JOIN failures f ON failures_fts.rowid = f.rowid
|
|
1611
|
+
WHERE failures_fts MATCH ?
|
|
1612
|
+
`;
|
|
1613
|
+
const queryParams: (string | number)[] = [escapedQuery];
|
|
1614
|
+
|
|
1615
|
+
if (p.repository) {
|
|
1616
|
+
const repoResult = resolveRepositoryIdentifierWithError(p.repository as string);
|
|
1617
|
+
if (!("error" in repoResult)) {
|
|
1618
|
+
sql += " AND f.repository_id = ?";
|
|
1619
|
+
queryParams.push(repoResult.id);
|
|
1620
|
+
}
|
|
1621
|
+
}
|
|
1622
|
+
|
|
1623
|
+
sql += " ORDER BY relevance LIMIT ?";
|
|
1624
|
+
queryParams.push(limit);
|
|
1625
|
+
|
|
1626
|
+
const rows = db.query<{
|
|
1627
|
+
id: string;
|
|
1628
|
+
title: string;
|
|
1629
|
+
problem: string;
|
|
1630
|
+
approach: string;
|
|
1631
|
+
failure_reason: string;
|
|
1632
|
+
related_files: string;
|
|
1633
|
+
repository_id: string | null;
|
|
1634
|
+
created_at: string;
|
|
1635
|
+
relevance: number;
|
|
1636
|
+
}>(sql, queryParams);
|
|
1637
|
+
|
|
1638
|
+
return {
|
|
1639
|
+
results: rows.map((row) => ({
|
|
1640
|
+
id: row.id,
|
|
1641
|
+
title: row.title,
|
|
1642
|
+
problem: row.problem,
|
|
1643
|
+
approach: row.approach,
|
|
1644
|
+
failure_reason: row.failure_reason,
|
|
1645
|
+
related_files: JSON.parse(row.related_files || "[]"),
|
|
1646
|
+
repository_id: row.repository_id,
|
|
1647
|
+
created_at: row.created_at,
|
|
1648
|
+
relevance: Math.abs(row.relevance),
|
|
1649
|
+
})),
|
|
1650
|
+
count: rows.length,
|
|
1651
|
+
};
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
/**
|
|
1655
|
+
* Execute record_failure tool
|
|
1656
|
+
*/
|
|
1657
|
+
export async function executeRecordFailure(
|
|
1658
|
+
params: unknown,
|
|
1659
|
+
_requestId: string | number,
|
|
1660
|
+
_userId: string,
|
|
1661
|
+
): Promise<unknown> {
|
|
1662
|
+
if (typeof params !== "object" || params === null) {
|
|
1663
|
+
throw new Error("Parameters must be an object");
|
|
1664
|
+
}
|
|
1665
|
+
|
|
1666
|
+
const p = params as Record<string, unknown>;
|
|
1667
|
+
|
|
1668
|
+
if (p.title === undefined || typeof p.title !== "string") {
|
|
1669
|
+
throw new Error("Missing or invalid required parameter: title");
|
|
1670
|
+
}
|
|
1671
|
+
if (p.problem === undefined || typeof p.problem !== "string") {
|
|
1672
|
+
throw new Error("Missing or invalid required parameter: problem");
|
|
1673
|
+
}
|
|
1674
|
+
if (p.approach === undefined || typeof p.approach !== "string") {
|
|
1675
|
+
throw new Error("Missing or invalid required parameter: approach");
|
|
1676
|
+
}
|
|
1677
|
+
if (p.failure_reason === undefined || typeof p.failure_reason !== "string") {
|
|
1678
|
+
throw new Error("Missing or invalid required parameter: failure_reason");
|
|
1679
|
+
}
|
|
1680
|
+
|
|
1681
|
+
if (p.related_files !== undefined && !Array.isArray(p.related_files)) {
|
|
1682
|
+
throw new Error("Parameter 'related_files' must be an array");
|
|
1683
|
+
}
|
|
1684
|
+
if (p.repository !== undefined && typeof p.repository !== "string") {
|
|
1685
|
+
throw new Error("Parameter 'repository' must be a string");
|
|
1686
|
+
}
|
|
1687
|
+
|
|
1688
|
+
const db = getGlobalDatabase();
|
|
1689
|
+
const { randomUUID } = await import("node:crypto");
|
|
1690
|
+
const id = randomUUID();
|
|
1691
|
+
|
|
1692
|
+
let repositoryId: string | null = null;
|
|
1693
|
+
if (p.repository) {
|
|
1694
|
+
const repoResult = resolveRepositoryIdentifierWithError(p.repository as string);
|
|
1695
|
+
if (!("error" in repoResult)) {
|
|
1696
|
+
repositoryId = repoResult.id;
|
|
1697
|
+
}
|
|
1698
|
+
}
|
|
1699
|
+
|
|
1700
|
+
const sql = `
|
|
1701
|
+
INSERT INTO failures (
|
|
1702
|
+
id, repository_id, title, problem, approach, failure_reason,
|
|
1703
|
+
related_files, created_at
|
|
1704
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'))
|
|
1705
|
+
`;
|
|
1706
|
+
|
|
1707
|
+
db.run(sql, [
|
|
1708
|
+
id,
|
|
1709
|
+
repositoryId,
|
|
1710
|
+
p.title as string,
|
|
1711
|
+
p.problem as string,
|
|
1712
|
+
p.approach as string,
|
|
1713
|
+
p.failure_reason as string,
|
|
1714
|
+
JSON.stringify((p.related_files as string[]) || []),
|
|
1715
|
+
]);
|
|
1716
|
+
|
|
1717
|
+
logger.info("Failure recorded", { id, title: p.title });
|
|
1718
|
+
|
|
1719
|
+
return {
|
|
1720
|
+
success: true,
|
|
1721
|
+
id,
|
|
1722
|
+
message: "Failure recorded successfully",
|
|
1723
|
+
};
|
|
1724
|
+
}
|
|
1725
|
+
|
|
1726
|
+
/**
|
|
1727
|
+
* Execute search_patterns tool
|
|
1728
|
+
*/
|
|
1729
|
+
export async function executeSearchPatterns(
|
|
1730
|
+
params: unknown,
|
|
1731
|
+
_requestId: string | number,
|
|
1732
|
+
_userId: string,
|
|
1733
|
+
): Promise<unknown> {
|
|
1734
|
+
if (params !== undefined && (typeof params !== "object" || params === null)) {
|
|
1735
|
+
throw new Error("Parameters must be an object");
|
|
1736
|
+
}
|
|
1737
|
+
|
|
1738
|
+
const p = (params as Record<string, unknown>) || {};
|
|
1739
|
+
|
|
1740
|
+
if (p.query !== undefined && typeof p.query !== "string") {
|
|
1741
|
+
throw new Error("Parameter 'query' must be a string");
|
|
1742
|
+
}
|
|
1743
|
+
if (p.pattern_type !== undefined && typeof p.pattern_type !== "string") {
|
|
1744
|
+
throw new Error("Parameter 'pattern_type' must be a string");
|
|
1745
|
+
}
|
|
1746
|
+
if (p.file !== undefined && typeof p.file !== "string") {
|
|
1747
|
+
throw new Error("Parameter 'file' must be a string");
|
|
1748
|
+
}
|
|
1749
|
+
if (p.repository !== undefined && typeof p.repository !== "string") {
|
|
1750
|
+
throw new Error("Parameter 'repository' must be a string");
|
|
1751
|
+
}
|
|
1752
|
+
if (p.limit !== undefined && typeof p.limit !== "number") {
|
|
1753
|
+
throw new Error("Parameter 'limit' must be a number");
|
|
1754
|
+
}
|
|
1755
|
+
|
|
1756
|
+
const db = getGlobalDatabase();
|
|
1757
|
+
const limit = Math.min(Math.max((p.limit as number) || 20, 1), 100);
|
|
1758
|
+
|
|
1759
|
+
let sql = `
|
|
1760
|
+
SELECT
|
|
1761
|
+
id,
|
|
1762
|
+
repository_id,
|
|
1763
|
+
pattern_type,
|
|
1764
|
+
file_path,
|
|
1765
|
+
description,
|
|
1766
|
+
example,
|
|
1767
|
+
created_at
|
|
1768
|
+
FROM patterns
|
|
1769
|
+
WHERE 1=1
|
|
1770
|
+
`;
|
|
1771
|
+
const queryParams: (string | number)[] = [];
|
|
1772
|
+
|
|
1773
|
+
if (p.pattern_type) {
|
|
1774
|
+
sql += " AND pattern_type = ?";
|
|
1775
|
+
queryParams.push(p.pattern_type as string);
|
|
1776
|
+
}
|
|
1777
|
+
|
|
1778
|
+
if (p.file) {
|
|
1779
|
+
sql += " AND file_path = ?";
|
|
1780
|
+
queryParams.push(p.file as string);
|
|
1781
|
+
}
|
|
1782
|
+
|
|
1783
|
+
if (p.repository) {
|
|
1784
|
+
const repoResult = resolveRepositoryIdentifierWithError(p.repository as string);
|
|
1785
|
+
if (!("error" in repoResult)) {
|
|
1786
|
+
sql += " AND repository_id = ?";
|
|
1787
|
+
queryParams.push(repoResult.id);
|
|
1788
|
+
}
|
|
1789
|
+
}
|
|
1790
|
+
|
|
1791
|
+
sql += " ORDER BY created_at DESC LIMIT ?";
|
|
1792
|
+
queryParams.push(limit);
|
|
1793
|
+
|
|
1794
|
+
const rows = db.query<{
|
|
1795
|
+
id: string;
|
|
1796
|
+
repository_id: string | null;
|
|
1797
|
+
pattern_type: string;
|
|
1798
|
+
file_path: string | null;
|
|
1799
|
+
description: string;
|
|
1800
|
+
example: string | null;
|
|
1801
|
+
created_at: string;
|
|
1802
|
+
}>(sql, queryParams);
|
|
1803
|
+
|
|
1804
|
+
return {
|
|
1805
|
+
results: rows.map((row) => ({
|
|
1806
|
+
id: row.id,
|
|
1807
|
+
repository_id: row.repository_id,
|
|
1808
|
+
pattern_type: row.pattern_type,
|
|
1809
|
+
file_path: row.file_path,
|
|
1810
|
+
description: row.description,
|
|
1811
|
+
example: row.example,
|
|
1812
|
+
created_at: row.created_at,
|
|
1813
|
+
})),
|
|
1814
|
+
count: rows.length,
|
|
1815
|
+
};
|
|
1816
|
+
}
|
|
1817
|
+
|
|
1818
|
+
/**
|
|
1819
|
+
* Execute record_insight tool
|
|
1820
|
+
*/
|
|
1821
|
+
export async function executeRecordInsight(
|
|
1822
|
+
params: unknown,
|
|
1823
|
+
_requestId: string | number,
|
|
1824
|
+
_userId: string,
|
|
1825
|
+
): Promise<unknown> {
|
|
1826
|
+
if (typeof params !== "object" || params === null) {
|
|
1827
|
+
throw new Error("Parameters must be an object");
|
|
1828
|
+
}
|
|
1829
|
+
|
|
1830
|
+
const p = params as Record<string, unknown>;
|
|
1831
|
+
|
|
1832
|
+
if (p.content === undefined || typeof p.content !== "string") {
|
|
1833
|
+
throw new Error("Missing or invalid required parameter: content");
|
|
1834
|
+
}
|
|
1835
|
+
if (p.insight_type === undefined || typeof p.insight_type !== "string") {
|
|
1836
|
+
throw new Error("Missing or invalid required parameter: insight_type");
|
|
1837
|
+
}
|
|
1838
|
+
if (!["discovery", "failure", "workaround"].includes(p.insight_type as string)) {
|
|
1839
|
+
throw new Error("Parameter 'insight_type' must be one of: discovery, failure, workaround");
|
|
1840
|
+
}
|
|
1841
|
+
|
|
1842
|
+
if (p.session_id !== undefined && typeof p.session_id !== "string") {
|
|
1843
|
+
throw new Error("Parameter 'session_id' must be a string");
|
|
1844
|
+
}
|
|
1845
|
+
if (p.related_file !== undefined && typeof p.related_file !== "string") {
|
|
1846
|
+
throw new Error("Parameter 'related_file' must be a string");
|
|
1847
|
+
}
|
|
1848
|
+
if (p.repository !== undefined && typeof p.repository !== "string") {
|
|
1849
|
+
throw new Error("Parameter 'repository' must be a string");
|
|
1850
|
+
}
|
|
1851
|
+
|
|
1852
|
+
const db = getGlobalDatabase();
|
|
1853
|
+
const { randomUUID } = await import("node:crypto");
|
|
1854
|
+
const id = randomUUID();
|
|
1855
|
+
|
|
1856
|
+
const sql = `
|
|
1857
|
+
INSERT INTO insights (
|
|
1858
|
+
id, session_id, content, insight_type, related_file, created_at
|
|
1859
|
+
) VALUES (?, ?, ?, ?, ?, datetime('now'))
|
|
1860
|
+
`;
|
|
1861
|
+
|
|
1862
|
+
db.run(sql, [
|
|
1863
|
+
id,
|
|
1864
|
+
(p.session_id as string) || null,
|
|
1865
|
+
p.content as string,
|
|
1866
|
+
p.insight_type as string,
|
|
1867
|
+
(p.related_file as string) || null,
|
|
1868
|
+
]);
|
|
1869
|
+
|
|
1870
|
+
logger.info("Insight recorded", { id, insight_type: p.insight_type });
|
|
1871
|
+
|
|
1872
|
+
return {
|
|
1873
|
+
success: true,
|
|
1874
|
+
id,
|
|
1875
|
+
message: "Insight recorded successfully",
|
|
1876
|
+
};
|
|
1877
|
+
}
|
|
1878
|
+
|
|
1134
1879
|
/**
|
|
1135
1880
|
* Main tool call dispatcher
|
|
1136
1881
|
*/
|
|
@@ -1159,6 +1904,19 @@ export async function handleToolCall(
|
|
|
1159
1904
|
return await executeSyncImport(params, requestId);
|
|
1160
1905
|
case "generate_task_context":
|
|
1161
1906
|
return await executeGenerateTaskContext(params, requestId, userId);
|
|
1907
|
+
// Memory Layer tools
|
|
1908
|
+
case "search_decisions":
|
|
1909
|
+
return await executeSearchDecisions(params, requestId, userId);
|
|
1910
|
+
case "record_decision":
|
|
1911
|
+
return await executeRecordDecision(params, requestId, userId);
|
|
1912
|
+
case "search_failures":
|
|
1913
|
+
return await executeSearchFailures(params, requestId, userId);
|
|
1914
|
+
case "record_failure":
|
|
1915
|
+
return await executeRecordFailure(params, requestId, userId);
|
|
1916
|
+
case "search_patterns":
|
|
1917
|
+
return await executeSearchPatterns(params, requestId, userId);
|
|
1918
|
+
case "record_insight":
|
|
1919
|
+
return await executeRecordInsight(params, requestId, userId);
|
|
1162
1920
|
default:
|
|
1163
1921
|
throw invalidParams(requestId, "Unknown tool: " + toolName);
|
|
1164
1922
|
}
|