dev-mcp-server 0.0.3 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/.env.example +23 -55
  2. package/README.md +609 -219
  3. package/cli.js +486 -160
  4. package/package.json +2 -2
  5. package/src/agents/BaseAgent.js +113 -0
  6. package/src/agents/dreamer.js +165 -0
  7. package/src/agents/improver.js +175 -0
  8. package/src/agents/specialists.js +202 -0
  9. package/src/agents/taskDecomposer.js +176 -0
  10. package/src/agents/teamCoordinator.js +153 -0
  11. package/src/api/routes/agents.js +172 -0
  12. package/src/api/routes/extras.js +115 -0
  13. package/src/api/routes/git.js +72 -0
  14. package/src/api/routes/ingest.js +60 -40
  15. package/src/api/routes/knowledge.js +59 -41
  16. package/src/api/routes/memory.js +41 -0
  17. package/src/api/routes/newRoutes.js +168 -0
  18. package/src/api/routes/pipelines.js +41 -0
  19. package/src/api/routes/planner.js +54 -0
  20. package/src/api/routes/query.js +24 -0
  21. package/src/api/routes/sessions.js +54 -0
  22. package/src/api/routes/tasks.js +67 -0
  23. package/src/api/routes/tools.js +85 -0
  24. package/src/api/routes/v5routes.js +196 -0
  25. package/src/api/server.js +134 -6
  26. package/src/context/compactor.js +151 -0
  27. package/src/context/contextEngineer.js +181 -0
  28. package/src/context/contextVisualizer.js +140 -0
  29. package/src/core/conversationEngine.js +231 -0
  30. package/src/core/indexer.js +169 -143
  31. package/src/core/ingester.js +141 -126
  32. package/src/core/queryEngine.js +286 -236
  33. package/src/cron/cronScheduler.js +260 -0
  34. package/src/dashboard/index.html +1181 -0
  35. package/src/lsp/symbolNavigator.js +220 -0
  36. package/src/memory/memoryManager.js +186 -0
  37. package/src/memory/teamMemory.js +111 -0
  38. package/src/messaging/messageBus.js +177 -0
  39. package/src/monitor/proactiveMonitor.js +337 -0
  40. package/src/pipelines/pipelineEngine.js +230 -0
  41. package/src/planner/plannerEngine.js +202 -0
  42. package/src/plugins/builtin/stats-plugin.js +29 -0
  43. package/src/plugins/pluginManager.js +144 -0
  44. package/src/prompts/promptEngineer.js +289 -0
  45. package/src/sessions/sessionManager.js +166 -0
  46. package/src/skills/skillsManager.js +263 -0
  47. package/src/storage/store.js +127 -105
  48. package/src/tasks/taskManager.js +151 -0
  49. package/src/tools/BashTool.js +154 -0
  50. package/src/tools/FileEditTool.js +280 -0
  51. package/src/tools/GitTool.js +212 -0
  52. package/src/tools/GrepTool.js +199 -0
  53. package/src/tools/registry.js +1380 -0
  54. package/src/utils/costTracker.js +69 -0
  55. package/src/utils/fileParser.js +176 -153
  56. package/src/utils/llmClient.js +355 -206
  57. package/src/watcher/fileWatcher.js +137 -0
  58. package/src/worktrees/worktreeManager.js +176 -0
@@ -0,0 +1,231 @@
1
+ /**
2
+ * Multi-turn conversational Q&A — stateful, context-aware, memory-injecting.
3
+ *
4
+ * Unlike QueryEngine (single-shot), ConversationEngine maintains a rolling
5
+ * conversation window. Each turn injects:
6
+ * - Prior conversation (compressed if too long)
7
+ * - Fresh retrieved context
8
+ * - Relevant memories
9
+ * - Session-specific facts learned so far
10
+ */
11
+
12
+ const llm = require('../utils/llmClient');
13
+ const indexer = require('./indexer');
14
+ const contextEngineer = require('../context/contextEngineer');
15
+ const { MemoryManager } = require('../memory/memoryManager');
16
+ const costTracker = require('../utils/costTracker');
17
+ const plannerEngine = require('../planner/plannerEngine');
18
+ const logger = require('../utils/logger');
19
+
20
+ // Max messages to keep in raw history before compacting
21
+ const MAX_HISTORY = 12;
22
+ // Max tokens to spend on compressed history summary
23
+ const HISTORY_BUDGET = 1000;
24
+
25
+ class ConversationEngine {
26
+ constructor() {
27
+ this._conversations = new Map(); // convId → { messages, compactedSummary, sessionFacts }
28
+ }
29
+
30
+ /**
31
+ * Send a message in a conversation. Creates the conversation if it doesn't exist.
32
+ */
33
+ async chat(message, convId = 'default', opts = {}) {
34
+ const { sessionId = convId, topK = 6 } = opts;
35
+
36
+ if (!this._conversations.has(convId)) {
37
+ this._conversations.set(convId, {
38
+ messages: [],
39
+ compactedSummary: null,
40
+ sessionFacts: [],
41
+ turnCount: 0,
42
+ });
43
+ }
44
+
45
+ const conv = this._conversations.get(convId);
46
+ conv.turnCount++;
47
+
48
+ logger.info(`[Conv:${convId}] Turn ${conv.turnCount}: "${message.slice(0, 60)}"`);
49
+
50
+ // 1. Retrieve fresh context for this message
51
+ const docs = indexer.search(message, topK);
52
+ const engineered = contextEngineer.engineer(docs, message, 'general');
53
+
54
+ // 2. Retrieve relevant memories
55
+ const memories = MemoryManager.getRelevant(message, 4);
56
+ const memContext = MemoryManager.formatAsContext(memories);
57
+
58
+ // 3. Build system prompt with session knowledge
59
+ const systemPrompt = this._buildSystem(conv, memContext);
60
+
61
+ // 4. Build messages array (history + new message)
62
+ const historyMessages = this._buildHistory(conv);
63
+ const userContent = this._buildUserContent(message, engineered.chunks);
64
+
65
+ const allMessages = [
66
+ ...historyMessages,
67
+ { role: 'user', content: userContent },
68
+ ];
69
+
70
+ // 5. Call LLM
71
+ const response = await llm.chat({
72
+ model: llm.model('smart'),
73
+ max_tokens: 2000,
74
+ system: systemPrompt,
75
+ messages: allMessages,
76
+ });
77
+
78
+ const answer = response.content[0].text;
79
+
80
+ // 6. Track cost
81
+ const cost = costTracker.record({
82
+ model: llm.model('smart'),
83
+ inputTokens: response.usage.input_tokens,
84
+ outputTokens: response.usage.output_tokens,
85
+ sessionId,
86
+ queryType: 'conversation',
87
+ });
88
+
89
+ // 7. Update conversation history
90
+ conv.messages.push({ role: 'user', content: message, timestamp: new Date().toISOString() });
91
+ conv.messages.push({ role: 'assistant', content: answer, timestamp: new Date().toISOString(), sources: engineered.chunks.map(c => c.filename) });
92
+
93
+ // 8. Auto-compact if history is too long
94
+ if (conv.messages.length >= MAX_HISTORY) {
95
+ await this._compact(conv, convId);
96
+ }
97
+
98
+ // 9. Extract session-specific facts (quick, lightweight)
99
+ this._extractSessionFacts(conv, message, answer);
100
+
101
+ // 10. Background memory extraction
102
+ MemoryManager.extractFromExchange(message, answer, sessionId).catch(() => { });
103
+
104
+ return {
105
+ answer,
106
+ convId,
107
+ turn: conv.turnCount,
108
+ contextChunks: engineered.chunks.length,
109
+ memoriesUsed: memories.length,
110
+ isCompacted: !!conv.compactedSummary,
111
+ usage: {
112
+ inputTokens: response.usage.input_tokens,
113
+ outputTokens: response.usage.output_tokens,
114
+ costUsd: cost.costUsd,
115
+ },
116
+ };
117
+ }
118
+
119
+ /**
120
+ * Ask a follow-up question referencing previous context
121
+ */
122
+ async followUp(message, convId = 'default', opts = {}) {
123
+ const conv = this._conversations.get(convId);
124
+ if (!conv || conv.messages.length === 0) {
125
+ return this.chat(message, convId, opts);
126
+ }
127
+ // Add a hint that this is a follow-up
128
+ return this.chat(`[Follow-up on our conversation] ${message}`, convId, opts);
129
+ }
130
+
131
+ /**
132
+ * Get conversation history
133
+ */
134
+ getHistory(convId = 'default') {
135
+ const conv = this._conversations.get(convId);
136
+ if (!conv) return [];
137
+ return conv.messages;
138
+ }
139
+
140
+ /**
141
+ * Reset a conversation
142
+ */
143
+ reset(convId = 'default') {
144
+ this._conversations.delete(convId);
145
+ logger.info(`[Conv:${convId}] Reset`);
146
+ }
147
+
148
+ /**
149
+ * List active conversations
150
+ */
151
+ list() {
152
+ return [...this._conversations.entries()].map(([id, conv]) => ({
153
+ id,
154
+ turns: conv.turnCount,
155
+ messages: conv.messages.length,
156
+ isCompacted: !!conv.compactedSummary,
157
+ lastMessage: conv.messages.at(-1)?.timestamp,
158
+ }));
159
+ }
160
+
161
+ // ── Private helpers ──────────────────────────────────────────────────────────
162
+
163
+ _buildSystem(conv, memContext) {
164
+ const parts = [
165
+ `You are an expert developer assistant with deep knowledge of the codebase.
166
+ Answer questions based on provided context. Be conversational but precise.
167
+ If you refer to something from earlier in the conversation, say so explicitly.`,
168
+ ];
169
+
170
+ if (conv.compactedSummary) {
171
+ parts.push(`## Earlier Conversation Summary\n${conv.compactedSummary}`);
172
+ }
173
+
174
+ if (conv.sessionFacts.length > 0) {
175
+ parts.push(`## Facts established this session\n${conv.sessionFacts.map(f => `- ${f}`).join('\n')}`);
176
+ }
177
+
178
+ if (memContext) parts.push(memContext);
179
+
180
+ return parts.join('\n\n');
181
+ }
182
+
183
+ _buildHistory(conv) {
184
+ // Only pass the recent raw messages (before compaction point)
185
+ const recent = conv.messages.slice(conv.compactedSummary ? -6 : -10);
186
+ return recent.map(m => ({ role: m.role, content: m.content }));
187
+ }
188
+
189
+ _buildUserContent(message, contextChunks) {
190
+ if (contextChunks.length === 0) return message;
191
+ const ctxStr = contextChunks
192
+ .map((c, i) => `[${i + 1}] ${c.filename}:\n\`\`\`\n${c.content.slice(0, 600)}\n\`\`\``)
193
+ .join('\n\n');
194
+ return `${message}\n\n## Relevant codebase context:\n${ctxStr}`;
195
+ }
196
+
197
+ async _compact(conv, convId) {
198
+ logger.info(`[Conv:${convId}] Compacting ${conv.messages.length} messages`);
199
+ try {
200
+ const result = await plannerEngine.compact(conv.messages, convId);
201
+ if (result.compacted) {
202
+ conv.compactedSummary = result.summary;
203
+ // Keep only the last 4 messages raw
204
+ conv.messages = conv.messages.slice(-4);
205
+ logger.info(`[Conv:${convId}] Compacted to summary + 4 recent messages`);
206
+ }
207
+ } catch (err) {
208
+ logger.warn(`[Conv:${convId}] Compact failed: ${err.message}`);
209
+ }
210
+ }
211
+
212
+ _extractSessionFacts(conv, question, answer) {
213
+ // Quick heuristic extraction of facts to inject next turn
214
+ const factPatterns = [
215
+ /the (\w+) (?:function|method|class|module|file) (?:is|does|handles|returns) ([^.]{10,60})\./gi,
216
+ /(?:causes|caused by|because of) ([^.]{10,60})\./gi,
217
+ ];
218
+
219
+ for (const pattern of factPatterns) {
220
+ const matches = answer.matchAll(pattern);
221
+ for (const m of matches) {
222
+ const fact = m[0].replace(/["`]/g, '').trim().slice(0, 120);
223
+ if (!conv.sessionFacts.includes(fact) && conv.sessionFacts.length < 8) {
224
+ conv.sessionFacts.push(fact);
225
+ }
226
+ }
227
+ }
228
+ }
229
+ }
230
+
231
+ module.exports = new ConversationEngine();
@@ -6,166 +6,192 @@ const tokenizer = new natural.WordTokenizer();
6
6
  const TfIdf = natural.TfIdf;
7
7
 
8
8
  class Indexer {
9
- constructor() {
10
- this.tfidf = new TfIdf();
11
- this._docMap = [];
12
- this._built = false;
9
+ constructor() {
10
+ this.tfidf = new TfIdf();
11
+ this._docMap = []; // maps tfidf index → store doc id
12
+ this._built = false;
13
+ }
14
+
15
+ /**
16
+ * (Re)build the TF-IDF index from the store
17
+ */
18
+ build() {
19
+ this.tfidf = new TfIdf();
20
+ this._docMap = [];
21
+
22
+ const docs = store.getAll();
23
+ for (const doc of docs) {
24
+ const text = this._docToText(doc);
25
+ this.tfidf.addDocument(text);
26
+ this._docMap.push(doc.id);
13
27
  }
14
28
 
15
- build() {
16
- this.tfidf = new TfIdf();
17
- this._docMap = [];
18
-
19
- const docs = store.getAll();
20
- for (const doc of docs) {
21
- const text = this._docToText(doc);
22
- this.tfidf.addDocument(text);
23
- this._docMap.push(doc.id);
24
- }
25
-
26
- this._built = true;
27
- logger.info(`Index built: ${docs.length} documents`);
28
- return docs.length;
29
+ this._built = true;
30
+ logger.info(`Index built: ${docs.length} documents`);
31
+ return docs.length;
32
+ }
33
+
34
+ /**
35
+ * Search for top-k relevant documents given a query
36
+ */
37
+ search(query, topK = 8, filter = {}) {
38
+ if (!this._built || this._docMap.length === 0) {
39
+ this.build();
29
40
  }
30
41
 
31
- search(query, topK = 8, filter = {}) {
32
- if (!this._built || this._docMap.length === 0) {
33
- this.build();
34
- }
35
-
36
- const queryTokens = tokenizer.tokenize(query.toLowerCase());
37
- const scores = new Array(this._docMap.length).fill(0);
42
+ const queryTokens = tokenizer.tokenize(query.toLowerCase());
43
+ const scores = new Array(this._docMap.length).fill(0);
38
44
 
39
- for (const token of queryTokens) {
40
- this.tfidf.tfidfs(token, (i, measure) => {
41
- if (i < scores.length) {
42
- scores[i] += measure;
43
- }
44
- });
45
+ // Score each document using TF-IDF
46
+ for (const token of queryTokens) {
47
+ this.tfidf.tfidfs(token, (i, measure) => {
48
+ if (i < scores.length) {
49
+ scores[i] += measure;
45
50
  }
51
+ });
52
+ }
46
53
 
47
- const allDocs = store.getAll();
48
- scores.forEach((_, i) => {
49
- const doc = allDocs[i];
50
- if (!doc) return;
51
- const textLower = doc.content.toLowerCase();
52
- for (const token of queryTokens) {
53
- if (token.length > 3 && textLower.includes(token)) {
54
- scores[i] += 0.5;
55
- }
56
- }
57
-
58
- if (doc.metadata) {
59
- const metaText = JSON.stringify(doc.metadata).toLowerCase();
60
- for (const token of queryTokens) {
61
- if (token.length > 3 && metaText.includes(token)) {
62
- scores[i] += 1.0;
63
- }
64
- }
65
- }
66
- });
67
-
68
- let results = scores
69
- .map((score, i) => ({ score, doc: allDocs[i] }))
70
- .filter(r => r.doc && r.score > 0);
71
-
72
- if (filter.kind) {
73
- results = results.filter(r => r.doc.kind === filter.kind);
74
- }
75
- if (filter.filename) {
76
- results = results.filter(r =>
77
- r.doc.filename.toLowerCase().includes(filter.filename.toLowerCase())
78
- );
54
+ // Also give a boost for keyword exact matches
55
+ const allDocs = store.getAll();
56
+ scores.forEach((_, i) => {
57
+ const doc = allDocs[i];
58
+ if (!doc) return;
59
+ const textLower = doc.content.toLowerCase();
60
+ for (const token of queryTokens) {
61
+ if (token.length > 3 && textLower.includes(token)) {
62
+ scores[i] += 0.5;
79
63
  }
80
-
81
- results.sort((a, b) => b.score - a.score);
82
-
83
- const seenFiles = new Map();
84
- const deduped = [];
85
- for (const r of results) {
86
- const fp = r.doc.filePath;
87
- if (!seenFiles.has(fp)) {
88
- seenFiles.set(fp, r);
89
- deduped.push(r);
90
- } else if (r.score > seenFiles.get(fp).score) {
91
- const idx = deduped.findIndex(d => d.doc.filePath === fp);
92
- deduped[idx] = r;
93
- }
94
- if (deduped.length >= topK) break;
64
+ }
65
+ // Boost for metadata matches (function names, class names, etc.)
66
+ if (doc.metadata) {
67
+ const metaText = JSON.stringify(doc.metadata).toLowerCase();
68
+ for (const token of queryTokens) {
69
+ if (token.length > 3 && metaText.includes(token)) {
70
+ scores[i] += 1.0;
71
+ }
95
72
  }
73
+ }
74
+ });
96
75
 
97
- return deduped.slice(0, topK).map(r => ({
98
- ...r.doc,
99
- relevanceScore: parseFloat(r.score.toFixed(4)),
100
- }));
101
- }
76
+ // Build scored results
77
+ let results = scores
78
+ .map((score, i) => ({ score, doc: allDocs[i] }))
79
+ .filter(r => r.doc && r.score > 0);
102
80
 
103
- searchForErrors(errorType, topK = 6) {
104
- const results = this.search(errorType, topK * 2);
105
- return results
106
- .map(doc => ({
107
- ...doc,
108
- relevanceScore: doc.kind === 'log'
109
- ? doc.relevanceScore * 1.5
110
- : doc.metadata?.isBugFix
111
- ? doc.relevanceScore * 1.3
112
- : doc.relevanceScore,
113
- }))
114
- .sort((a, b) => b.relevanceScore - a.relevanceScore)
115
- .slice(0, topK);
81
+ // Apply filters
82
+ if (filter.kind) {
83
+ results = results.filter(r => r.doc.kind === filter.kind);
116
84
  }
117
-
118
- searchForUsages(symbol, topK = 8) {
119
- const query = `${symbol} usage import call reference`;
120
- const results = this.search(query, topK * 2);
121
-
122
- return results
123
- .map(doc => {
124
- let boost = 1;
125
- if (doc.content.includes(symbol)) boost = 2;
126
- if (doc.metadata?.imports?.some(i => i.includes(symbol))) boost = 2.5;
127
- if (doc.metadata?.functions?.includes(symbol)) boost = 3;
128
- return { ...doc, relevanceScore: doc.relevanceScore * boost };
129
- })
130
- .sort((a, b) => b.relevanceScore - a.relevanceScore)
131
- .slice(0, topK);
85
+ if (filter.filename) {
86
+ results = results.filter(r =>
87
+ r.doc.filename.toLowerCase().includes(filter.filename.toLowerCase())
88
+ );
132
89
  }
133
90
 
134
- searchForImpact(target, topK = 8) {
135
- const query = `${target} depends import module connection`;
136
- const results = this.search(query, topK * 2);
137
-
138
- return results
139
- .map(doc => {
140
- let boost = 1;
141
- if (doc.content.includes(target)) boost = 2;
142
- if (doc.metadata?.imports?.some(i => i.includes(target))) boost = 3;
143
- return { ...doc, relevanceScore: doc.relevanceScore * boost };
144
- })
145
- .sort((a, b) => b.relevanceScore - a.relevanceScore)
146
- .slice(0, topK);
91
+ // Sort by score descending
92
+ results.sort((a, b) => b.score - a.score);
93
+
94
+ // Deduplicate by file (keep best chunk per file)
95
+ const seenFiles = new Map();
96
+ const deduped = [];
97
+ for (const r of results) {
98
+ const fp = r.doc.filePath;
99
+ if (!seenFiles.has(fp)) {
100
+ seenFiles.set(fp, r);
101
+ deduped.push(r);
102
+ } else if (r.score > seenFiles.get(fp).score) {
103
+ // Replace with better chunk
104
+ const idx = deduped.findIndex(d => d.doc.filePath === fp);
105
+ deduped[idx] = r;
106
+ }
107
+ if (deduped.length >= topK) break;
147
108
  }
148
109
 
149
- _docToText(doc) {
150
- const parts = [
151
- doc.filename,
152
- doc.kind,
153
- doc.content,
154
- doc.metadata?.functions?.join(' ') || '',
155
- doc.metadata?.classes?.join(' ') || '',
156
- doc.metadata?.imports?.join(' ') || '',
157
- doc.metadata?.exports?.join(' ') || '',
158
- doc.metadata?.errors?.join(' ') || '',
159
- doc.metadata?.patterns?.join(' ') || '',
160
- doc.metadata?.tables?.join(' ') || '',
161
- ];
162
- return parts.join(' ');
163
- }
164
-
165
- invalidate() {
166
- this._built = false;
167
- }
110
+ return deduped.slice(0, topK).map(r => ({
111
+ ...r.doc,
112
+ relevanceScore: parseFloat(r.score.toFixed(4)),
113
+ }));
114
+ }
115
+
116
+ /**
117
+ * Search specifically for error-related content
118
+ */
119
+ searchForErrors(errorType, topK = 6) {
120
+ const results = this.search(errorType, topK * 2);
121
+ // Boost log files and bug-fix files
122
+ return results
123
+ .map(doc => ({
124
+ ...doc,
125
+ relevanceScore: doc.kind === 'log'
126
+ ? doc.relevanceScore * 1.5
127
+ : doc.metadata?.isBugFix
128
+ ? doc.relevanceScore * 1.3
129
+ : doc.relevanceScore,
130
+ }))
131
+ .sort((a, b) => b.relevanceScore - a.relevanceScore)
132
+ .slice(0, topK);
133
+ }
134
+
135
+ /**
136
+ * Find all usages of a function/class/symbol
137
+ */
138
+ searchForUsages(symbol, topK = 8) {
139
+ const query = `${symbol} usage import call reference`;
140
+ const results = this.search(query, topK * 2);
141
+
142
+ // Extra boost for files that actually import or reference the symbol
143
+ return results
144
+ .map(doc => {
145
+ let boost = 1;
146
+ if (doc.content.includes(symbol)) boost = 2;
147
+ if (doc.metadata?.imports?.some(i => i.includes(symbol))) boost = 2.5;
148
+ if (doc.metadata?.functions?.includes(symbol)) boost = 3;
149
+ return { ...doc, relevanceScore: doc.relevanceScore * boost };
150
+ })
151
+ .sort((a, b) => b.relevanceScore - a.relevanceScore)
152
+ .slice(0, topK);
153
+ }
154
+
155
+ /**
156
+ * Find what a file/module connects to (impact analysis)
157
+ */
158
+ searchForImpact(target, topK = 8) {
159
+ const query = `${target} depends import module connection`;
160
+ const results = this.search(query, topK * 2);
161
+
162
+ return results
163
+ .map(doc => {
164
+ let boost = 1;
165
+ if (doc.content.includes(target)) boost = 2;
166
+ if (doc.metadata?.imports?.some(i => i.includes(target))) boost = 3;
167
+ return { ...doc, relevanceScore: doc.relevanceScore * boost };
168
+ })
169
+ .sort((a, b) => b.relevanceScore - a.relevanceScore)
170
+ .slice(0, topK);
171
+ }
172
+
173
+ _docToText(doc) {
174
+ const parts = [
175
+ doc.filename,
176
+ doc.kind,
177
+ doc.content,
178
+ // Flatten metadata into searchable text
179
+ doc.metadata?.functions?.join(' ') || '',
180
+ doc.metadata?.classes?.join(' ') || '',
181
+ doc.metadata?.imports?.join(' ') || '',
182
+ doc.metadata?.exports?.join(' ') || '',
183
+ doc.metadata?.errors?.join(' ') || '',
184
+ doc.metadata?.patterns?.join(' ') || '',
185
+ doc.metadata?.tables?.join(' ') || '',
186
+ ];
187
+ return parts.join(' ');
188
+ }
189
+
190
+ invalidate() {
191
+ this._built = false;
192
+ }
168
193
  }
169
194
 
195
+ // Singleton
170
196
  const indexer = new Indexer();
171
197
  module.exports = indexer;