dev-mcp-server 0.0.2 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +23 -55
- package/README.md +609 -219
- package/cli.js +486 -160
- package/package.json +2 -2
- package/src/agents/BaseAgent.js +113 -0
- package/src/agents/dreamer.js +165 -0
- package/src/agents/improver.js +175 -0
- package/src/agents/specialists.js +202 -0
- package/src/agents/taskDecomposer.js +176 -0
- package/src/agents/teamCoordinator.js +153 -0
- package/src/api/routes/agents.js +172 -0
- package/src/api/routes/extras.js +115 -0
- package/src/api/routes/git.js +72 -0
- package/src/api/routes/ingest.js +60 -40
- package/src/api/routes/knowledge.js +59 -41
- package/src/api/routes/memory.js +41 -0
- package/src/api/routes/newRoutes.js +168 -0
- package/src/api/routes/pipelines.js +41 -0
- package/src/api/routes/planner.js +54 -0
- package/src/api/routes/query.js +24 -0
- package/src/api/routes/sessions.js +54 -0
- package/src/api/routes/tasks.js +67 -0
- package/src/api/routes/tools.js +85 -0
- package/src/api/routes/v5routes.js +196 -0
- package/src/api/server.js +133 -5
- package/src/context/compactor.js +151 -0
- package/src/context/contextEngineer.js +181 -0
- package/src/context/contextVisualizer.js +140 -0
- package/src/core/conversationEngine.js +231 -0
- package/src/core/indexer.js +169 -143
- package/src/core/ingester.js +141 -126
- package/src/core/queryEngine.js +286 -236
- package/src/cron/cronScheduler.js +260 -0
- package/src/dashboard/index.html +1181 -0
- package/src/lsp/symbolNavigator.js +220 -0
- package/src/memory/memoryManager.js +186 -0
- package/src/memory/teamMemory.js +111 -0
- package/src/messaging/messageBus.js +177 -0
- package/src/monitor/proactiveMonitor.js +337 -0
- package/src/pipelines/pipelineEngine.js +230 -0
- package/src/planner/plannerEngine.js +202 -0
- package/src/plugins/builtin/stats-plugin.js +29 -0
- package/src/plugins/pluginManager.js +144 -0
- package/src/prompts/promptEngineer.js +289 -0
- package/src/sessions/sessionManager.js +166 -0
- package/src/skills/skillsManager.js +263 -0
- package/src/storage/store.js +127 -105
- package/src/tasks/taskManager.js +151 -0
- package/src/tools/BashTool.js +154 -0
- package/src/tools/FileEditTool.js +280 -0
- package/src/tools/GitTool.js +212 -0
- package/src/tools/GrepTool.js +199 -0
- package/src/tools/registry.js +1380 -0
- package/src/utils/costTracker.js +69 -0
- package/src/utils/fileParser.js +176 -153
- package/src/utils/llmClient.js +355 -206
- package/src/watcher/fileWatcher.js +137 -0
- package/src/worktrees/worktreeManager.js +176 -0
package/src/core/ingester.js
CHANGED
|
@@ -7,149 +7,164 @@ const indexer = require('./indexer');
|
|
|
7
7
|
const logger = require('../utils/logger');
|
|
8
8
|
|
|
9
9
|
class Ingester {
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
10
|
+
/**
|
|
11
|
+
* Ingest a single file
|
|
12
|
+
*/
|
|
13
|
+
async ingestFile(filePath) {
|
|
14
|
+
const absPath = path.resolve(filePath);
|
|
15
|
+
|
|
16
|
+
if (!fs.existsSync(absPath)) {
|
|
17
|
+
throw new Error(`File not found: ${absPath}`);
|
|
18
|
+
}
|
|
16
19
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
20
|
+
if (shouldSkip(absPath)) {
|
|
21
|
+
logger.warn(`Skipping: ${absPath}`);
|
|
22
|
+
return { skipped: true };
|
|
23
|
+
}
|
|
21
24
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
25
|
+
const ext = path.extname(absPath).toLowerCase();
|
|
26
|
+
if (!FILE_TYPE_MAP[ext]) {
|
|
27
|
+
logger.warn(`Unsupported file type: ${ext}`);
|
|
28
|
+
return { skipped: true, reason: 'unsupported type' };
|
|
29
|
+
}
|
|
27
30
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
31
|
+
const stat = fs.statSync(absPath);
|
|
32
|
+
if (stat.size > 500 * 1024) { // skip files > 500KB
|
|
33
|
+
logger.warn(`File too large (${(stat.size / 1024).toFixed(0)}KB): ${absPath}`);
|
|
34
|
+
return { skipped: true, reason: 'file too large' };
|
|
35
|
+
}
|
|
33
36
|
|
|
34
|
-
|
|
35
|
-
|
|
37
|
+
try {
|
|
38
|
+
// Remove old chunks for this file
|
|
39
|
+
store.removeByPath(absPath);
|
|
36
40
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
+
// Parse into chunks
|
|
42
|
+
const chunks = parseFile(absPath);
|
|
43
|
+
if (chunks.length === 0) {
|
|
44
|
+
return { skipped: true, reason: 'empty file' };
|
|
45
|
+
}
|
|
41
46
|
|
|
42
|
-
|
|
47
|
+
// Store
|
|
48
|
+
const result = store.upsertDocs(chunks);
|
|
43
49
|
|
|
44
|
-
|
|
50
|
+
// Invalidate index so next search will rebuild
|
|
51
|
+
indexer.invalidate();
|
|
45
52
|
|
|
46
|
-
|
|
47
|
-
|
|
53
|
+
logger.info(`Ingested: ${path.basename(absPath)} (${chunks.length} chunk(s))`);
|
|
54
|
+
return { success: true, file: absPath, chunks: chunks.length, ...result };
|
|
48
55
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
}
|
|
56
|
+
} catch (err) {
|
|
57
|
+
logger.error(`Failed to ingest ${absPath}: ${err.message}`);
|
|
58
|
+
throw err;
|
|
53
59
|
}
|
|
60
|
+
}
|
|
54
61
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
const extensions = Object.keys(FILE_TYPE_MAP).map(e => e.slice(1));
|
|
63
|
-
const pattern = `**/*.{${extensions.join(',')}}`;
|
|
64
|
-
|
|
65
|
-
logger.info(`Scanning: ${absDir}`);
|
|
66
|
-
const files = await glob(pattern, {
|
|
67
|
-
cwd: absDir,
|
|
68
|
-
absolute: true,
|
|
69
|
-
ignore: [
|
|
70
|
-
'**/node_modules/**',
|
|
71
|
-
'**/.git/**',
|
|
72
|
-
'**/dist/**',
|
|
73
|
-
'**/build/**',
|
|
74
|
-
'**/coverage/**',
|
|
75
|
-
'**/*.min.js',
|
|
76
|
-
'**/package-lock.json',
|
|
77
|
-
'**/yarn.lock',
|
|
78
|
-
],
|
|
79
|
-
});
|
|
80
|
-
|
|
81
|
-
logger.info(`Found ${files.length} files to process`);
|
|
82
|
-
|
|
83
|
-
const results = {
|
|
84
|
-
total: files.length,
|
|
85
|
-
ingested: 0,
|
|
86
|
-
skipped: 0,
|
|
87
|
-
failed: 0,
|
|
88
|
-
totalChunks: 0,
|
|
89
|
-
errors: [],
|
|
90
|
-
};
|
|
91
|
-
|
|
92
|
-
for (const file of files) {
|
|
93
|
-
try {
|
|
94
|
-
const result = await this.ingestFile(file);
|
|
95
|
-
if (result.skipped) {
|
|
96
|
-
results.skipped++;
|
|
97
|
-
} else {
|
|
98
|
-
results.ingested++;
|
|
99
|
-
results.totalChunks += result.chunks || 0;
|
|
100
|
-
}
|
|
101
|
-
} catch (err) {
|
|
102
|
-
results.failed++;
|
|
103
|
-
results.errors.push({ file, error: err.message });
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
if ((results.ingested + results.skipped) % 50 === 0) {
|
|
107
|
-
logger.info(`Progress: ${results.ingested + results.skipped}/${files.length}`);
|
|
108
|
-
}
|
|
109
|
-
}
|
|
62
|
+
/**
|
|
63
|
+
* Ingest all supported files in a directory
|
|
64
|
+
*/
|
|
65
|
+
async ingestDirectory(dirPath, options = {}) {
|
|
66
|
+
const absDir = path.resolve(dirPath);
|
|
110
67
|
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
return results;
|
|
68
|
+
if (!fs.existsSync(absDir)) {
|
|
69
|
+
throw new Error(`Directory not found: ${absDir}`);
|
|
115
70
|
}
|
|
116
71
|
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
72
|
+
const extensions = Object.keys(FILE_TYPE_MAP).map(e => e.slice(1));
|
|
73
|
+
const pattern = `**/*.{${extensions.join(',')}}`;
|
|
74
|
+
|
|
75
|
+
logger.info(`Scanning: ${absDir}`);
|
|
76
|
+
const files = await glob(pattern, {
|
|
77
|
+
cwd: absDir,
|
|
78
|
+
absolute: true,
|
|
79
|
+
ignore: [
|
|
80
|
+
'**/node_modules/**',
|
|
81
|
+
'**/.git/**',
|
|
82
|
+
'**/dist/**',
|
|
83
|
+
'**/build/**',
|
|
84
|
+
'**/coverage/**',
|
|
85
|
+
'**/*.min.js',
|
|
86
|
+
'**/package-lock.json',
|
|
87
|
+
'**/yarn.lock',
|
|
88
|
+
],
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
logger.info(`Found ${files.length} files to process`);
|
|
92
|
+
|
|
93
|
+
const results = {
|
|
94
|
+
total: files.length,
|
|
95
|
+
ingested: 0,
|
|
96
|
+
skipped: 0,
|
|
97
|
+
failed: 0,
|
|
98
|
+
totalChunks: 0,
|
|
99
|
+
errors: [],
|
|
100
|
+
};
|
|
101
|
+
|
|
102
|
+
for (const file of files) {
|
|
103
|
+
try {
|
|
104
|
+
const result = await this.ingestFile(file);
|
|
105
|
+
if (result.skipped) {
|
|
106
|
+
results.skipped++;
|
|
107
|
+
} else {
|
|
108
|
+
results.ingested++;
|
|
109
|
+
results.totalChunks += result.chunks || 0;
|
|
126
110
|
}
|
|
111
|
+
} catch (err) {
|
|
112
|
+
results.failed++;
|
|
113
|
+
results.errors.push({ file, error: err.message });
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// Progress logging every 50 files
|
|
117
|
+
if ((results.ingested + results.skipped) % 50 === 0) {
|
|
118
|
+
logger.info(`Progress: ${results.ingested + results.skipped}/${files.length}`);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
127
121
|
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
store.upsertDocs([doc]);
|
|
148
|
-
indexer.invalidate();
|
|
149
|
-
|
|
150
|
-
logger.info(`Ingested raw text: "${label}" (${content.length} chars)`);
|
|
151
|
-
return { success: true, id, kind, label };
|
|
122
|
+
// Rebuild the index after batch ingest
|
|
123
|
+
const docCount = indexer.build();
|
|
124
|
+
logger.info(`Index rebuilt with ${docCount} documents`);
|
|
125
|
+
|
|
126
|
+
return results;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Ingest raw text (e.g. error logs pasted directly, API response, bug description)
|
|
131
|
+
*/
|
|
132
|
+
async ingestRawText(content, options = {}) {
|
|
133
|
+
const {
|
|
134
|
+
kind = 'documentation',
|
|
135
|
+
label = 'manual-entry',
|
|
136
|
+
tags = [],
|
|
137
|
+
} = options;
|
|
138
|
+
|
|
139
|
+
if (!content || content.trim().length === 0) {
|
|
140
|
+
throw new Error('Content cannot be empty');
|
|
152
141
|
}
|
|
142
|
+
|
|
143
|
+
const id = `raw::${label}::${Date.now()}`;
|
|
144
|
+
const doc = {
|
|
145
|
+
id,
|
|
146
|
+
filePath: `raw://${label}`,
|
|
147
|
+
filename: label,
|
|
148
|
+
ext: '.txt',
|
|
149
|
+
kind,
|
|
150
|
+
chunkIndex: 0,
|
|
151
|
+
totalChunks: 1,
|
|
152
|
+
content: content.trim(),
|
|
153
|
+
lines: content.split('\n').length,
|
|
154
|
+
ingestedAt: new Date().toISOString(),
|
|
155
|
+
metadata: {
|
|
156
|
+
isRaw: true,
|
|
157
|
+
tags,
|
|
158
|
+
label,
|
|
159
|
+
},
|
|
160
|
+
};
|
|
161
|
+
|
|
162
|
+
store.upsertDocs([doc]);
|
|
163
|
+
indexer.invalidate();
|
|
164
|
+
|
|
165
|
+
logger.info(`Ingested raw text: "${label}" (${content.length} chars)`);
|
|
166
|
+
return { success: true, id, kind, label };
|
|
167
|
+
}
|
|
153
168
|
}
|
|
154
169
|
|
|
155
170
|
module.exports = new Ingester();
|