dev-mcp-server 0.0.2 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/.env.example +23 -55
  2. package/README.md +609 -219
  3. package/cli.js +486 -160
  4. package/package.json +2 -2
  5. package/src/agents/BaseAgent.js +113 -0
  6. package/src/agents/dreamer.js +165 -0
  7. package/src/agents/improver.js +175 -0
  8. package/src/agents/specialists.js +202 -0
  9. package/src/agents/taskDecomposer.js +176 -0
  10. package/src/agents/teamCoordinator.js +153 -0
  11. package/src/api/routes/agents.js +172 -0
  12. package/src/api/routes/extras.js +115 -0
  13. package/src/api/routes/git.js +72 -0
  14. package/src/api/routes/ingest.js +60 -40
  15. package/src/api/routes/knowledge.js +59 -41
  16. package/src/api/routes/memory.js +41 -0
  17. package/src/api/routes/newRoutes.js +168 -0
  18. package/src/api/routes/pipelines.js +41 -0
  19. package/src/api/routes/planner.js +54 -0
  20. package/src/api/routes/query.js +24 -0
  21. package/src/api/routes/sessions.js +54 -0
  22. package/src/api/routes/tasks.js +67 -0
  23. package/src/api/routes/tools.js +85 -0
  24. package/src/api/routes/v5routes.js +196 -0
  25. package/src/api/server.js +133 -5
  26. package/src/context/compactor.js +151 -0
  27. package/src/context/contextEngineer.js +181 -0
  28. package/src/context/contextVisualizer.js +140 -0
  29. package/src/core/conversationEngine.js +231 -0
  30. package/src/core/indexer.js +169 -143
  31. package/src/core/ingester.js +141 -126
  32. package/src/core/queryEngine.js +286 -236
  33. package/src/cron/cronScheduler.js +260 -0
  34. package/src/dashboard/index.html +1181 -0
  35. package/src/lsp/symbolNavigator.js +220 -0
  36. package/src/memory/memoryManager.js +186 -0
  37. package/src/memory/teamMemory.js +111 -0
  38. package/src/messaging/messageBus.js +177 -0
  39. package/src/monitor/proactiveMonitor.js +337 -0
  40. package/src/pipelines/pipelineEngine.js +230 -0
  41. package/src/planner/plannerEngine.js +202 -0
  42. package/src/plugins/builtin/stats-plugin.js +29 -0
  43. package/src/plugins/pluginManager.js +144 -0
  44. package/src/prompts/promptEngineer.js +289 -0
  45. package/src/sessions/sessionManager.js +166 -0
  46. package/src/skills/skillsManager.js +263 -0
  47. package/src/storage/store.js +127 -105
  48. package/src/tasks/taskManager.js +151 -0
  49. package/src/tools/BashTool.js +154 -0
  50. package/src/tools/FileEditTool.js +280 -0
  51. package/src/tools/GitTool.js +212 -0
  52. package/src/tools/GrepTool.js +199 -0
  53. package/src/tools/registry.js +1380 -0
  54. package/src/utils/costTracker.js +69 -0
  55. package/src/utils/fileParser.js +176 -153
  56. package/src/utils/llmClient.js +355 -206
  57. package/src/watcher/fileWatcher.js +137 -0
  58. package/src/worktrees/worktreeManager.js +176 -0
@@ -7,149 +7,164 @@ const indexer = require('./indexer');
7
7
  const logger = require('../utils/logger');
8
8
 
9
9
  class Ingester {
10
- async ingestFile(filePath) {
11
- const absPath = path.resolve(filePath);
12
-
13
- if (!fs.existsSync(absPath)) {
14
- throw new Error(`File not found: ${absPath}`);
15
- }
10
+ /**
11
+ * Ingest a single file
12
+ */
13
+ async ingestFile(filePath) {
14
+ const absPath = path.resolve(filePath);
15
+
16
+ if (!fs.existsSync(absPath)) {
17
+ throw new Error(`File not found: ${absPath}`);
18
+ }
16
19
 
17
- if (shouldSkip(absPath)) {
18
- logger.warn(`Skipping: ${absPath}`);
19
- return { skipped: true };
20
- }
20
+ if (shouldSkip(absPath)) {
21
+ logger.warn(`Skipping: ${absPath}`);
22
+ return { skipped: true };
23
+ }
21
24
 
22
- const ext = path.extname(absPath).toLowerCase();
23
- if (!FILE_TYPE_MAP[ext]) {
24
- logger.warn(`Unsupported file type: ${ext}`);
25
- return { skipped: true, reason: 'unsupported type' };
26
- }
25
+ const ext = path.extname(absPath).toLowerCase();
26
+ if (!FILE_TYPE_MAP[ext]) {
27
+ logger.warn(`Unsupported file type: ${ext}`);
28
+ return { skipped: true, reason: 'unsupported type' };
29
+ }
27
30
 
28
- const stat = fs.statSync(absPath);
29
- if (stat.size > 500 * 1024) { // skip files > 500KB
30
- logger.warn(`File too large (${(stat.size / 1024).toFixed(0)}KB): ${absPath}`);
31
- return { skipped: true, reason: 'file too large' };
32
- }
31
+ const stat = fs.statSync(absPath);
32
+ if (stat.size > 500 * 1024) { // skip files > 500KB
33
+ logger.warn(`File too large (${(stat.size / 1024).toFixed(0)}KB): ${absPath}`);
34
+ return { skipped: true, reason: 'file too large' };
35
+ }
33
36
 
34
- try {
35
- store.removeByPath(absPath);
37
+ try {
38
+ // Remove old chunks for this file
39
+ store.removeByPath(absPath);
36
40
 
37
- const chunks = parseFile(absPath);
38
- if (chunks.length === 0) {
39
- return { skipped: true, reason: 'empty file' };
40
- }
41
+ // Parse into chunks
42
+ const chunks = parseFile(absPath);
43
+ if (chunks.length === 0) {
44
+ return { skipped: true, reason: 'empty file' };
45
+ }
41
46
 
42
- const result = store.upsertDocs(chunks);
47
+ // Store
48
+ const result = store.upsertDocs(chunks);
43
49
 
44
- indexer.invalidate();
50
+ // Invalidate index so next search will rebuild
51
+ indexer.invalidate();
45
52
 
46
- logger.info(`Ingested: ${path.basename(absPath)} (${chunks.length} chunk(s))`);
47
- return { success: true, file: absPath, chunks: chunks.length, ...result };
53
+ logger.info(`Ingested: ${path.basename(absPath)} (${chunks.length} chunk(s))`);
54
+ return { success: true, file: absPath, chunks: chunks.length, ...result };
48
55
 
49
- } catch (err) {
50
- logger.error(`Failed to ingest ${absPath}: ${err.message}`);
51
- throw err;
52
- }
56
+ } catch (err) {
57
+ logger.error(`Failed to ingest ${absPath}: ${err.message}`);
58
+ throw err;
53
59
  }
60
+ }
54
61
 
55
- async ingestDirectory(dirPath, options = {}) {
56
- const absDir = path.resolve(dirPath);
57
-
58
- if (!fs.existsSync(absDir)) {
59
- throw new Error(`Directory not found: ${absDir}`);
60
- }
61
-
62
- const extensions = Object.keys(FILE_TYPE_MAP).map(e => e.slice(1));
63
- const pattern = `**/*.{${extensions.join(',')}}`;
64
-
65
- logger.info(`Scanning: ${absDir}`);
66
- const files = await glob(pattern, {
67
- cwd: absDir,
68
- absolute: true,
69
- ignore: [
70
- '**/node_modules/**',
71
- '**/.git/**',
72
- '**/dist/**',
73
- '**/build/**',
74
- '**/coverage/**',
75
- '**/*.min.js',
76
- '**/package-lock.json',
77
- '**/yarn.lock',
78
- ],
79
- });
80
-
81
- logger.info(`Found ${files.length} files to process`);
82
-
83
- const results = {
84
- total: files.length,
85
- ingested: 0,
86
- skipped: 0,
87
- failed: 0,
88
- totalChunks: 0,
89
- errors: [],
90
- };
91
-
92
- for (const file of files) {
93
- try {
94
- const result = await this.ingestFile(file);
95
- if (result.skipped) {
96
- results.skipped++;
97
- } else {
98
- results.ingested++;
99
- results.totalChunks += result.chunks || 0;
100
- }
101
- } catch (err) {
102
- results.failed++;
103
- results.errors.push({ file, error: err.message });
104
- }
105
-
106
- if ((results.ingested + results.skipped) % 50 === 0) {
107
- logger.info(`Progress: ${results.ingested + results.skipped}/${files.length}`);
108
- }
109
- }
62
+ /**
63
+ * Ingest all supported files in a directory
64
+ */
65
+ async ingestDirectory(dirPath, options = {}) {
66
+ const absDir = path.resolve(dirPath);
110
67
 
111
- const docCount = indexer.build();
112
- logger.info(`Index rebuilt with ${docCount} documents`);
113
-
114
- return results;
68
+ if (!fs.existsSync(absDir)) {
69
+ throw new Error(`Directory not found: ${absDir}`);
115
70
  }
116
71
 
117
- async ingestRawText(content, options = {}) {
118
- const {
119
- kind = 'documentation',
120
- label = 'manual-entry',
121
- tags = [],
122
- } = options;
123
-
124
- if (!content || content.trim().length === 0) {
125
- throw new Error('Content cannot be empty');
72
+ const extensions = Object.keys(FILE_TYPE_MAP).map(e => e.slice(1));
73
+ const pattern = `**/*.{${extensions.join(',')}}`;
74
+
75
+ logger.info(`Scanning: ${absDir}`);
76
+ const files = await glob(pattern, {
77
+ cwd: absDir,
78
+ absolute: true,
79
+ ignore: [
80
+ '**/node_modules/**',
81
+ '**/.git/**',
82
+ '**/dist/**',
83
+ '**/build/**',
84
+ '**/coverage/**',
85
+ '**/*.min.js',
86
+ '**/package-lock.json',
87
+ '**/yarn.lock',
88
+ ],
89
+ });
90
+
91
+ logger.info(`Found ${files.length} files to process`);
92
+
93
+ const results = {
94
+ total: files.length,
95
+ ingested: 0,
96
+ skipped: 0,
97
+ failed: 0,
98
+ totalChunks: 0,
99
+ errors: [],
100
+ };
101
+
102
+ for (const file of files) {
103
+ try {
104
+ const result = await this.ingestFile(file);
105
+ if (result.skipped) {
106
+ results.skipped++;
107
+ } else {
108
+ results.ingested++;
109
+ results.totalChunks += result.chunks || 0;
126
110
  }
111
+ } catch (err) {
112
+ results.failed++;
113
+ results.errors.push({ file, error: err.message });
114
+ }
115
+
116
+ // Progress logging every 50 files
117
+ if ((results.ingested + results.skipped) % 50 === 0) {
118
+ logger.info(`Progress: ${results.ingested + results.skipped}/${files.length}`);
119
+ }
120
+ }
127
121
 
128
- const id = `raw::${label}::${Date.now()}`;
129
- const doc = {
130
- id,
131
- filePath: `raw://${label}`,
132
- filename: label,
133
- ext: '.txt',
134
- kind,
135
- chunkIndex: 0,
136
- totalChunks: 1,
137
- content: content.trim(),
138
- lines: content.split('\n').length,
139
- ingestedAt: new Date().toISOString(),
140
- metadata: {
141
- isRaw: true,
142
- tags,
143
- label,
144
- },
145
- };
146
-
147
- store.upsertDocs([doc]);
148
- indexer.invalidate();
149
-
150
- logger.info(`Ingested raw text: "${label}" (${content.length} chars)`);
151
- return { success: true, id, kind, label };
122
+ // Rebuild the index after batch ingest
123
+ const docCount = indexer.build();
124
+ logger.info(`Index rebuilt with ${docCount} documents`);
125
+
126
+ return results;
127
+ }
128
+
129
+ /**
130
+ * Ingest raw text (e.g. error logs pasted directly, API response, bug description)
131
+ */
132
+ async ingestRawText(content, options = {}) {
133
+ const {
134
+ kind = 'documentation',
135
+ label = 'manual-entry',
136
+ tags = [],
137
+ } = options;
138
+
139
+ if (!content || content.trim().length === 0) {
140
+ throw new Error('Content cannot be empty');
152
141
  }
142
+
143
+ const id = `raw::${label}::${Date.now()}`;
144
+ const doc = {
145
+ id,
146
+ filePath: `raw://${label}`,
147
+ filename: label,
148
+ ext: '.txt',
149
+ kind,
150
+ chunkIndex: 0,
151
+ totalChunks: 1,
152
+ content: content.trim(),
153
+ lines: content.split('\n').length,
154
+ ingestedAt: new Date().toISOString(),
155
+ metadata: {
156
+ isRaw: true,
157
+ tags,
158
+ label,
159
+ },
160
+ };
161
+
162
+ store.upsertDocs([doc]);
163
+ indexer.invalidate();
164
+
165
+ logger.info(`Ingested raw text: "${label}" (${content.length} chars)`);
166
+ return { success: true, id, kind, label };
167
+ }
153
168
  }
154
169
 
155
170
  module.exports = new Ingester();