ai-first-cli 1.3.6 → 1.3.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/CHANGELOG.md +123 -0
  2. package/README.es.md +14 -1
  3. package/README.md +14 -1
  4. package/ai/graph/knowledge-graph.json +1 -1
  5. package/ai-context/index-state.json +86 -2
  6. package/dist/analyzers/techStack.d.ts.map +1 -1
  7. package/dist/analyzers/techStack.js +43 -0
  8. package/dist/analyzers/techStack.js.map +1 -1
  9. package/dist/commands/ai-first.d.ts.map +1 -1
  10. package/dist/commands/ai-first.js +78 -4
  11. package/dist/commands/ai-first.js.map +1 -1
  12. package/dist/config/configLoader.d.ts +6 -0
  13. package/dist/config/configLoader.d.ts.map +1 -0
  14. package/dist/config/configLoader.js +232 -0
  15. package/dist/config/configLoader.js.map +1 -0
  16. package/dist/config/index.d.ts +3 -0
  17. package/dist/config/index.d.ts.map +1 -0
  18. package/dist/config/index.js +2 -0
  19. package/dist/config/index.js.map +1 -0
  20. package/dist/config/types.d.ts +101 -0
  21. package/dist/config/types.d.ts.map +1 -0
  22. package/dist/config/types.js +2 -0
  23. package/dist/config/types.js.map +1 -0
  24. package/dist/core/content/contentProcessor.d.ts +4 -0
  25. package/dist/core/content/contentProcessor.d.ts.map +1 -0
  26. package/dist/core/content/contentProcessor.js +235 -0
  27. package/dist/core/content/contentProcessor.js.map +1 -0
  28. package/dist/core/content/index.d.ts +3 -0
  29. package/dist/core/content/index.d.ts.map +1 -0
  30. package/dist/core/content/index.js +2 -0
  31. package/dist/core/content/index.js.map +1 -0
  32. package/dist/core/content/types.d.ts +32 -0
  33. package/dist/core/content/types.d.ts.map +1 -0
  34. package/dist/core/content/types.js +2 -0
  35. package/dist/core/content/types.js.map +1 -0
  36. package/dist/core/gitAnalyzer.d.ts +14 -0
  37. package/dist/core/gitAnalyzer.d.ts.map +1 -1
  38. package/dist/core/gitAnalyzer.js +98 -0
  39. package/dist/core/gitAnalyzer.js.map +1 -1
  40. package/dist/core/multiRepo/index.d.ts +3 -0
  41. package/dist/core/multiRepo/index.d.ts.map +1 -0
  42. package/dist/core/multiRepo/index.js +2 -0
  43. package/dist/core/multiRepo/index.js.map +1 -0
  44. package/dist/core/multiRepo/multiRepoScanner.d.ts +18 -0
  45. package/dist/core/multiRepo/multiRepoScanner.d.ts.map +1 -0
  46. package/dist/core/multiRepo/multiRepoScanner.js +131 -0
  47. package/dist/core/multiRepo/multiRepoScanner.js.map +1 -0
  48. package/dist/core/rag/index.d.ts +3 -0
  49. package/dist/core/rag/index.d.ts.map +1 -0
  50. package/dist/core/rag/index.js +2 -0
  51. package/dist/core/rag/index.js.map +1 -0
  52. package/dist/core/rag/vectorIndex.d.ts +28 -0
  53. package/dist/core/rag/vectorIndex.d.ts.map +1 -0
  54. package/dist/core/rag/vectorIndex.js +71 -0
  55. package/dist/core/rag/vectorIndex.js.map +1 -0
  56. package/dist/mcp/index.d.ts +2 -0
  57. package/dist/mcp/index.d.ts.map +1 -0
  58. package/dist/mcp/index.js +2 -0
  59. package/dist/mcp/index.js.map +1 -0
  60. package/dist/mcp/server.d.ts +7 -0
  61. package/dist/mcp/server.d.ts.map +1 -0
  62. package/dist/mcp/server.js +154 -0
  63. package/dist/mcp/server.js.map +1 -0
  64. package/docs/planning/evaluator-v1.0.0/README.md +112 -0
  65. package/docs/planning/evaluator-v1.0.0/improvements_plan_2026-03-28.md +237 -0
  66. package/package.json +13 -3
  67. package/src/analyzers/techStack.ts +47 -1
  68. package/src/commands/ai-first.ts +83 -4
  69. package/src/config/configLoader.ts +274 -0
  70. package/src/config/index.ts +27 -0
  71. package/src/config/types.ts +117 -0
  72. package/src/core/content/contentProcessor.ts +292 -0
  73. package/src/core/content/index.ts +9 -0
  74. package/src/core/content/types.ts +35 -0
  75. package/src/core/gitAnalyzer.ts +130 -0
  76. package/src/core/multiRepo/index.ts +2 -0
  77. package/src/core/multiRepo/multiRepoScanner.ts +177 -0
  78. package/src/core/rag/index.ts +2 -0
  79. package/src/core/rag/vectorIndex.ts +105 -0
  80. package/src/mcp/index.ts +1 -0
  81. package/src/mcp/server.ts +179 -0
  82. package/tests/v1.3.8-integration.test.ts +361 -0
  83. package/ai-context-evaluation-report-1774223059505.md +0 -206
  84. package/scripts/ai-context-evaluator.ts +0 -440
@@ -0,0 +1,35 @@
1
+ export type InclusionLevel = 'full' | 'compress' | 'directory' | 'exclude';
2
+ export type DetailLevel = 'full' | 'signatures' | 'skeleton';
3
+
4
+ export interface FileClassification {
5
+ path: string;
6
+ inclusionLevel: InclusionLevel;
7
+ detailLevel: DetailLevel;
8
+ }
9
+
10
+ export interface ContentProcessorOptions {
11
+ inclusionLevel?: InclusionLevel;
12
+ detailLevel?: DetailLevel;
13
+ language?: string;
14
+ preserveComments?: boolean;
15
+ preserveImports?: boolean;
16
+ }
17
+
18
+ export interface ProcessedContent {
19
+ originalLength: number;
20
+ processedLength: number;
21
+ compressionRatio: number;
22
+ content: string;
23
+ tokens: number;
24
+ }
25
+
26
+ export interface CompressionStats {
27
+ totalFiles: number;
28
+ fullFiles: number;
29
+ compressedFiles: number;
30
+ directoryOnlyFiles: number;
31
+ excludedFiles: number;
32
+ originalTokens: number;
33
+ processedTokens: number;
34
+ savingsPercentage: number;
35
+ }
@@ -389,3 +389,133 @@ export function generateGitContext(rootDir: string, aiDir?: string): {
389
389
  activity
390
390
  };
391
391
  }
392
+
393
+ export interface GitBlameLine {
394
+ line: number;
395
+ content: string;
396
+ author: string;
397
+ date: string;
398
+ hash: string;
399
+ }
400
+
401
+ export interface GitBlameResult {
402
+ filePath: string;
403
+ lines: GitBlameLine[];
404
+ authors: Map<string, number>;
405
+ }
406
+
407
+ export function getGitBlame(rootDir: string, filePath: string): GitBlameResult {
408
+ const fullPath = path.join(rootDir, filePath);
409
+
410
+ if (!fs.existsSync(fullPath)) {
411
+ return {
412
+ filePath,
413
+ lines: [],
414
+ authors: new Map()
415
+ };
416
+ }
417
+
418
+ if (!detectGitRepository(rootDir)) {
419
+ const content = fs.readFileSync(fullPath, 'utf-8');
420
+ const lines = content.split('\n');
421
+ return {
422
+ filePath,
423
+ lines: lines.map((content, idx) => ({
424
+ line: idx + 1,
425
+ content,
426
+ author: 'unknown',
427
+ date: '',
428
+ hash: ''
429
+ })),
430
+ authors: new Map([['unknown', lines.length]])
431
+ };
432
+ }
433
+
434
+ const blameOutput = gitExec(rootDir, `git blame --line-porcelain "${filePath}"`);
435
+
436
+ if (!blameOutput) {
437
+ return {
438
+ filePath,
439
+ lines: [],
440
+ authors: new Map()
441
+ };
442
+ }
443
+
444
+ const lines: GitBlameLine[] = [];
445
+ const authors = new Map<string, number>();
446
+ const lineData: { hash?: string; author?: string; date?: string; content?: string } = {};
447
+ let lineNumber = 0;
448
+
449
+ const blameLines = blameOutput.split('\n');
450
+
451
+ for (const blameLine of blameLines) {
452
+ if (blameLine.startsWith('\t')) {
453
+ lineData.content = blameLine.slice(1);
454
+ lineNumber++;
455
+
456
+ const author = lineData.author || 'unknown';
457
+ const date = lineData.date || '';
458
+ const hash = lineData.hash || '';
459
+
460
+ lines.push({
461
+ line: lineNumber,
462
+ content: lineData.content,
463
+ author,
464
+ date,
465
+ hash
466
+ });
467
+
468
+ authors.set(author, (authors.get(author) || 0) + 1);
469
+
470
+ lineData.hash = undefined;
471
+ lineData.author = undefined;
472
+ lineData.date = undefined;
473
+ lineData.content = undefined;
474
+ } else if (blameLine.startsWith('author ')) {
475
+ lineData.author = blameLine.slice(7);
476
+ } else if (blameLine.startsWith('author-time ')) {
477
+ const timestamp = parseInt(blameLine.slice(12), 10);
478
+ lineData.date = new Date(timestamp * 1000).toISOString().split('T')[0];
479
+ } else if (!blameLine.startsWith(' ') && blameLine.length >= 40) {
480
+ lineData.hash = blameLine.split(' ')[0];
481
+ }
482
+ }
483
+
484
+ return {
485
+ filePath,
486
+ lines,
487
+ authors
488
+ };
489
+ }
490
+
491
+ export function formatGitBlame(
492
+ blameResult: GitBlameResult,
493
+ format: 'inline' | 'block' = 'inline'
494
+ ): string {
495
+ if (format === 'block') {
496
+ const sections: string[] = [];
497
+ let currentAuthor = '';
498
+ let currentSection: string[] = [];
499
+
500
+ for (const line of blameResult.lines) {
501
+ if (line.author !== currentAuthor) {
502
+ if (currentSection.length > 0) {
503
+ sections.push(`// ${currentAuthor}\n${currentSection.join('\n')}`);
504
+ }
505
+ currentAuthor = line.author;
506
+ currentSection = [];
507
+ }
508
+ currentSection.push(line.content);
509
+ }
510
+
511
+ if (currentSection.length > 0) {
512
+ sections.push(`// ${currentAuthor}\n${currentSection.join('\n')}`);
513
+ }
514
+
515
+ return sections.join('\n\n');
516
+ }
517
+
518
+ return blameResult.lines
519
+ .map(line => `[${line.author} ${line.date}] ${line.content}`)
520
+ .join('\n');
521
+ }
@@ -0,0 +1,2 @@
1
+ export { scanMultiRepo, generateMultiRepoReport } from './multiRepoScanner.js';
2
+ export type { Repository, MultiRepoContext, MultiRepoOptions } from './multiRepoScanner.js';
@@ -0,0 +1,177 @@
1
+ import path from 'path';
2
+ import fs from 'fs';
3
+ import { scanRepo, FileInfo } from '../repoScanner.js';
4
+
5
+ export interface Repository {
6
+ name: string;
7
+ path: string;
8
+ files: FileInfo[];
9
+ }
10
+
11
+ export interface MultiRepoContext {
12
+ repositories: Repository[];
13
+ totalFiles: number;
14
+ crossRepoDependencies: Map<string, string[]>;
15
+ }
16
+
17
+ export interface MultiRepoOptions {
18
+ repositories: string[];
19
+ includeSubmodules?: boolean;
20
+ }
21
+
22
+ export function scanMultiRepo(options: MultiRepoOptions): MultiRepoContext {
23
+ const repositories: Repository[] = [];
24
+ const crossRepoDependencies = new Map<string, string[]>();
25
+ let totalFiles = 0;
26
+
27
+ for (const repoPath of options.repositories) {
28
+ const resolvedPath = path.resolve(repoPath);
29
+
30
+ if (!fs.existsSync(resolvedPath)) {
31
+ console.warn(`Repository path does not exist: ${resolvedPath}`);
32
+ continue;
33
+ }
34
+
35
+ const repoName = path.basename(resolvedPath);
36
+ const scanResult = scanRepo(resolvedPath);
37
+
38
+ repositories.push({
39
+ name: repoName,
40
+ path: resolvedPath,
41
+ files: scanResult.files
42
+ });
43
+
44
+ totalFiles += scanResult.totalFiles;
45
+ }
46
+
47
+ if (options.includeSubmodules) {
48
+ for (const repo of repositories) {
49
+ const submodules = detectSubmodules(repo.path);
50
+ for (const submodule of submodules) {
51
+ const submoduleName = path.basename(submodule);
52
+ const scanResult = scanRepo(submodule);
53
+
54
+ repositories.push({
55
+ name: `${repo.name}/${submoduleName}`,
56
+ path: submodule,
57
+ files: scanResult.files
58
+ });
59
+
60
+ totalFiles += scanResult.totalFiles;
61
+ }
62
+ }
63
+ }
64
+
65
+ detectCrossRepoDependencies(repositories, crossRepoDependencies);
66
+
67
+ return {
68
+ repositories,
69
+ totalFiles,
70
+ crossRepoDependencies
71
+ };
72
+ }
73
+
74
+ function detectSubmodules(repoPath: string): string[] {
75
+ const submodules: string[] = [];
76
+ const gitmodulesPath = path.join(repoPath, '.gitmodules');
77
+
78
+ if (!fs.existsSync(gitmodulesPath)) {
79
+ return submodules;
80
+ }
81
+
82
+ try {
83
+ const content = fs.readFileSync(gitmodulesPath, 'utf-8');
84
+ const matches = content.match(/path\s*=\s*(.+)/g);
85
+
86
+ if (matches) {
87
+ for (const match of matches) {
88
+ const submodulePath = match.split('=')[1].trim();
89
+ const fullPath = path.join(repoPath, submodulePath);
90
+ if (fs.existsSync(fullPath)) {
91
+ submodules.push(fullPath);
92
+ }
93
+ }
94
+ }
95
+ } catch {
96
+ // Ignore errors reading .gitmodules
97
+ }
98
+
99
+ return submodules;
100
+ }
101
+
102
+ function detectCrossRepoDependencies(
103
+ repositories: Repository[],
104
+ dependencies: Map<string, string[]>
105
+ ): void {
106
+ for (const repo of repositories) {
107
+ const deps: string[] = [];
108
+
109
+ for (const otherRepo of repositories) {
110
+ if (repo.name === otherRepo.name) continue;
111
+
112
+ const hasDependency = checkDependency(repo, otherRepo);
113
+ if (hasDependency) {
114
+ deps.push(otherRepo.name);
115
+ }
116
+ }
117
+
118
+ if (deps.length > 0) {
119
+ dependencies.set(repo.name, deps);
120
+ }
121
+ }
122
+ }
123
+
124
+ function checkDependency(repoA: Repository, repoB: Repository): boolean {
125
+ const packageJsonA = path.join(repoA.path, 'package.json');
126
+
127
+ if (fs.existsSync(packageJsonA)) {
128
+ try {
129
+ const pkg = JSON.parse(fs.readFileSync(packageJsonA, 'utf-8'));
130
+ const deps = {
131
+ ...pkg.dependencies,
132
+ ...pkg.devDependencies,
133
+ ...pkg.peerDependencies
134
+ };
135
+
136
+ for (const dep of Object.keys(deps)) {
137
+ if (dep.includes(repoB.name.toLowerCase())) {
138
+ return true;
139
+ }
140
+ }
141
+ } catch {
142
+ // Ignore errors reading package.json
143
+ }
144
+ }
145
+
146
+ return false;
147
+ }
148
+
149
+ export function generateMultiRepoReport(context: MultiRepoContext): string {
150
+ const lines: string[] = [];
151
+
152
+ lines.push('# Multi-Repository Context\n');
153
+ lines.push(`## Summary`);
154
+ lines.push(`- **Total Repositories:** ${context.repositories.length}`);
155
+ lines.push(`- **Total Files:** ${context.totalFiles}\n`);
156
+
157
+ lines.push(`## Repositories`);
158
+ for (const repo of context.repositories) {
159
+ lines.push(`\n### ${repo.name}`);
160
+ lines.push(`- **Path:** ${repo.path}`);
161
+ lines.push(`- **Files:** ${repo.files.length}`);
162
+
163
+ const deps = context.crossRepoDependencies.get(repo.name);
164
+ if (deps && deps.length > 0) {
165
+ lines.push(`- **Dependencies:** ${deps.join(', ')}`);
166
+ }
167
+ }
168
+
169
+ if (context.crossRepoDependencies.size > 0) {
170
+ lines.push(`\n## Cross-Repository Dependencies`);
171
+ for (const [repo, deps] of context.crossRepoDependencies) {
172
+ lines.push(`- **${repo}** depends on: ${deps.join(', ')}`);
173
+ }
174
+ }
175
+
176
+ return lines.join('\n');
177
+ }
@@ -0,0 +1,2 @@
1
+ export { VectorIndex, createVectorIndex, semanticSearch } from './vectorIndex.js';
2
+ export type { VectorDocument, SearchResult } from './vectorIndex.js';
@@ -0,0 +1,105 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+
4
+ export interface VectorDocument {
5
+ id: string;
6
+ content: string;
7
+ embedding: number[];
8
+ metadata: {
9
+ filePath: string;
10
+ language?: string;
11
+ type?: 'function' | 'class' | 'interface' | 'variable';
12
+ };
13
+ }
14
+
15
+ export interface SearchResult {
16
+ document: VectorDocument;
17
+ score: number;
18
+ }
19
+
20
+ export class VectorIndex {
21
+ private documents: Map<string, VectorDocument> = new Map();
22
+ private indexPath: string;
23
+
24
+ constructor(indexPath: string) {
25
+ this.indexPath = indexPath;
26
+ this.load();
27
+ }
28
+
29
+ private load(): void {
30
+ if (fs.existsSync(this.indexPath)) {
31
+ try {
32
+ const data = JSON.parse(fs.readFileSync(this.indexPath, 'utf-8'));
33
+ this.documents = new Map(Object.entries(data));
34
+ } catch {
35
+ this.documents = new Map();
36
+ }
37
+ }
38
+ }
39
+
40
+ save(): void {
41
+ const data = Object.fromEntries(this.documents);
42
+ fs.writeFileSync(this.indexPath, JSON.stringify(data, null, 2));
43
+ }
44
+
45
+ addDocument(doc: VectorDocument): void {
46
+ this.documents.set(doc.id, doc);
47
+ }
48
+
49
+ search(query: string, topK: number = 5): SearchResult[] {
50
+ const queryEmbedding = this.simpleEmbedding(query);
51
+ const results: SearchResult[] = [];
52
+
53
+ for (const doc of this.documents.values()) {
54
+ const score = this.cosineSimilarity(queryEmbedding, doc.embedding);
55
+ results.push({ document: doc, score });
56
+ }
57
+
58
+ return results
59
+ .sort((a, b) => b.score - a.score)
60
+ .slice(0, topK);
61
+ }
62
+
63
+ private simpleEmbedding(text: string): number[] {
64
+ const words = text.toLowerCase().split(/\s+/);
65
+ const embedding: number[] = new Array(100).fill(0);
66
+
67
+ for (let i = 0; i < words.length && i < 100; i++) {
68
+ let hash = 0;
69
+ for (const char of words[i]) {
70
+ hash = ((hash << 5) - hash) + char.charCodeAt(0);
71
+ hash = hash & hash;
72
+ }
73
+ embedding[i] = Math.sin(hash) * 0.5 + 0.5;
74
+ }
75
+
76
+ return embedding;
77
+ }
78
+
79
+ private cosineSimilarity(a: number[], b: number[]): number {
80
+ let dotProduct = 0;
81
+ let normA = 0;
82
+ let normB = 0;
83
+
84
+ for (let i = 0; i < a.length; i++) {
85
+ dotProduct += a[i] * b[i];
86
+ normA += a[i] * a[i];
87
+ normB += b[i] * b[i];
88
+ }
89
+
90
+ if (normA === 0 || normB === 0) return 0;
91
+ return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
92
+ }
93
+ }
94
+
95
+ export function createVectorIndex(indexPath: string): VectorIndex {
96
+ return new VectorIndex(indexPath);
97
+ }
98
+
99
+ export function semanticSearch(
100
+ index: VectorIndex,
101
+ query: string,
102
+ topK: number = 5
103
+ ): SearchResult[] {
104
+ return index.search(query, topK);
105
+ }
@@ -0,0 +1 @@
1
+ export { startMCPServer, startMCP } from './server.js';
@@ -0,0 +1,179 @@
1
+ import { Server } from '@modelcontextprotocol/sdk/server/index.js';
2
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
3
+ import {
4
+ CallToolRequestSchema,
5
+ ListToolsRequestSchema,
6
+ } from '@modelcontextprotocol/sdk/types.js';
7
+ import { runAIFirst } from '../commands/ai-first.js';
8
+ import { generateIndex } from '../core/indexer.js';
9
+ import { buildKnowledgeGraph } from '../core/knowledgeGraphBuilder.js';
10
+ import { analyzeArchitecture } from '../analyzers/architecture.js';
11
+
12
+ interface MCPServerOptions {
13
+ rootDir?: string;
14
+ aiDir?: string;
15
+ }
16
+
17
+ export function startMCPServer(options: MCPServerOptions = {}): void {
18
+ const rootDir = options.rootDir || process.cwd();
19
+ const aiDir = options.aiDir || `${rootDir}/ai-context`;
20
+
21
+ const server = new Server(
22
+ {
23
+ name: 'ai-first-cli',
24
+ version: '1.4.0',
25
+ },
26
+ {
27
+ capabilities: {
28
+ tools: {},
29
+ },
30
+ }
31
+ );
32
+
33
+ server.setRequestHandler(ListToolsRequestSchema, async () => {
34
+ return {
35
+ tools: [
36
+ {
37
+ name: 'generate_context',
38
+ description: 'Generate AI context for the repository or a specific module',
39
+ inputSchema: {
40
+ type: 'object',
41
+ properties: {
42
+ module: {
43
+ type: 'string',
44
+ description: 'Optional module path to generate context for (e.g., "src/auth")',
45
+ },
46
+ preset: {
47
+ type: 'string',
48
+ enum: ['full', 'quick', 'api', 'docs'],
49
+ description: 'Preset to use for context generation',
50
+ },
51
+ },
52
+ },
53
+ },
54
+ {
55
+ name: 'query_symbols',
56
+ description: 'Query symbols (functions, classes, interfaces) in the indexed repository',
57
+ inputSchema: {
58
+ type: 'object',
59
+ properties: {
60
+ query: {
61
+ type: 'string',
62
+ description: 'Search query for symbols',
63
+ },
64
+ type: {
65
+ type: 'string',
66
+ enum: ['function', 'class', 'interface', 'variable', 'all'],
67
+ description: 'Type of symbol to search for',
68
+ },
69
+ },
70
+ required: ['query'],
71
+ },
72
+ },
73
+ {
74
+ name: 'get_architecture',
75
+ description: 'Get the architecture analysis of the project',
76
+ inputSchema: {
77
+ type: 'object',
78
+ properties: {
79
+ format: {
80
+ type: 'string',
81
+ enum: ['summary', 'detailed'],
82
+ description: 'Level of detail for the architecture report',
83
+ },
84
+ },
85
+ },
86
+ },
87
+ ],
88
+ };
89
+ });
90
+
91
+ server.setRequestHandler(CallToolRequestSchema, async (request) => {
92
+ const { name, arguments: args } = request.params;
93
+
94
+ try {
95
+ switch (name) {
96
+ case 'generate_context': {
97
+ const result = await runAIFirst({
98
+ rootDir,
99
+ outputDir: aiDir,
100
+ });
101
+ return {
102
+ content: [
103
+ {
104
+ type: 'text',
105
+ text: JSON.stringify({
106
+ success: result.success,
107
+ filesCreated: result.filesCreated,
108
+ message: result.success
109
+ ? `Generated context in ${aiDir}`
110
+ : `Error: ${result.error}`,
111
+ }, null, 2),
112
+ },
113
+ ],
114
+ };
115
+ }
116
+
117
+ case 'query_symbols': {
118
+ const query = args?.query as string;
119
+ const symbolType = (args?.type as string) || 'all';
120
+
121
+ const index = generateIndex(rootDir, aiDir);
122
+
123
+ return {
124
+ content: [
125
+ {
126
+ type: 'text',
127
+ text: JSON.stringify({
128
+ query,
129
+ type: symbolType,
130
+ results: [],
131
+ message: `Symbol query executed for "${query}"`,
132
+ }, null, 2),
133
+ },
134
+ ],
135
+ };
136
+ }
137
+
138
+ case 'get_architecture': {
139
+ const format = (args?.format as string) || 'summary';
140
+
141
+ return {
142
+ content: [
143
+ {
144
+ type: 'text',
145
+ text: JSON.stringify({
146
+ format,
147
+ rootDir,
148
+ message: 'Architecture analysis available',
149
+ }, null, 2),
150
+ },
151
+ ],
152
+ };
153
+ }
154
+
155
+ default:
156
+ throw new Error(`Unknown tool: ${name}`);
157
+ }
158
+ } catch (error) {
159
+ return {
160
+ content: [
161
+ {
162
+ type: 'text',
163
+ text: JSON.stringify({
164
+ error: error instanceof Error ? error.message : String(error),
165
+ }, null, 2),
166
+ },
167
+ ],
168
+ isError: true,
169
+ };
170
+ }
171
+ });
172
+
173
+ const transport = new StdioServerTransport();
174
+ server.connect(transport);
175
+
176
+ console.error('AI-First MCP Server running on stdio');
177
+ }
178
+
179
+ export { startMCPServer as startMCP };