project-graph-mcp 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,177 @@
1
+ /**
2
+ * Framework Reference System
3
+ * Loads framework-specific AI references from GitHub (with caching) or local files
4
+ */
5
+
6
+ import { readFileSync, readdirSync, existsSync, writeFileSync } from 'fs';
7
+ import { join, basename, dirname } from 'path';
8
+ import { fileURLToPath } from 'url';
9
+ import { detectProjectRuleSets } from './custom-rules.js';
10
+
11
+ const __dirname = dirname(fileURLToPath(import.meta.url));
12
+ const REFERENCES_DIR = join(__dirname, '..', 'references');
13
+
14
+ /**
15
+ * Remote sources for framework references
16
+ * Maps reference name to raw GitHub URL
17
+ */
18
+ const REMOTE_SOURCES = {
19
+ 'symbiote-3x': 'https://raw.githubusercontent.com/symbiotejs/symbiote.js/main/AI_REFERENCE.md',
20
+ };
21
+
22
+ /** @type {Map<string, {content: string, fetchedAt: number}>} */
23
+ const cache = new Map();
24
+
25
+ /** Cache TTL: 1 hour */
26
+ const CACHE_TTL = 60 * 60 * 1000;
27
+
28
+ /**
29
+ * Fetch reference from GitHub with caching
30
+ * Falls back to local file if fetch fails
31
+ * @param {string} name - Reference name
32
+ * @returns {Promise<{content: string, source: string}>}
33
+ */
34
+ async function fetchReference(name) {
35
+ const url = REMOTE_SOURCES[name];
36
+ const localPath = join(REFERENCES_DIR, `${name}.md`);
37
+
38
+ // Check in-memory cache
39
+ const cached = cache.get(name);
40
+ if (cached && Date.now() - cached.fetchedAt < CACHE_TTL) {
41
+ return { content: cached.content, source: 'cache' };
42
+ }
43
+
44
+ // Try fetching from GitHub
45
+ if (url) {
46
+ try {
47
+ const response = await fetch(url, { signal: AbortSignal.timeout(5000) });
48
+ if (response.ok) {
49
+ const content = await response.text();
50
+ cache.set(name, { content, fetchedAt: Date.now() });
51
+
52
+ // Update local file as backup
53
+ try {
54
+ writeFileSync(localPath, content, 'utf-8');
55
+ } catch (e) {
56
+ // Write failure is non-critical
57
+ }
58
+
59
+ return { content, source: `github (${url})` };
60
+ }
61
+ } catch (e) {
62
+ // Fetch failed — fall back to local
63
+ }
64
+ }
65
+
66
+ // Fall back to local file
67
+ if (existsSync(localPath)) {
68
+ const content = readFileSync(localPath, 'utf-8');
69
+ cache.set(name, { content, fetchedAt: Date.now() });
70
+ return { content, source: 'local' };
71
+ }
72
+
73
+ return { content: '', source: 'not_found' };
74
+ }
75
+
76
+ /**
77
+ * Map ruleset names to reference names
78
+ */
79
+ const RULESET_TO_REFERENCE = {
80
+ 'symbiote-3x': 'symbiote-3x',
81
+ 'symbiote-2x': 'symbiote-3x',
82
+ };
83
+
84
+ /**
85
+ * List available framework references (local + remote)
86
+ * @returns {string[]}
87
+ */
88
+ function listAvailable() {
89
+ const names = new Set(Object.keys(REMOTE_SOURCES));
90
+
91
+ if (existsSync(REFERENCES_DIR)) {
92
+ for (const f of readdirSync(REFERENCES_DIR)) {
93
+ if (f.endsWith('.md')) {
94
+ names.add(basename(f, '.md'));
95
+ }
96
+ }
97
+ }
98
+
99
+ return [...names];
100
+ }
101
+
102
+ /**
103
+ * Get framework reference content
104
+ * @param {Object} options
105
+ * @param {string} [options.framework] - Explicit framework name
106
+ * @param {string} [options.path] - Project path for auto-detection
107
+ * @returns {Promise<Object>}
108
+ */
109
+ export async function getFrameworkReference(options = {}) {
110
+ const available = listAvailable();
111
+
112
+ // Explicit framework requested
113
+ if (options.framework) {
114
+ if (!available.includes(options.framework)) {
115
+ return {
116
+ error: `Framework reference '${options.framework}' not found`,
117
+ available,
118
+ };
119
+ }
120
+
121
+ const { content, source } = await fetchReference(options.framework);
122
+ if (!content) {
123
+ return { error: `Failed to load reference '${options.framework}'`, available };
124
+ }
125
+
126
+ return {
127
+ framework: options.framework,
128
+ source,
129
+ lines: content.split('\n').length,
130
+ content,
131
+ };
132
+ }
133
+
134
+ // Auto-detect from project path
135
+ if (options.path) {
136
+ const { detected, reasons } = detectProjectRuleSets(options.path);
137
+
138
+ const matchedRefs = [];
139
+ for (const ruleset of detected) {
140
+ const refName = RULESET_TO_REFERENCE[ruleset];
141
+ if (refName && available.includes(refName) && !matchedRefs.includes(refName)) {
142
+ matchedRefs.push(refName);
143
+ }
144
+ }
145
+
146
+ if (matchedRefs.length === 0) {
147
+ return {
148
+ error: 'No framework references found for this project',
149
+ detected,
150
+ reasons,
151
+ available,
152
+ };
153
+ }
154
+
155
+ const results = await Promise.all(matchedRefs.map(fetchReference));
156
+ const contents = results.map(r => r.content).filter(Boolean);
157
+ const sources = results.map(r => r.source);
158
+
159
+ return {
160
+ frameworks: matchedRefs,
161
+ sources,
162
+ detected: { rulesets: detected, reasons },
163
+ lines: contents.reduce((sum, c) => sum + c.split('\n').length, 0),
164
+ content: contents.join('\n\n---\n\n'),
165
+ };
166
+ }
167
+
168
+ // No framework specified — list available
169
+ return {
170
+ error: 'Specify framework name or path for auto-detection',
171
+ available: available.map(name => ({
172
+ name,
173
+ remote: !!REMOTE_SOURCES[name],
174
+ url: REMOTE_SOURCES[name] ?? null,
175
+ })),
176
+ };
177
+ }
@@ -0,0 +1,159 @@
1
+ /**
2
+ * Full Analysis - Comprehensive Code Health Report
3
+ * Runs all analysis tools and generates a health score
4
+ */
5
+
6
+ import { getDeadCode } from './dead-code.js';
7
+ import { getUndocumentedSummary } from './undocumented.js';
8
+ import { getSimilarFunctions } from './similar-functions.js';
9
+ import { getComplexity } from './complexity.js';
10
+ import { getLargeFiles } from './large-files.js';
11
+ import { getOutdatedPatterns } from './outdated-patterns.js';
12
+
13
+ /**
14
+ * @typedef {Object} AnalysisResult
15
+ * @property {Object} deadCode
16
+ * @property {Object} undocumented
17
+ * @property {Object} similar
18
+ * @property {Object} complexity
19
+ * @property {Object} largeFiles
20
+ * @property {Object} outdated
21
+ * @property {Object} overall
22
+ */
23
+
24
+ /**
25
+ * Calculate health score from analysis results
26
+ * @param {Object} results
27
+ * @returns {{score: number, rating: string, topIssues: string[]}}
28
+ */
29
+ function calculateHealthScore(results) {
30
+ let score = 100;
31
+ const topIssues = [];
32
+
33
+ // Dead code penalty: -2 per item (max -20)
34
+ const deadPenalty = Math.min(results.deadCode.total * 2, 20);
35
+ score -= deadPenalty;
36
+ if (results.deadCode.total > 0) {
37
+ topIssues.push(`${results.deadCode.total} unused functions/classes`);
38
+ }
39
+
40
+ // Undocumented penalty: -0.5 per item (max -15)
41
+ const undocPenalty = Math.min(results.undocumented.total * 0.5, 15);
42
+ score -= undocPenalty;
43
+ if (results.undocumented.total > 10) {
44
+ topIssues.push(`${results.undocumented.total} undocumented items`);
45
+ }
46
+
47
+ // Similar functions penalty: -3 per pair (max -15)
48
+ const similarPenalty = Math.min(results.similar.total * 3, 15);
49
+ score -= similarPenalty;
50
+ if (results.similar.total > 0) {
51
+ topIssues.push(`${results.similar.total} similar function pairs`);
52
+ }
53
+
54
+ // Complexity penalty: -5 per critical, -2 per high (max -20)
55
+ const criticalCount = results.complexity.stats?.critical || 0;
56
+ const highCount = results.complexity.stats?.high || 0;
57
+ const complexityPenalty = Math.min(criticalCount * 5 + highCount * 2, 20);
58
+ score -= complexityPenalty;
59
+ if (criticalCount > 0) {
60
+ topIssues.push(`${criticalCount} critical complexity functions`);
61
+ }
62
+
63
+ // Large files penalty: -4 per critical, -1 per warning (max -10)
64
+ const largeCritical = results.largeFiles.stats?.critical || 0;
65
+ const largeWarning = results.largeFiles.stats?.warning || 0;
66
+ const largePenalty = Math.min(largeCritical * 4 + largeWarning * 1, 10);
67
+ score -= largePenalty;
68
+ if (largeCritical > 0) {
69
+ topIssues.push(`${largeCritical} files need splitting`);
70
+ }
71
+
72
+ // Outdated patterns penalty: -3 per error, -1 per warning (max -10)
73
+ const errorPatterns = results.outdated.stats?.bySeverity?.error || 0;
74
+ const warningPatterns = results.outdated.stats?.bySeverity?.warning || 0;
75
+ const outdatedPenalty = Math.min(errorPatterns * 3 + warningPatterns * 1, 10);
76
+ score -= outdatedPenalty;
77
+ if (results.outdated.redundantDeps.length > 0) {
78
+ topIssues.push(`${results.outdated.redundantDeps.length} redundant npm dependencies`);
79
+ }
80
+
81
+ // Clamp score
82
+ score = Math.max(0, Math.min(100, Math.round(score)));
83
+
84
+ // Determine rating
85
+ let rating;
86
+ if (score >= 90) rating = 'excellent';
87
+ else if (score >= 70) rating = 'good';
88
+ else if (score >= 50) rating = 'warning';
89
+ else rating = 'critical';
90
+
91
+ return { score, rating, topIssues: topIssues.slice(0, 5) };
92
+ }
93
+
94
+ /**
95
+ * Run full analysis on directory
96
+ * @param {string} dir
97
+ * @param {Object} [options]
98
+ * @param {boolean} [options.includeItems=false] - Include individual items
99
+ * @returns {Promise<AnalysisResult>}
100
+ */
101
+ export async function getFullAnalysis(dir, options = {}) {
102
+ const includeItems = options.includeItems || false;
103
+
104
+ // Run all analyses in parallel
105
+ const [deadCode, undocumented, similar, complexity, largeFiles, outdated] = await Promise.all([
106
+ getDeadCode(dir),
107
+ getUndocumentedSummary(dir, 'tests'),
108
+ getSimilarFunctions(dir, { threshold: 70 }),
109
+ getComplexity(dir, { minComplexity: 5 }),
110
+ getLargeFiles(dir),
111
+ getOutdatedPatterns(dir),
112
+ ]);
113
+
114
+ // Calculate overall health
115
+ const overall = calculateHealthScore({
116
+ deadCode,
117
+ undocumented,
118
+ similar,
119
+ complexity,
120
+ largeFiles,
121
+ outdated,
122
+ });
123
+
124
+ // Build result
125
+ const result = {
126
+ deadCode: {
127
+ total: deadCode.total,
128
+ byType: deadCode.byType,
129
+ ...(includeItems && { items: deadCode.items.slice(0, 10) }),
130
+ },
131
+ undocumented: {
132
+ total: undocumented.total,
133
+ byType: undocumented.byType,
134
+ ...(includeItems && { items: undocumented.items.slice(0, 10) }),
135
+ },
136
+ similar: {
137
+ total: similar.total,
138
+ ...(includeItems && { pairs: similar.pairs.slice(0, 5) }),
139
+ },
140
+ complexity: {
141
+ total: complexity.total,
142
+ stats: complexity.stats,
143
+ ...(includeItems && { items: complexity.items.slice(0, 10) }),
144
+ },
145
+ largeFiles: {
146
+ total: largeFiles.total,
147
+ stats: largeFiles.stats,
148
+ ...(includeItems && { items: largeFiles.items.slice(0, 10) }),
149
+ },
150
+ outdated: {
151
+ totalPatterns: outdated.stats.totalPatterns,
152
+ redundantDeps: outdated.redundantDeps,
153
+ ...(includeItems && { codePatterns: outdated.codePatterns.slice(0, 10) }),
154
+ },
155
+ overall,
156
+ };
157
+
158
+ return result;
159
+ }
@@ -0,0 +1,269 @@
1
+ /**
2
+ * Graph Builder - Creates minified project graph from parsed data
3
+ */
4
+
5
+ /**
6
+ * @typedef {Object} GraphNode
7
+ * @property {string} t - type (class/func)
8
+ * @property {string} [x] - extends
9
+ * @property {string[]} [m] - methods
10
+ * @property {string[]} [$] - properties (init$)
11
+ * @property {string[]} [i] - imports
12
+ * @property {string[]} [→] - calls (outgoing)
13
+ * @property {string[]} [←] - usedBy (incoming)
14
+ * @property {string} [f] - source file path
15
+ * @property {boolean} [e] - exported flag (functions)
16
+ */
17
+
18
+ /**
19
+ * @typedef {Object} Graph
20
+ * @property {number} v - version
21
+ * @property {Object<string, string>} legend - minified name → full name
22
+ * @property {Object<string, string>} reverseLegend - full name → minified
23
+ * @property {Object} stats - { files, classes, functions }
24
+ * @property {Object<string, GraphNode>} nodes
25
+ * @property {Array<[string, string, string]>} edges - [from, type, to]
26
+ * @property {string[]} orphans
27
+ * @property {Object<string, string[]>} duplicates
28
+ * @property {string[]} files - list of parsed file paths
29
+ */
30
+
31
+ /**
32
+ * Create minified legend from names
33
+ * Strategy: Use camelCase initials + suffix if collision
34
+ * @param {string[]} names
35
+ * @returns {Object<string, string>}
36
+ */
37
+ export function minifyLegend(names) {
38
+ const legend = {};
39
+ const used = new Set();
40
+
41
+ for (const name of names) {
42
+ let short = createShortName(name);
43
+ let suffix = 1;
44
+
45
+ while (used.has(short)) {
46
+ short = createShortName(name) + suffix;
47
+ suffix++;
48
+ }
49
+
50
+ used.add(short);
51
+ legend[name] = short;
52
+ }
53
+
54
+ return legend;
55
+ }
56
+
57
+ /**
58
+ * Create short name from full name
59
+ * SymNode → SN, togglePin → tP, autoArrange → aA
60
+ * @param {string} name
61
+ * @returns {string}
62
+ */
63
+ function createShortName(name) {
64
+ // For PascalCase: extract uppercase letters
65
+ const upperOnly = name.replace(/[a-z]/g, '');
66
+ if (upperOnly.length >= 2) {
67
+ return upperOnly.slice(0, 3);
68
+ }
69
+
70
+ // For camelCase: first letter + next uppercase
71
+ const firstUpper = name.match(/[A-Z]/g);
72
+ if (firstUpper && firstUpper.length > 0) {
73
+ return name[0].toLowerCase() + firstUpper[0];
74
+ }
75
+
76
+ // Fallback: first 2 letters
77
+ return name.slice(0, 2);
78
+ }
79
+
80
+ /**
81
+ * Build graph from parsed project data
82
+ * @param {import('./parser.js').ParseResult} parsed
83
+ * @returns {Graph}
84
+ */
85
+ export function buildGraph(parsed) {
86
+ // Collect all names for legend
87
+ const classes = parsed.classes || [];
88
+ const functions = parsed.functions || [];
89
+
90
+ const allNames = [
91
+ ...classes.map(c => c.name),
92
+ ...functions.map(f => f.name),
93
+ ...classes.flatMap(c => c.methods || []),
94
+ ];
95
+
96
+ const legend = minifyLegend([...new Set(allNames)]);
97
+ const reverseLegend = Object.fromEntries(
98
+ Object.entries(legend).map(([k, v]) => [v, k])
99
+ );
100
+
101
+ const graph = {
102
+ v: 1,
103
+ legend,
104
+ reverseLegend,
105
+ stats: {
106
+ files: (parsed.files || []).length,
107
+ classes: classes.length,
108
+ functions: functions.length,
109
+ },
110
+ nodes: {},
111
+ edges: [],
112
+ orphans: [],
113
+ duplicates: {},
114
+ files: parsed.files || [],
115
+ };
116
+
117
+ // Build class nodes
118
+ for (const cls of classes) {
119
+ const shortName = legend[cls.name];
120
+ graph.nodes[shortName] = {
121
+ t: 'C',
122
+ x: cls.extends || undefined,
123
+ m: (cls.methods || []).map(m => legend[m] || m),
124
+ $: (cls.properties || []).length ? cls.properties : undefined,
125
+ i: cls.imports?.length ? cls.imports : undefined,
126
+ f: cls.file || undefined,
127
+ };
128
+
129
+ // Build edges from calls
130
+ for (const call of cls.calls || []) {
131
+ if (call.includes('.')) {
132
+ // Class.method() pattern
133
+ const [target, method] = call.split('.');
134
+ if (legend[target]) {
135
+ const edge = [shortName, '→', `${legend[target]}.${legend[method] || method}`];
136
+ graph.edges.push(edge);
137
+ }
138
+ } else {
139
+ // Standalone function call
140
+ if (legend[call]) {
141
+ const edge = [shortName, '→', legend[call]];
142
+ graph.edges.push(edge);
143
+ }
144
+ }
145
+ }
146
+ }
147
+
148
+ // Build function nodes
149
+ for (const func of functions) {
150
+ const shortName = legend[func.name];
151
+ graph.nodes[shortName] = {
152
+ t: 'F',
153
+ e: func.exported,
154
+ f: func.file || undefined,
155
+ };
156
+ }
157
+
158
+ // Detect orphans (nodes with no incoming edges)
159
+ const hasIncoming = new Set();
160
+ for (const edge of graph.edges) {
161
+ const target = edge[2].split('.')[0];
162
+ hasIncoming.add(target);
163
+ }
164
+
165
+ for (const name of Object.keys(graph.nodes)) {
166
+ if (!hasIncoming.has(name) && graph.nodes[name].t === 'F' && !graph.nodes[name].e) {
167
+ graph.orphans.push(reverseLegend[name]);
168
+ }
169
+ }
170
+
171
+ // Detect duplicates (same method name in multiple classes)
172
+ const methodLocations = {};
173
+ for (const cls of classes) {
174
+ for (const method of cls.methods || []) {
175
+ if (!methodLocations[method]) {
176
+ methodLocations[method] = [];
177
+ }
178
+ methodLocations[method].push(`${cls.name}:${cls.line}`);
179
+ }
180
+ }
181
+
182
+ for (const [method, locations] of Object.entries(methodLocations)) {
183
+ if (locations.length > 1) {
184
+ graph.duplicates[method] = locations;
185
+ }
186
+ }
187
+
188
+ return graph;
189
+ }
190
+
191
+ /**
192
+ * Create compact skeleton (minimal tokens)
193
+ * @param {Graph} graph
194
+ * @returns {Object}
195
+ */
196
+ export function createSkeleton(graph) {
197
+ const legend = {};
198
+ const nodes = {};
199
+
200
+ // Build class nodes with file path
201
+ // graph.legend = {fullName → shortName}
202
+ for (const [full, short] of Object.entries(graph.legend)) {
203
+ const node = graph.nodes[short];
204
+ if (!node) continue;
205
+
206
+ if (node.t === 'C') {
207
+ // Skip empty classes (0 methods, 0 props)
208
+ const methodCount = node.m?.length || 0;
209
+ const propCount = node.$?.length || 0;
210
+ if (methodCount === 0 && propCount === 0) continue;
211
+
212
+ legend[short] = full;
213
+ const entry = { m: methodCount };
214
+ if (propCount > 0) entry.$ = propCount;
215
+ if (node.f) entry.f = node.f;
216
+ nodes[short] = entry;
217
+ }
218
+ }
219
+
220
+ // Build exported functions grouped by file: { "file.js": ["shortName1", ...] }
221
+ // Also add function names to legend
222
+ const exportsByFile = {};
223
+ for (const [full, short] of Object.entries(graph.legend)) {
224
+ const node = graph.nodes[short];
225
+ if (node?.t === 'F' && node.e) {
226
+ legend[short] = full;
227
+ const file = node.f || '?';
228
+ if (!exportsByFile[file]) exportsByFile[file] = [];
229
+ exportsByFile[file].push(short);
230
+ }
231
+ }
232
+
233
+ // Build file tree grouped by directory (only files not covered by n/X)
234
+ const coveredFiles = new Set();
235
+ for (const v of Object.values(nodes)) {
236
+ if (v.f) coveredFiles.add(v.f);
237
+ }
238
+ for (const file of Object.keys(exportsByFile)) {
239
+ coveredFiles.add(file);
240
+ }
241
+
242
+ const fileTree = {};
243
+ for (const filePath of graph.files || []) {
244
+ if (coveredFiles.has(filePath)) continue;
245
+ const lastSlash = filePath.lastIndexOf('/');
246
+ const dir = lastSlash >= 0 ? filePath.slice(0, lastSlash + 1) : './';
247
+ const file = lastSlash >= 0 ? filePath.slice(lastSlash + 1) : filePath;
248
+ if (!fileTree[dir]) fileTree[dir] = [];
249
+ fileTree[dir].push(file);
250
+ }
251
+
252
+ const result = {
253
+ v: graph.v,
254
+ L: legend,
255
+ s: graph.stats,
256
+ n: nodes,
257
+ X: exportsByFile,
258
+ e: graph.edges.length,
259
+ o: graph.orphans.length,
260
+ d: Object.keys(graph.duplicates).length,
261
+ };
262
+
263
+ // Only add uncovered files if there are any
264
+ if (Object.keys(fileTree).length > 0) {
265
+ result.f = fileTree;
266
+ }
267
+
268
+ return result;
269
+ }