typegraph-mcp 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +17 -0
- package/.cursor-plugin/plugin.json +17 -0
- package/.mcp.json +10 -0
- package/LICENSE +21 -0
- package/README.md +451 -0
- package/benchmark.ts +735 -0
- package/check.ts +459 -0
- package/cli.ts +778 -0
- package/commands/check.md +23 -0
- package/commands/test.md +23 -0
- package/config.ts +50 -0
- package/gemini-extension.json +16 -0
- package/graph-queries.ts +462 -0
- package/hooks/hooks.json +15 -0
- package/module-graph.ts +507 -0
- package/package.json +39 -0
- package/scripts/ensure-deps.sh +34 -0
- package/server.ts +837 -0
- package/skills/code-exploration/SKILL.md +55 -0
- package/skills/dependency-audit/SKILL.md +50 -0
- package/skills/impact-analysis/SKILL.md +52 -0
- package/skills/refactor-safety/SKILL.md +50 -0
- package/skills/tool-selection/SKILL.md +79 -0
- package/smoke-test.ts +500 -0
- package/tsserver-client.ts +413 -0
package/module-graph.ts
ADDED
|
@@ -0,0 +1,507 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Module Graph — Import/export dependency graph using oxc-parser + oxc-resolver.
|
|
3
|
+
*
|
|
4
|
+
* Builds a bidirectional graph of import edges across all TypeScript source files
|
|
5
|
+
* in the project. Supports incremental updates via fs.watch.
|
|
6
|
+
*
|
|
7
|
+
* Graph stores absolute paths internally. Consumers convert to relative paths.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { parseSync } from "oxc-parser";
|
|
11
|
+
import { ResolverFactory } from "oxc-resolver";
|
|
12
|
+
import * as fs from "node:fs";
|
|
13
|
+
import * as path from "node:path";
|
|
14
|
+
|
|
15
|
+
// ─── Types ───────────────────────────────────────────────────────────────────
|
|
16
|
+
|
|
17
|
+
export interface ImportEdge {
|
|
18
|
+
target: string; // absolute resolved file path
|
|
19
|
+
specifiers: string[]; // ["UserService", "createUser"] or ["*"] for star re-exports
|
|
20
|
+
isTypeOnly: boolean; // import type { ... } — only reliable for imports, not re-exports
|
|
21
|
+
isDynamic: boolean; // import("./lazy")
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export interface ModuleGraph {
|
|
25
|
+
forward: Map<string, ImportEdge[]>; // file → its imports
|
|
26
|
+
reverse: Map<string, ImportEdge[]>; // file → files that import it
|
|
27
|
+
files: Set<string>; // all known source files
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export interface BuildGraphResult {
|
|
31
|
+
graph: ModuleGraph;
|
|
32
|
+
resolver: ResolverFactory;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// ─── Logging ─────────────────────────────────────────────────────────────────
|
|
36
|
+
|
|
37
|
+
const log = (...args: unknown[]) => console.error("[typegraph/graph]", ...args);
|
|
38
|
+
|
|
39
|
+
// ─── Constants ───────────────────────────────────────────────────────────────
|
|
40
|
+
|
|
41
|
+
const TS_EXTENSIONS = new Set([".ts", ".tsx", ".mts", ".cts"]);
|
|
42
|
+
const SKIP_DIRS = new Set([
|
|
43
|
+
"node_modules",
|
|
44
|
+
"dist",
|
|
45
|
+
"build",
|
|
46
|
+
"out",
|
|
47
|
+
".wrangler",
|
|
48
|
+
".mf",
|
|
49
|
+
".git",
|
|
50
|
+
".next",
|
|
51
|
+
".turbo",
|
|
52
|
+
"coverage",
|
|
53
|
+
]);
|
|
54
|
+
const SKIP_FILES = new Set(["routeTree.gen.ts"]);
|
|
55
|
+
|
|
56
|
+
// ─── File Discovery ──────────────────────────────────────────────────────────
|
|
57
|
+
|
|
58
|
+
export function discoverFiles(rootDir: string): string[] {
|
|
59
|
+
const files: string[] = [];
|
|
60
|
+
|
|
61
|
+
function walk(dir: string): void {
|
|
62
|
+
let entries: fs.Dirent[];
|
|
63
|
+
try {
|
|
64
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
65
|
+
} catch {
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
for (const entry of entries) {
|
|
70
|
+
if (entry.isDirectory()) {
|
|
71
|
+
if (SKIP_DIRS.has(entry.name)) continue;
|
|
72
|
+
// Skip hidden directories (except the root)
|
|
73
|
+
if (entry.name.startsWith(".") && dir !== rootDir) continue;
|
|
74
|
+
walk(path.join(dir, entry.name));
|
|
75
|
+
} else if (entry.isFile()) {
|
|
76
|
+
const name = entry.name;
|
|
77
|
+
if (SKIP_FILES.has(name)) continue;
|
|
78
|
+
if (name.endsWith(".d.ts") || name.endsWith(".d.mts") || name.endsWith(".d.cts")) continue;
|
|
79
|
+
const ext = path.extname(name);
|
|
80
|
+
if (TS_EXTENSIONS.has(ext)) {
|
|
81
|
+
files.push(path.join(dir, name));
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
walk(rootDir);
|
|
88
|
+
return files;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// ─── Parsing ─────────────────────────────────────────────────────────────────
|
|
92
|
+
|
|
93
|
+
interface RawImport {
|
|
94
|
+
specifier: string; // module specifier (e.g. "./utils", "effect")
|
|
95
|
+
names: string[]; // imported names, or ["*"] for star
|
|
96
|
+
isTypeOnly: boolean;
|
|
97
|
+
isDynamic: boolean;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
function parseFileImports(filePath: string, source: string): RawImport[] {
|
|
101
|
+
const result = parseSync(filePath, source);
|
|
102
|
+
const imports: RawImport[] = [];
|
|
103
|
+
|
|
104
|
+
// Static imports: import { X, Y } from "./foo"
|
|
105
|
+
for (const imp of result.module.staticImports) {
|
|
106
|
+
const specifier = imp.moduleRequest.value;
|
|
107
|
+
const names: string[] = [];
|
|
108
|
+
let allTypeOnly = true;
|
|
109
|
+
|
|
110
|
+
for (const entry of imp.entries) {
|
|
111
|
+
const kind = entry.importName.kind as string;
|
|
112
|
+
const name =
|
|
113
|
+
kind === "Default"
|
|
114
|
+
? "default"
|
|
115
|
+
: kind === "All" || kind === "AllButDefault" || kind === "NamespaceObject"
|
|
116
|
+
? "*"
|
|
117
|
+
: (entry.importName.name ?? entry.localName.value);
|
|
118
|
+
names.push(name);
|
|
119
|
+
if (!entry.isType) allTypeOnly = false;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// If no entries (e.g. `import "./side-effect"`), it's a side-effect import
|
|
123
|
+
if (names.length === 0) {
|
|
124
|
+
imports.push({ specifier, names: ["*"], isTypeOnly: false, isDynamic: false });
|
|
125
|
+
} else {
|
|
126
|
+
imports.push({ specifier, names, isTypeOnly: allTypeOnly, isDynamic: false });
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Static re-exports: export { X } from "./foo", export * from "./foo"
|
|
131
|
+
for (const exp of result.module.staticExports) {
|
|
132
|
+
for (const entry of exp.entries) {
|
|
133
|
+
// Only re-exports have moduleRequest on the entry
|
|
134
|
+
const moduleRequest = (entry as { moduleRequest?: { value: string } }).moduleRequest;
|
|
135
|
+
if (!moduleRequest) continue;
|
|
136
|
+
|
|
137
|
+
const specifier = moduleRequest.value;
|
|
138
|
+
const entryKind = entry.importName.kind as string;
|
|
139
|
+
const name =
|
|
140
|
+
entryKind === "AllButDefault" || entryKind === "All" || entryKind === "NamespaceObject"
|
|
141
|
+
? "*"
|
|
142
|
+
: (entry.importName.name ?? "*");
|
|
143
|
+
|
|
144
|
+
// Group by specifier — multiple entries from same module
|
|
145
|
+
const existing = imports.find((i) => i.specifier === specifier && !i.isDynamic);
|
|
146
|
+
if (existing) {
|
|
147
|
+
if (!existing.names.includes(name)) existing.names.push(name);
|
|
148
|
+
} else {
|
|
149
|
+
// oxc-parser doesn't expose isType on export entries, default false
|
|
150
|
+
imports.push({ specifier, names: [name], isTypeOnly: false, isDynamic: false });
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Dynamic imports: import("./lazy")
|
|
156
|
+
for (const di of result.module.dynamicImports) {
|
|
157
|
+
if (di.moduleRequest) {
|
|
158
|
+
const sliced = source.slice(di.moduleRequest.start, di.moduleRequest.end);
|
|
159
|
+
// Only include string literals (starts with ' or ")
|
|
160
|
+
if (sliced.startsWith("'") || sliced.startsWith('"')) {
|
|
161
|
+
const specifier = sliced.slice(1, -1); // strip quotes
|
|
162
|
+
imports.push({ specifier, names: ["*"], isTypeOnly: false, isDynamic: true });
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
return imports;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// ─── Resolution ──────────────────────────────────────────────────────────────
|
|
171
|
+
|
|
172
|
+
const SOURCE_EXTS = [".ts", ".tsx", ".mts", ".cts"];
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Map a dist/ path back to its source .ts file.
|
|
176
|
+
*
|
|
177
|
+
* Monorepo pattern:
|
|
178
|
+
* packages: rootDir="src", outDir="dist" → dist/X.js → src/X.ts
|
|
179
|
+
* apps: rootDir=".", outDir="dist" → dist/X.js → X.ts
|
|
180
|
+
*/
|
|
181
|
+
function distToSource(resolvedPath: string, projectRoot: string): string {
|
|
182
|
+
// Only remap paths within the project that contain /dist/
|
|
183
|
+
if (!resolvedPath.startsWith(projectRoot)) return resolvedPath;
|
|
184
|
+
const rel = path.relative(projectRoot, resolvedPath);
|
|
185
|
+
const distIdx = rel.indexOf("dist" + path.sep);
|
|
186
|
+
if (distIdx === -1) return resolvedPath;
|
|
187
|
+
|
|
188
|
+
const prefix = rel.slice(0, distIdx); // e.g. "packages/core/" or "apps/gateway/"
|
|
189
|
+
const afterDist = rel.slice(distIdx + 5); // e.g. "index.js" or "schemas/index.js"
|
|
190
|
+
|
|
191
|
+
// Strip .js/.mjs/.cjs extension
|
|
192
|
+
const withoutExt = afterDist.replace(/\.(m?j|c)s$/, "");
|
|
193
|
+
|
|
194
|
+
// Strategy 1: packages pattern — dist/X → src/X
|
|
195
|
+
for (const ext of SOURCE_EXTS) {
|
|
196
|
+
const candidate = path.resolve(projectRoot, prefix, "src", withoutExt + ext);
|
|
197
|
+
if (fs.existsSync(candidate)) return candidate;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// Strategy 2: apps pattern — dist/X → X (rootDir is ".")
|
|
201
|
+
for (const ext of SOURCE_EXTS) {
|
|
202
|
+
const candidate = path.resolve(projectRoot, prefix, withoutExt + ext);
|
|
203
|
+
if (fs.existsSync(candidate)) return candidate;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Strategy 3: index file — dist/schemas/index.js → src/schemas/index.ts
|
|
207
|
+
// Already covered by strategy 1, but try without /index
|
|
208
|
+
if (withoutExt.endsWith("/index")) {
|
|
209
|
+
const dirPath = withoutExt.slice(0, -6);
|
|
210
|
+
for (const ext of SOURCE_EXTS) {
|
|
211
|
+
const candidate = path.resolve(projectRoot, prefix, "src", dirPath + ext);
|
|
212
|
+
if (fs.existsSync(candidate)) return candidate;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
return resolvedPath;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
function resolveImport(
|
|
220
|
+
resolver: ResolverFactory,
|
|
221
|
+
fromDir: string,
|
|
222
|
+
specifier: string,
|
|
223
|
+
projectRoot: string
|
|
224
|
+
): string | null {
|
|
225
|
+
try {
|
|
226
|
+
const result = resolver.sync(fromDir, specifier);
|
|
227
|
+
if (result.path && !result.path.includes("node_modules")) {
|
|
228
|
+
const mapped = distToSource(result.path, projectRoot);
|
|
229
|
+
// Only include TypeScript source files
|
|
230
|
+
const ext = path.extname(mapped);
|
|
231
|
+
if (!TS_EXTENSIONS.has(ext)) return null;
|
|
232
|
+
// Exclude skipped files
|
|
233
|
+
if (SKIP_FILES.has(path.basename(mapped))) return null;
|
|
234
|
+
return mapped;
|
|
235
|
+
}
|
|
236
|
+
} catch {
|
|
237
|
+
// Resolution failure — external dep, Node builtin, etc.
|
|
238
|
+
}
|
|
239
|
+
return null;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
export function createResolver(projectRoot: string, tsconfigPath: string): ResolverFactory {
|
|
243
|
+
return new ResolverFactory({
|
|
244
|
+
tsconfig: {
|
|
245
|
+
configFile: path.resolve(projectRoot, tsconfigPath),
|
|
246
|
+
references: "auto",
|
|
247
|
+
},
|
|
248
|
+
extensions: [".ts", ".tsx", ".mts", ".cts", ".js", ".jsx", ".mjs", ".cjs"],
|
|
249
|
+
extensionAlias: {
|
|
250
|
+
".js": [".ts", ".tsx", ".js"],
|
|
251
|
+
".jsx": [".tsx", ".jsx"],
|
|
252
|
+
".mjs": [".mts", ".mjs"],
|
|
253
|
+
".cjs": [".cts", ".cjs"],
|
|
254
|
+
},
|
|
255
|
+
conditionNames: ["import", "require"],
|
|
256
|
+
mainFields: ["module", "main"],
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// ─── Graph Construction ──────────────────────────────────────────────────────
|
|
261
|
+
|
|
262
|
+
function buildForwardEdges(
|
|
263
|
+
files: string[],
|
|
264
|
+
resolver: ResolverFactory,
|
|
265
|
+
projectRoot: string
|
|
266
|
+
): { forward: Map<string, ImportEdge[]>; parseFailures: string[] } {
|
|
267
|
+
const forward = new Map<string, ImportEdge[]>();
|
|
268
|
+
const parseFailures: string[] = [];
|
|
269
|
+
|
|
270
|
+
for (const filePath of files) {
|
|
271
|
+
let source: string;
|
|
272
|
+
try {
|
|
273
|
+
source = fs.readFileSync(filePath, "utf-8");
|
|
274
|
+
} catch {
|
|
275
|
+
continue;
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
let rawImports: RawImport[];
|
|
279
|
+
try {
|
|
280
|
+
rawImports = parseFileImports(filePath, source);
|
|
281
|
+
} catch (err) {
|
|
282
|
+
parseFailures.push(filePath);
|
|
283
|
+
continue;
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
const edges: ImportEdge[] = [];
|
|
287
|
+
const fromDir = path.dirname(filePath);
|
|
288
|
+
|
|
289
|
+
for (const raw of rawImports) {
|
|
290
|
+
const target = resolveImport(resolver, fromDir, raw.specifier, projectRoot);
|
|
291
|
+
if (target) {
|
|
292
|
+
edges.push({
|
|
293
|
+
target,
|
|
294
|
+
specifiers: raw.names,
|
|
295
|
+
isTypeOnly: raw.isTypeOnly,
|
|
296
|
+
isDynamic: raw.isDynamic,
|
|
297
|
+
});
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
forward.set(filePath, edges);
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
return { forward, parseFailures };
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
function buildReverseMap(forward: Map<string, ImportEdge[]>): Map<string, ImportEdge[]> {
|
|
308
|
+
const reverse = new Map<string, ImportEdge[]>();
|
|
309
|
+
|
|
310
|
+
for (const [source, edges] of forward) {
|
|
311
|
+
for (const edge of edges) {
|
|
312
|
+
let revEdges = reverse.get(edge.target);
|
|
313
|
+
if (!revEdges) {
|
|
314
|
+
revEdges = [];
|
|
315
|
+
reverse.set(edge.target, revEdges);
|
|
316
|
+
}
|
|
317
|
+
revEdges.push({
|
|
318
|
+
target: source, // reverse: the "target" is the file that imports
|
|
319
|
+
specifiers: edge.specifiers,
|
|
320
|
+
isTypeOnly: edge.isTypeOnly,
|
|
321
|
+
isDynamic: edge.isDynamic,
|
|
322
|
+
});
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
return reverse;
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
export async function buildGraph(
|
|
330
|
+
projectRoot: string,
|
|
331
|
+
tsconfigPath: string
|
|
332
|
+
): Promise<BuildGraphResult> {
|
|
333
|
+
const startTime = performance.now();
|
|
334
|
+
|
|
335
|
+
const resolver = createResolver(projectRoot, tsconfigPath);
|
|
336
|
+
const fileList = discoverFiles(projectRoot);
|
|
337
|
+
|
|
338
|
+
log(`Discovered ${fileList.length} source files`);
|
|
339
|
+
|
|
340
|
+
const { forward, parseFailures } = buildForwardEdges(fileList, resolver, projectRoot);
|
|
341
|
+
const reverse = buildReverseMap(forward);
|
|
342
|
+
const files = new Set(fileList);
|
|
343
|
+
|
|
344
|
+
const edgeCount = [...forward.values()].reduce((sum, edges) => sum + edges.length, 0);
|
|
345
|
+
const elapsed = (performance.now() - startTime).toFixed(0);
|
|
346
|
+
|
|
347
|
+
log(`Graph built: ${files.size} files, ${edgeCount} edges [${elapsed}ms]`);
|
|
348
|
+
if (parseFailures.length > 0) {
|
|
349
|
+
log(`Parse failures: ${parseFailures.length} files`);
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
return {
|
|
353
|
+
graph: { forward, reverse, files },
|
|
354
|
+
resolver,
|
|
355
|
+
};
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// ─── Incremental Updates ─────────────────────────────────────────────────────
|
|
359
|
+
|
|
360
|
+
export function updateFile(
|
|
361
|
+
graph: ModuleGraph,
|
|
362
|
+
filePath: string,
|
|
363
|
+
resolver: ResolverFactory,
|
|
364
|
+
projectRoot: string
|
|
365
|
+
): void {
|
|
366
|
+
// Remove old forward edges from reverse map
|
|
367
|
+
const oldEdges = graph.forward.get(filePath) ?? [];
|
|
368
|
+
for (const edge of oldEdges) {
|
|
369
|
+
const revEdges = graph.reverse.get(edge.target);
|
|
370
|
+
if (revEdges) {
|
|
371
|
+
const idx = revEdges.findIndex((r) => r.target === filePath);
|
|
372
|
+
if (idx !== -1) revEdges.splice(idx, 1);
|
|
373
|
+
if (revEdges.length === 0) graph.reverse.delete(edge.target);
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
// Re-parse file
|
|
378
|
+
let source: string;
|
|
379
|
+
try {
|
|
380
|
+
source = fs.readFileSync(filePath, "utf-8");
|
|
381
|
+
} catch {
|
|
382
|
+
// File unreadable — remove it
|
|
383
|
+
removeFile(graph, filePath);
|
|
384
|
+
return;
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
let rawImports: RawImport[];
|
|
388
|
+
try {
|
|
389
|
+
rawImports = parseFileImports(filePath, source);
|
|
390
|
+
} catch {
|
|
391
|
+
log(`Parse error on update: ${filePath}`);
|
|
392
|
+
graph.forward.set(filePath, []);
|
|
393
|
+
return;
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
// Build new edges
|
|
397
|
+
const fromDir = path.dirname(filePath);
|
|
398
|
+
const newEdges: ImportEdge[] = [];
|
|
399
|
+
for (const raw of rawImports) {
|
|
400
|
+
const target = resolveImport(resolver, fromDir, raw.specifier, projectRoot);
|
|
401
|
+
if (target) {
|
|
402
|
+
newEdges.push({
|
|
403
|
+
target,
|
|
404
|
+
specifiers: raw.names,
|
|
405
|
+
isTypeOnly: raw.isTypeOnly,
|
|
406
|
+
isDynamic: raw.isDynamic,
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
// Update forward map
|
|
412
|
+
graph.forward.set(filePath, newEdges);
|
|
413
|
+
graph.files.add(filePath);
|
|
414
|
+
|
|
415
|
+
// Update reverse map
|
|
416
|
+
for (const edge of newEdges) {
|
|
417
|
+
let revEdges = graph.reverse.get(edge.target);
|
|
418
|
+
if (!revEdges) {
|
|
419
|
+
revEdges = [];
|
|
420
|
+
graph.reverse.set(edge.target, revEdges);
|
|
421
|
+
}
|
|
422
|
+
revEdges.push({
|
|
423
|
+
target: filePath,
|
|
424
|
+
specifiers: edge.specifiers,
|
|
425
|
+
isTypeOnly: edge.isTypeOnly,
|
|
426
|
+
isDynamic: edge.isDynamic,
|
|
427
|
+
});
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
export function removeFile(graph: ModuleGraph, filePath: string): void {
|
|
432
|
+
// Remove forward edges from reverse map
|
|
433
|
+
const edges = graph.forward.get(filePath) ?? [];
|
|
434
|
+
for (const edge of edges) {
|
|
435
|
+
const revEdges = graph.reverse.get(edge.target);
|
|
436
|
+
if (revEdges) {
|
|
437
|
+
const idx = revEdges.findIndex((r) => r.target === filePath);
|
|
438
|
+
if (idx !== -1) revEdges.splice(idx, 1);
|
|
439
|
+
if (revEdges.length === 0) graph.reverse.delete(edge.target);
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
// Remove reverse edges that point to this file
|
|
444
|
+
const revEdges = graph.reverse.get(filePath) ?? [];
|
|
445
|
+
for (const revEdge of revEdges) {
|
|
446
|
+
const fwdEdges = graph.forward.get(revEdge.target);
|
|
447
|
+
if (fwdEdges) {
|
|
448
|
+
const idx = fwdEdges.findIndex((e) => e.target === filePath);
|
|
449
|
+
if (idx !== -1) fwdEdges.splice(idx, 1);
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
graph.forward.delete(filePath);
|
|
454
|
+
graph.reverse.delete(filePath);
|
|
455
|
+
graph.files.delete(filePath);
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// ─── File Watcher ────────────────────────────────────────────────────────────
|
|
459
|
+
|
|
460
|
+
export function startWatcher(
|
|
461
|
+
projectRoot: string,
|
|
462
|
+
graph: ModuleGraph,
|
|
463
|
+
resolver: ResolverFactory
|
|
464
|
+
): void {
|
|
465
|
+
try {
|
|
466
|
+
const watcher = fs.watch(
|
|
467
|
+
projectRoot,
|
|
468
|
+
{ recursive: true },
|
|
469
|
+
(_eventType: string, filename: string | null) => {
|
|
470
|
+
if (!filename) return;
|
|
471
|
+
|
|
472
|
+
// Filter to TS files only
|
|
473
|
+
const ext = path.extname(filename);
|
|
474
|
+
if (!TS_EXTENSIONS.has(ext)) return;
|
|
475
|
+
|
|
476
|
+
// Skip excluded directories and files
|
|
477
|
+
const parts = filename.split(path.sep);
|
|
478
|
+
if (parts.some((p: string) => SKIP_DIRS.has(p))) return;
|
|
479
|
+
if (SKIP_FILES.has(path.basename(filename))) return;
|
|
480
|
+
if (
|
|
481
|
+
filename.endsWith(".d.ts") ||
|
|
482
|
+
filename.endsWith(".d.mts") ||
|
|
483
|
+
filename.endsWith(".d.cts")
|
|
484
|
+
)
|
|
485
|
+
return;
|
|
486
|
+
|
|
487
|
+
const absPath = path.resolve(projectRoot, filename);
|
|
488
|
+
|
|
489
|
+
if (fs.existsSync(absPath)) {
|
|
490
|
+
// File created or modified
|
|
491
|
+
updateFile(graph, absPath, resolver, projectRoot);
|
|
492
|
+
} else {
|
|
493
|
+
// File deleted
|
|
494
|
+
removeFile(graph, absPath);
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
);
|
|
498
|
+
|
|
499
|
+
// Cleanup on process exit
|
|
500
|
+
process.on("SIGINT", () => watcher.close());
|
|
501
|
+
process.on("SIGTERM", () => watcher.close());
|
|
502
|
+
|
|
503
|
+
log("File watcher started");
|
|
504
|
+
} catch (err) {
|
|
505
|
+
log("Failed to start file watcher:", err);
|
|
506
|
+
}
|
|
507
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "typegraph-mcp",
|
|
3
|
+
"version": "0.9.0",
|
|
4
|
+
"description": "Type-aware codebase navigation for AI coding agents — 14 MCP tools powered by tsserver + oxc",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"typegraph-mcp": "./cli.ts",
|
|
8
|
+
"typegraph": "./cli.ts"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"*.ts",
|
|
12
|
+
"*.json",
|
|
13
|
+
"README.md",
|
|
14
|
+
"LICENSE",
|
|
15
|
+
"pnpm-lock.yaml",
|
|
16
|
+
"skills/",
|
|
17
|
+
"commands/",
|
|
18
|
+
"hooks/",
|
|
19
|
+
"scripts/",
|
|
20
|
+
".claude-plugin/",
|
|
21
|
+
".cursor-plugin/",
|
|
22
|
+
"gemini-extension.json"
|
|
23
|
+
],
|
|
24
|
+
"scripts": {
|
|
25
|
+
"start": "tsx server.ts",
|
|
26
|
+
"test": "tsx smoke-test.ts",
|
|
27
|
+
"check": "tsx check.ts"
|
|
28
|
+
},
|
|
29
|
+
"dependencies": {
|
|
30
|
+
"@clack/prompts": "^1.0.1",
|
|
31
|
+
"@modelcontextprotocol/sdk": "^1.26.0",
|
|
32
|
+
"oxc-parser": "^0.114.0",
|
|
33
|
+
"oxc-resolver": "^11.17.1",
|
|
34
|
+
"zod": "^4.3.6"
|
|
35
|
+
},
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"@types/node": "^25.3.0"
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
#
|
|
3
|
+
# ensure-deps.sh
|
|
4
|
+
# Ensures typegraph-mcp dependencies are installed.
|
|
5
|
+
# Called automatically by Claude Code on session start via plugin hooks.
|
|
6
|
+
#
|
|
7
|
+
|
|
8
|
+
set -e
|
|
9
|
+
|
|
10
|
+
PLUGIN_DIR="${CLAUDE_PLUGIN_ROOT:-$(cd "$(dirname "$0")/.." && pwd)}"
|
|
11
|
+
|
|
12
|
+
# Check if node_modules exist with required packages
|
|
13
|
+
if [ -d "$PLUGIN_DIR/node_modules/@modelcontextprotocol" ] && \
|
|
14
|
+
[ -d "$PLUGIN_DIR/node_modules/oxc-parser" ] && \
|
|
15
|
+
[ -d "$PLUGIN_DIR/node_modules/oxc-resolver" ]; then
|
|
16
|
+
echo "typegraph-mcp dependencies OK"
|
|
17
|
+
exit 0
|
|
18
|
+
fi
|
|
19
|
+
|
|
20
|
+
echo "Installing typegraph-mcp dependencies..."
|
|
21
|
+
cd "$PLUGIN_DIR"
|
|
22
|
+
|
|
23
|
+
# Prefer pnpm, fall back to npm
|
|
24
|
+
if command -v pnpm &> /dev/null; then
|
|
25
|
+
pnpm install --frozen-lockfile 2>/dev/null || pnpm install
|
|
26
|
+
elif command -v npm &> /dev/null; then
|
|
27
|
+
npm install
|
|
28
|
+
else
|
|
29
|
+
echo "Warning: Neither pnpm nor npm found. Run 'npm install' in $PLUGIN_DIR manually."
|
|
30
|
+
exit 1
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
echo "typegraph-mcp dependencies installed"
|
|
34
|
+
exit 0
|