@yawlabs/ctxlint 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1076 @@
1
+ // src/mcp/server.ts
2
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
3
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
+ import { z } from "zod";
5
+
6
+ // src/core/scanner.ts
7
+ import * as fs2 from "fs";
8
+ import * as path2 from "path";
9
+ import { glob } from "glob";
10
+
11
+ // src/utils/fs.ts
12
+ import * as fs from "fs";
13
+ import * as path from "path";
14
+ function fileExists(filePath) {
15
+ try {
16
+ fs.accessSync(filePath);
17
+ return true;
18
+ } catch {
19
+ return false;
20
+ }
21
+ }
22
+ function isDirectory(filePath) {
23
+ try {
24
+ return fs.statSync(filePath).isDirectory();
25
+ } catch {
26
+ return false;
27
+ }
28
+ }
29
+ function isSymlink(filePath) {
30
+ try {
31
+ return fs.lstatSync(filePath).isSymbolicLink();
32
+ } catch {
33
+ return false;
34
+ }
35
+ }
36
+ function readSymlinkTarget(filePath) {
37
+ try {
38
+ return fs.readlinkSync(filePath);
39
+ } catch {
40
+ return void 0;
41
+ }
42
+ }
43
+ function readFileContent(filePath) {
44
+ return fs.readFileSync(filePath, "utf-8");
45
+ }
46
+ var IGNORED_DIRS = /* @__PURE__ */ new Set([
47
+ "node_modules",
48
+ ".git",
49
+ "dist",
50
+ "build",
51
+ "vendor",
52
+ ".next",
53
+ ".nuxt",
54
+ "coverage",
55
+ "__pycache__"
56
+ ]);
57
+ function getAllProjectFiles(projectRoot) {
58
+ const files = [];
59
+ function walk(dir, depth) {
60
+ if (depth > 10) return;
61
+ try {
62
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
63
+ for (const entry of entries) {
64
+ if (IGNORED_DIRS.has(entry.name)) continue;
65
+ const fullPath = path.join(dir, entry.name);
66
+ if (entry.isDirectory()) {
67
+ walk(fullPath, depth + 1);
68
+ } else {
69
+ files.push(path.relative(projectRoot, fullPath));
70
+ }
71
+ }
72
+ } catch {
73
+ }
74
+ }
75
+ walk(projectRoot, 0);
76
+ return files;
77
+ }
78
+
79
+ // src/core/scanner.ts
80
+ var CONTEXT_FILE_PATTERNS = [
81
+ "CLAUDE.md",
82
+ "CLAUDE.local.md",
83
+ "AGENTS.md",
84
+ ".cursorrules",
85
+ ".cursor/rules/*.md",
86
+ ".cursor/rules/*.mdc",
87
+ "copilot-instructions.md",
88
+ ".github/copilot-instructions.md",
89
+ ".windsurfrules",
90
+ ".windsurf/rules/*.md",
91
+ "GEMINI.md",
92
+ "JULES.md",
93
+ ".clinerules",
94
+ "CONVENTIONS.md"
95
+ ];
96
+ var IGNORED_DIRS2 = /* @__PURE__ */ new Set(["node_modules", ".git", "dist", "build", "vendor"]);
97
+ async function scanForContextFiles(projectRoot) {
98
+ const found = [];
99
+ const seen = /* @__PURE__ */ new Set();
100
+ const dirsToScan = [projectRoot];
101
+ try {
102
+ const entries = fs2.readdirSync(projectRoot, { withFileTypes: true });
103
+ for (const entry of entries) {
104
+ if (entry.isDirectory() && !IGNORED_DIRS2.has(entry.name) && !entry.name.startsWith(".")) {
105
+ dirsToScan.push(path2.join(projectRoot, entry.name));
106
+ }
107
+ }
108
+ } catch {
109
+ }
110
+ for (const dir of dirsToScan) {
111
+ for (const pattern of CONTEXT_FILE_PATTERNS) {
112
+ const matches = await glob(pattern, {
113
+ cwd: dir,
114
+ absolute: true,
115
+ nodir: true,
116
+ dot: true
117
+ });
118
+ for (const match of matches) {
119
+ const normalized = path2.normalize(match);
120
+ if (seen.has(normalized)) continue;
121
+ seen.add(normalized);
122
+ const relativePath = path2.relative(projectRoot, normalized);
123
+ const symlink = isSymlink(normalized);
124
+ const target = symlink ? readSymlinkTarget(normalized) : void 0;
125
+ found.push({
126
+ absolutePath: normalized,
127
+ relativePath: relativePath.replace(/\\/g, "/"),
128
+ isSymlink: symlink,
129
+ symlinkTarget: target
130
+ });
131
+ }
132
+ }
133
+ }
134
+ return found.sort((a, b) => a.relativePath.localeCompare(b.relativePath));
135
+ }
136
+
137
+ // src/utils/tokens.ts
138
+ import { encoding_for_model } from "tiktoken";
139
+ var encoder = null;
140
+ function getEncoder() {
141
+ if (!encoder) {
142
+ encoder = encoding_for_model("gpt-4");
143
+ }
144
+ return encoder;
145
+ }
146
+ function countTokens(text) {
147
+ try {
148
+ const enc = getEncoder();
149
+ const tokens = enc.encode(text);
150
+ return tokens.length;
151
+ } catch {
152
+ return Math.ceil(text.length / 4);
153
+ }
154
+ }
155
+ function freeEncoder() {
156
+ if (encoder) {
157
+ encoder.free();
158
+ encoder = null;
159
+ }
160
+ }
161
+
162
+ // src/core/parser.ts
163
+ var PATH_PATTERN = /(?:^|[\s`"'(])((\.{0,2}\/)?(?:[\w@.-]+\/)+[\w.*-]+(?:\.\w+)?)(?=[\s`"'),;:]|$)/gm;
164
+ var PATH_EXCLUDE = /^(https?:\/\/|ftp:\/\/|mailto:|n\/a|w\/o|I\/O|i\/o|e\.g\.|N\/A|\.deb\/|\.rpm[.\/]|\.tar[.\/]|\.zip[.\/])/i;
165
+ var COMMAND_PREFIXES = /^\s*[\$>]\s+(.+)$/;
166
+ var COMMON_COMMANDS = /^(npm\s+run|npx|pnpm|yarn|make|cargo|go\s+(run|build|test)|python|pytest|vitest|jest|bun|deno)\b/;
167
+ function parseContextFile(file) {
168
+ const content = readFileContent(file.absolutePath);
169
+ const lines = content.split("\n");
170
+ const sections = parseSections(lines);
171
+ const paths = extractPathReferences(lines, sections);
172
+ const commands = extractCommandReferences(lines, sections);
173
+ return {
174
+ filePath: file.absolutePath,
175
+ relativePath: file.relativePath,
176
+ isSymlink: file.isSymlink,
177
+ symlinkTarget: file.symlinkTarget,
178
+ totalTokens: countTokens(content),
179
+ totalLines: lines.length,
180
+ content,
181
+ sections,
182
+ references: {
183
+ paths,
184
+ commands
185
+ }
186
+ };
187
+ }
188
+ function parseSections(lines) {
189
+ const sections = [];
190
+ for (let i = 0; i < lines.length; i++) {
191
+ const match = lines[i].match(/^(#{1,6})\s+(.+)/);
192
+ if (match) {
193
+ if (sections.length > 0) {
194
+ const prev = sections[sections.length - 1];
195
+ if (prev.endLine === -1) {
196
+ prev.endLine = i - 1;
197
+ }
198
+ }
199
+ sections.push({
200
+ title: match[2].trim(),
201
+ startLine: i + 1,
202
+ // 1-indexed
203
+ endLine: -1,
204
+ level: match[1].length
205
+ });
206
+ }
207
+ }
208
+ if (sections.length > 0) {
209
+ const last = sections[sections.length - 1];
210
+ if (last.endLine === -1) {
211
+ last.endLine = lines.length;
212
+ }
213
+ }
214
+ return sections;
215
+ }
216
+ function getSectionForLine(line, sections) {
217
+ for (let i = sections.length - 1; i >= 0; i--) {
218
+ if (line >= sections[i].startLine) {
219
+ return sections[i].title;
220
+ }
221
+ }
222
+ return void 0;
223
+ }
224
+ function extractPathReferences(lines, sections) {
225
+ const paths = [];
226
+ let inCodeBlock = false;
227
+ let codeBlockLang = "";
228
+ for (let i = 0; i < lines.length; i++) {
229
+ const line = lines[i];
230
+ if (line.trimStart().startsWith("```")) {
231
+ if (!inCodeBlock) {
232
+ inCodeBlock = true;
233
+ codeBlockLang = line.trimStart().slice(3).trim().toLowerCase();
234
+ } else {
235
+ inCodeBlock = false;
236
+ codeBlockLang = "";
237
+ }
238
+ continue;
239
+ }
240
+ if (inCodeBlock && isExampleCodeBlock(codeBlockLang)) {
241
+ continue;
242
+ }
243
+ PATH_PATTERN.lastIndex = 0;
244
+ let match;
245
+ while ((match = PATH_PATTERN.exec(line)) !== null) {
246
+ const value = match[1];
247
+ if (PATH_EXCLUDE.test(value)) continue;
248
+ if (value.length < 3) continue;
249
+ if (/^v?\d+\.\d+\//.test(value)) continue;
250
+ const column = match.index + match[0].length - match[1].length + 1;
251
+ paths.push({
252
+ value,
253
+ line: i + 1,
254
+ // 1-indexed
255
+ column,
256
+ section: getSectionForLine(i + 1, sections)
257
+ });
258
+ }
259
+ }
260
+ return paths;
261
+ }
262
+ function isExampleCodeBlock(lang) {
263
+ return [
264
+ "javascript",
265
+ "js",
266
+ "typescript",
267
+ "ts",
268
+ "python",
269
+ "py",
270
+ "go",
271
+ "rust",
272
+ "java",
273
+ "c",
274
+ "cpp",
275
+ "ruby",
276
+ "php",
277
+ "json",
278
+ "yaml",
279
+ "yml",
280
+ "toml",
281
+ "xml",
282
+ "html",
283
+ "css",
284
+ "sql",
285
+ "graphql",
286
+ "jsx",
287
+ "tsx"
288
+ ].includes(lang);
289
+ }
290
+ function extractCommandReferences(lines, sections) {
291
+ const commands = [];
292
+ let inCodeBlock = false;
293
+ let codeBlockLang = "";
294
+ for (let i = 0; i < lines.length; i++) {
295
+ const line = lines[i];
296
+ if (line.trimStart().startsWith("```")) {
297
+ if (!inCodeBlock) {
298
+ inCodeBlock = true;
299
+ codeBlockLang = line.trimStart().slice(3).trim().toLowerCase();
300
+ } else {
301
+ inCodeBlock = false;
302
+ codeBlockLang = "";
303
+ }
304
+ continue;
305
+ }
306
+ const prefixMatch = line.match(COMMAND_PREFIXES);
307
+ if (prefixMatch) {
308
+ commands.push({
309
+ value: prefixMatch[1].trim(),
310
+ line: i + 1,
311
+ column: prefixMatch.index + prefixMatch[0].length - prefixMatch[1].length + 1,
312
+ section: getSectionForLine(i + 1, sections)
313
+ });
314
+ continue;
315
+ }
316
+ if (inCodeBlock && ["bash", "sh", "shell", "zsh", ""].includes(codeBlockLang)) {
317
+ const trimmed = line.trim();
318
+ if (trimmed && !trimmed.startsWith("#") && !trimmed.startsWith("//")) {
319
+ if (COMMON_COMMANDS.test(trimmed) || trimmed.startsWith("$") || trimmed.startsWith(">")) {
320
+ const cmd = trimmed.replace(/^\s*[\$>]\s*/, "");
321
+ if (cmd) {
322
+ const cmdStart = line.indexOf(trimmed) + trimmed.indexOf(cmd);
323
+ commands.push({
324
+ value: cmd,
325
+ line: i + 1,
326
+ column: cmdStart + 1,
327
+ section: getSectionForLine(i + 1, sections)
328
+ });
329
+ }
330
+ }
331
+ }
332
+ continue;
333
+ }
334
+ const inlineMatches = line.matchAll(/`([^`]+)`/g);
335
+ for (const m of inlineMatches) {
336
+ const cmd = m[1].trim();
337
+ if (COMMON_COMMANDS.test(cmd)) {
338
+ commands.push({
339
+ value: cmd,
340
+ line: i + 1,
341
+ column: (m.index ?? 0) + 2,
342
+ section: getSectionForLine(i + 1, sections)
343
+ });
344
+ }
345
+ }
346
+ }
347
+ return commands;
348
+ }
349
+
350
+ // src/core/checks/paths.ts
351
+ import * as path3 from "path";
352
+ import levenshteinPkg from "fast-levenshtein";
353
+ import { glob as glob2 } from "glob";
354
+
355
+ // src/utils/git.ts
356
+ import simpleGit from "simple-git";
357
+ var gitInstance = null;
358
+ var gitProjectRoot = null;
359
+ function getGit(projectRoot) {
360
+ if (!gitInstance || gitProjectRoot !== projectRoot) {
361
+ gitInstance = simpleGit(projectRoot);
362
+ gitProjectRoot = projectRoot;
363
+ }
364
+ return gitInstance;
365
+ }
366
+ function resetGit() {
367
+ gitInstance = null;
368
+ gitProjectRoot = null;
369
+ }
370
+ async function isGitRepo(projectRoot) {
371
+ try {
372
+ const git = simpleGit(projectRoot);
373
+ await git.revparse(["--is-inside-work-tree"]);
374
+ return true;
375
+ } catch {
376
+ return false;
377
+ }
378
+ }
379
+ async function getFileLastModified(projectRoot, filePath) {
380
+ try {
381
+ const git = getGit(projectRoot);
382
+ const log = await git.log({ file: filePath, maxCount: 1 });
383
+ if (log.latest?.date) {
384
+ return new Date(log.latest.date);
385
+ }
386
+ return null;
387
+ } catch {
388
+ return null;
389
+ }
390
+ }
391
+ async function getCommitsSince(projectRoot, filePath, since) {
392
+ try {
393
+ const git = getGit(projectRoot);
394
+ const log = await git.log({
395
+ file: filePath,
396
+ "--since": since.toISOString()
397
+ });
398
+ return log.total;
399
+ } catch {
400
+ return 0;
401
+ }
402
+ }
403
+ async function findRenames(projectRoot, filePath) {
404
+ try {
405
+ const git = getGit(projectRoot);
406
+ const result = await git.raw([
407
+ "log",
408
+ "--diff-filter=R",
409
+ "--find-renames",
410
+ "--name-status",
411
+ "--format=%H %ai",
412
+ "-10",
413
+ "--",
414
+ filePath
415
+ ]);
416
+ if (!result.trim()) return null;
417
+ const lines = result.trim().split("\n");
418
+ for (let i = 0; i < lines.length; i++) {
419
+ const line = lines[i];
420
+ if (line.startsWith("R")) {
421
+ const parts = line.split(" ");
422
+ if (parts.length >= 3) {
423
+ const hashLine = lines[i - 1] || "";
424
+ const hashMatch = hashLine.match(/^([a-f0-9]+)\s+(.+)/);
425
+ const commitHash = hashMatch?.[1]?.substring(0, 7) || "unknown";
426
+ const dateStr = hashMatch?.[2];
427
+ const daysAgo = dateStr ? Math.floor((Date.now() - new Date(dateStr).getTime()) / (1e3 * 60 * 60 * 24)) : 0;
428
+ return {
429
+ oldPath: parts[1],
430
+ newPath: parts[2],
431
+ commitHash,
432
+ daysAgo
433
+ };
434
+ }
435
+ }
436
+ }
437
+ return null;
438
+ } catch {
439
+ return null;
440
+ }
441
+ }
442
+
443
+ // src/core/checks/paths.ts
444
+ var levenshtein = levenshteinPkg.get;
445
+ var cachedProjectFiles = null;
446
+ function getProjectFiles(projectRoot) {
447
+ if (cachedProjectFiles?.root === projectRoot) return cachedProjectFiles.files;
448
+ const files = getAllProjectFiles(projectRoot);
449
+ cachedProjectFiles = { root: projectRoot, files };
450
+ return files;
451
+ }
452
+ async function checkPaths(file, projectRoot) {
453
+ const issues = [];
454
+ const projectFiles = getProjectFiles(projectRoot);
455
+ const contextDir = path3.dirname(file.filePath);
456
+ for (const ref of file.references.paths) {
457
+ const baseDir = ref.value.startsWith("./") || ref.value.startsWith("../") ? contextDir : projectRoot;
458
+ const resolvedPath = path3.resolve(baseDir, ref.value);
459
+ const normalizedRef = ref.value.replace(/\\/g, "/");
460
+ if (normalizedRef.includes("*")) {
461
+ const matches = await glob2(normalizedRef, { cwd: baseDir, nodir: false });
462
+ if (matches.length === 0) {
463
+ issues.push({
464
+ severity: "error",
465
+ check: "paths",
466
+ line: ref.line,
467
+ message: `${ref.value} matches no files`,
468
+ suggestion: "Verify the glob pattern is correct"
469
+ });
470
+ }
471
+ continue;
472
+ }
473
+ const isDir = normalizedRef.endsWith("/");
474
+ if (isDir) {
475
+ const dirPath = path3.resolve(baseDir, normalizedRef);
476
+ if (!isDirectory(dirPath)) {
477
+ issues.push({
478
+ severity: "error",
479
+ check: "paths",
480
+ line: ref.line,
481
+ message: `${ref.value} directory does not exist`
482
+ });
483
+ }
484
+ continue;
485
+ }
486
+ if (fileExists(resolvedPath) || isDirectory(resolvedPath)) {
487
+ continue;
488
+ }
489
+ let suggestion;
490
+ let detail;
491
+ const rename = await findRenames(projectRoot, ref.value);
492
+ if (rename) {
493
+ suggestion = `Did you mean ${rename.newPath}?`;
494
+ detail = `Renamed ${rename.daysAgo} days ago in commit ${rename.commitHash}`;
495
+ } else {
496
+ const match = findClosestMatch(normalizedRef, projectFiles);
497
+ if (match) {
498
+ suggestion = `Did you mean ${match}?`;
499
+ }
500
+ }
501
+ issues.push({
502
+ severity: "error",
503
+ check: "paths",
504
+ line: ref.line,
505
+ message: `${ref.value} does not exist`,
506
+ suggestion,
507
+ detail
508
+ });
509
+ }
510
+ return issues;
511
+ }
512
+ function findClosestMatch(target, files) {
513
+ const targetNorm = target.replace(/\\/g, "/");
514
+ const targetBase = path3.basename(targetNorm);
515
+ let bestMatch = null;
516
+ let bestDistance = Infinity;
517
+ for (const file of files) {
518
+ const fileNorm = file.replace(/\\/g, "/");
519
+ if (path3.basename(fileNorm) === targetBase && fileNorm !== targetNorm) {
520
+ const dist = levenshtein(targetNorm, fileNorm);
521
+ if (dist < bestDistance) {
522
+ bestDistance = dist;
523
+ bestMatch = fileNorm;
524
+ }
525
+ }
526
+ }
527
+ if (!bestMatch) {
528
+ for (const file of files) {
529
+ const fileNorm = file.replace(/\\/g, "/");
530
+ const dist = levenshtein(targetNorm, fileNorm);
531
+ if (dist < bestDistance && dist <= Math.max(targetNorm.length * 0.4, 5)) {
532
+ bestDistance = dist;
533
+ bestMatch = fileNorm;
534
+ }
535
+ }
536
+ }
537
+ return bestMatch;
538
+ }
539
+
540
+ // src/core/checks/commands.ts
541
+ import * as fs3 from "fs";
542
+ import * as path4 from "path";
543
+ var NPM_SCRIPT_PATTERN = /^(?:npm\s+run|pnpm(?:\s+run)?|yarn(?:\s+run)?)\s+(\S+)/;
544
+ var MAKE_PATTERN = /^make\s+(\S+)/;
545
+ async function checkCommands(file, projectRoot) {
546
+ const issues = [];
547
+ const pkgJson = loadPackageJson(projectRoot);
548
+ const makefile = loadMakefile(projectRoot);
549
+ for (const ref of file.references.commands) {
550
+ const cmd = ref.value;
551
+ const scriptMatch = cmd.match(NPM_SCRIPT_PATTERN);
552
+ if (scriptMatch && pkgJson) {
553
+ const scriptName = scriptMatch[1];
554
+ if (pkgJson.scripts && !(scriptName in pkgJson.scripts)) {
555
+ const available = Object.keys(pkgJson.scripts).join(", ");
556
+ issues.push({
557
+ severity: "error",
558
+ check: "commands",
559
+ line: ref.line,
560
+ message: `"${cmd}" \u2014 script "${scriptName}" not found in package.json`,
561
+ suggestion: available ? `Available scripts: ${available}` : void 0
562
+ });
563
+ }
564
+ continue;
565
+ }
566
+ const shorthandMatch = cmd.match(/^(npm|pnpm|yarn)\s+(test|start|build|dev|lint|format)\b/);
567
+ if (shorthandMatch && pkgJson) {
568
+ const scriptName = shorthandMatch[2];
569
+ if (pkgJson.scripts && !(scriptName in pkgJson.scripts)) {
570
+ issues.push({
571
+ severity: "error",
572
+ check: "commands",
573
+ line: ref.line,
574
+ message: `"${cmd}" \u2014 script "${scriptName}" not found in package.json`
575
+ });
576
+ }
577
+ continue;
578
+ }
579
+ const makeMatch = cmd.match(MAKE_PATTERN);
580
+ if (makeMatch) {
581
+ const target = makeMatch[1];
582
+ if (makefile && !hasMakeTarget(makefile, target)) {
583
+ issues.push({
584
+ severity: "error",
585
+ check: "commands",
586
+ line: ref.line,
587
+ message: `"${cmd}" \u2014 target "${target}" not found in Makefile`
588
+ });
589
+ } else if (!makefile) {
590
+ issues.push({
591
+ severity: "error",
592
+ check: "commands",
593
+ line: ref.line,
594
+ message: `"${cmd}" \u2014 no Makefile found in project`
595
+ });
596
+ }
597
+ continue;
598
+ }
599
+ const toolMatch = cmd.match(/^(vitest|jest|pytest|mocha|eslint|prettier|tsc)\b/);
600
+ if (toolMatch && pkgJson) {
601
+ const tool = toolMatch[1];
602
+ const allDeps = {
603
+ ...pkgJson.dependencies,
604
+ ...pkgJson.devDependencies
605
+ };
606
+ if (!(tool in allDeps)) {
607
+ const binPath = path4.join(projectRoot, "node_modules", ".bin", tool);
608
+ try {
609
+ fs3.accessSync(binPath);
610
+ } catch {
611
+ issues.push({
612
+ severity: "warning",
613
+ check: "commands",
614
+ line: ref.line,
615
+ message: `"${cmd}" \u2014 "${tool}" not found in dependencies or node_modules/.bin`
616
+ });
617
+ }
618
+ }
619
+ }
620
+ }
621
+ return issues;
622
+ }
623
+ function loadPackageJson(projectRoot) {
624
+ try {
625
+ const content = fs3.readFileSync(path4.join(projectRoot, "package.json"), "utf-8");
626
+ return JSON.parse(content);
627
+ } catch {
628
+ return null;
629
+ }
630
+ }
631
+ function loadMakefile(projectRoot) {
632
+ try {
633
+ return fs3.readFileSync(path4.join(projectRoot, "Makefile"), "utf-8");
634
+ } catch {
635
+ return null;
636
+ }
637
+ }
638
+ function hasMakeTarget(makefile, target) {
639
+ const pattern = new RegExp(`^${target.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}\\s*:`, "m");
640
+ return pattern.test(makefile);
641
+ }
642
+
643
+ // src/core/checks/staleness.ts
644
+ import * as path5 from "path";
645
+ var WARNING_DAYS = 30;
646
+ var INFO_DAYS = 14;
647
+ async function checkStaleness(file, projectRoot) {
648
+ const issues = [];
649
+ if (!await isGitRepo(projectRoot)) {
650
+ return issues;
651
+ }
652
+ const relativePath = path5.relative(projectRoot, file.filePath).replace(/\\/g, "/");
653
+ const lastModified = await getFileLastModified(projectRoot, relativePath);
654
+ if (!lastModified || isNaN(lastModified.getTime())) {
655
+ return issues;
656
+ }
657
+ const daysSinceUpdate = Math.floor((Date.now() - lastModified.getTime()) / (1e3 * 60 * 60 * 24));
658
+ if (daysSinceUpdate < INFO_DAYS) {
659
+ return issues;
660
+ }
661
+ const referencedPaths = /* @__PURE__ */ new Set();
662
+ for (const ref of file.references.paths) {
663
+ const parts = ref.value.split("/");
664
+ if (parts.length > 1) {
665
+ referencedPaths.add(parts.slice(0, -1).join("/"));
666
+ }
667
+ referencedPaths.add(ref.value);
668
+ }
669
+ let totalCommits = 0;
670
+ let mostActiveRef = "";
671
+ let mostActiveCommits = 0;
672
+ for (const refPath of referencedPaths) {
673
+ const commits = await getCommitsSince(projectRoot, refPath, lastModified);
674
+ totalCommits += commits;
675
+ if (commits > mostActiveCommits) {
676
+ mostActiveCommits = commits;
677
+ mostActiveRef = refPath;
678
+ }
679
+ }
680
+ if (totalCommits === 0) {
681
+ return issues;
682
+ }
683
+ const severity = daysSinceUpdate >= WARNING_DAYS ? "warning" : "info";
684
+ issues.push({
685
+ severity,
686
+ check: "staleness",
687
+ line: 1,
688
+ message: `Last updated ${daysSinceUpdate} days ago. ${mostActiveRef} has ${mostActiveCommits} commits since.`,
689
+ suggestion: "Review and update this context file to reflect recent changes.",
690
+ detail: `${totalCommits} total commits to referenced paths since last update.`
691
+ });
692
+ return issues;
693
+ }
694
+
695
+ // src/core/checks/tokens.ts
696
+ var INFO_THRESHOLD = 1e3;
697
+ var WARNING_THRESHOLD = 3e3;
698
+ var ERROR_THRESHOLD = 8e3;
699
+ async function checkTokens(file, _projectRoot) {
700
+ const issues = [];
701
+ const tokens = file.totalTokens;
702
+ if (tokens >= ERROR_THRESHOLD) {
703
+ issues.push({
704
+ severity: "error",
705
+ check: "tokens",
706
+ line: 1,
707
+ message: `${tokens.toLocaleString()} tokens \u2014 consumes significant context window space`,
708
+ suggestion: "Consider splitting into focused sections or removing redundant content."
709
+ });
710
+ } else if (tokens >= WARNING_THRESHOLD) {
711
+ issues.push({
712
+ severity: "warning",
713
+ check: "tokens",
714
+ line: 1,
715
+ message: `${tokens.toLocaleString()} tokens \u2014 large context file`,
716
+ suggestion: "Consider trimming \u2014 research shows diminishing returns past ~300 lines."
717
+ });
718
+ } else if (tokens >= INFO_THRESHOLD) {
719
+ issues.push({
720
+ severity: "info",
721
+ check: "tokens",
722
+ line: 1,
723
+ message: `Uses ~${tokens.toLocaleString()} tokens per session`
724
+ });
725
+ }
726
+ return issues;
727
+ }
728
+ function checkAggregateTokens(files) {
729
+ const total = files.reduce((sum, f) => sum + f.tokens, 0);
730
+ if (total > 5e3 && files.length > 1) {
731
+ return {
732
+ severity: "warning",
733
+ check: "tokens",
734
+ line: 0,
735
+ message: `${files.length} context files consume ${total.toLocaleString()} tokens combined`,
736
+ suggestion: "Consider consolidating or trimming to reduce per-session context cost."
737
+ };
738
+ }
739
+ return null;
740
+ }
741
+
742
+ // src/core/checks/redundancy.ts
743
+ import * as fs4 from "fs";
744
+ import * as path6 from "path";
745
+ var PACKAGE_TECH_MAP = {
746
+ react: ["React", "react"],
747
+ "react-dom": ["React DOM", "ReactDOM"],
748
+ next: ["Next.js", "NextJS", "next.js"],
749
+ express: ["Express", "express.js"],
750
+ fastify: ["Fastify"],
751
+ typescript: ["TypeScript"],
752
+ vue: ["Vue", "Vue.js", "vue.js"],
753
+ angular: ["Angular"],
754
+ svelte: ["Svelte", "SvelteKit"],
755
+ tailwindcss: ["Tailwind", "TailwindCSS", "tailwind"],
756
+ prisma: ["Prisma"],
757
+ drizzle: ["Drizzle"],
758
+ "drizzle-orm": ["Drizzle"],
759
+ jest: ["Jest"],
760
+ vitest: ["Vitest"],
761
+ mocha: ["Mocha"],
762
+ eslint: ["ESLint"],
763
+ prettier: ["Prettier"],
764
+ webpack: ["Webpack"],
765
+ vite: ["Vite"],
766
+ esbuild: ["esbuild"],
767
+ tsup: ["tsup"],
768
+ rollup: ["Rollup"],
769
+ graphql: ["GraphQL"],
770
+ mongoose: ["Mongoose"],
771
+ sequelize: ["Sequelize"],
772
+ "socket.io": ["Socket.IO", "socket.io"],
773
+ redis: ["Redis"],
774
+ ioredis: ["Redis"],
775
+ postgres: ["PostgreSQL", "Postgres"],
776
+ pg: ["PostgreSQL", "Postgres"],
777
+ mysql2: ["MySQL"],
778
+ sqlite3: ["SQLite"],
779
+ "better-sqlite3": ["SQLite"],
780
+ zod: ["Zod"],
781
+ joi: ["Joi"],
782
+ axios: ["Axios"],
783
+ lodash: ["Lodash", "lodash"],
784
+ underscore: ["Underscore"],
785
+ moment: ["Moment", "moment.js"],
786
+ dayjs: ["Day.js", "dayjs"],
787
+ "date-fns": ["date-fns"],
788
+ docker: ["Docker"],
789
+ kubernetes: ["Kubernetes", "K8s"],
790
+ terraform: ["Terraform"],
791
+ storybook: ["Storybook"],
792
+ playwright: ["Playwright"],
793
+ cypress: ["Cypress"],
794
+ puppeteer: ["Puppeteer"]
795
+ };
796
+ async function checkRedundancy(file, projectRoot) {
797
+ const issues = [];
798
+ const pkgJson = loadPackageJson2(projectRoot);
799
+ if (pkgJson) {
800
+ const allDeps = /* @__PURE__ */ new Set([
801
+ ...Object.keys(pkgJson.dependencies || {}),
802
+ ...Object.keys(pkgJson.devDependencies || {})
803
+ ]);
804
+ const lines2 = file.content.split("\n");
805
+ for (let i = 0; i < lines2.length; i++) {
806
+ const line = lines2[i];
807
+ for (const [pkg, mentions] of Object.entries(PACKAGE_TECH_MAP)) {
808
+ if (!allDeps.has(pkg)) continue;
809
+ for (const mention of mentions) {
810
+ const patterns = [
811
+ new RegExp(
812
+ `\\b(?:use|using|built with|powered by|written in)\\s+${escapeRegex(mention)}\\b`,
813
+ "i"
814
+ ),
815
+ new RegExp(`\\bwe\\s+use\\s+${escapeRegex(mention)}\\b`, "i"),
816
+ new RegExp(
817
+ `\\b${escapeRegex(mention)}\\s+(?:project|app|application|codebase)\\b`,
818
+ "i"
819
+ ),
820
+ new RegExp(`\\bThis is a\\s+${escapeRegex(mention)}\\b`, "i")
821
+ ];
822
+ for (const pattern of patterns) {
823
+ if (pattern.test(line)) {
824
+ const wastedTokens = countTokens(line.trim());
825
+ issues.push({
826
+ severity: "info",
827
+ check: "redundancy",
828
+ line: i + 1,
829
+ message: `"${mention}" is in package.json ${pkgJson.dependencies?.[pkg] ? "dependencies" : "devDependencies"} \u2014 agent can infer this`,
830
+ suggestion: `~${wastedTokens} tokens could be saved`
831
+ });
832
+ break;
833
+ }
834
+ }
835
+ }
836
+ }
837
+ }
838
+ }
839
+ const lines = file.content.split("\n");
840
+ for (let i = 0; i < lines.length; i++) {
841
+ const line = lines[i];
842
+ const dirMatch = line.match(
843
+ /(?:are|go|live|found|located|stored)\s+(?:in|at|under)\s+[`"]?(\S+\/)[`"]?/i
844
+ );
845
+ if (dirMatch) {
846
+ const dir = dirMatch[1].replace(/[`"]/g, "");
847
+ const fullPath = path6.resolve(projectRoot, dir);
848
+ if (isDirectory(fullPath)) {
849
+ issues.push({
850
+ severity: "info",
851
+ check: "redundancy",
852
+ line: i + 1,
853
+ message: `Directory "${dir}" exists and is discoverable \u2014 agent can find this by listing files`,
854
+ suggestion: "Only keep if there is non-obvious context about this directory"
855
+ });
856
+ }
857
+ }
858
+ }
859
+ return issues;
860
+ }
861
+ function checkDuplicateContent(files) {
862
+ const issues = [];
863
+ for (let i = 0; i < files.length; i++) {
864
+ for (let j = i + 1; j < files.length; j++) {
865
+ const overlap = calculateLineOverlap(files[i].content, files[j].content);
866
+ if (overlap > 0.6) {
867
+ issues.push({
868
+ severity: "warning",
869
+ check: "redundancy",
870
+ line: 1,
871
+ message: `${files[i].relativePath} and ${files[j].relativePath} have ${Math.round(overlap * 100)}% content overlap`,
872
+ suggestion: "Consider consolidating into a single context file"
873
+ });
874
+ }
875
+ }
876
+ }
877
+ return issues;
878
+ }
879
+ function calculateLineOverlap(contentA, contentB) {
880
+ const linesA = new Set(
881
+ contentA.split("\n").map((l) => l.trim()).filter((l) => l.length > 10)
882
+ );
883
+ const linesB = new Set(
884
+ contentB.split("\n").map((l) => l.trim()).filter((l) => l.length > 10)
885
+ );
886
+ if (linesA.size === 0 || linesB.size === 0) return 0;
887
+ let overlap = 0;
888
+ for (const line of linesA) {
889
+ if (linesB.has(line)) overlap++;
890
+ }
891
+ return overlap / Math.min(linesA.size, linesB.size);
892
+ }
893
+ function loadPackageJson2(projectRoot) {
894
+ try {
895
+ const content = fs4.readFileSync(path6.join(projectRoot, "package.json"), "utf-8");
896
+ return JSON.parse(content);
897
+ } catch {
898
+ return null;
899
+ }
900
+ }
901
+ function escapeRegex(str) {
902
+ return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
903
+ }
904
+
905
+ // src/mcp/server.ts
906
+ import * as path7 from "path";
907
+ var VERSION = "0.1.0";
908
+ var ALL_CHECKS = ["paths", "commands", "staleness", "tokens", "redundancy"];
909
+ var server = new McpServer({
910
+ name: "ctxlint",
911
+ version: VERSION
912
+ });
913
+ server.tool(
914
+ "ctxlint_audit",
915
+ "Audit all AI agent context files (CLAUDE.md, AGENTS.md, etc.) in the project for stale references, invalid commands, redundant content, and token waste.",
916
+ {
917
+ projectPath: z.string().optional().describe("Path to the project root. Defaults to current working directory."),
918
+ checks: z.array(z.enum(["paths", "commands", "staleness", "tokens", "redundancy"])).optional().describe("Which checks to run. Defaults to all.")
919
+ },
920
+ async ({ projectPath, checks }) => {
921
+ const root = path7.resolve(projectPath || process.cwd());
922
+ const activeChecks = checks || ALL_CHECKS;
923
+ try {
924
+ const result = await runAudit(root, activeChecks);
925
+ return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
926
+ } catch (err) {
927
+ const msg = err instanceof Error ? err.message : String(err);
928
+ return {
929
+ content: [{ type: "text", text: JSON.stringify({ error: msg }) }],
930
+ isError: true
931
+ };
932
+ } finally {
933
+ freeEncoder();
934
+ resetGit();
935
+ }
936
+ }
937
+ );
938
+ server.tool(
939
+ "ctxlint_validate_path",
940
+ "Check if a file path referenced in a context file actually exists in the project. Returns the file status and suggests corrections if the path is invalid.",
941
+ {
942
+ path: z.string().describe("The file path to validate"),
943
+ projectPath: z.string().optional().describe("Project root. Defaults to cwd.")
944
+ },
945
+ async ({ path: filePath, projectPath }) => {
946
+ try {
947
+ const root = path7.resolve(projectPath || process.cwd());
948
+ const resolved = path7.resolve(root, filePath);
949
+ const result = {
950
+ path: filePath,
951
+ exists: fileExists(resolved) || isDirectory(resolved)
952
+ };
953
+ if (!result.exists) {
954
+ const rename = await findRenames(root, filePath);
955
+ if (rename) {
956
+ result.renamed = true;
957
+ result.newPath = rename.newPath;
958
+ result.renameCommit = rename.commitHash;
959
+ result.daysAgo = rename.daysAgo;
960
+ }
961
+ }
962
+ return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
963
+ } catch (err) {
964
+ const msg = err instanceof Error ? err.message : String(err);
965
+ return {
966
+ content: [{ type: "text", text: JSON.stringify({ error: msg }) }],
967
+ isError: true
968
+ };
969
+ } finally {
970
+ resetGit();
971
+ }
972
+ }
973
+ );
974
+ server.tool(
975
+ "ctxlint_token_report",
976
+ "Get a token count breakdown for all context files in the project. Shows per-file and aggregate token usage, plus estimated waste from redundant content.",
977
+ {
978
+ projectPath: z.string().optional().describe("Project root. Defaults to cwd.")
979
+ },
980
+ async ({ projectPath }) => {
981
+ const root = path7.resolve(projectPath || process.cwd());
982
+ try {
983
+ const discovered = await scanForContextFiles(root);
984
+ const parsed = discovered.map((f) => parseContextFile(f));
985
+ const files = parsed.map((f) => ({
986
+ path: f.relativePath,
987
+ tokens: f.totalTokens,
988
+ lines: f.totalLines,
989
+ isSymlink: f.isSymlink
990
+ }));
991
+ const totalTokens = files.reduce((sum, f) => sum + f.tokens, 0);
992
+ return {
993
+ content: [
994
+ {
995
+ type: "text",
996
+ text: JSON.stringify({ files, totalTokens }, null, 2)
997
+ }
998
+ ]
999
+ };
1000
+ } catch (err) {
1001
+ const msg = err instanceof Error ? err.message : String(err);
1002
+ return {
1003
+ content: [{ type: "text", text: JSON.stringify({ error: msg }) }],
1004
+ isError: true
1005
+ };
1006
+ } finally {
1007
+ freeEncoder();
1008
+ }
1009
+ }
1010
+ );
1011
+ async function runAudit(projectRoot, activeChecks) {
1012
+ const discovered = await scanForContextFiles(projectRoot);
1013
+ const parsed = discovered.map((f) => parseContextFile(f));
1014
+ const fileResults = [];
1015
+ for (const file of parsed) {
1016
+ const issues = [];
1017
+ if (activeChecks.includes("paths")) issues.push(...await checkPaths(file, projectRoot));
1018
+ if (activeChecks.includes("commands")) issues.push(...await checkCommands(file, projectRoot));
1019
+ if (activeChecks.includes("staleness"))
1020
+ issues.push(...await checkStaleness(file, projectRoot));
1021
+ if (activeChecks.includes("tokens")) issues.push(...await checkTokens(file, projectRoot));
1022
+ if (activeChecks.includes("redundancy"))
1023
+ issues.push(...await checkRedundancy(file, projectRoot));
1024
+ fileResults.push({
1025
+ path: file.relativePath,
1026
+ isSymlink: file.isSymlink,
1027
+ symlinkTarget: file.symlinkTarget,
1028
+ tokens: file.totalTokens,
1029
+ lines: file.totalLines,
1030
+ issues
1031
+ });
1032
+ }
1033
+ if (activeChecks.includes("tokens")) {
1034
+ const aggIssue = checkAggregateTokens(
1035
+ fileResults.map((f) => ({ path: f.path, tokens: f.tokens }))
1036
+ );
1037
+ if (aggIssue && fileResults.length > 0) fileResults[0].issues.push(aggIssue);
1038
+ }
1039
+ if (activeChecks.includes("redundancy")) {
1040
+ const dupIssues = checkDuplicateContent(parsed);
1041
+ if (dupIssues.length > 0 && fileResults.length > 0) fileResults[0].issues.push(...dupIssues);
1042
+ }
1043
+ let estimatedWaste = 0;
1044
+ for (const fr of fileResults) {
1045
+ for (const issue of fr.issues) {
1046
+ if (issue.check === "redundancy" && issue.suggestion) {
1047
+ const tokenMatch = issue.suggestion.match(/~(\d+)\s+tokens/);
1048
+ if (tokenMatch) estimatedWaste += parseInt(tokenMatch[1], 10);
1049
+ }
1050
+ }
1051
+ }
1052
+ return {
1053
+ version: VERSION,
1054
+ scannedAt: (/* @__PURE__ */ new Date()).toISOString(),
1055
+ projectRoot,
1056
+ files: fileResults,
1057
+ summary: {
1058
+ errors: fileResults.reduce(
1059
+ (sum, f) => sum + f.issues.filter((i) => i.severity === "error").length,
1060
+ 0
1061
+ ),
1062
+ warnings: fileResults.reduce(
1063
+ (sum, f) => sum + f.issues.filter((i) => i.severity === "warning").length,
1064
+ 0
1065
+ ),
1066
+ info: fileResults.reduce(
1067
+ (sum, f) => sum + f.issues.filter((i) => i.severity === "info").length,
1068
+ 0
1069
+ ),
1070
+ totalTokens: fileResults.reduce((sum, f) => sum + f.tokens, 0),
1071
+ estimatedWaste
1072
+ }
1073
+ };
1074
+ }
1075
+ var transport = new StdioServerTransport();
1076
+ await server.connect(transport);