cursor-doctor 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 nedcodes
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,99 @@
1
+ # cursor-doctor
2
+
3
+ **Fix your Cursor AI setup in seconds.**
4
+
5
+ Run one command to find out what's wrong with your `.cursor/` config and how to fix it.
6
+
7
+ ```
8
+ $ npx cursor-doctor scan
9
+
10
+ ✓ Rules exist: .cursor/rules/ found with .mdc files
11
+ ✗ No legacy .cursorrules: .cursorrules exists alongside .mdc rules — may cause conflicts
12
+ ! Lint checks: 3 errors, 2 warnings. Run `cursor-doctor fix` to repair.
13
+ ! Token budget: ~4,200 tokens — getting heavy. Consider trimming.
14
+ ✓ Coverage: Rules cover your project file types
15
+ i Agent skills: No agent skills found
16
+
17
+ Health Score: C (62%)
18
+
19
+ 3 issues can be auto-fixed. Run cursor-doctor fix (Pro)
20
+ ```
21
+
22
+ ## Install
23
+
24
+ ```bash
25
+ npx cursor-doctor scan
26
+ ```
27
+
28
+ No install needed. Runs directly with npx. Zero dependencies.
29
+
30
+ ## What It Checks
31
+
32
+ | Check | What it does |
33
+ |-------|-------------|
34
+ | **Rules exist** | Verifies you have `.cursor/rules/*.mdc` files |
35
+ | **Legacy files** | Flags `.cursorrules` that should be migrated to `.mdc` |
36
+ | **Lint** | 20+ checks: broken YAML, missing frontmatter, vague rules, conflicts |
37
+ | **Token budget** | Estimates how many tokens your rules consume per request |
38
+ | **Coverage** | Detects project file types with no matching rules |
39
+ | **Skills** | Checks for agent skill definitions |
40
+ | **Conflicts** | Finds contradictory instructions across rule files |
41
+ | **Redundancy** | Spots duplicate content between rules |
42
+
43
+ ## Commands
44
+
45
+ ### Free
46
+
47
+ ```bash
48
+ # Health score + issue list
49
+ cursor-doctor scan
50
+
51
+ # CI-friendly: one line per issue, exit code 0/1
52
+ cursor-doctor check
53
+
54
+ # Convert .cursorrules to .cursor/rules/*.mdc
55
+ cursor-doctor migrate
56
+ ```
57
+
58
+ ### Pro ($9 one-time)
59
+
60
+ ```bash
61
+ # Full diagnostic report: conflicts, redundancy, token budget, stack detection
62
+ cursor-doctor audit
63
+
64
+ # Export as markdown
65
+ cursor-doctor audit --md > report.md
66
+
67
+ # Auto-fix: repair frontmatter, split oversized files, resolve issues
68
+ cursor-doctor fix
69
+
70
+ # Preview fixes without writing
71
+ cursor-doctor fix --dry-run
72
+
73
+ # Activate your license
74
+ cursor-doctor activate <key>
75
+ ```
76
+
77
+ **Get a Pro key:** [nedcodes.gumroad.com/l/cursor-doctor](https://nedcodes.gumroad.com/l/cursor-doctor)
78
+
79
+ ## Why?
80
+
81
+ Cursor's AI reads your `.cursor/rules/` directory to understand how you want code written. But most setups have problems:
82
+
83
+ - Rules with broken YAML frontmatter that Cursor silently ignores
84
+ - `alwaysApply: true` on everything, burning tokens on irrelevant rules
85
+ - Conflicting instructions across files ("use semicolons" in one, "no semicolons" in another)
86
+ - Legacy `.cursorrules` files that conflict with `.mdc` rules
87
+ - 5,000+ tokens of rules eating into your context window every request
88
+
89
+ cursor-doctor finds these problems and fixes them.
90
+
91
+ ## From the makers of cursor-lint
92
+
93
+ cursor-doctor is the evolution of [cursor-lint](https://www.npmjs.com/package/cursor-lint) (1,800+ downloads). Same engine, broader scope, auto-fix capabilities.
94
+
95
+ If you're already using cursor-lint, cursor-doctor includes everything cursor-lint does plus diagnostics, conflict detection, and automated repair.
96
+
97
+ ## License
98
+
99
+ MIT
package/package.json ADDED
@@ -0,0 +1,38 @@
1
+ {
2
+ "name": "cursor-doctor",
3
+ "version": "1.0.0",
4
+ "description": "Fix your Cursor AI setup in seconds — health checks, diagnostics, and auto-repair for your .cursor config",
5
+ "main": "src/index.js",
6
+ "bin": {
7
+ "cursor-doctor": "src/cli.js"
8
+ },
9
+ "keywords": [
10
+ "cursor",
11
+ "cursor-ai",
12
+ "cursorrules",
13
+ "mdc",
14
+ "developer-tools",
15
+ "cursor-rules",
16
+ "cursor-config",
17
+ "ai-coding",
18
+ "code-quality",
19
+ "diagnostics",
20
+ "health-check"
21
+ ],
22
+ "author": "nedcodes",
23
+ "license": "MIT",
24
+ "homepage": "https://github.com/nedcodes-ok/cursor-doctor",
25
+ "repository": {
26
+ "type": "git",
27
+ "url": "https://github.com/nedcodes-ok/cursor-doctor.git"
28
+ },
29
+ "engines": {
30
+ "node": ">=16"
31
+ },
32
+ "files": [
33
+ "src/"
34
+ ],
35
+ "devDependencies": {
36
+ "playwright": "^1.58.2"
37
+ }
38
+ }
package/src/audit.js ADDED
@@ -0,0 +1,342 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const { lintProject, parseFrontmatter } = require('./index');
4
+ const { showStats } = require('./stats');
5
+
6
+ function detectStack(dir) {
7
+ const stack = { frameworks: [], languages: [], packageManager: null };
8
+
9
+ const pkgPath = path.join(dir, 'package.json');
10
+ if (fs.existsSync(pkgPath)) {
11
+ try {
12
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
13
+ const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
14
+
15
+ if (allDeps.next) stack.frameworks.push(`Next.js ${allDeps.next}`);
16
+ if (allDeps.react) stack.frameworks.push(`React ${allDeps.react}`);
17
+ if (allDeps.vue) stack.frameworks.push(`Vue ${allDeps.vue}`);
18
+ if (allDeps.svelte || allDeps['@sveltejs/kit']) stack.frameworks.push('SvelteKit');
19
+ if (allDeps.express) stack.frameworks.push(`Express ${allDeps.express}`);
20
+ if (allDeps['@nestjs/core']) stack.frameworks.push('NestJS');
21
+ if (allDeps['@angular/core']) stack.frameworks.push('Angular');
22
+ if (allDeps.tailwindcss) stack.frameworks.push('Tailwind CSS');
23
+ if (allDeps.prisma || allDeps['@prisma/client']) stack.frameworks.push('Prisma');
24
+ if (allDeps.drizzle || allDeps['drizzle-orm']) stack.frameworks.push('Drizzle');
25
+
26
+ if (allDeps.typescript) stack.languages.push('TypeScript');
27
+ stack.languages.push('JavaScript');
28
+
29
+ if (fs.existsSync(path.join(dir, 'pnpm-lock.yaml'))) stack.packageManager = 'pnpm';
30
+ else if (fs.existsSync(path.join(dir, 'yarn.lock'))) stack.packageManager = 'yarn';
31
+ else if (fs.existsSync(path.join(dir, 'bun.lockb'))) stack.packageManager = 'bun';
32
+ else stack.packageManager = 'npm';
33
+ } catch {}
34
+ }
35
+
36
+ if (fs.existsSync(path.join(dir, 'requirements.txt')) || fs.existsSync(path.join(dir, 'pyproject.toml'))) {
37
+ stack.languages.push('Python');
38
+ if (fs.existsSync(path.join(dir, 'manage.py'))) stack.frameworks.push('Django');
39
+ }
40
+ if (fs.existsSync(path.join(dir, 'Gemfile'))) {
41
+ stack.languages.push('Ruby');
42
+ if (fs.existsSync(path.join(dir, 'config', 'routes.rb'))) stack.frameworks.push('Rails');
43
+ }
44
+ if (fs.existsSync(path.join(dir, 'go.mod'))) stack.languages.push('Go');
45
+ if (fs.existsSync(path.join(dir, 'Cargo.toml'))) stack.languages.push('Rust');
46
+
47
+ return stack;
48
+ }
49
+
50
+ function findConflicts(rules) {
51
+ const conflicts = [];
52
+
53
+ for (let i = 0; i < rules.length; i++) {
54
+ for (let j = i + 1; j < rules.length; j++) {
55
+ const a = rules[i];
56
+ const b = rules[j];
57
+
58
+ // Check glob overlap
59
+ const aGlobs = a.globs || [];
60
+ const bGlobs = b.globs || [];
61
+ const overlapping = aGlobs.some(ag => bGlobs.some(bg => globsOverlap(ag, bg)));
62
+
63
+ if (!overlapping && !a.alwaysApply && !b.alwaysApply) continue;
64
+
65
+ // Check for contradictory instructions
66
+ const aBody = a.body.toLowerCase();
67
+ const bBody = b.body.toLowerCase();
68
+
69
+ // Simple contradiction detection
70
+ const contradictions = findContradictions(aBody, bBody);
71
+ if (contradictions.length > 0) {
72
+ conflicts.push({
73
+ fileA: a.file,
74
+ fileB: b.file,
75
+ reason: contradictions.join('; '),
76
+ severity: 'warning',
77
+ });
78
+ }
79
+ }
80
+ }
81
+
82
+ return conflicts;
83
+ }
84
+
85
+ function globsOverlap(a, b) {
86
+ if (a === b) return true;
87
+ if (a === '**/*' || b === '**/*') return true;
88
+ // Extract extensions
89
+ const extA = a.match(/\*\.(\w+)$/);
90
+ const extB = b.match(/\*\.(\w+)$/);
91
+ if (extA && extB && extA[1] === extB[1]) return true;
92
+ return false;
93
+ }
94
+
95
+ function findContradictions(a, b) {
96
+ const contradictions = [];
97
+ const pairs = [
98
+ [/always use semicolons/i, /never use semicolons|no semicolons/i],
99
+ [/use single quotes/i, /use double quotes/i],
100
+ [/use tabs/i, /use spaces/i],
101
+ [/use css modules/i, /use tailwind|use styled-components/i],
102
+ [/use relative imports/i, /use absolute imports|use path aliases/i],
103
+ [/prefer classes/i, /prefer functions|prefer functional/i],
104
+ [/use default exports/i, /use named exports|no default exports/i],
105
+ [/use arrow functions/i, /use function declarations|avoid arrow functions/i],
106
+ [/use any/i, /never use any|avoid any|no any/i],
107
+ ];
108
+
109
+ for (const [patA, patB] of pairs) {
110
+ if ((patA.test(a) && patB.test(b)) || (patB.test(a) && patA.test(b))) {
111
+ contradictions.push(`Conflicting style: "${patA.source}" vs "${patB.source}"`);
112
+ }
113
+ }
114
+
115
+ return contradictions;
116
+ }
117
+
118
+ function findRedundancy(rules) {
119
+ const redundant = [];
120
+
121
+ for (let i = 0; i < rules.length; i++) {
122
+ for (let j = i + 1; j < rules.length; j++) {
123
+ const a = rules[i];
124
+ const b = rules[j];
125
+
126
+ // Check for very similar bodies (>80% line overlap)
127
+ const aLines = new Set(a.body.split('\n').map(l => l.trim()).filter(l => l.length > 10));
128
+ const bLines = new Set(b.body.split('\n').map(l => l.trim()).filter(l => l.length > 10));
129
+
130
+ if (aLines.size === 0 || bLines.size === 0) continue;
131
+
132
+ let overlap = 0;
133
+ for (const line of aLines) {
134
+ if (bLines.has(line)) overlap++;
135
+ }
136
+
137
+ const overlapPct = overlap / Math.min(aLines.size, bLines.size);
138
+ if (overlapPct > 0.6) {
139
+ redundant.push({
140
+ fileA: a.file,
141
+ fileB: b.file,
142
+ overlapPct: Math.round(overlapPct * 100),
143
+ sharedLines: overlap,
144
+ });
145
+ }
146
+ }
147
+ }
148
+
149
+ return redundant;
150
+ }
151
+
152
+ function tokenBudgetBreakdown(stats) {
153
+ const breakdown = {
154
+ alwaysLoaded: 0,
155
+ conditionalMax: 0,
156
+ total: stats.totalTokens,
157
+ files: [],
158
+ };
159
+
160
+ for (const f of stats.mdcFiles) {
161
+ const entry = { file: f.file, tokens: f.tokens, tier: f.tier };
162
+ if (f.tier === 'always') {
163
+ breakdown.alwaysLoaded += f.tokens;
164
+ } else {
165
+ breakdown.conditionalMax += f.tokens;
166
+ }
167
+ breakdown.files.push(entry);
168
+ }
169
+
170
+ if (stats.hasCursorrules) {
171
+ breakdown.alwaysLoaded += stats.cursorrulesTokens;
172
+ breakdown.files.unshift({ file: '.cursorrules', tokens: stats.cursorrulesTokens, tier: 'always' });
173
+ }
174
+
175
+ // Sort by tokens descending
176
+ breakdown.files.sort((a, b) => b.tokens - a.tokens);
177
+
178
+ return breakdown;
179
+ }
180
+
181
+ function loadRules(dir) {
182
+ const rules = [];
183
+ const rulesDir = path.join(dir, '.cursor', 'rules');
184
+ if (!fs.existsSync(rulesDir)) return rules;
185
+
186
+ for (const entry of fs.readdirSync(rulesDir)) {
187
+ if (!entry.endsWith('.mdc')) continue;
188
+ const filePath = path.join(rulesDir, entry);
189
+ const content = fs.readFileSync(filePath, 'utf-8');
190
+ const fm = parseFrontmatter(content);
191
+ const body = content.replace(/^---\n[\s\S]*?\n---\n?/, '');
192
+
193
+ let globs = [];
194
+ let alwaysApply = false;
195
+ if (fm.found && fm.data) {
196
+ alwaysApply = fm.data.alwaysApply === true;
197
+ const globVal = fm.data.globs;
198
+ if (typeof globVal === 'string') {
199
+ const trimmed = globVal.trim();
200
+ if (trimmed.startsWith('[')) {
201
+ globs = trimmed.slice(1, -1).split(',').map(g => g.trim().replace(/^["']|["']$/g, '')).filter(Boolean);
202
+ } else {
203
+ globs = [trimmed];
204
+ }
205
+ }
206
+ }
207
+
208
+ rules.push({ file: entry, content, body, globs, alwaysApply, fm });
209
+ }
210
+
211
+ return rules;
212
+ }
213
+
214
+ async function fullAudit(dir) {
215
+ const report = { sections: [] };
216
+
217
+ // 1. Stack detection
218
+ const stack = detectStack(dir);
219
+ report.sections.push({
220
+ title: 'Detected Stack',
221
+ items: [
222
+ ...stack.frameworks.map(f => ({ text: f, type: 'info' })),
223
+ ...stack.languages.map(l => ({ text: l, type: 'info' })),
224
+ stack.packageManager ? { text: `Package manager: ${stack.packageManager}`, type: 'info' } : null,
225
+ ].filter(Boolean),
226
+ });
227
+
228
+ // 2. Token budget breakdown
229
+ const stats = showStats(dir);
230
+ const budget = tokenBudgetBreakdown(stats);
231
+ report.sections.push({
232
+ title: 'Token Budget',
233
+ items: [
234
+ { text: `Always loaded: ~${budget.alwaysLoaded} tokens`, type: budget.alwaysLoaded > 3000 ? 'warning' : 'info' },
235
+ { text: `Conditional (max): ~${budget.conditionalMax} tokens`, type: 'info' },
236
+ { text: `Total: ~${budget.total} tokens`, type: budget.total > 5000 ? 'warning' : 'info' },
237
+ ...budget.files.map(f => ({
238
+ text: ` ${f.file}: ~${f.tokens} tokens (${f.tier})`,
239
+ type: f.tokens > 1500 ? 'warning' : 'info',
240
+ })),
241
+ ],
242
+ });
243
+
244
+ // 3. Lint issues
245
+ const lintResults = await lintProject(dir);
246
+ let errors = 0, warnings = 0;
247
+ const issues = [];
248
+ for (const r of lintResults) {
249
+ for (const i of r.issues) {
250
+ if (i.severity === 'error') errors++;
251
+ else warnings++;
252
+ issues.push({ file: r.file, ...i });
253
+ }
254
+ }
255
+ report.sections.push({
256
+ title: 'Lint Issues',
257
+ items: issues.length === 0
258
+ ? [{ text: 'No issues found', type: 'pass' }]
259
+ : issues.map(i => ({ text: `${i.file}: ${i.message}`, type: i.severity })),
260
+ });
261
+
262
+ // 4. Conflicts
263
+ const rules = loadRules(dir);
264
+ const conflicts = findConflicts(rules);
265
+ report.sections.push({
266
+ title: 'Conflicts',
267
+ items: conflicts.length === 0
268
+ ? [{ text: 'No conflicts detected', type: 'pass' }]
269
+ : conflicts.map(c => ({ text: `${c.fileA} vs ${c.fileB}: ${c.reason}`, type: c.severity })),
270
+ });
271
+
272
+ // 5. Redundancy
273
+ const redundant = findRedundancy(rules);
274
+ report.sections.push({
275
+ title: 'Redundancy',
276
+ items: redundant.length === 0
277
+ ? [{ text: 'No redundant rules found', type: 'pass' }]
278
+ : redundant.map(r => ({
279
+ text: `${r.fileA} and ${r.fileB}: ${r.overlapPct}% overlap (${r.sharedLines} shared lines)`,
280
+ type: 'warning',
281
+ })),
282
+ });
283
+
284
+ // 6. Coverage gaps
285
+ report.sections.push({
286
+ title: 'Coverage Gaps',
287
+ items: stats.coverageGaps.length === 0
288
+ ? [{ text: 'All detected file types have matching rules', type: 'pass' }]
289
+ : stats.coverageGaps.map(g => ({
290
+ text: `No rules for ${g.ext} files. Consider adding: ${g.suggestedRules.join(', ')}`,
291
+ type: 'warning',
292
+ })),
293
+ });
294
+
295
+ // 7. Fix suggestions
296
+ const fixes = [];
297
+ if (stats.hasCursorrules) fixes.push({ text: 'Run `cursor-doctor migrate` to convert .cursorrules to .mdc format', type: 'fix' });
298
+ if (errors > 0) fixes.push({ text: 'Run `cursor-doctor fix` to auto-fix frontmatter and structural issues', type: 'fix' });
299
+ for (const f of budget.files) {
300
+ if (f.tokens > 2000) fixes.push({ text: `Split ${f.file} into smaller focused rules (~${f.tokens} tokens is heavy)`, type: 'fix' });
301
+ }
302
+ if (rules.filter(r => r.alwaysApply).length > 5) {
303
+ fixes.push({ text: 'Too many alwaysApply rules. Convert some to glob-targeted rules to save tokens.', type: 'fix' });
304
+ }
305
+ for (const r of redundant) {
306
+ fixes.push({ text: `Merge or deduplicate ${r.fileA} and ${r.fileB}`, type: 'fix' });
307
+ }
308
+
309
+ report.sections.push({
310
+ title: 'Suggested Fixes',
311
+ items: fixes.length === 0
312
+ ? [{ text: 'No fixes needed. Setup looks good.', type: 'pass' }]
313
+ : fixes,
314
+ });
315
+
316
+ report.stack = stack;
317
+ report.stats = stats;
318
+ report.budget = budget;
319
+ report.conflicts = conflicts;
320
+ report.redundant = redundant;
321
+ report.lintErrors = errors;
322
+ report.lintWarnings = warnings;
323
+
324
+ return report;
325
+ }
326
+
327
+ function formatAuditMarkdown(report) {
328
+ let md = '# cursor-doctor Audit Report\n\n';
329
+
330
+ for (const section of report.sections) {
331
+ md += `## ${section.title}\n\n`;
332
+ for (const item of section.items) {
333
+ const icon = item.type === 'pass' ? '✅' : item.type === 'error' ? '❌' : item.type === 'warning' ? '⚠️' : item.type === 'fix' ? '🔧' : 'ℹ️';
334
+ md += `${icon} ${item.text}\n`;
335
+ }
336
+ md += '\n';
337
+ }
338
+
339
+ return md;
340
+ }
341
+
342
+ module.exports = { fullAudit, formatAuditMarkdown, detectStack, findConflicts, findRedundancy, tokenBudgetBreakdown, loadRules };
package/src/autofix.js ADDED
@@ -0,0 +1,171 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const { lintProject, parseFrontmatter } = require('./index');
4
+ const { loadRules, findRedundancy } = require('./audit');
5
+
6
+ function fixFrontmatter(content) {
7
+ const fm = parseFrontmatter(content);
8
+
9
+ // No frontmatter at all — add minimal one
10
+ if (!fm.found) {
11
+ return `---\ndescription: \nalwaysApply: false\n---\n${content}`;
12
+ }
13
+
14
+ // Frontmatter has errors — try to repair
15
+ if (fm.found && fm.error) {
16
+ // Try to fix common YAML issues
17
+ const match = content.match(/^---\n([\s\S]*?)\n---/);
18
+ if (match) {
19
+ let yaml = match[1];
20
+ // Fix missing spaces after colons
21
+ yaml = yaml.replace(/^(\w+):([^\s])/gm, '$1: $2');
22
+ // Fix inconsistent quoting
23
+ yaml = yaml.replace(/globs:\s*\[([^\]]*)\]/g, (m, inner) => {
24
+ const items = inner.split(',').map(i => {
25
+ const trimmed = i.trim().replace(/^["']|["']$/g, '');
26
+ return `"${trimmed}"`;
27
+ });
28
+ return `globs: [${items.join(', ')}]`;
29
+ });
30
+ return content.replace(/^---\n[\s\S]*?\n---/, `---\n${yaml}\n---`);
31
+ }
32
+ }
33
+
34
+ return content;
35
+ }
36
+
37
+ function splitOversizedFile(filePath, maxTokens = 1500) {
38
+ const content = fs.readFileSync(filePath, 'utf-8');
39
+ const tokens = Math.ceil(content.length / 4);
40
+
41
+ if (tokens <= maxTokens) return null; // no split needed
42
+
43
+ const fm = parseFrontmatter(content);
44
+ const body = content.replace(/^---\n[\s\S]*?\n---\n?/, '');
45
+
46
+ // Split by sections (## headers)
47
+ const sections = body.split(/(?=^## )/m).filter(s => s.trim());
48
+
49
+ if (sections.length <= 1) {
50
+ // No sections to split on — split by paragraph
51
+ const paragraphs = body.split(/\n\n+/).filter(p => p.trim());
52
+ const mid = Math.ceil(paragraphs.length / 2);
53
+ return {
54
+ original: filePath,
55
+ parts: [
56
+ { body: paragraphs.slice(0, mid).join('\n\n'), suffix: '-part1' },
57
+ { body: paragraphs.slice(mid).join('\n\n'), suffix: '-part2' },
58
+ ],
59
+ frontmatter: fm,
60
+ };
61
+ }
62
+
63
+ // Group sections to stay under token limit
64
+ const parts = [];
65
+ let current = [];
66
+ let currentTokens = 0;
67
+
68
+ for (const section of sections) {
69
+ const sectionTokens = Math.ceil(section.length / 4);
70
+ if (currentTokens + sectionTokens > maxTokens && current.length > 0) {
71
+ parts.push(current.join('\n'));
72
+ current = [section];
73
+ currentTokens = sectionTokens;
74
+ } else {
75
+ current.push(section);
76
+ currentTokens += sectionTokens;
77
+ }
78
+ }
79
+ if (current.length > 0) parts.push(current.join('\n'));
80
+
81
+ return {
82
+ original: filePath,
83
+ parts: parts.map((body, i) => ({ body, suffix: `-part${i + 1}` })),
84
+ frontmatter: fm,
85
+ };
86
+ }
87
+
88
+ async function autoFix(dir, options = {}) {
89
+ const results = { fixed: [], splits: [], deduped: [], errors: [] };
90
+ const rulesDir = path.join(dir, '.cursor', 'rules');
91
+
92
+ if (!fs.existsSync(rulesDir)) {
93
+ results.errors.push('No .cursor/rules/ directory found');
94
+ return results;
95
+ }
96
+
97
+ // 1. Fix broken frontmatter
98
+ for (const entry of fs.readdirSync(rulesDir)) {
99
+ if (!entry.endsWith('.mdc')) continue;
100
+ const filePath = path.join(rulesDir, entry);
101
+ const original = fs.readFileSync(filePath, 'utf-8');
102
+ const fixed = fixFrontmatter(original);
103
+
104
+ if (fixed !== original) {
105
+ if (!options.dryRun) {
106
+ fs.writeFileSync(filePath, fixed, 'utf-8');
107
+ }
108
+ results.fixed.push({ file: entry, change: 'frontmatter repaired' });
109
+ }
110
+ }
111
+
112
+ // 2. Split oversized files
113
+ if (options.split !== false) {
114
+ for (const entry of fs.readdirSync(rulesDir)) {
115
+ if (!entry.endsWith('.mdc')) continue;
116
+ const filePath = path.join(rulesDir, entry);
117
+ const split = splitOversizedFile(filePath, options.maxTokens || 1500);
118
+
119
+ if (split && split.parts.length > 1) {
120
+ const baseName = entry.replace('.mdc', '');
121
+
122
+ if (!options.dryRun) {
123
+ for (let i = 0; i < split.parts.length; i++) {
124
+ const part = split.parts[i];
125
+ const newName = `${baseName}${part.suffix}.mdc`;
126
+ const newPath = path.join(rulesDir, newName);
127
+
128
+ // Rebuild with original frontmatter
129
+ let newContent = '';
130
+ if (split.frontmatter.found && split.frontmatter.data) {
131
+ const fmLines = [];
132
+ for (const [k, v] of Object.entries(split.frontmatter.data)) {
133
+ if (typeof v === 'boolean') fmLines.push(`${k}: ${v}`);
134
+ else if (typeof v === 'string' && (v.startsWith('[') || v === 'true' || v === 'false')) fmLines.push(`${k}: ${v}`);
135
+ else fmLines.push(`${k}: ${v}`);
136
+ }
137
+ newContent = `---\n${fmLines.join('\n')}\n---\n${part.body}`;
138
+ } else {
139
+ newContent = part.body;
140
+ }
141
+
142
+ fs.writeFileSync(newPath, newContent, 'utf-8');
143
+ }
144
+ // Remove original
145
+ fs.unlinkSync(filePath);
146
+ }
147
+
148
+ results.splits.push({
149
+ file: entry,
150
+ parts: split.parts.map((p, i) => `${baseName}${p.suffix}.mdc`),
151
+ });
152
+ }
153
+ }
154
+ }
155
+
156
+ // 3. Remove redundancy (just flag, don't auto-delete)
157
+ const rules = loadRules(dir);
158
+ const redundant = findRedundancy(rules);
159
+ for (const r of redundant) {
160
+ results.deduped.push({
161
+ fileA: r.fileA,
162
+ fileB: r.fileB,
163
+ overlapPct: r.overlapPct,
164
+ action: 'manual review needed — run `cursor-doctor audit` for details',
165
+ });
166
+ }
167
+
168
+ return results;
169
+ }
170
+
171
+ module.exports = { autoFix, fixFrontmatter, splitOversizedFile };