pumuki-ast-hooks 5.5.37 → 5.5.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pumuki-ast-hooks",
3
- "version": "5.5.37",
3
+ "version": "5.5.40",
4
4
  "description": "Enterprise-grade AST Intelligence System with multi-platform support (iOS, Android, Backend, Frontend) and Feature-First + DDD + Clean Architecture enforcement. Includes dynamic violations API for intelligent querying.",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -157,7 +157,10 @@ class InstallService {
157
157
  this.logStep('7/8', 'Installing Git hooks...');
158
158
  this.gitService.installGitHooks();
159
159
 
160
- this.logStep('7.5/8', 'Configuring VS Code/Cursor tasks for auto-start...');
160
+ this.logStep('7.5/8', 'Cleaning up duplicate rule files...');
161
+ this.cleanupDuplicateRules();
162
+
163
+ this.logStep('7.75/8', 'Configuring VS Code/Cursor tasks for auto-start...');
161
164
  this.ideIntegration.configureVSCodeTasks();
162
165
 
163
166
  this.logStep('8/8', 'Adding npm scripts to package.json...');
@@ -205,6 +208,55 @@ class InstallService {
205
208
  }
206
209
  }
207
210
 
211
+ cleanupDuplicateRules() {
212
+ const cleanupEnabled = env.getBool('HOOK_CLEANUP_DUPLICATES', false);
213
+ if (!cleanupEnabled) {
214
+ this.logStep('7.75/8', 'Skipping duplicate cleanup (disabled via HOOK_CLEANUP_DUPLICATES)');
215
+ return;
216
+ }
217
+
218
+ this.logStep('7.75/8', 'Cleaning up duplicate rule files (.md when .mdc exists)...');
219
+
220
+ const rulesDirs = [
221
+ path.join(this.targetRoot, '.cursor', 'rules'),
222
+ path.join(this.targetRoot, '.windsurf', 'rules')
223
+ ];
224
+
225
+ let deletedCount = 0;
226
+
227
+ for (const rulesDir of rulesDirs) {
228
+ if (!fs.existsSync(rulesDir)) {
229
+ continue;
230
+ }
231
+
232
+ try {
233
+ const files = fs.readdirSync(rulesDir);
234
+ for (const file of files) {
235
+ if (!file.endsWith('.md')) {
236
+ continue;
237
+ }
238
+
239
+ const mdPath = path.join(rulesDir, file);
240
+ const mdcPath = path.join(rulesDir, file + 'c');
241
+
242
+ if (fs.existsSync(mdcPath)) {
243
+ fs.unlinkSync(mdPath);
244
+ deletedCount++;
245
+ this.logger.info('DUPLICATE_RULE_DELETED', { file: mdPath });
246
+ }
247
+ }
248
+ } catch (error) {
249
+ this.logWarning(`Failed to cleanup duplicates in ${rulesDir}: ${error.message}`);
250
+ }
251
+ }
252
+
253
+ if (deletedCount > 0) {
254
+ this.logSuccess(`Cleaned up ${deletedCount} duplicate .md files`);
255
+ } else {
256
+ this.logSuccess('No duplicate .md files found');
257
+ }
258
+ }
259
+
208
260
  printHeader() {
209
261
  const versionPadded = `v${this.version}`.padStart(24).padEnd(48);
210
262
  process.stdout.write(`${COLORS.blue}
@@ -9,3 +9,86 @@ describe('install', () => {
9
9
  expect(() => require.resolve('../install.js')).not.toThrow();
10
10
  });
11
11
  });
12
+
13
+ describe('InstallService - cleanupDuplicateRules', () => {
14
+ const fs = require('fs');
15
+ const path = require('path');
16
+ const InstallService = require('../../application/services/installation/InstallService');
17
+
18
+ let testRoot;
19
+ let service;
20
+
21
+ beforeEach(() => {
22
+ testRoot = path.join(__dirname, '.tmp-cleanup-test');
23
+ fs.mkdirSync(testRoot, { recursive: true });
24
+ service = new InstallService();
25
+ service.targetRoot = testRoot;
26
+ });
27
+
28
+ afterEach(() => {
29
+ if (fs.existsSync(testRoot)) {
30
+ fs.rmSync(testRoot, { recursive: true, force: true });
31
+ }
32
+ });
33
+
34
+ it('should skip cleanup when HOOK_CLEANUP_DUPLICATES is false', () => {
35
+ process.env.HOOK_CLEANUP_DUPLICATES = 'false';
36
+ service.cleanupDuplicateRules();
37
+ expect(fs.existsSync(testRoot)).toBe(true);
38
+ });
39
+
40
+ it('should skip cleanup when HOOK_CLEANUP_DUPLICATES is not set', () => {
41
+ delete process.env.HOOK_CLEANUP_DUPLICATES;
42
+ service.cleanupDuplicateRules();
43
+ expect(fs.existsSync(testRoot)).toBe(true);
44
+ });
45
+
46
+ it('should delete .md files when .mdc exists and cleanup is enabled', () => {
47
+ process.env.HOOK_CLEANUP_DUPLICATES = 'true';
48
+
49
+ const cursorRules = path.join(testRoot, '.cursor', 'rules');
50
+ fs.mkdirSync(cursorRules, { recursive: true });
51
+
52
+ fs.writeFileSync(path.join(cursorRules, 'test.md'), 'duplicate');
53
+ fs.writeFileSync(path.join(cursorRules, 'test.mdc'), 'canonical');
54
+
55
+ service.cleanupDuplicateRules();
56
+
57
+ expect(fs.existsSync(path.join(cursorRules, 'test.md'))).toBe(false);
58
+ expect(fs.existsSync(path.join(cursorRules, 'test.mdc'))).toBe(true);
59
+ });
60
+
61
+ it('should not delete .md files when .mdc does not exist', () => {
62
+ process.env.HOOK_CLEANUP_DUPLICATES = 'true';
63
+
64
+ const cursorRules = path.join(testRoot, '.cursor', 'rules');
65
+ fs.mkdirSync(cursorRules, { recursive: true });
66
+
67
+ fs.writeFileSync(path.join(cursorRules, 'test.md'), 'only md');
68
+
69
+ service.cleanupDuplicateRules();
70
+
71
+ expect(fs.existsSync(path.join(cursorRules, 'test.md'))).toBe(true);
72
+ });
73
+
74
+ it('should cleanup both .cursor and .windsurf rules directories', () => {
75
+ process.env.HOOK_CLEANUP_DUPLICATES = 'true';
76
+
77
+ const cursorRules = path.join(testRoot, '.cursor', 'rules');
78
+ const windsurfRules = path.join(testRoot, '.windsurf', 'rules');
79
+ fs.mkdirSync(cursorRules, { recursive: true });
80
+ fs.mkdirSync(windsurfRules, { recursive: true });
81
+
82
+ fs.writeFileSync(path.join(cursorRules, 'cursor.md'), 'duplicate');
83
+ fs.writeFileSync(path.join(cursorRules, 'cursor.mdc'), 'canonical');
84
+ fs.writeFileSync(path.join(windsurfRules, 'windsurf.md'), 'duplicate');
85
+ fs.writeFileSync(path.join(windsurfRules, 'windsurf.mdc'), 'canonical');
86
+
87
+ service.cleanupDuplicateRules();
88
+
89
+ expect(fs.existsSync(path.join(cursorRules, 'cursor.md'))).toBe(false);
90
+ expect(fs.existsSync(path.join(cursorRules, 'cursor.mdc'))).toBe(true);
91
+ expect(fs.existsSync(path.join(windsurfRules, 'windsurf.md'))).toBe(false);
92
+ expect(fs.existsSync(path.join(windsurfRules, 'windsurf.mdc'))).toBe(true);
93
+ });
94
+ });
@@ -95,6 +95,68 @@ function summarizeRulesContent(content) {
95
95
  return firstLine.length > 0 ? firstLine : `loaded (${content.length} chars)`;
96
96
  }
97
97
 
98
+ function buildAutoContextFrontmatter(detectedPlatforms) {
99
+ const platforms = Array.isArray(detectedPlatforms) ? detectedPlatforms.filter(Boolean) : [];
100
+ const generated = formatLocalTimestamp();
101
+ const platformStr = platforms.length > 0 ? platforms.join(', ') : 'none';
102
+ return `---\nalwaysApply: true\ndescription: Auto-generated context for detected platforms\nplatforms: ${platformStr}\ngenerated: ${generated}\n---\n\n`;
103
+ }
104
+
105
+ async function buildAutoContextContent(platformsEvidence) {
106
+ const loader = new DynamicRulesLoader();
107
+ const detectedPlatforms = ['backend', 'frontend', 'ios', 'android']
108
+ .filter(p => platformsEvidence && platformsEvidence[p] && platformsEvidence[p].detected);
109
+
110
+ const files = ['rulesgold.mdc', ...detectedPlatforms.map(p => loader.rulesMap[p]).filter(Boolean)];
111
+ const uniqueFiles = Array.from(new Set(files));
112
+
113
+ let content = buildAutoContextFrontmatter(detectedPlatforms);
114
+
115
+ for (const file of uniqueFiles) {
116
+ let ruleContent = null;
117
+ try {
118
+ ruleContent = await loader.loadRule(file);
119
+ } catch {
120
+ ruleContent = null;
121
+ }
122
+
123
+ content += `## Source: ${file}\n\n`;
124
+ content += ruleContent ? `${ruleContent}\n\n` : `not found\n\n`;
125
+ content += `---\n\n`;
126
+ }
127
+
128
+ return content;
129
+ }
130
+
131
+ async function writeAutoContextFiles(platformsEvidence) {
132
+ try {
133
+ const pkgPath = path.join(process.cwd(), 'package.json');
134
+ if (fs.existsSync(pkgPath)) {
135
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
136
+ if (pkg && pkg.name === 'pumuki-ast-hooks') {
137
+ return;
138
+ }
139
+ }
140
+ } catch (error) {
141
+ process.stderr.write(`[Intelligent Audit] ⚠️ Failed to inspect package.json for auto-context skip logic (${toErrorMessage(error)})\n`);
142
+ }
143
+
144
+ const content = await buildAutoContextContent(platformsEvidence);
145
+ const targets = [
146
+ path.join(process.cwd(), '.cursor', 'rules', 'auto-context.mdc'),
147
+ path.join(process.cwd(), '.windsurf', 'rules', 'auto-context.mdc')
148
+ ];
149
+
150
+ for (const target of targets) {
151
+ try {
152
+ await fs.promises.mkdir(path.dirname(target), { recursive: true });
153
+ await fs.promises.writeFile(target, content, 'utf-8');
154
+ } catch (error) {
155
+ process.stderr.write(`[Intelligent Audit] ⚠️ Failed to write auto-context: ${target} (${toErrorMessage(error)})\n`);
156
+ }
157
+ }
158
+ }
159
+
98
160
  async function buildRulesReadEvidence(platformsEvidence) {
99
161
  const loader = new DynamicRulesLoader();
100
162
  const entries = [];
@@ -158,6 +220,21 @@ function formatLocalTimestamp(date = new Date()) {
158
220
  return `${year}-${month}-${day}T${hours}:${minutes}:${seconds}.${milliseconds}${sign}${offsetHours}:${offsetMins}`;
159
221
  }
160
222
 
223
+ function normalizePathForMatch(value) {
224
+ const s = String(value || '');
225
+ const normalized = path.normalize(s).replace(/\\/g, '/');
226
+ return normalized;
227
+ }
228
+
229
+ function toRepoRelativePath(filePath) {
230
+ const normalized = normalizePathForMatch(filePath);
231
+ const cwd = normalizePathForMatch(process.cwd());
232
+ if (normalized.startsWith(cwd + '/')) {
233
+ return normalized.slice(cwd.length + 1);
234
+ }
235
+ return normalized;
236
+ }
237
+
161
238
  function resolveAuditTmpDir() {
162
239
  const configured = (env.get('AUDIT_TMP', '') || '').trim();
163
240
  if (configured.length > 0) {
@@ -189,9 +266,23 @@ async function runIntelligentAudit() {
189
266
  violationsForEvidence = rawViolations;
190
267
  } else {
191
268
  const stagedFiles = getStagedFiles();
192
- const stagedViolations = rawViolations.filter(v =>
193
- stagedFiles.some(sf => v.filePath && v.filePath.includes(sf))
194
- );
269
+ const stagedSet = new Set((Array.isArray(stagedFiles) ? stagedFiles : []).map(toRepoRelativePath));
270
+
271
+ const stagedViolations = rawViolations.filter(v => {
272
+ const violationPath = toRepoRelativePath(v.filePath || v.file || '');
273
+ if (!violationPath) {
274
+ return false;
275
+ }
276
+ if (stagedSet.has(violationPath)) {
277
+ return true;
278
+ }
279
+ for (const sf of stagedSet) {
280
+ if (sf && (violationPath === sf || violationPath.endsWith('/' + sf) || violationPath.includes('/' + sf))) {
281
+ return true;
282
+ }
283
+ }
284
+ return false;
285
+ });
195
286
 
196
287
  console.log(`[Intelligent Audit] Gate scope: STAGING (${stagedFiles.length} files)`);
197
288
  console.log(`[Intelligent Audit] Filtered to ${stagedViolations.length} violations in staged files`);
@@ -440,6 +531,8 @@ async function updateAIEvidence(violations, gateResult, tokenUsage) {
440
531
  const platformsEvidence = buildPlatformsEvidence(stagedFiles, violations);
441
532
  const rulesRead = await buildRulesReadEvidence(platformsEvidence);
442
533
 
534
+ await writeAutoContextFiles(platformsEvidence);
535
+
443
536
  evidence.rules_read = rulesRead.entries;
444
537
  evidence.rules_read_flags = rulesRead.legacyFlags;
445
538
 
@@ -9,6 +9,20 @@ const RecommendationGenerator = require('./generators/RecommendationGenerator');
9
9
  const ContextMultiplier = require('./scorers/ContextMultiplier');
10
10
  const SeverityMapper = require('./mappers/SeverityMapper');
11
11
 
12
+ function applySeverityFloor(originalSeverity, evaluatedSeverity) {
13
+ const order = {
14
+ LOW: 1,
15
+ MEDIUM: 2,
16
+ HIGH: 3,
17
+ CRITICAL: 4
18
+ };
19
+
20
+ const original = order[String(originalSeverity || '').toUpperCase()] || order.MEDIUM;
21
+ const evaluated = order[String(evaluatedSeverity || '').toUpperCase()] || original;
22
+
23
+ return evaluated >= original ? String(evaluatedSeverity || originalSeverity || 'MEDIUM').toUpperCase() : String(originalSeverity).toUpperCase();
24
+ }
25
+
12
26
  /**
13
27
  * Main severity evaluator
14
28
  * Analyzes violations across 4 dimensions: Security, Performance, Stability, Maintainability
@@ -98,10 +112,13 @@ function evaluateViolations(violations) {
98
112
  return violations.map(violation => {
99
113
  const evaluation = evaluator.evaluate(violation);
100
114
 
115
+ const baseSeverity = violation.severity;
116
+ const finalSeverity = applySeverityFloor(baseSeverity, evaluation.severity);
117
+
101
118
  return {
102
119
  ...violation,
103
120
  originalSeverity: violation.severity,
104
- severity: evaluation.severity,
121
+ severity: finalSeverity,
105
122
  severityScore: evaluation.score,
106
123
  baseScore: evaluation.baseScore,
107
124
  impactBreakdown: evaluation.breakdown,