pumuki-ast-hooks 5.5.38 → 5.5.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pumuki-ast-hooks",
3
- "version": "5.5.38",
3
+ "version": "5.5.40",
4
4
  "description": "Enterprise-grade AST Intelligence System with multi-platform support (iOS, Android, Backend, Frontend) and Feature-First + DDD + Clean Architecture enforcement. Includes dynamic violations API for intelligent querying.",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -157,7 +157,10 @@ class InstallService {
157
157
  this.logStep('7/8', 'Installing Git hooks...');
158
158
  this.gitService.installGitHooks();
159
159
 
160
- this.logStep('7.5/8', 'Configuring VS Code/Cursor tasks for auto-start...');
160
+ this.logStep('7.5/8', 'Cleaning up duplicate rule files...');
161
+ this.cleanupDuplicateRules();
162
+
163
+ this.logStep('7.75/8', 'Configuring VS Code/Cursor tasks for auto-start...');
161
164
  this.ideIntegration.configureVSCodeTasks();
162
165
 
163
166
  this.logStep('8/8', 'Adding npm scripts to package.json...');
@@ -205,6 +208,55 @@ class InstallService {
205
208
  }
206
209
  }
207
210
 
211
+ cleanupDuplicateRules() {
212
+ const cleanupEnabled = env.getBool('HOOK_CLEANUP_DUPLICATES', false);
213
+ if (!cleanupEnabled) {
214
+ this.logStep('7.75/8', 'Skipping duplicate cleanup (disabled via HOOK_CLEANUP_DUPLICATES)');
215
+ return;
216
+ }
217
+
218
+ this.logStep('7.75/8', 'Cleaning up duplicate rule files (.md when .mdc exists)...');
219
+
220
+ const rulesDirs = [
221
+ path.join(this.targetRoot, '.cursor', 'rules'),
222
+ path.join(this.targetRoot, '.windsurf', 'rules')
223
+ ];
224
+
225
+ let deletedCount = 0;
226
+
227
+ for (const rulesDir of rulesDirs) {
228
+ if (!fs.existsSync(rulesDir)) {
229
+ continue;
230
+ }
231
+
232
+ try {
233
+ const files = fs.readdirSync(rulesDir);
234
+ for (const file of files) {
235
+ if (!file.endsWith('.md')) {
236
+ continue;
237
+ }
238
+
239
+ const mdPath = path.join(rulesDir, file);
240
+ const mdcPath = path.join(rulesDir, file + 'c');
241
+
242
+ if (fs.existsSync(mdcPath)) {
243
+ fs.unlinkSync(mdPath);
244
+ deletedCount++;
245
+ this.logger.info('DUPLICATE_RULE_DELETED', { file: mdPath });
246
+ }
247
+ }
248
+ } catch (error) {
249
+ this.logWarning(`Failed to cleanup duplicates in ${rulesDir}: ${error.message}`);
250
+ }
251
+ }
252
+
253
+ if (deletedCount > 0) {
254
+ this.logSuccess(`Cleaned up ${deletedCount} duplicate .md files`);
255
+ } else {
256
+ this.logSuccess('No duplicate .md files found');
257
+ }
258
+ }
259
+
208
260
  printHeader() {
209
261
  const versionPadded = `v${this.version}`.padStart(24).padEnd(48);
210
262
  process.stdout.write(`${COLORS.blue}
@@ -9,3 +9,86 @@ describe('install', () => {
9
9
  expect(() => require.resolve('../install.js')).not.toThrow();
10
10
  });
11
11
  });
12
+
13
+ describe('InstallService - cleanupDuplicateRules', () => {
14
+ const fs = require('fs');
15
+ const path = require('path');
16
+ const InstallService = require('../../application/services/installation/InstallService');
17
+
18
+ let testRoot;
19
+ let service;
20
+
21
+ beforeEach(() => {
22
+ testRoot = path.join(__dirname, '.tmp-cleanup-test');
23
+ fs.mkdirSync(testRoot, { recursive: true });
24
+ service = new InstallService();
25
+ service.targetRoot = testRoot;
26
+ });
27
+
28
+ afterEach(() => {
29
+ if (fs.existsSync(testRoot)) {
30
+ fs.rmSync(testRoot, { recursive: true, force: true });
31
+ }
32
+ });
33
+
34
+ it('should skip cleanup when HOOK_CLEANUP_DUPLICATES is false', () => {
35
+ process.env.HOOK_CLEANUP_DUPLICATES = 'false';
36
+ service.cleanupDuplicateRules();
37
+ expect(fs.existsSync(testRoot)).toBe(true);
38
+ });
39
+
40
+ it('should skip cleanup when HOOK_CLEANUP_DUPLICATES is not set', () => {
41
+ delete process.env.HOOK_CLEANUP_DUPLICATES;
42
+ service.cleanupDuplicateRules();
43
+ expect(fs.existsSync(testRoot)).toBe(true);
44
+ });
45
+
46
+ it('should delete .md files when .mdc exists and cleanup is enabled', () => {
47
+ process.env.HOOK_CLEANUP_DUPLICATES = 'true';
48
+
49
+ const cursorRules = path.join(testRoot, '.cursor', 'rules');
50
+ fs.mkdirSync(cursorRules, { recursive: true });
51
+
52
+ fs.writeFileSync(path.join(cursorRules, 'test.md'), 'duplicate');
53
+ fs.writeFileSync(path.join(cursorRules, 'test.mdc'), 'canonical');
54
+
55
+ service.cleanupDuplicateRules();
56
+
57
+ expect(fs.existsSync(path.join(cursorRules, 'test.md'))).toBe(false);
58
+ expect(fs.existsSync(path.join(cursorRules, 'test.mdc'))).toBe(true);
59
+ });
60
+
61
+ it('should not delete .md files when .mdc does not exist', () => {
62
+ process.env.HOOK_CLEANUP_DUPLICATES = 'true';
63
+
64
+ const cursorRules = path.join(testRoot, '.cursor', 'rules');
65
+ fs.mkdirSync(cursorRules, { recursive: true });
66
+
67
+ fs.writeFileSync(path.join(cursorRules, 'test.md'), 'only md');
68
+
69
+ service.cleanupDuplicateRules();
70
+
71
+ expect(fs.existsSync(path.join(cursorRules, 'test.md'))).toBe(true);
72
+ });
73
+
74
+ it('should cleanup both .cursor and .windsurf rules directories', () => {
75
+ process.env.HOOK_CLEANUP_DUPLICATES = 'true';
76
+
77
+ const cursorRules = path.join(testRoot, '.cursor', 'rules');
78
+ const windsurfRules = path.join(testRoot, '.windsurf', 'rules');
79
+ fs.mkdirSync(cursorRules, { recursive: true });
80
+ fs.mkdirSync(windsurfRules, { recursive: true });
81
+
82
+ fs.writeFileSync(path.join(cursorRules, 'cursor.md'), 'duplicate');
83
+ fs.writeFileSync(path.join(cursorRules, 'cursor.mdc'), 'canonical');
84
+ fs.writeFileSync(path.join(windsurfRules, 'windsurf.md'), 'duplicate');
85
+ fs.writeFileSync(path.join(windsurfRules, 'windsurf.mdc'), 'canonical');
86
+
87
+ service.cleanupDuplicateRules();
88
+
89
+ expect(fs.existsSync(path.join(cursorRules, 'cursor.md'))).toBe(false);
90
+ expect(fs.existsSync(path.join(cursorRules, 'cursor.mdc'))).toBe(true);
91
+ expect(fs.existsSync(path.join(windsurfRules, 'windsurf.md'))).toBe(false);
92
+ expect(fs.existsSync(path.join(windsurfRules, 'windsurf.mdc'))).toBe(true);
93
+ });
94
+ });
@@ -220,6 +220,21 @@ function formatLocalTimestamp(date = new Date()) {
220
220
  return `${year}-${month}-${day}T${hours}:${minutes}:${seconds}.${milliseconds}${sign}${offsetHours}:${offsetMins}`;
221
221
  }
222
222
 
223
+ function normalizePathForMatch(value) {
224
+ const s = String(value || '');
225
+ const normalized = path.normalize(s).replace(/\\/g, '/');
226
+ return normalized;
227
+ }
228
+
229
+ function toRepoRelativePath(filePath) {
230
+ const normalized = normalizePathForMatch(filePath);
231
+ const cwd = normalizePathForMatch(process.cwd());
232
+ if (normalized.startsWith(cwd + '/')) {
233
+ return normalized.slice(cwd.length + 1);
234
+ }
235
+ return normalized;
236
+ }
237
+
223
238
  function resolveAuditTmpDir() {
224
239
  const configured = (env.get('AUDIT_TMP', '') || '').trim();
225
240
  if (configured.length > 0) {
@@ -251,9 +266,23 @@ async function runIntelligentAudit() {
251
266
  violationsForEvidence = rawViolations;
252
267
  } else {
253
268
  const stagedFiles = getStagedFiles();
254
- const stagedViolations = rawViolations.filter(v =>
255
- stagedFiles.some(sf => v.filePath && v.filePath.includes(sf))
256
- );
269
+ const stagedSet = new Set((Array.isArray(stagedFiles) ? stagedFiles : []).map(toRepoRelativePath));
270
+
271
+ const stagedViolations = rawViolations.filter(v => {
272
+ const violationPath = toRepoRelativePath(v.filePath || v.file || '');
273
+ if (!violationPath) {
274
+ return false;
275
+ }
276
+ if (stagedSet.has(violationPath)) {
277
+ return true;
278
+ }
279
+ for (const sf of stagedSet) {
280
+ if (sf && (violationPath === sf || violationPath.endsWith('/' + sf) || violationPath.includes('/' + sf))) {
281
+ return true;
282
+ }
283
+ }
284
+ return false;
285
+ });
257
286
 
258
287
  console.log(`[Intelligent Audit] Gate scope: STAGING (${stagedFiles.length} files)`);
259
288
  console.log(`[Intelligent Audit] Filtered to ${stagedViolations.length} violations in staged files`);
@@ -9,6 +9,20 @@ const RecommendationGenerator = require('./generators/RecommendationGenerator');
9
9
  const ContextMultiplier = require('./scorers/ContextMultiplier');
10
10
  const SeverityMapper = require('./mappers/SeverityMapper');
11
11
 
12
+ function applySeverityFloor(originalSeverity, evaluatedSeverity) {
13
+ const order = {
14
+ LOW: 1,
15
+ MEDIUM: 2,
16
+ HIGH: 3,
17
+ CRITICAL: 4
18
+ };
19
+
20
+ const original = order[String(originalSeverity || '').toUpperCase()] || order.MEDIUM;
21
+ const evaluated = order[String(evaluatedSeverity || '').toUpperCase()] || original;
22
+
23
+ return evaluated >= original ? String(evaluatedSeverity || originalSeverity || 'MEDIUM').toUpperCase() : String(originalSeverity).toUpperCase();
24
+ }
25
+
12
26
  /**
13
27
  * Main severity evaluator
14
28
  * Analyzes violations across 4 dimensions: Security, Performance, Stability, Maintainability
@@ -98,10 +112,13 @@ function evaluateViolations(violations) {
98
112
  return violations.map(violation => {
99
113
  const evaluation = evaluator.evaluate(violation);
100
114
 
115
+ const baseSeverity = violation.severity;
116
+ const finalSeverity = applySeverityFloor(baseSeverity, evaluation.severity);
117
+
101
118
  return {
102
119
  ...violation,
103
120
  originalSeverity: violation.severity,
104
- severity: evaluation.severity,
121
+ severity: finalSeverity,
105
122
  severityScore: evaluation.score,
106
123
  baseScore: evaluation.baseScore,
107
124
  impactBreakdown: evaluation.breakdown,