agileflow 2.78.0 → 2.79.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -6
- package/package.json +1 -1
- package/scripts/agileflow-welcome.js +31 -84
- package/scripts/auto-self-improve.js +23 -45
- package/scripts/check-update.js +35 -42
- package/scripts/damage-control/bash-tool-damage-control.js +257 -0
- package/scripts/damage-control/edit-tool-damage-control.js +279 -0
- package/scripts/damage-control/patterns.yaml +227 -0
- package/scripts/damage-control/write-tool-damage-control.js +274 -0
- package/scripts/obtain-context.js +22 -3
- package/scripts/ralph-loop.js +191 -63
- package/scripts/screenshot-verifier.js +213 -0
- package/scripts/session-manager.js +12 -33
- package/src/core/agents/configuration-damage-control.md +248 -0
- package/src/core/commands/babysit.md +30 -2
- package/src/core/commands/setup/visual-e2e.md +462 -0
- package/src/core/skills/_learnings/code-review.yaml +118 -0
- package/src/core/skills/_learnings/story-writer.yaml +71 -0
- package/tools/cli/installers/ide/claude-code.js +127 -0
package/README.md
CHANGED
|
@@ -3,8 +3,8 @@
|
|
|
3
3
|
</p>
|
|
4
4
|
|
|
5
5
|
[](https://www.npmjs.com/package/agileflow)
|
|
6
|
-
[](docs/04-architecture/commands.md)
|
|
7
|
+
[](docs/04-architecture/subagents.md)
|
|
8
8
|
[](docs/04-architecture/skills.md)
|
|
9
9
|
|
|
10
10
|
**AI-driven agile development for Claude Code, Cursor, Windsurf, OpenAI Codex CLI, and more.** Combining Scrum, Kanban, ADRs, and docs-as-code principles into one framework-agnostic system.
|
|
@@ -65,8 +65,8 @@ AgileFlow combines three proven methodologies:
|
|
|
65
65
|
|
|
66
66
|
| Component | Count | Description |
|
|
67
67
|
|-----------|-------|-------------|
|
|
68
|
-
| [Commands](docs/04-architecture/commands.md) |
|
|
69
|
-
| [Agents/Experts](docs/04-architecture/subagents.md) |
|
|
68
|
+
| [Commands](docs/04-architecture/commands.md) | 69 | Slash commands for agile workflows |
|
|
69
|
+
| [Agents/Experts](docs/04-architecture/subagents.md) | 28 | Specialized agents with self-improving knowledge bases |
|
|
70
70
|
| [Skills](docs/04-architecture/skills.md) | Dynamic | Generated on-demand with `/agileflow:skill:create` |
|
|
71
71
|
|
|
72
72
|
---
|
|
@@ -76,8 +76,8 @@ AgileFlow combines three proven methodologies:
|
|
|
76
76
|
Full documentation lives in [`docs/04-architecture/`](docs/04-architecture/):
|
|
77
77
|
|
|
78
78
|
### Reference
|
|
79
|
-
- [Commands](docs/04-architecture/commands.md) - All
|
|
80
|
-
- [Agents/Experts](docs/04-architecture/subagents.md) -
|
|
79
|
+
- [Commands](docs/04-architecture/commands.md) - All 69 slash commands
|
|
80
|
+
- [Agents/Experts](docs/04-architecture/subagents.md) - 28 specialized agents with self-improving knowledge
|
|
81
81
|
- [Skills](docs/04-architecture/skills.md) - Dynamic skill generator with MCP integration
|
|
82
82
|
|
|
83
83
|
### Architecture
|
package/package.json
CHANGED
|
@@ -15,6 +15,10 @@ const fs = require('fs');
|
|
|
15
15
|
const path = require('path');
|
|
16
16
|
const { execSync, spawnSync } = require('child_process');
|
|
17
17
|
|
|
18
|
+
// Shared utilities
|
|
19
|
+
const { c, box } = require('../lib/colors');
|
|
20
|
+
const { getProjectRoot } = require('../lib/paths');
|
|
21
|
+
|
|
18
22
|
// Session manager path (relative to script location)
|
|
19
23
|
const SESSION_MANAGER_PATH = path.join(__dirname, 'session-manager.js');
|
|
20
24
|
|
|
@@ -26,68 +30,6 @@ try {
|
|
|
26
30
|
// Update checker not available
|
|
27
31
|
}
|
|
28
32
|
|
|
29
|
-
// ANSI color codes
|
|
30
|
-
const c = {
|
|
31
|
-
reset: '\x1b[0m',
|
|
32
|
-
bold: '\x1b[1m',
|
|
33
|
-
dim: '\x1b[2m',
|
|
34
|
-
|
|
35
|
-
// Standard ANSI colors
|
|
36
|
-
red: '\x1b[31m',
|
|
37
|
-
green: '\x1b[32m',
|
|
38
|
-
yellow: '\x1b[33m',
|
|
39
|
-
blue: '\x1b[34m',
|
|
40
|
-
magenta: '\x1b[35m',
|
|
41
|
-
cyan: '\x1b[36m',
|
|
42
|
-
|
|
43
|
-
brightBlack: '\x1b[90m',
|
|
44
|
-
brightGreen: '\x1b[92m',
|
|
45
|
-
brightYellow: '\x1b[93m',
|
|
46
|
-
brightCyan: '\x1b[96m',
|
|
47
|
-
|
|
48
|
-
// Vibrant 256-color palette (modern, sleek look)
|
|
49
|
-
mintGreen: '\x1b[38;5;158m', // Healthy/success states
|
|
50
|
-
peach: '\x1b[38;5;215m', // Warning states
|
|
51
|
-
coral: '\x1b[38;5;203m', // Critical/error states
|
|
52
|
-
lightGreen: '\x1b[38;5;194m', // Session healthy
|
|
53
|
-
lightYellow: '\x1b[38;5;228m', // Session warning
|
|
54
|
-
lightPink: '\x1b[38;5;210m', // Session critical
|
|
55
|
-
skyBlue: '\x1b[38;5;117m', // Directories/paths
|
|
56
|
-
lavender: '\x1b[38;5;147m', // Model info, story IDs
|
|
57
|
-
softGold: '\x1b[38;5;222m', // Cost/money
|
|
58
|
-
teal: '\x1b[38;5;80m', // Ready/pending states
|
|
59
|
-
slate: '\x1b[38;5;103m', // Secondary info
|
|
60
|
-
rose: '\x1b[38;5;211m', // Blocked/critical accent
|
|
61
|
-
amber: '\x1b[38;5;214m', // WIP/in-progress accent
|
|
62
|
-
powder: '\x1b[38;5;153m', // Labels/headers
|
|
63
|
-
|
|
64
|
-
// Brand color (#e8683a)
|
|
65
|
-
brand: '\x1b[38;2;232;104;58m',
|
|
66
|
-
};
|
|
67
|
-
|
|
68
|
-
// Box drawing characters
|
|
69
|
-
const box = {
|
|
70
|
-
tl: '╭',
|
|
71
|
-
tr: '╮',
|
|
72
|
-
bl: '╰',
|
|
73
|
-
br: '╯',
|
|
74
|
-
h: '─',
|
|
75
|
-
v: '│',
|
|
76
|
-
lT: '├',
|
|
77
|
-
rT: '┤',
|
|
78
|
-
tT: '┬',
|
|
79
|
-
bT: '┴',
|
|
80
|
-
cross: '┼',
|
|
81
|
-
};
|
|
82
|
-
|
|
83
|
-
function getProjectRoot() {
|
|
84
|
-
let dir = process.cwd();
|
|
85
|
-
while (!fs.existsSync(path.join(dir, '.agileflow')) && dir !== '/') {
|
|
86
|
-
dir = path.dirname(dir);
|
|
87
|
-
}
|
|
88
|
-
return dir !== '/' ? dir : process.cwd();
|
|
89
|
-
}
|
|
90
|
-
|
|
91
33
|
function getProjectInfo(rootDir) {
|
|
92
34
|
const info = {
|
|
93
35
|
name: 'agileflow',
|
|
@@ -347,7 +289,7 @@ function checkPreCompact(rootDir) {
|
|
|
347
289
|
}
|
|
348
290
|
|
|
349
291
|
function checkDamageControl(rootDir) {
|
|
350
|
-
const result = { configured: false, level:
|
|
292
|
+
const result = { configured: false, level: 'standard', patternCount: 0, scriptsOk: true };
|
|
351
293
|
|
|
352
294
|
try {
|
|
353
295
|
// Check if PreToolUse hooks are configured in settings
|
|
@@ -368,15 +310,23 @@ function checkDamageControl(rootDir) {
|
|
|
368
310
|
);
|
|
369
311
|
result.hooksCount = dcHooks.length;
|
|
370
312
|
|
|
371
|
-
// Check
|
|
372
|
-
const
|
|
313
|
+
// Check for enhanced mode (has prompt hook)
|
|
314
|
+
const hasPromptHook = settings.hooks.PreToolUse.some(
|
|
315
|
+
h => h.hooks?.some(hk => hk.type === 'prompt')
|
|
316
|
+
);
|
|
317
|
+
if (hasPromptHook) {
|
|
318
|
+
result.level = 'enhanced';
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
// Check if all required scripts exist (in .claude/hooks/damage-control/)
|
|
322
|
+
const hooksDir = path.join(rootDir, '.claude', 'hooks', 'damage-control');
|
|
373
323
|
const requiredScripts = [
|
|
374
|
-
'damage-control
|
|
375
|
-
'damage-control
|
|
376
|
-
'damage-control
|
|
324
|
+
'bash-tool-damage-control.js',
|
|
325
|
+
'edit-tool-damage-control.js',
|
|
326
|
+
'write-tool-damage-control.js',
|
|
377
327
|
];
|
|
378
328
|
for (const script of requiredScripts) {
|
|
379
|
-
if (!fs.existsSync(path.join(
|
|
329
|
+
if (!fs.existsSync(path.join(hooksDir, script))) {
|
|
380
330
|
result.scriptsOk = false;
|
|
381
331
|
break;
|
|
382
332
|
}
|
|
@@ -385,23 +335,20 @@ function checkDamageControl(rootDir) {
|
|
|
385
335
|
}
|
|
386
336
|
}
|
|
387
337
|
|
|
388
|
-
//
|
|
389
|
-
const
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
338
|
+
// Count patterns in patterns.yaml
|
|
339
|
+
const patternsLocations = [
|
|
340
|
+
path.join(rootDir, '.claude', 'hooks', 'damage-control', 'patterns.yaml'),
|
|
341
|
+
path.join(rootDir, '.agileflow', 'scripts', 'damage-control', 'patterns.yaml'),
|
|
342
|
+
];
|
|
343
|
+
for (const patternsPath of patternsLocations) {
|
|
344
|
+
if (fs.existsSync(patternsPath)) {
|
|
345
|
+
const content = fs.readFileSync(patternsPath, 'utf8');
|
|
346
|
+
// Count pattern entries (lines starting with " - pattern:")
|
|
347
|
+
const patternMatches = content.match(/^\s*-\s*pattern:/gm);
|
|
348
|
+
result.patternCount = patternMatches ? patternMatches.length : 0;
|
|
349
|
+
break;
|
|
394
350
|
}
|
|
395
351
|
}
|
|
396
|
-
|
|
397
|
-
// Count patterns in config file
|
|
398
|
-
const patternsPath = path.join(rootDir, '.agileflow', 'config', 'damage-control-patterns.yaml');
|
|
399
|
-
if (fs.existsSync(patternsPath)) {
|
|
400
|
-
const content = fs.readFileSync(patternsPath, 'utf8');
|
|
401
|
-
// Count pattern entries (lines starting with " - pattern:")
|
|
402
|
-
const patternMatches = content.match(/^\s*-\s*pattern:/gm);
|
|
403
|
-
result.patternCount = patternMatches ? patternMatches.length : 0;
|
|
404
|
-
}
|
|
405
352
|
} catch (e) {}
|
|
406
353
|
|
|
407
354
|
return result;
|
|
@@ -21,18 +21,10 @@ const fs = require('fs');
|
|
|
21
21
|
const path = require('path');
|
|
22
22
|
const { execSync } = require('child_process');
|
|
23
23
|
|
|
24
|
-
//
|
|
25
|
-
const c =
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
dim: '\x1b[2m',
|
|
29
|
-
red: '\x1b[31m',
|
|
30
|
-
green: '\x1b[32m',
|
|
31
|
-
yellow: '\x1b[33m',
|
|
32
|
-
blue: '\x1b[34m',
|
|
33
|
-
cyan: '\x1b[36m',
|
|
34
|
-
brand: '\x1b[38;2;232;104;58m',
|
|
35
|
-
};
|
|
24
|
+
// Shared utilities
|
|
25
|
+
const { c } = require('../lib/colors');
|
|
26
|
+
const { getProjectRoot } = require('../lib/paths');
|
|
27
|
+
const { safeReadJSON, safeReadFile, safeWriteFile } = require('../lib/errors');
|
|
36
28
|
|
|
37
29
|
// Agents that have expertise files
|
|
38
30
|
const AGENTS_WITH_EXPERTISE = [
|
|
@@ -76,24 +68,11 @@ const DOMAIN_PATTERNS = {
|
|
|
76
68
|
devops: [/deploy/, /kubernetes/, /k8s/, /terraform/, /ansible/],
|
|
77
69
|
};
|
|
78
70
|
|
|
79
|
-
// Find project root
|
|
80
|
-
function getProjectRoot() {
|
|
81
|
-
let dir = process.cwd();
|
|
82
|
-
while (!fs.existsSync(path.join(dir, '.agileflow')) && dir !== '/') {
|
|
83
|
-
dir = path.dirname(dir);
|
|
84
|
-
}
|
|
85
|
-
return dir !== '/' ? dir : process.cwd();
|
|
86
|
-
}
|
|
87
|
-
|
|
88
71
|
// Read session state
|
|
89
72
|
function getSessionState(rootDir) {
|
|
90
73
|
const statePath = path.join(rootDir, 'docs/09-agents/session-state.json');
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
return JSON.parse(fs.readFileSync(statePath, 'utf8'));
|
|
94
|
-
}
|
|
95
|
-
} catch (e) {}
|
|
96
|
-
return {};
|
|
74
|
+
const result = safeReadJSON(statePath, { defaultValue: {} });
|
|
75
|
+
return result.ok ? result.data : {};
|
|
97
76
|
}
|
|
98
77
|
|
|
99
78
|
// Get git diff summary
|
|
@@ -232,26 +211,25 @@ function getExpertisePath(rootDir, agent) {
|
|
|
232
211
|
|
|
233
212
|
// Append learning to expertise file
|
|
234
213
|
function appendLearning(expertisePath, learning) {
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
// Find the learnings section
|
|
239
|
-
const learningsMatch = content.match(/^learnings:\s*$/m);
|
|
240
|
-
|
|
241
|
-
if (!learningsMatch) {
|
|
242
|
-
// No learnings section, add it at the end
|
|
243
|
-
content += `\n\nlearnings:\n${learning}`;
|
|
244
|
-
} else {
|
|
245
|
-
// Find where to insert (after "learnings:" line)
|
|
246
|
-
const insertPos = learningsMatch.index + learningsMatch[0].length;
|
|
247
|
-
content = content.slice(0, insertPos) + '\n' + learning + content.slice(insertPos);
|
|
248
|
-
}
|
|
214
|
+
const readResult = safeReadFile(expertisePath);
|
|
215
|
+
if (!readResult.ok) return false;
|
|
249
216
|
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
217
|
+
let content = readResult.data;
|
|
218
|
+
|
|
219
|
+
// Find the learnings section
|
|
220
|
+
const learningsMatch = content.match(/^learnings:\s*$/m);
|
|
221
|
+
|
|
222
|
+
if (!learningsMatch) {
|
|
223
|
+
// No learnings section, add it at the end
|
|
224
|
+
content += `\n\nlearnings:\n${learning}`;
|
|
225
|
+
} else {
|
|
226
|
+
// Find where to insert (after "learnings:" line)
|
|
227
|
+
const insertPos = learningsMatch.index + learningsMatch[0].length;
|
|
228
|
+
content = content.slice(0, insertPos) + '\n' + learning + content.slice(insertPos);
|
|
254
229
|
}
|
|
230
|
+
|
|
231
|
+
const writeResult = safeWriteFile(expertisePath, content);
|
|
232
|
+
return writeResult.ok;
|
|
255
233
|
}
|
|
256
234
|
|
|
257
235
|
// Format learning as YAML
|
package/scripts/check-update.js
CHANGED
|
@@ -24,6 +24,10 @@ const fs = require('fs');
|
|
|
24
24
|
const path = require('path');
|
|
25
25
|
const https = require('https');
|
|
26
26
|
|
|
27
|
+
// Shared utilities
|
|
28
|
+
const { getProjectRoot } = require('../lib/paths');
|
|
29
|
+
const { safeReadJSON, safeWriteJSON } = require('../lib/errors');
|
|
30
|
+
|
|
27
31
|
// Debug mode
|
|
28
32
|
const DEBUG = process.env.DEBUG_UPDATE === '1';
|
|
29
33
|
|
|
@@ -33,35 +37,23 @@ function debugLog(message, data = null) {
|
|
|
33
37
|
}
|
|
34
38
|
}
|
|
35
39
|
|
|
36
|
-
// Find project root (has .agileflow directory)
|
|
37
|
-
function getProjectRoot() {
|
|
38
|
-
let dir = process.cwd();
|
|
39
|
-
while (!fs.existsSync(path.join(dir, '.agileflow')) && dir !== '/') {
|
|
40
|
-
dir = path.dirname(dir);
|
|
41
|
-
}
|
|
42
|
-
return dir !== '/' ? dir : process.cwd();
|
|
43
|
-
}
|
|
44
|
-
|
|
45
40
|
// Get installed AgileFlow version
|
|
46
41
|
function getInstalledVersion(rootDir) {
|
|
47
42
|
// First check .agileflow/package.json (installed version)
|
|
48
43
|
const agileflowPkg = path.join(rootDir, '.agileflow', 'package.json');
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
}
|
|
44
|
+
const agileflowResult = safeReadJSON(agileflowPkg);
|
|
45
|
+
if (agileflowResult.ok && agileflowResult.data?.version) {
|
|
46
|
+
return agileflowResult.data.version;
|
|
47
|
+
}
|
|
48
|
+
if (!agileflowResult.ok && agileflowResult.error) {
|
|
49
|
+
debugLog('Error reading .agileflow/package.json', agileflowResult.error);
|
|
56
50
|
}
|
|
57
51
|
|
|
58
52
|
// Fallback: check if this is the AgileFlow dev repo
|
|
59
53
|
const cliPkg = path.join(rootDir, 'packages/cli/package.json');
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
if (pkg.name === 'agileflow' && pkg.version) return pkg.version;
|
|
64
|
-
} catch (e) {}
|
|
54
|
+
const cliResult = safeReadJSON(cliPkg);
|
|
55
|
+
if (cliResult.ok && cliResult.data?.name === 'agileflow' && cliResult.data?.version) {
|
|
56
|
+
return cliResult.data.version;
|
|
65
57
|
}
|
|
66
58
|
|
|
67
59
|
return null;
|
|
@@ -78,16 +70,16 @@ function getUpdateConfig(rootDir) {
|
|
|
78
70
|
latestVersion: null,
|
|
79
71
|
};
|
|
80
72
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
73
|
+
const metadataPath = path.join(rootDir, 'docs/00-meta/agileflow-metadata.json');
|
|
74
|
+
const result = safeReadJSON(metadataPath, { defaultValue: {} });
|
|
75
|
+
|
|
76
|
+
if (!result.ok) {
|
|
77
|
+
debugLog('Error reading update config', result.error);
|
|
78
|
+
return defaults;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
if (result.data?.updates) {
|
|
82
|
+
return { ...defaults, ...result.data.updates };
|
|
91
83
|
}
|
|
92
84
|
|
|
93
85
|
return defaults;
|
|
@@ -95,21 +87,22 @@ function getUpdateConfig(rootDir) {
|
|
|
95
87
|
|
|
96
88
|
// Save update configuration
|
|
97
89
|
function saveUpdateConfig(rootDir, config) {
|
|
98
|
-
|
|
99
|
-
const metadataPath = path.join(rootDir, 'docs/00-meta/agileflow-metadata.json');
|
|
100
|
-
let metadata = {};
|
|
90
|
+
const metadataPath = path.join(rootDir, 'docs/00-meta/agileflow-metadata.json');
|
|
101
91
|
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
92
|
+
// Read existing metadata
|
|
93
|
+
const readResult = safeReadJSON(metadataPath, { defaultValue: {} });
|
|
94
|
+
const metadata = readResult.ok ? readResult.data : {};
|
|
95
|
+
|
|
96
|
+
// Update and write
|
|
97
|
+
metadata.updates = config;
|
|
98
|
+
const writeResult = safeWriteJSON(metadataPath, metadata, { createDir: true });
|
|
105
99
|
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
return true;
|
|
109
|
-
} catch (e) {
|
|
110
|
-
debugLog('Error saving update config', e.message);
|
|
100
|
+
if (!writeResult.ok) {
|
|
101
|
+
debugLog('Error saving update config', writeResult.error);
|
|
111
102
|
return false;
|
|
112
103
|
}
|
|
104
|
+
|
|
105
|
+
return true;
|
|
113
106
|
}
|
|
114
107
|
|
|
115
108
|
// Check if cache is still valid
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* bash-tool-damage-control.js - Validate bash commands against security patterns
|
|
5
|
+
*
|
|
6
|
+
* This PreToolUse hook runs before every Bash tool execution.
|
|
7
|
+
* It checks the command against patterns.yaml to block or ask for
|
|
8
|
+
* confirmation on dangerous commands.
|
|
9
|
+
*
|
|
10
|
+
* Exit codes:
|
|
11
|
+
* 0 = Allow command to proceed (or ask with JSON output)
|
|
12
|
+
* 2 = Block command
|
|
13
|
+
*
|
|
14
|
+
* For ask confirmation, outputs JSON:
|
|
15
|
+
* {"result": "ask", "message": "Reason for asking"}
|
|
16
|
+
*
|
|
17
|
+
* Usage (as PreToolUse hook):
|
|
18
|
+
* node .claude/hooks/damage-control/bash-tool-damage-control.js
|
|
19
|
+
*
|
|
20
|
+
* Environment:
|
|
21
|
+
* CLAUDE_TOOL_INPUT - JSON string with tool input (contains "command")
|
|
22
|
+
* CLAUDE_PROJECT_DIR - Project root directory
|
|
23
|
+
*/
|
|
24
|
+
|
|
25
|
+
const fs = require('fs');
|
|
26
|
+
const path = require('path');
|
|
27
|
+
|
|
28
|
+
// ANSI colors for output
|
|
29
|
+
const c = {
|
|
30
|
+
reset: '\x1b[0m',
|
|
31
|
+
bold: '\x1b[1m',
|
|
32
|
+
red: '\x1b[31m',
|
|
33
|
+
yellow: '\x1b[33m',
|
|
34
|
+
cyan: '\x1b[36m',
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
// Exit codes
|
|
38
|
+
const EXIT_ALLOW = 0;
|
|
39
|
+
const EXIT_BLOCK = 2;
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Load patterns from YAML file
|
|
43
|
+
* Falls back to built-in patterns if YAML parsing fails
|
|
44
|
+
*/
|
|
45
|
+
function loadPatterns(projectDir) {
|
|
46
|
+
const locations = [
|
|
47
|
+
path.join(projectDir, '.claude/hooks/damage-control/patterns.yaml'),
|
|
48
|
+
path.join(projectDir, '.agileflow/hooks/damage-control/patterns.yaml'),
|
|
49
|
+
path.join(projectDir, 'patterns.yaml'),
|
|
50
|
+
];
|
|
51
|
+
|
|
52
|
+
for (const loc of locations) {
|
|
53
|
+
if (fs.existsSync(loc)) {
|
|
54
|
+
try {
|
|
55
|
+
const content = fs.readFileSync(loc, 'utf8');
|
|
56
|
+
// Simple YAML parsing for our specific structure
|
|
57
|
+
return parseSimpleYaml(content);
|
|
58
|
+
} catch (e) {
|
|
59
|
+
console.error(`Warning: Could not parse ${loc}: ${e.message}`);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Return built-in defaults if no file found
|
|
65
|
+
return getDefaultPatterns();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Simple YAML parser for patterns.yaml structure
|
|
70
|
+
* Only handles the specific structure we use (arrays of objects with pattern/reason/ask)
|
|
71
|
+
*/
|
|
72
|
+
function parseSimpleYaml(content) {
|
|
73
|
+
const patterns = {
|
|
74
|
+
bashToolPatterns: [],
|
|
75
|
+
askPatterns: [],
|
|
76
|
+
agileflowPatterns: [],
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
let currentSection = null;
|
|
80
|
+
let currentItem = null;
|
|
81
|
+
|
|
82
|
+
const lines = content.split('\n');
|
|
83
|
+
|
|
84
|
+
for (const line of lines) {
|
|
85
|
+
// Skip comments and empty lines
|
|
86
|
+
if (line.trim().startsWith('#') || line.trim() === '') continue;
|
|
87
|
+
|
|
88
|
+
// Check for section headers
|
|
89
|
+
if (line.match(/^bashToolPatterns:/)) {
|
|
90
|
+
currentSection = 'bashToolPatterns';
|
|
91
|
+
continue;
|
|
92
|
+
}
|
|
93
|
+
if (line.match(/^askPatterns:/)) {
|
|
94
|
+
currentSection = 'askPatterns';
|
|
95
|
+
continue;
|
|
96
|
+
}
|
|
97
|
+
if (line.match(/^agileflowPatterns:/)) {
|
|
98
|
+
currentSection = 'agileflowPatterns';
|
|
99
|
+
continue;
|
|
100
|
+
}
|
|
101
|
+
if (line.match(/^(zeroAccessPaths|readOnlyPaths|noDeletePaths|config):/)) {
|
|
102
|
+
currentSection = null; // Skip non-pattern sections
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Parse pattern items
|
|
107
|
+
if (currentSection && patterns[currentSection]) {
|
|
108
|
+
const patternMatch = line.match(/^\s+-\s*pattern:\s*['"]?(.+?)['"]?\s*$/);
|
|
109
|
+
if (patternMatch) {
|
|
110
|
+
currentItem = { pattern: patternMatch[1] };
|
|
111
|
+
patterns[currentSection].push(currentItem);
|
|
112
|
+
continue;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const reasonMatch = line.match(/^\s+reason:\s*['"]?(.+?)['"]?\s*$/);
|
|
116
|
+
if (reasonMatch && currentItem) {
|
|
117
|
+
currentItem.reason = reasonMatch[1];
|
|
118
|
+
continue;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const askMatch = line.match(/^\s+ask:\s*(true|false)\s*$/);
|
|
122
|
+
if (askMatch && currentItem) {
|
|
123
|
+
currentItem.ask = askMatch[1] === 'true';
|
|
124
|
+
continue;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return patterns;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
/**
|
|
133
|
+
* Built-in default patterns (used if patterns.yaml not found)
|
|
134
|
+
*/
|
|
135
|
+
function getDefaultPatterns() {
|
|
136
|
+
return {
|
|
137
|
+
bashToolPatterns: [
|
|
138
|
+
{ pattern: '\\brm\\s+-[rRf]', reason: 'rm with recursive or force flags' },
|
|
139
|
+
{ pattern: 'DROP\\s+(TABLE|DATABASE)', reason: 'DROP commands are destructive' },
|
|
140
|
+
{ pattern: 'DELETE\\s+FROM\\s+\\w+\\s*;', reason: 'DELETE without WHERE clause' },
|
|
141
|
+
{ pattern: 'TRUNCATE\\s+(TABLE\\s+)?\\w+', reason: 'TRUNCATE removes all data' },
|
|
142
|
+
{ pattern: 'git\\s+push\\s+.*--force', reason: 'Force push can overwrite history', ask: true },
|
|
143
|
+
{ pattern: 'git\\s+reset\\s+--hard', reason: 'Hard reset discards changes', ask: true },
|
|
144
|
+
],
|
|
145
|
+
askPatterns: [
|
|
146
|
+
{ pattern: 'DELETE\\s+FROM\\s+\\w+\\s+WHERE', reason: 'Confirm record deletion' },
|
|
147
|
+
{ pattern: 'npm\\s+publish', reason: 'Publishing to npm is permanent' },
|
|
148
|
+
],
|
|
149
|
+
agileflowPatterns: [
|
|
150
|
+
{ pattern: 'rm.*\\.agileflow', reason: 'Deleting .agileflow breaks installation' },
|
|
151
|
+
{ pattern: 'rm.*\\.claude', reason: 'Deleting .claude breaks configuration' },
|
|
152
|
+
],
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
/**
|
|
157
|
+
* Check command against patterns
|
|
158
|
+
* Returns: { blocked: boolean, ask: boolean, reason: string }
|
|
159
|
+
*/
|
|
160
|
+
function checkCommand(command, patterns) {
|
|
161
|
+
// Combine all pattern sources
|
|
162
|
+
const allPatterns = [
|
|
163
|
+
...(patterns.bashToolPatterns || []),
|
|
164
|
+
...(patterns.agileflowPatterns || []),
|
|
165
|
+
];
|
|
166
|
+
|
|
167
|
+
// Check block/ask patterns
|
|
168
|
+
for (const p of allPatterns) {
|
|
169
|
+
try {
|
|
170
|
+
const regex = new RegExp(p.pattern, 'i');
|
|
171
|
+
if (regex.test(command)) {
|
|
172
|
+
if (p.ask) {
|
|
173
|
+
return { blocked: false, ask: true, reason: p.reason };
|
|
174
|
+
}
|
|
175
|
+
return { blocked: true, ask: false, reason: p.reason };
|
|
176
|
+
}
|
|
177
|
+
} catch (e) {
|
|
178
|
+
// Invalid regex, skip
|
|
179
|
+
console.error(`Warning: Invalid regex pattern: ${p.pattern}`);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Check ask-only patterns
|
|
184
|
+
for (const p of (patterns.askPatterns || [])) {
|
|
185
|
+
try {
|
|
186
|
+
const regex = new RegExp(p.pattern, 'i');
|
|
187
|
+
if (regex.test(command)) {
|
|
188
|
+
return { blocked: false, ask: true, reason: p.reason };
|
|
189
|
+
}
|
|
190
|
+
} catch (e) {
|
|
191
|
+
// Invalid regex, skip
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
return { blocked: false, ask: false, reason: null };
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
/**
|
|
199
|
+
* Main entry point
|
|
200
|
+
*/
|
|
201
|
+
function main() {
|
|
202
|
+
// Get tool input from environment
|
|
203
|
+
const toolInput = process.env.CLAUDE_TOOL_INPUT;
|
|
204
|
+
const projectDir = process.env.CLAUDE_PROJECT_DIR || process.cwd();
|
|
205
|
+
|
|
206
|
+
if (!toolInput) {
|
|
207
|
+
// No input, allow by default
|
|
208
|
+
process.exit(EXIT_ALLOW);
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
let input;
|
|
212
|
+
try {
|
|
213
|
+
input = JSON.parse(toolInput);
|
|
214
|
+
} catch (e) {
|
|
215
|
+
console.error('Error parsing CLAUDE_TOOL_INPUT:', e.message);
|
|
216
|
+
process.exit(EXIT_ALLOW);
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
const command = input.command;
|
|
220
|
+
if (!command) {
|
|
221
|
+
process.exit(EXIT_ALLOW);
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// Load patterns
|
|
225
|
+
const patterns = loadPatterns(projectDir);
|
|
226
|
+
|
|
227
|
+
// Check command
|
|
228
|
+
const result = checkCommand(command, patterns);
|
|
229
|
+
|
|
230
|
+
if (result.blocked) {
|
|
231
|
+
// Block the command
|
|
232
|
+
console.error(`${c.red}${c.bold}BLOCKED${c.reset}: ${result.reason}`);
|
|
233
|
+
console.error(`${c.yellow}Command: ${command}${c.reset}`);
|
|
234
|
+
console.error(`${c.cyan}This command was blocked by damage control.${c.reset}`);
|
|
235
|
+
process.exit(EXIT_BLOCK);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
if (result.ask) {
|
|
239
|
+
// Ask for confirmation
|
|
240
|
+
const response = {
|
|
241
|
+
result: 'ask',
|
|
242
|
+
message: `${result.reason}\n\nCommand: ${command}\n\nProceed with this command?`,
|
|
243
|
+
};
|
|
244
|
+
console.log(JSON.stringify(response));
|
|
245
|
+
process.exit(EXIT_ALLOW);
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Allow the command
|
|
249
|
+
process.exit(EXIT_ALLOW);
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// Run if called directly
|
|
253
|
+
if (require.main === module) {
|
|
254
|
+
main();
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
module.exports = { checkCommand, loadPatterns, parseSimpleYaml };
|