@whitehatd/crag 0.0.1 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/crag.js ADDED
@@ -0,0 +1,7 @@
1
+ #!/usr/bin/env node
2
+
3
+ 'use strict';
4
+
5
+ const { run } = require('../src/cli');
6
+
7
+ run(process.argv.slice(2));
package/package.json CHANGED
@@ -1,9 +1,12 @@
1
1
  {
2
2
  "name": "@whitehatd/crag",
3
- "version": "0.0.1",
4
- "description": "The bedrock layer for AI coding agents. One governance.md. Any project. Never stale. (Name reservation — full release coming soon.)",
3
+ "version": "0.2.1",
4
+ "description": "The bedrock layer for AI coding agents. One governance.md. Any project. Never stale.",
5
5
  "bin": {
6
- "crag": "cli.js"
6
+ "crag": "bin/crag.js"
7
+ },
8
+ "scripts": {
9
+ "test": "node test/all.js"
7
10
  },
8
11
  "keywords": [
9
12
  "claude-code",
@@ -11,8 +14,13 @@
11
14
  "ai-coding",
12
15
  "agent-governance",
13
16
  "developer-tools",
17
+ "workflow-automation",
14
18
  "meta-framework",
15
- "monorepo"
19
+ "monorepo",
20
+ "cursor",
21
+ "codex",
22
+ "aider",
23
+ "gemini-cli"
16
24
  ],
17
25
  "author": "Alexandru Cioc (WhitehatD)",
18
26
  "license": "MIT",
@@ -24,6 +32,12 @@
24
32
  "bugs": {
25
33
  "url": "https://github.com/WhitehatD/crag/issues"
26
34
  },
35
+ "files": [
36
+ "bin",
37
+ "src",
38
+ "README.md",
39
+ "LICENSE"
40
+ ],
27
41
  "engines": {
28
42
  "node": ">=18"
29
43
  }
package/src/cli.js ADDED
@@ -0,0 +1,102 @@
1
+ 'use strict';
2
+
3
+ const { init, install } = require('./commands/init');
4
+ const { check } = require('./commands/check');
5
+ const { compile } = require('./commands/compile');
6
+ const { analyze } = require('./commands/analyze');
7
+ const { diff } = require('./commands/diff');
8
+ const { upgrade } = require('./commands/upgrade');
9
+ const { workspace } = require('./commands/workspace');
10
+ const { checkOnce } = require('./update/version-check');
11
+
12
+ function printUsage() {
13
+ console.log(`
14
+ crag — the bedrock layer for AI coding agents
15
+ One governance.md. Any project. Never stale.
16
+
17
+ Usage:
18
+ crag init Interview → generate governance, hooks, agents
19
+ crag analyze Generate governance from existing project (no interview)
20
+ crag check Verify infrastructure is complete
21
+ crag compile Compile governance.md → CI, hooks, AGENTS.md, Cursor, Gemini
22
+ crag diff Compare governance against codebase reality
23
+ crag upgrade Update universal skills to latest version
24
+ crag workspace Inspect detected workspace (type, members, governance hierarchy)
25
+ crag install Install agent globally for /crag-project
26
+ crag version Show version
27
+
28
+ Compile targets (12):
29
+ CI / git hooks:
30
+ crag compile --target github .github/workflows/gates.yml
31
+ crag compile --target husky .husky/pre-commit
32
+ crag compile --target pre-commit .pre-commit-config.yaml
33
+ AI agents — native:
34
+ crag compile --target agents-md AGENTS.md (Codex, Aider, Factory)
35
+ crag compile --target cursor .cursor/rules/governance.mdc
36
+ crag compile --target gemini GEMINI.md
37
+ AI agents — additional:
38
+ crag compile --target copilot .github/copilot-instructions.md
39
+ crag compile --target cline .clinerules
40
+ crag compile --target continue .continuerules
41
+ crag compile --target windsurf .windsurfrules
42
+ crag compile --target zed .zed/rules.md
43
+ crag compile --target cody .sourcegraph/cody-instructions.md
44
+ crag compile --target all All 12 targets at once
45
+
46
+ Analyze options:
47
+ crag analyze --dry-run Print inferred governance without writing
48
+ crag analyze --workspace Analyze all workspace members
49
+ crag analyze --merge Merge with existing governance
50
+
51
+ Upgrade options:
52
+ crag upgrade --check Show what would change
53
+ crag upgrade --workspace Update all workspace members
54
+ crag upgrade --force Overwrite modified skills (with backup)
55
+
56
+ Workspace options:
57
+ crag workspace Human-readable workspace inspection
58
+ crag workspace --json Machine-readable JSON output
59
+
60
+ Architecture:
61
+ Universal skills (ship with crag, same for every project):
62
+ pre-start-context discovers any project at runtime
63
+ post-start-validation validates using governance gates
64
+
65
+ Generated per-project (from your interview or analysis):
66
+ governance.md your rules, quality bar, policies
67
+ hooks/ sandbox guard, drift detector, circuit breaker
68
+ agents/ test-runner, security-reviewer, scanners
69
+ settings.local.json permissions + hook wiring
70
+
71
+ The skills read governance.md and adapt. Nothing is hardcoded.
72
+ `);
73
+ }
74
+
75
+ function run(args) {
76
+ const command = args[0];
77
+
78
+ // Non-blocking update check (cached, ~1ms on warm path)
79
+ checkOnce();
80
+
81
+ switch (command) {
82
+ case 'init': init(); break;
83
+ case 'install': install(); break;
84
+ case 'check': check(); break;
85
+ case 'compile': compile(args); break;
86
+ case 'analyze': analyze(args); break;
87
+ case 'diff': diff(args); break;
88
+ case 'upgrade': upgrade(args); break;
89
+ case 'workspace': workspace(args); break;
90
+ case 'version': case '--version': case '-v':
91
+ console.log(` crag v${require('../package.json').version}`);
92
+ break;
93
+ case 'help': case '--help': case '-h': case undefined:
94
+ printUsage(); break;
95
+ default:
96
+ console.error(` Unknown command: ${command}`);
97
+ printUsage();
98
+ process.exit(1);
99
+ }
100
+ }
101
+
102
+ module.exports = { run };
@@ -0,0 +1,513 @@
1
+ 'use strict';
2
+
3
+ const { execSync } = require('child_process');
4
+ const fs = require('fs');
5
+ const path = require('path');
6
+ const { detectWorkspace } = require('../workspace/detect');
7
+ const { enumerateMembers } = require('../workspace/enumerate');
8
+
9
+ /**
10
+ * crag analyze — generate governance.md from existing project without interview.
11
+ * Reads CI configs, package manifests, linter configs, git history.
12
+ */
13
+ function analyze(args) {
14
+ const dryRun = args.includes('--dry-run');
15
+ const workspace = args.includes('--workspace');
16
+ const merge = args.includes('--merge');
17
+ const cwd = process.cwd();
18
+
19
+ console.log(`\n Analyzing project in ${cwd}...\n`);
20
+
21
+ const analysis = analyzeProject(cwd);
22
+
23
+ if (workspace) {
24
+ const ws = detectWorkspace(cwd);
25
+ if (ws.type !== 'none') {
26
+ const members = enumerateMembers(ws);
27
+ console.log(` Workspace detected: ${ws.type} (${members.length} members)\n`);
28
+ analysis.workspace = { type: ws.type, members: [] };
29
+
30
+ for (const member of members) {
31
+ const memberAnalysis = analyzeProject(member.path);
32
+ analysis.workspace.members.push({ name: member.name, ...memberAnalysis });
33
+ }
34
+ }
35
+ }
36
+
37
+ const governance = generateGovernance(analysis, cwd);
38
+
39
+ if (dryRun) {
40
+ console.log(' --- DRY RUN (would generate) ---\n');
41
+ console.log(governance);
42
+ console.log(' --- END DRY RUN ---\n');
43
+ return;
44
+ }
45
+
46
+ const govPath = path.join(cwd, '.claude', 'governance.md');
47
+ const govDir = path.dirname(govPath);
48
+ if (!fs.existsSync(govDir)) fs.mkdirSync(govDir, { recursive: true });
49
+
50
+ if (fs.existsSync(govPath) && !merge) {
51
+ const backupPath = govPath + '.bak.' + Date.now();
52
+ fs.copyFileSync(govPath, backupPath);
53
+ console.log(` Backed up existing governance to ${path.basename(backupPath)}`);
54
+ }
55
+
56
+ if (merge && fs.existsSync(govPath)) {
57
+ console.log(' Merge mode: preserving existing governance, appending new gates');
58
+ const existing = fs.readFileSync(govPath, 'utf-8');
59
+ const mergedContent = mergeWithExisting(existing, governance);
60
+ fs.writeFileSync(govPath, mergedContent);
61
+ } else {
62
+ fs.writeFileSync(govPath, governance);
63
+ }
64
+
65
+ console.log(` \x1b[32m✓\x1b[0m Generated ${path.relative(cwd, govPath)}`);
66
+ console.log(`\n Review the file — sections marked "# Inferred" should be verified.`);
67
+ console.log(` Run 'crag check' to verify infrastructure.\n`);
68
+ }
69
+
70
+ function analyzeProject(dir) {
71
+ const result = {
72
+ name: path.basename(dir),
73
+ description: '',
74
+ stack: [],
75
+ gates: [],
76
+ linters: [],
77
+ formatters: [],
78
+ testers: [],
79
+ builders: [],
80
+ branchStrategy: 'unknown',
81
+ commitConvention: 'unknown',
82
+ deployment: [],
83
+ ci: null,
84
+ ciGates: [],
85
+ };
86
+
87
+ // Detect stack from manifests
88
+ detectStack(dir, result);
89
+
90
+ // Extract gates from CI
91
+ extractCIGates(dir, result);
92
+
93
+ // Extract gates from package.json scripts
94
+ extractPackageScripts(dir, result);
95
+
96
+ // Detect linters
97
+ detectLinters(dir, result);
98
+
99
+ // Detect deployment
100
+ detectDeployment(dir, result);
101
+
102
+ // Infer branch strategy and commit convention from git
103
+ inferGitPatterns(dir, result);
104
+
105
+ return result;
106
+ }
107
+
108
+ function detectStack(dir, result) {
109
+ if (fs.existsSync(path.join(dir, 'package.json'))) {
110
+ result.stack.push('node');
111
+ try {
112
+ const pkg = JSON.parse(fs.readFileSync(path.join(dir, 'package.json'), 'utf-8'));
113
+ result.name = pkg.name || result.name;
114
+ result.description = pkg.description || '';
115
+
116
+ const deps = { ...pkg.dependencies, ...pkg.devDependencies };
117
+ if (deps.next) result.stack.push('next.js');
118
+ if (deps.react && !deps.next) result.stack.push('react');
119
+ if (deps.vue) result.stack.push('vue');
120
+ if (deps.svelte) result.stack.push('svelte');
121
+ if (deps.express) result.stack.push('express');
122
+ if (deps.fastify) result.stack.push('fastify');
123
+ if (deps.typescript) result.stack.push('typescript');
124
+ } catch { /* skip */ }
125
+ }
126
+ if (fs.existsSync(path.join(dir, 'Cargo.toml'))) result.stack.push('rust');
127
+ if (fs.existsSync(path.join(dir, 'go.mod'))) result.stack.push('go');
128
+ if (fs.existsSync(path.join(dir, 'pyproject.toml')) || fs.existsSync(path.join(dir, 'setup.py'))) result.stack.push('python');
129
+ if (fs.existsSync(path.join(dir, 'build.gradle.kts')) || fs.existsSync(path.join(dir, 'build.gradle'))) result.stack.push('java/gradle');
130
+ if (fs.existsSync(path.join(dir, 'pom.xml'))) result.stack.push('java/maven');
131
+ if (fs.existsSync(path.join(dir, 'Dockerfile'))) result.stack.push('docker');
132
+ }
133
+
134
+ function extractCIGates(dir, result) {
135
+ // GitHub Actions
136
+ const workflowDir = path.join(dir, '.github', 'workflows');
137
+ if (fs.existsSync(workflowDir)) {
138
+ result.ci = 'github-actions';
139
+ try {
140
+ // Walk workflow dir recursively to catch nested workflows
141
+ const walk = (d) => {
142
+ const out = [];
143
+ for (const entry of fs.readdirSync(d, { withFileTypes: true })) {
144
+ const full = path.join(d, entry.name);
145
+ if (entry.isDirectory()) out.push(...walk(full));
146
+ else if (entry.name.endsWith('.yml') || entry.name.endsWith('.yaml')) out.push(full);
147
+ }
148
+ return out;
149
+ };
150
+ for (const file of walk(workflowDir)) {
151
+ const content = fs.readFileSync(file, 'utf-8');
152
+ for (const cmd of extractRunCommands(content)) {
153
+ if (isGateCommand(cmd)) result.ciGates.push(cmd);
154
+ }
155
+ }
156
+ } catch { /* skip */ }
157
+ }
158
+
159
+ // GitLab CI
160
+ if (fs.existsSync(path.join(dir, '.gitlab-ci.yml'))) {
161
+ result.ci = 'gitlab-ci';
162
+ }
163
+
164
+ // Jenkins
165
+ if (fs.existsSync(path.join(dir, 'Jenkinsfile'))) {
166
+ result.ci = 'jenkins';
167
+ }
168
+ }
169
+
170
+ /**
171
+ * Extract commands from YAML `run:` steps, handling both inline and block-scalar forms:
172
+ * run: npm test
173
+ * run: |
174
+ * npm test
175
+ * npm build
176
+ * run: >-
177
+ * npm test
178
+ */
179
+ function extractRunCommands(content) {
180
+ const commands = [];
181
+ const lines = content.split(/\r?\n/);
182
+
183
+ for (let i = 0; i < lines.length; i++) {
184
+ const line = lines[i];
185
+ const m = line.match(/^(\s*)-?\s*run:\s*(.*)$/);
186
+ if (!m) continue;
187
+
188
+ const baseIndent = m[1].length;
189
+ const rest = m[2].trim();
190
+
191
+ // Block scalar: | or |- or |+ or > or >- or >+
192
+ if (/^[|>][+-]?\s*$/.test(rest)) {
193
+ // Collect following lines with greater indent
194
+ const blockLines = [];
195
+ for (let j = i + 1; j < lines.length; j++) {
196
+ const ln = lines[j];
197
+ if (ln.trim() === '') { blockLines.push(''); continue; }
198
+ const indentMatch = ln.match(/^(\s*)/);
199
+ if (indentMatch[1].length <= baseIndent) break;
200
+ blockLines.push(ln.slice(baseIndent + 2));
201
+ }
202
+ for (const bl of blockLines) {
203
+ const trimmed = bl.trim();
204
+ if (trimmed && !trimmed.startsWith('#')) commands.push(trimmed);
205
+ }
206
+ } else if (rest && !rest.startsWith('#')) {
207
+ // Inline: remove surrounding quotes if any
208
+ commands.push(rest.replace(/^["']|["']$/g, ''));
209
+ }
210
+ }
211
+
212
+ return commands;
213
+ }
214
+
215
+ function extractPackageScripts(dir, result) {
216
+ const pkgPath = path.join(dir, 'package.json');
217
+ if (!fs.existsSync(pkgPath)) return;
218
+
219
+ try {
220
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
221
+ const scripts = pkg.scripts || {};
222
+
223
+ const scriptMap = {
224
+ test: 'testers',
225
+ lint: 'linters',
226
+ build: 'builders',
227
+ format: 'formatters',
228
+ typecheck: 'builders',
229
+ check: 'linters',
230
+ };
231
+
232
+ for (const [key, category] of Object.entries(scriptMap)) {
233
+ if (scripts[key]) {
234
+ result[category].push(`npm run ${key}`);
235
+ }
236
+ }
237
+
238
+ // Also check for specific patterns
239
+ if (scripts['lint:fix']) result.formatters.push('npm run lint:fix');
240
+ if (scripts['format:check']) result.linters.push('npm run format:check');
241
+ } catch { /* skip */ }
242
+ }
243
+
244
+ function detectLinters(dir, result) {
245
+ const linterConfigs = [
246
+ ['.eslintrc', 'eslint'], ['.eslintrc.js', 'eslint'], ['.eslintrc.json', 'eslint'], ['.eslintrc.cjs', 'eslint'],
247
+ ['eslint.config.js', 'eslint'], ['eslint.config.mjs', 'eslint'], ['eslint.config.cjs', 'eslint'],
248
+ ['biome.json', 'biome'], ['biome.jsonc', 'biome'],
249
+ ['.prettierrc', 'prettier'], ['.prettierrc.js', 'prettier'], ['.prettierrc.json', 'prettier'],
250
+ ['prettier.config.js', 'prettier'], ['prettier.config.mjs', 'prettier'],
251
+ ['ruff.toml', 'ruff'], ['.ruff.toml', 'ruff'],
252
+ ['clippy.toml', 'clippy'], ['.clippy.toml', 'clippy'],
253
+ ['rustfmt.toml', 'rustfmt'], ['.rustfmt.toml', 'rustfmt'],
254
+ ['tsconfig.json', 'typescript'],
255
+ ['.mypy.ini', 'mypy'], ['mypy.ini', 'mypy'],
256
+ ];
257
+
258
+ for (const [file, tool] of linterConfigs) {
259
+ if (fs.existsSync(path.join(dir, file))) {
260
+ if (!result.linters.includes(tool)) result.linters.push(tool);
261
+ }
262
+ }
263
+
264
+ // Build/task file detection
265
+ const taskFiles = ['Makefile', 'Taskfile.yml', 'justfile'];
266
+ for (const file of taskFiles) {
267
+ if (fs.existsSync(path.join(dir, file))) {
268
+ result.builders.push(`${file} detected`);
269
+ }
270
+ }
271
+ }
272
+
273
+ function detectDeployment(dir, result) {
274
+ if (fs.existsSync(path.join(dir, 'Dockerfile'))) result.deployment.push('docker');
275
+ if (fs.existsSync(path.join(dir, 'docker-compose.yml')) || fs.existsSync(path.join(dir, 'docker-compose.yaml'))) result.deployment.push('docker-compose');
276
+ if (fs.existsSync(path.join(dir, 'vercel.json')) || fs.existsSync(path.join(dir, '.vercel'))) result.deployment.push('vercel');
277
+ if (fs.existsSync(path.join(dir, 'fly.toml'))) result.deployment.push('fly.io');
278
+ if (fs.existsSync(path.join(dir, 'netlify.toml'))) result.deployment.push('netlify');
279
+ if (fs.existsSync(path.join(dir, 'render.yaml'))) result.deployment.push('render');
280
+
281
+ // Kubernetes
282
+ try {
283
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
284
+ for (const entry of entries) {
285
+ if (entry.isDirectory() && (entry.name === 'k8s' || entry.name === 'kubernetes' || entry.name === 'deploy')) {
286
+ result.deployment.push('kubernetes');
287
+ break;
288
+ }
289
+ }
290
+ } catch { /* skip */ }
291
+
292
+ // Terraform
293
+ try {
294
+ const entries = fs.readdirSync(dir);
295
+ if (entries.some(f => f.endsWith('.tf'))) result.deployment.push('terraform');
296
+ } catch { /* skip */ }
297
+ }
298
+
299
+ function inferGitPatterns(dir, result) {
300
+ try {
301
+ const log = execSync('git log --oneline --all -50', { cwd: dir, encoding: 'utf-8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'] });
302
+ const lines = log.trim().split('\n');
303
+
304
+ // Detect conventional commits
305
+ const conventional = lines.filter(l => /\b(feat|fix|docs|chore|style|refactor|test|build|ci|perf|revert)[\(:!]/.test(l));
306
+ result.commitConvention = conventional.length > lines.length * 0.3 ? 'conventional' : 'free-form';
307
+
308
+ // Detect branch strategy
309
+ const branches = execSync('git branch -a --format="%(refname:short)"', { cwd: dir, encoding: 'utf-8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'] });
310
+ const branchList = branches.trim().split('\n');
311
+ const featureBranches = branchList.filter(b => /^(feat|fix|docs|chore|feature|hotfix|release)\//.test(b));
312
+ result.branchStrategy = featureBranches.length > 2 ? 'feature-branches' : 'trunk-based';
313
+ } catch {
314
+ result.branchStrategy = 'unknown';
315
+ result.commitConvention = 'unknown';
316
+ }
317
+ }
318
+
319
+ function isGateCommand(cmd) {
320
+ const gatePatterns = [
321
+ /npm (run |ci|test|install)/, /npx /, /node /,
322
+ /cargo (test|build|check|clippy)/, /rustfmt/,
323
+ /go (test|build|vet)/, /golangci-lint/,
324
+ /pytest/, /python -m/, /ruff/, /mypy/, /flake8/,
325
+ /gradle/, /mvn /, /maven/,
326
+ /eslint/, /biome/, /prettier/, /tsc/,
327
+ /docker (build|compose)/, /make /, /just /,
328
+ ];
329
+ return gatePatterns.some(p => p.test(cmd));
330
+ }
331
+
332
+ function generateGovernance(analysis, cwd) {
333
+ const sections = [];
334
+
335
+ // Identity
336
+ sections.push(`# Governance — ${analysis.name}`);
337
+ sections.push(`# Inferred by crag analyze — review and adjust as needed\n`);
338
+ sections.push('## Identity');
339
+ sections.push(`- Project: ${analysis.name}`);
340
+ if (analysis.description) sections.push(`- Description: ${analysis.description}`);
341
+ sections.push(`- Stack: ${analysis.stack.join(', ') || 'unknown'}`);
342
+ sections.push('');
343
+
344
+ // Gates
345
+ sections.push('## Gates (run in order, stop on failure)');
346
+
347
+ // Group gates by type
348
+ const allGates = new Set();
349
+
350
+ // From linters
351
+ if (analysis.linters.length > 0) {
352
+ sections.push('### Lint');
353
+ for (const linter of analysis.linters) {
354
+ let cmd;
355
+ switch (linter) {
356
+ case 'eslint': cmd = 'npx eslint . --max-warnings 0'; break;
357
+ case 'biome': cmd = 'npx biome check .'; break;
358
+ case 'ruff': cmd = 'ruff check .'; break;
359
+ case 'clippy': cmd = 'cargo clippy -- -D warnings'; break;
360
+ case 'mypy': cmd = 'mypy .'; break;
361
+ case 'typescript': cmd = 'npx tsc --noEmit'; break;
362
+ default: cmd = null;
363
+ }
364
+ if (cmd && !allGates.has(cmd)) {
365
+ sections.push(`- ${cmd}`);
366
+ allGates.add(cmd);
367
+ }
368
+ }
369
+ sections.push('');
370
+ }
371
+
372
+ // From testers
373
+ if (analysis.testers.length > 0) {
374
+ sections.push('### Test');
375
+ for (const tester of analysis.testers) {
376
+ if (!allGates.has(tester)) {
377
+ sections.push(`- ${tester}`);
378
+ allGates.add(tester);
379
+ }
380
+ }
381
+ sections.push('');
382
+ }
383
+
384
+ // From builders
385
+ if (analysis.builders.length > 0) {
386
+ sections.push('### Build');
387
+ for (const builder of analysis.builders) {
388
+ if (!builder.includes('detected') && !allGates.has(builder)) {
389
+ sections.push(`- ${builder}`);
390
+ allGates.add(builder);
391
+ }
392
+ }
393
+ sections.push('');
394
+ }
395
+
396
+ // From CI gates (if not already covered)
397
+ const uniqueCiGates = analysis.ciGates.filter(g => !allGates.has(g));
398
+ if (uniqueCiGates.length > 0) {
399
+ sections.push('### CI (inferred from workflow)');
400
+ for (const gate of uniqueCiGates) {
401
+ sections.push(`- ${gate}`);
402
+ }
403
+ sections.push('');
404
+ }
405
+
406
+ // Rust-specific gates
407
+ if (analysis.stack.includes('rust')) {
408
+ if (!allGates.has('cargo test')) {
409
+ sections.push('### Rust');
410
+ sections.push('- cargo test');
411
+ sections.push('- cargo clippy -- -D warnings');
412
+ sections.push('');
413
+ }
414
+ }
415
+
416
+ // Go-specific gates
417
+ if (analysis.stack.includes('go')) {
418
+ if (!allGates.has('go test ./...')) {
419
+ sections.push('### Go');
420
+ sections.push('- go test ./...');
421
+ sections.push('- go vet ./...');
422
+ sections.push('');
423
+ }
424
+ }
425
+
426
+ // Node.js syntax check for CLI projects
427
+ if (analysis.stack.includes('node') && !analysis.stack.includes('next.js') && !analysis.stack.includes('react')) {
428
+ try {
429
+ const pkg = JSON.parse(fs.readFileSync(path.join(cwd, 'package.json'), 'utf-8'));
430
+ if (pkg.bin) {
431
+ const binFiles = typeof pkg.bin === 'string' ? [pkg.bin] : Object.values(pkg.bin);
432
+ sections.push('### Syntax');
433
+ for (const bin of binFiles) {
434
+ sections.push(`- node --check ${bin}`);
435
+ }
436
+ sections.push('');
437
+ }
438
+ } catch { /* skip */ }
439
+ }
440
+
441
+ // Branch Strategy
442
+ sections.push('## Branch Strategy');
443
+ sections.push(`- ${analysis.branchStrategy === 'feature-branches' ? 'Feature branches (feat/, fix/, docs/)' : 'Trunk-based development'}`);
444
+ sections.push(`- ${analysis.commitConvention === 'conventional' ? 'Conventional commits' : 'Free-form commits'}`);
445
+ sections.push('- Commit trailer: Co-Authored-By: Claude <noreply@anthropic.com>');
446
+ sections.push('');
447
+
448
+ // Security
449
+ sections.push('## Security');
450
+ sections.push('- No hardcoded secrets — grep for sk_live, AKIA, password= before commit');
451
+ sections.push('');
452
+
453
+ // Autonomy
454
+ sections.push('## Autonomy');
455
+ sections.push('- Auto-commit after gates pass');
456
+ sections.push('');
457
+
458
+ // Deployment
459
+ if (analysis.deployment.length > 0) {
460
+ sections.push('## Deployment');
461
+ sections.push(`- Target: ${analysis.deployment.join(', ')}`);
462
+ if (analysis.ci) sections.push(`- CI: ${analysis.ci}`);
463
+ sections.push('');
464
+ }
465
+
466
+ return sections.join('\n') + '\n';
467
+ }
468
+
469
+ function mergeWithExisting(existing, generated) {
470
+ // Simple merge: keep existing content, append new sections that don't exist
471
+ const existingSections = new Set();
472
+ for (const match of existing.matchAll(/^## (.+)$/gm)) {
473
+ existingSections.add(match[1].trim().toLowerCase());
474
+ }
475
+
476
+ // Walk the generated file and collect new sections in their original order.
477
+ // Each new section becomes a self-contained block: the heading line plus all
478
+ // following lines until the next heading or EOF.
479
+ const newBlocks = [];
480
+ const genLines = generated.split('\n');
481
+ let currentBlock = null;
482
+ let blockIsNew = false;
483
+
484
+ for (const line of genLines) {
485
+ const sectionMatch = line.match(/^## (.+)$/);
486
+ if (sectionMatch) {
487
+ // Flush previous block if it was new
488
+ if (currentBlock && blockIsNew) {
489
+ newBlocks.push(currentBlock.trimEnd());
490
+ }
491
+ // Start new block
492
+ const section = sectionMatch[1].trim().toLowerCase();
493
+ blockIsNew = !existingSections.has(section);
494
+ currentBlock = blockIsNew ? line + '\n' : null;
495
+ } else if (blockIsNew && currentBlock !== null) {
496
+ currentBlock += line + '\n';
497
+ }
498
+ }
499
+ // Flush final block
500
+ if (currentBlock && blockIsNew) {
501
+ newBlocks.push(currentBlock.trimEnd());
502
+ }
503
+
504
+ if (newBlocks.length > 0) {
505
+ return existing.trimEnd() +
506
+ '\n\n# --- Inferred additions (review) ---\n\n' +
507
+ newBlocks.join('\n\n') +
508
+ '\n';
509
+ }
510
+ return existing;
511
+ }
512
+
513
+ module.exports = { analyze, analyzeProject, isGateCommand, mergeWithExisting };