@codexstar/bug-hunter 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +151 -0
- package/LICENSE +21 -0
- package/README.md +665 -0
- package/SKILL.md +624 -0
- package/bin/bug-hunter +222 -0
- package/evals/evals.json +362 -0
- package/modes/_dispatch.md +121 -0
- package/modes/extended.md +94 -0
- package/modes/fix-loop.md +115 -0
- package/modes/fix-pipeline.md +384 -0
- package/modes/large-codebase.md +212 -0
- package/modes/local-sequential.md +143 -0
- package/modes/loop.md +125 -0
- package/modes/parallel.md +113 -0
- package/modes/scaled.md +76 -0
- package/modes/single-file.md +38 -0
- package/modes/small.md +86 -0
- package/package.json +56 -0
- package/prompts/doc-lookup.md +44 -0
- package/prompts/examples/hunter-examples.md +131 -0
- package/prompts/examples/skeptic-examples.md +87 -0
- package/prompts/fixer.md +103 -0
- package/prompts/hunter.md +146 -0
- package/prompts/recon.md +159 -0
- package/prompts/referee.md +122 -0
- package/prompts/skeptic.md +143 -0
- package/prompts/threat-model.md +122 -0
- package/scripts/bug-hunter-state.cjs +537 -0
- package/scripts/code-index.cjs +541 -0
- package/scripts/context7-api.cjs +133 -0
- package/scripts/delta-mode.cjs +219 -0
- package/scripts/dep-scan.cjs +343 -0
- package/scripts/doc-lookup.cjs +316 -0
- package/scripts/fix-lock.cjs +167 -0
- package/scripts/init-test-fixture.sh +19 -0
- package/scripts/payload-guard.cjs +197 -0
- package/scripts/run-bug-hunter.cjs +892 -0
- package/scripts/tests/bug-hunter-state.test.cjs +87 -0
- package/scripts/tests/code-index.test.cjs +57 -0
- package/scripts/tests/delta-mode.test.cjs +47 -0
- package/scripts/tests/fix-lock.test.cjs +36 -0
- package/scripts/tests/fixtures/flaky-worker.cjs +63 -0
- package/scripts/tests/fixtures/low-confidence-worker.cjs +73 -0
- package/scripts/tests/fixtures/success-worker.cjs +42 -0
- package/scripts/tests/payload-guard.test.cjs +41 -0
- package/scripts/tests/run-bug-hunter.test.cjs +403 -0
- package/scripts/tests/test-utils.cjs +59 -0
- package/scripts/tests/worktree-harvest.test.cjs +297 -0
- package/scripts/triage.cjs +528 -0
- package/scripts/worktree-harvest.cjs +516 -0
- package/templates/subagent-wrapper.md +109 -0
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
const assert = require('node:assert/strict');
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const test = require('node:test');
|
|
5
|
+
|
|
6
|
+
const {
|
|
7
|
+
makeSandbox,
|
|
8
|
+
readJson,
|
|
9
|
+
resolveSkillScript,
|
|
10
|
+
runJson,
|
|
11
|
+
writeJson
|
|
12
|
+
} = require('./test-utils.cjs');
|
|
13
|
+
|
|
14
|
+
test('bug-hunter-state init/mark/hash/filter/record works end-to-end', () => {
|
|
15
|
+
const sandbox = makeSandbox('bug-hunter-state-');
|
|
16
|
+
const stateScript = resolveSkillScript('bug-hunter-state.cjs');
|
|
17
|
+
const fileA = path.join(sandbox, 'a.ts');
|
|
18
|
+
const fileB = path.join(sandbox, 'b.ts');
|
|
19
|
+
fs.writeFileSync(fileA, 'const a = 1;\n', 'utf8');
|
|
20
|
+
fs.writeFileSync(fileB, 'const b = 2;\n', 'utf8');
|
|
21
|
+
|
|
22
|
+
const filesJson = path.join(sandbox, 'files.json');
|
|
23
|
+
writeJson(filesJson, [fileA, fileB]);
|
|
24
|
+
const statePath = path.join(sandbox, 'state.json');
|
|
25
|
+
|
|
26
|
+
const init = runJson('node', [stateScript, 'init', statePath, 'extended', filesJson, '1']);
|
|
27
|
+
assert.equal(init.ok, true);
|
|
28
|
+
assert.equal(init.summary.metrics.filesTotal, 2);
|
|
29
|
+
assert.equal(init.summary.metrics.chunksTotal, 2);
|
|
30
|
+
|
|
31
|
+
const next = runJson('node', [stateScript, 'next-chunk', statePath]);
|
|
32
|
+
assert.equal(next.done, false);
|
|
33
|
+
assert.equal(next.chunk.id, 'chunk-1');
|
|
34
|
+
|
|
35
|
+
const markProgress = runJson('node', [stateScript, 'mark-chunk', statePath, 'chunk-1', 'in_progress']);
|
|
36
|
+
assert.equal(markProgress.chunk.status, 'in_progress');
|
|
37
|
+
|
|
38
|
+
const hashFilter1 = runJson('node', [stateScript, 'hash-filter', statePath, filesJson]);
|
|
39
|
+
assert.deepEqual(hashFilter1.skip, []);
|
|
40
|
+
assert.equal(hashFilter1.scan.length, 2);
|
|
41
|
+
|
|
42
|
+
const scanJson = path.join(sandbox, 'scan.json');
|
|
43
|
+
writeJson(scanJson, hashFilter1.scan);
|
|
44
|
+
runJson('node', [stateScript, 'hash-update', statePath, scanJson, 'scanned']);
|
|
45
|
+
|
|
46
|
+
const hashFilter2 = runJson('node', [stateScript, 'hash-filter', statePath, filesJson]);
|
|
47
|
+
assert.equal(hashFilter2.scan.length, 0);
|
|
48
|
+
assert.equal(hashFilter2.skip.length, 2);
|
|
49
|
+
|
|
50
|
+
const findingsJson = path.join(sandbox, 'findings.json');
|
|
51
|
+
writeJson(findingsJson, [
|
|
52
|
+
{ bugId: 'BUG-1', severity: 'Low', file: 'src/x.ts', lines: '1', claim: 'x' },
|
|
53
|
+
{ bugId: 'BUG-2', severity: 'Critical', file: 'src/x.ts', lines: '1', claim: 'x' }
|
|
54
|
+
]);
|
|
55
|
+
const recorded = runJson('node', [stateScript, 'record-findings', statePath, findingsJson, 'test']);
|
|
56
|
+
assert.equal(recorded.inserted, 1);
|
|
57
|
+
assert.equal(recorded.updated, 1);
|
|
58
|
+
assert.equal(recorded.metrics.findingsUnique, 1);
|
|
59
|
+
|
|
60
|
+
runJson('node', [stateScript, 'mark-chunk', statePath, 'chunk-1', 'done']);
|
|
61
|
+
const status = runJson('node', [stateScript, 'status', statePath]);
|
|
62
|
+
assert.equal(status.summary.metrics.chunksDone, 1);
|
|
63
|
+
assert.equal(status.summary.chunkStatus.done, 1);
|
|
64
|
+
|
|
65
|
+
const state = readJson(statePath);
|
|
66
|
+
assert.equal(state.bugLedger[0].severity, 'Critical');
|
|
67
|
+
assert.equal(state.metrics.lowConfidenceFindings, 1);
|
|
68
|
+
|
|
69
|
+
const extraFile = path.join(sandbox, 'c.ts');
|
|
70
|
+
fs.writeFileSync(extraFile, 'const c = 3;\n', 'utf8');
|
|
71
|
+
const extraFilesJson = path.join(sandbox, 'extra-files.json');
|
|
72
|
+
writeJson(extraFilesJson, [extraFile]);
|
|
73
|
+
const appendResult = runJson('node', [stateScript, 'append-files', statePath, extraFilesJson]);
|
|
74
|
+
assert.equal(appendResult.appended, 1);
|
|
75
|
+
assert.equal(appendResult.chunksAdded, 1);
|
|
76
|
+
|
|
77
|
+
const factCardJson = path.join(sandbox, 'fact-card.json');
|
|
78
|
+
writeJson(factCardJson, {
|
|
79
|
+
apiContracts: ['src/x.ts contract'],
|
|
80
|
+
authAssumptions: ['auth check required'],
|
|
81
|
+
invariants: ['state transition remains atomic']
|
|
82
|
+
});
|
|
83
|
+
const factCardResult = runJson('node', [stateScript, 'record-fact-card', statePath, 'chunk-1', factCardJson]);
|
|
84
|
+
assert.equal(factCardResult.ok, true);
|
|
85
|
+
const updatedState = readJson(statePath);
|
|
86
|
+
assert.equal(updatedState.factCards['chunk-1'].apiContracts.length, 1);
|
|
87
|
+
});
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
const assert = require('node:assert/strict');
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const test = require('node:test');
|
|
5
|
+
|
|
6
|
+
const {
|
|
7
|
+
makeSandbox,
|
|
8
|
+
writeJson,
|
|
9
|
+
resolveSkillScript,
|
|
10
|
+
runJson
|
|
11
|
+
} = require('./test-utils.cjs');
|
|
12
|
+
|
|
13
|
+
test('code-index build captures symbols, call graph, boundaries, and query scope', () => {
|
|
14
|
+
const sandbox = makeSandbox('code-index-');
|
|
15
|
+
const codeIndex = resolveSkillScript('code-index.cjs');
|
|
16
|
+
const filesJson = path.join(sandbox, 'files.json');
|
|
17
|
+
const indexPath = path.join(sandbox, 'index.json');
|
|
18
|
+
|
|
19
|
+
const routeFile = path.join(sandbox, 'src', 'routes', 'user-route.ts');
|
|
20
|
+
const serviceFile = path.join(sandbox, 'src', 'routes', 'service.ts');
|
|
21
|
+
const authFile = path.join(sandbox, 'src', 'lib', 'auth.ts');
|
|
22
|
+
fs.mkdirSync(path.dirname(routeFile), { recursive: true });
|
|
23
|
+
fs.mkdirSync(path.dirname(authFile), { recursive: true });
|
|
24
|
+
fs.writeFileSync(routeFile, [
|
|
25
|
+
"import { loadUser } from './service';",
|
|
26
|
+
'export function handler(req) {',
|
|
27
|
+
' return loadUser(req);',
|
|
28
|
+
'}'
|
|
29
|
+
].join('\n'), 'utf8');
|
|
30
|
+
fs.writeFileSync(serviceFile, [
|
|
31
|
+
"import { verifyToken } from '../lib/auth';",
|
|
32
|
+
'export function loadUser(req) {',
|
|
33
|
+
' return verifyToken(req.token);',
|
|
34
|
+
'}'
|
|
35
|
+
].join('\n'), 'utf8');
|
|
36
|
+
fs.writeFileSync(authFile, [
|
|
37
|
+
'export function verifyToken(token) {',
|
|
38
|
+
' return Boolean(token);',
|
|
39
|
+
'}'
|
|
40
|
+
].join('\n'), 'utf8');
|
|
41
|
+
|
|
42
|
+
writeJson(filesJson, [routeFile, serviceFile, authFile]);
|
|
43
|
+
const buildResult = runJson('node', [codeIndex, 'build', indexPath, filesJson, sandbox]);
|
|
44
|
+
assert.equal(buildResult.ok, true);
|
|
45
|
+
assert.equal(buildResult.metrics.filesIndexed, 3);
|
|
46
|
+
assert.equal(buildResult.metrics.symbolsIndexed > 0, true);
|
|
47
|
+
assert.equal(buildResult.metrics.callEdges > 0, true);
|
|
48
|
+
assert.equal(buildResult.metrics.trustBoundaryFiles >= 1, true);
|
|
49
|
+
|
|
50
|
+
const seedJson = path.join(sandbox, 'seed.json');
|
|
51
|
+
writeJson(seedJson, [routeFile]);
|
|
52
|
+
const queryResult = runJson('node', [codeIndex, 'query', indexPath, seedJson, '1']);
|
|
53
|
+
assert.equal(queryResult.ok, true);
|
|
54
|
+
assert.equal(queryResult.selected.includes(routeFile), true);
|
|
55
|
+
assert.equal(queryResult.selected.includes(serviceFile), true);
|
|
56
|
+
assert.equal(queryResult.trustBoundaryFiles.includes(routeFile), true);
|
|
57
|
+
});
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
const assert = require('node:assert/strict');
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const test = require('node:test');
|
|
5
|
+
|
|
6
|
+
const {
|
|
7
|
+
makeSandbox,
|
|
8
|
+
resolveSkillScript,
|
|
9
|
+
runJson,
|
|
10
|
+
writeJson
|
|
11
|
+
} = require('./test-utils.cjs');
|
|
12
|
+
|
|
13
|
+
test('delta-mode selects changed scope and returns expansion overlays', () => {
|
|
14
|
+
const sandbox = makeSandbox('delta-mode-');
|
|
15
|
+
const codeIndex = resolveSkillScript('code-index.cjs');
|
|
16
|
+
const deltaMode = resolveSkillScript('delta-mode.cjs');
|
|
17
|
+
const filesJson = path.join(sandbox, 'files.json');
|
|
18
|
+
const indexPath = path.join(sandbox, 'index.json');
|
|
19
|
+
const changedJson = path.join(sandbox, 'changed.json');
|
|
20
|
+
|
|
21
|
+
const changedFile = path.join(sandbox, 'src', 'feature', 'changed.ts');
|
|
22
|
+
const depFile = path.join(sandbox, 'src', 'feature', 'dep.ts');
|
|
23
|
+
const criticalOverlay = path.join(sandbox, 'src', 'api', 'admin-route.ts');
|
|
24
|
+
fs.mkdirSync(path.dirname(changedFile), { recursive: true });
|
|
25
|
+
fs.mkdirSync(path.dirname(criticalOverlay), { recursive: true });
|
|
26
|
+
fs.writeFileSync(changedFile, "import { dep } from './dep';\nexport const value = dep();\n", 'utf8');
|
|
27
|
+
fs.writeFileSync(depFile, 'export function dep() { return 1; }\n', 'utf8');
|
|
28
|
+
fs.writeFileSync(criticalOverlay, 'export function handler(req) { return req.body; }\n', 'utf8');
|
|
29
|
+
|
|
30
|
+
writeJson(filesJson, [changedFile, depFile, criticalOverlay]);
|
|
31
|
+
runJson('node', [codeIndex, 'build', indexPath, filesJson, sandbox]);
|
|
32
|
+
writeJson(changedJson, [changedFile]);
|
|
33
|
+
|
|
34
|
+
const selected = runJson('node', [deltaMode, 'select', indexPath, changedJson, '1']);
|
|
35
|
+
assert.equal(selected.ok, true);
|
|
36
|
+
assert.equal(selected.selected.includes(changedFile), true);
|
|
37
|
+
assert.equal(selected.selected.includes(depFile), true);
|
|
38
|
+
assert.equal(selected.expansionCandidates.includes(criticalOverlay), true);
|
|
39
|
+
|
|
40
|
+
const alreadySelectedJson = path.join(sandbox, 'selected.json');
|
|
41
|
+
const lowFilesJson = path.join(sandbox, 'low-files.json');
|
|
42
|
+
writeJson(alreadySelectedJson, selected.selected);
|
|
43
|
+
writeJson(lowFilesJson, [changedFile]);
|
|
44
|
+
const expansion = runJson('node', [deltaMode, 'expand', indexPath, lowFilesJson, alreadySelectedJson, '1']);
|
|
45
|
+
assert.equal(expansion.ok, true);
|
|
46
|
+
assert.equal(expansion.overlayOnly.includes(criticalOverlay), true);
|
|
47
|
+
});
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
const assert = require('node:assert/strict');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const test = require('node:test');
|
|
4
|
+
|
|
5
|
+
const {
|
|
6
|
+
makeSandbox,
|
|
7
|
+
resolveSkillScript,
|
|
8
|
+
runJson,
|
|
9
|
+
runRaw
|
|
10
|
+
} = require('./test-utils.cjs');
|
|
11
|
+
|
|
12
|
+
test('fix-lock enforces single writer and supports release', () => {
|
|
13
|
+
const sandbox = makeSandbox('fix-lock-');
|
|
14
|
+
const lockScript = resolveSkillScript('fix-lock.cjs');
|
|
15
|
+
const lockPath = path.join(sandbox, 'bug-hunter-fix.lock');
|
|
16
|
+
|
|
17
|
+
const acquire1 = runJson('node', [lockScript, 'acquire', lockPath, '120']);
|
|
18
|
+
assert.equal(acquire1.ok, true);
|
|
19
|
+
assert.equal(acquire1.acquired, true);
|
|
20
|
+
|
|
21
|
+
const acquire2 = runRaw('node', [lockScript, 'acquire', lockPath, '120']);
|
|
22
|
+
assert.notEqual(acquire2.status, 0);
|
|
23
|
+
const output2 = `${acquire2.stdout || ''}${acquire2.stderr || ''}`;
|
|
24
|
+
assert.match(output2, /lock-held/);
|
|
25
|
+
|
|
26
|
+
const status = runJson('node', [lockScript, 'status', lockPath, '120']);
|
|
27
|
+
assert.equal(status.exists, true);
|
|
28
|
+
assert.equal(status.stale, false);
|
|
29
|
+
|
|
30
|
+
const release = runJson('node', [lockScript, 'release', lockPath]);
|
|
31
|
+
assert.equal(release.ok, true);
|
|
32
|
+
assert.equal(release.released, true);
|
|
33
|
+
|
|
34
|
+
const statusAfter = runJson('node', [lockScript, 'status', lockPath, '120']);
|
|
35
|
+
assert.equal(statusAfter.exists, false);
|
|
36
|
+
});
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
|
|
6
|
+
function parseArgs(argv) {
|
|
7
|
+
const options = {};
|
|
8
|
+
let index = 0;
|
|
9
|
+
while (index < argv.length) {
|
|
10
|
+
const token = argv[index];
|
|
11
|
+
if (!token.startsWith('--')) {
|
|
12
|
+
index += 1;
|
|
13
|
+
continue;
|
|
14
|
+
}
|
|
15
|
+
const key = token.slice(2);
|
|
16
|
+
const value = argv[index + 1];
|
|
17
|
+
if (!value || value.startsWith('--')) {
|
|
18
|
+
options[key] = 'true';
|
|
19
|
+
index += 1;
|
|
20
|
+
continue;
|
|
21
|
+
}
|
|
22
|
+
options[key] = value;
|
|
23
|
+
index += 2;
|
|
24
|
+
}
|
|
25
|
+
return options;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const options = parseArgs(process.argv.slice(2));
|
|
29
|
+
const chunkId = options['chunk-id'] || 'chunk';
|
|
30
|
+
const attemptsFile = options['attempts-file'];
|
|
31
|
+
const findingsJson = options['findings-json'];
|
|
32
|
+
|
|
33
|
+
if (!attemptsFile) {
|
|
34
|
+
console.error('attempts-file is required');
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const attemptsPath = path.resolve(attemptsFile);
|
|
39
|
+
let attempts = {};
|
|
40
|
+
if (fs.existsSync(attemptsPath)) {
|
|
41
|
+
attempts = JSON.parse(fs.readFileSync(attemptsPath, 'utf8'));
|
|
42
|
+
}
|
|
43
|
+
attempts[chunkId] = (attempts[chunkId] || 0) + 1;
|
|
44
|
+
fs.mkdirSync(path.dirname(attemptsPath), { recursive: true });
|
|
45
|
+
fs.writeFileSync(attemptsPath, `${JSON.stringify(attempts, null, 2)}\n`, 'utf8');
|
|
46
|
+
|
|
47
|
+
if (attempts[chunkId] === 1) {
|
|
48
|
+
console.error(`intentional failure on first attempt for ${chunkId}`);
|
|
49
|
+
process.exit(1);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (findingsJson) {
|
|
53
|
+
const payload = [
|
|
54
|
+
{
|
|
55
|
+
bugId: `BUG-${chunkId}`,
|
|
56
|
+
severity: 'Medium',
|
|
57
|
+
file: `src/retry-${chunkId}.ts`,
|
|
58
|
+
lines: '10-11',
|
|
59
|
+
claim: `retry-success-${chunkId}`
|
|
60
|
+
}
|
|
61
|
+
];
|
|
62
|
+
fs.writeFileSync(findingsJson, `${JSON.stringify(payload, null, 2)}\n`, 'utf8');
|
|
63
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
|
|
6
|
+
function parseArgs(argv) {
|
|
7
|
+
const options = {};
|
|
8
|
+
let index = 0;
|
|
9
|
+
while (index < argv.length) {
|
|
10
|
+
const token = argv[index];
|
|
11
|
+
if (!token.startsWith('--')) {
|
|
12
|
+
index += 1;
|
|
13
|
+
continue;
|
|
14
|
+
}
|
|
15
|
+
const key = token.slice(2);
|
|
16
|
+
const value = argv[index + 1];
|
|
17
|
+
if (!value || value.startsWith('--')) {
|
|
18
|
+
options[key] = 'true';
|
|
19
|
+
index += 1;
|
|
20
|
+
continue;
|
|
21
|
+
}
|
|
22
|
+
options[key] = value;
|
|
23
|
+
index += 2;
|
|
24
|
+
}
|
|
25
|
+
return options;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function readJson(filePath) {
|
|
29
|
+
return JSON.parse(fs.readFileSync(filePath, 'utf8'));
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function writeJson(filePath, value) {
|
|
33
|
+
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
|
34
|
+
fs.writeFileSync(filePath, `${JSON.stringify(value, null, 2)}\n`, 'utf8');
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const options = parseArgs(process.argv.slice(2));
|
|
38
|
+
const scanFilesJson = options['scan-files-json'];
|
|
39
|
+
const findingsJson = options['findings-json'];
|
|
40
|
+
const factsJson = options['facts-json'];
|
|
41
|
+
const seenFilesPath = options['seen-files'];
|
|
42
|
+
const chunkId = options['chunk-id'] || 'chunk';
|
|
43
|
+
const confidence = Number.parseInt(String(options.confidence || '60'), 10);
|
|
44
|
+
|
|
45
|
+
const scanFiles = scanFilesJson ? readJson(scanFilesJson) : [];
|
|
46
|
+
if (seenFilesPath) {
|
|
47
|
+
const current = fs.existsSync(seenFilesPath) ? readJson(seenFilesPath) : [];
|
|
48
|
+
const merged = [...new Set([...current, ...scanFiles])];
|
|
49
|
+
writeJson(seenFilesPath, merged);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (findingsJson && scanFiles.length > 0) {
|
|
53
|
+
writeJson(findingsJson, [
|
|
54
|
+
{
|
|
55
|
+
bugId: `BUG-${chunkId}`,
|
|
56
|
+
severity: 'Critical',
|
|
57
|
+
confidence: Number.isInteger(confidence) ? confidence : 60,
|
|
58
|
+
file: scanFiles[0],
|
|
59
|
+
lines: '1',
|
|
60
|
+
claim: `Low-confidence risk in ${path.basename(scanFiles[0])}`
|
|
61
|
+
}
|
|
62
|
+
]);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (factsJson) {
|
|
66
|
+
writeJson(factsJson, {
|
|
67
|
+
apiContracts: scanFiles.map((filePath) => {
|
|
68
|
+
return `${path.basename(filePath)} contract`;
|
|
69
|
+
}),
|
|
70
|
+
authAssumptions: ['Auth decisions must remain explicit'],
|
|
71
|
+
invariants: [`Chunk ${chunkId} invariants captured`]
|
|
72
|
+
});
|
|
73
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
|
|
5
|
+
function parseArgs(argv) {
|
|
6
|
+
const options = {};
|
|
7
|
+
let index = 0;
|
|
8
|
+
while (index < argv.length) {
|
|
9
|
+
const token = argv[index];
|
|
10
|
+
if (!token.startsWith('--')) {
|
|
11
|
+
index += 1;
|
|
12
|
+
continue;
|
|
13
|
+
}
|
|
14
|
+
const key = token.slice(2);
|
|
15
|
+
const value = argv[index + 1];
|
|
16
|
+
if (!value || value.startsWith('--')) {
|
|
17
|
+
options[key] = 'true';
|
|
18
|
+
index += 1;
|
|
19
|
+
continue;
|
|
20
|
+
}
|
|
21
|
+
options[key] = value;
|
|
22
|
+
index += 2;
|
|
23
|
+
}
|
|
24
|
+
return options;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const options = parseArgs(process.argv.slice(2));
|
|
28
|
+
const findingsJson = options['findings-json'];
|
|
29
|
+
if (!findingsJson) {
|
|
30
|
+
process.exit(0);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const payload = [
|
|
34
|
+
{
|
|
35
|
+
bugId: `BUG-${options['chunk-id'] || '0'}`,
|
|
36
|
+
severity: 'Low',
|
|
37
|
+
file: 'src/example.ts',
|
|
38
|
+
lines: '1',
|
|
39
|
+
claim: 'example'
|
|
40
|
+
}
|
|
41
|
+
];
|
|
42
|
+
fs.writeFileSync(findingsJson, `${JSON.stringify(payload, null, 2)}\n`, 'utf8');
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
const assert = require('node:assert/strict');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const test = require('node:test');
|
|
4
|
+
|
|
5
|
+
const {
|
|
6
|
+
makeSandbox,
|
|
7
|
+
resolveSkillScript,
|
|
8
|
+
runJson,
|
|
9
|
+
runRaw,
|
|
10
|
+
writeJson
|
|
11
|
+
} = require('./test-utils.cjs');
|
|
12
|
+
|
|
13
|
+
test('payload-guard accepts valid hunter payload and rejects malformed payload', () => {
|
|
14
|
+
const sandbox = makeSandbox('payload-guard-');
|
|
15
|
+
const guardScript = resolveSkillScript('payload-guard.cjs');
|
|
16
|
+
const validPayloadPath = path.join(sandbox, 'valid.json');
|
|
17
|
+
const invalidPayloadPath = path.join(sandbox, 'invalid.json');
|
|
18
|
+
|
|
19
|
+
writeJson(validPayloadPath, {
|
|
20
|
+
skillDir: '/Users/codex/.agents/skills/bug-hunter',
|
|
21
|
+
targetFiles: ['src/a.ts'],
|
|
22
|
+
riskMap: {},
|
|
23
|
+
techStack: { framework: 'express' },
|
|
24
|
+
outputSchema: { type: 'object' }
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
const valid = runJson('node', [guardScript, 'validate', 'hunter', validPayloadPath]);
|
|
28
|
+
assert.equal(valid.ok, true);
|
|
29
|
+
assert.deepEqual(valid.errors, []);
|
|
30
|
+
|
|
31
|
+
writeJson(invalidPayloadPath, {
|
|
32
|
+
skillDir: 'relative/path',
|
|
33
|
+
targetFiles: [],
|
|
34
|
+
outputSchema: null
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
const invalid = runRaw('node', [guardScript, 'validate', 'hunter', invalidPayloadPath]);
|
|
38
|
+
assert.notEqual(invalid.status, 0);
|
|
39
|
+
const output = `${invalid.stdout || ''}\n${invalid.stderr || ''}`;
|
|
40
|
+
assert.match(output, /Missing required field: riskMap/);
|
|
41
|
+
});
|