configsentry 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +126 -0
- package/dist/baseline.js +50 -0
- package/dist/baseline.test.js +12 -0
- package/dist/cli.js +113 -0
- package/dist/compose.js +12 -0
- package/dist/rules.js +142 -0
- package/dist/rules.test.js +18 -0
- package/dist/sarif.js +67 -0
- package/dist/scan.js +46 -0
- package/dist/types.js +1 -0
- package/package.json +58 -0
- package/src/baseline.test.ts +15 -0
- package/src/baseline.ts +68 -0
- package/src/cli.ts +123 -0
- package/src/compose.ts +19 -0
- package/src/rules.test.ts +21 -0
- package/src/rules.ts +148 -0
- package/src/sarif.ts +74 -0
- package/src/scan.ts +47 -0
- package/src/types.ts +16 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Marius Morgenstern
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
# ConfigSentry (MVP)
|
|
2
|
+
|
|
3
|
+
Developer-first guardrails for **docker-compose.yml** (security + ops footguns).
|
|
4
|
+
|
|
5
|
+
## What it does
|
|
6
|
+
ConfigSentry reads a Compose file and flags common **high-impact** mistakes:
|
|
7
|
+
- privileged containers (`privileged: true`)
|
|
8
|
+
- Docker socket mounts (`/var/run/docker.sock`)
|
|
9
|
+
- sensitive ports exposed publicly (e.g. `5432:5432` instead of `127.0.0.1:5432:5432`)
|
|
10
|
+
- missing `restart:` policy
|
|
11
|
+
- missing `healthcheck:`
|
|
12
|
+
- likely running as root (missing `user:`)
|
|
13
|
+
|
|
14
|
+
Designed to be **CI-friendly** (non-zero exit code when findings exist).
|
|
15
|
+
|
|
16
|
+
## Quickstart
|
|
17
|
+
|
|
18
|
+
### Run via npx (after npm publish)
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
npx configsentry ./docker-compose.yml
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
### Run from source
|
|
25
|
+
|
|
26
|
+
```bash
|
|
27
|
+
npm install
|
|
28
|
+
npm run build
|
|
29
|
+
node dist/cli.js ./docker-compose.yml
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
### JSON output (CI / tooling)
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
node dist/cli.js ./docker-compose.yml --json
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
### SARIF output (GitHub Code Scanning)
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
node dist/cli.js ./docker-compose.yml --sarif > configsentry.sarif.json
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Baselines (incremental adoption)
|
|
45
|
+
|
|
46
|
+
Generate a baseline (captures current findings):
|
|
47
|
+
|
|
48
|
+
```bash
|
|
49
|
+
node dist/cli.js ./docker-compose.yml --write-baseline .configsentry-baseline.json
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
Then suppress baseline findings in CI:
|
|
53
|
+
|
|
54
|
+
```bash
|
|
55
|
+
node dist/cli.js ./docker-compose.yml --baseline .configsentry-baseline.json
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
## Use in GitHub Actions (copy/paste)
|
|
59
|
+
|
|
60
|
+
More examples: [`docs/action-usage.md`](docs/action-usage.md)
|
|
61
|
+
|
|
62
|
+
### Option A: run from source
|
|
63
|
+
|
|
64
|
+
```yml
|
|
65
|
+
name: Compose lint
|
|
66
|
+
on: [push, pull_request]
|
|
67
|
+
|
|
68
|
+
jobs:
|
|
69
|
+
configsentry:
|
|
70
|
+
runs-on: ubuntu-latest
|
|
71
|
+
steps:
|
|
72
|
+
- uses: actions/checkout@v4
|
|
73
|
+
- uses: actions/setup-node@v4
|
|
74
|
+
with:
|
|
75
|
+
node-version: 22
|
|
76
|
+
|
|
77
|
+
- run: npm ci
|
|
78
|
+
- run: npm run build
|
|
79
|
+
- run: node dist/cli.js ./docker-compose.yml
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
### Option B: use the ConfigSentry composite action
|
|
83
|
+
|
|
84
|
+
```yml
|
|
85
|
+
name: Compose lint
|
|
86
|
+
on: [push, pull_request]
|
|
87
|
+
|
|
88
|
+
permissions:
|
|
89
|
+
contents: read
|
|
90
|
+
security-events: write # required if upload-sarif=true (Code Scanning)
|
|
91
|
+
|
|
92
|
+
jobs:
|
|
93
|
+
configsentry:
|
|
94
|
+
runs-on: ubuntu-latest
|
|
95
|
+
steps:
|
|
96
|
+
- uses: actions/checkout@v4
|
|
97
|
+
- uses: alfredMorgenstern/configsentry@v0.0.9
|
|
98
|
+
with:
|
|
99
|
+
target: .
|
|
100
|
+
# optional: baseline: .configsentry-baseline.json
|
|
101
|
+
sarif: true
|
|
102
|
+
upload-sarif: false
|
|
103
|
+
|
|
104
|
+
# If you set upload-sarif: true, also ensure the workflow has:
|
|
105
|
+
# permissions:
|
|
106
|
+
# security-events: write
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
> Tip: pin to a tag (like `v0.0.8`) for reproducible builds.
|
|
110
|
+
|
|
111
|
+
## Exit codes
|
|
112
|
+
- `0` no findings
|
|
113
|
+
- `2` findings present
|
|
114
|
+
- `1` error
|
|
115
|
+
|
|
116
|
+
## Example
|
|
117
|
+
|
|
118
|
+
```bash
|
|
119
|
+
node dist/cli.js ./example.docker-compose.yml
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
## Next steps
|
|
123
|
+
- publish as `configsentry` on npm
|
|
124
|
+
- GitHub Action wrapper
|
|
125
|
+
- SARIF output
|
|
126
|
+
- autofix mode (`--fix`) for safe transforms
|
package/dist/baseline.js
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import crypto from 'node:crypto';
|
|
3
|
+
export function fingerprintFinding(f) {
|
|
4
|
+
const h = crypto.createHash('sha256');
|
|
5
|
+
// Keep stable + minimal; avoid messages that could change wording.
|
|
6
|
+
h.update(String(f.id));
|
|
7
|
+
h.update('\n');
|
|
8
|
+
h.update(String(f.service ?? ''));
|
|
9
|
+
h.update('\n');
|
|
10
|
+
h.update(String(f.path ?? ''));
|
|
11
|
+
return h.digest('hex');
|
|
12
|
+
}
|
|
13
|
+
export async function writeBaseline(path, findings) {
|
|
14
|
+
const entries = findings.map((f) => ({
|
|
15
|
+
fingerprint: fingerprintFinding(f),
|
|
16
|
+
id: f.id,
|
|
17
|
+
service: f.service,
|
|
18
|
+
path: f.path,
|
|
19
|
+
}));
|
|
20
|
+
const file = {
|
|
21
|
+
version: 1,
|
|
22
|
+
generatedAt: new Date().toISOString(),
|
|
23
|
+
tool: 'ConfigSentry',
|
|
24
|
+
entries,
|
|
25
|
+
};
|
|
26
|
+
await fs.writeFile(path, JSON.stringify(file, null, 2) + '\n', 'utf8');
|
|
27
|
+
}
|
|
28
|
+
export async function loadBaseline(path) {
|
|
29
|
+
const raw = await fs.readFile(path, 'utf8');
|
|
30
|
+
const json = JSON.parse(raw);
|
|
31
|
+
const entries = Array.isArray(json?.entries) ? json.entries : [];
|
|
32
|
+
const set = new Set();
|
|
33
|
+
for (const e of entries) {
|
|
34
|
+
if (typeof e?.fingerprint === 'string')
|
|
35
|
+
set.add(e.fingerprint);
|
|
36
|
+
}
|
|
37
|
+
return set;
|
|
38
|
+
}
|
|
39
|
+
export function applyBaseline(findings, baselineFingerprints) {
|
|
40
|
+
const kept = [];
|
|
41
|
+
const suppressed = [];
|
|
42
|
+
for (const f of findings) {
|
|
43
|
+
const fp = fingerprintFinding(f);
|
|
44
|
+
if (baselineFingerprints.has(fp))
|
|
45
|
+
suppressed.push(f);
|
|
46
|
+
else
|
|
47
|
+
kept.push(f);
|
|
48
|
+
}
|
|
49
|
+
return { kept, suppressed };
|
|
50
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import test from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { fingerprintFinding, applyBaseline } from './baseline.js';
|
|
4
|
+
test('baseline suppression works', () => {
|
|
5
|
+
const f1 = { id: 'r1', title: 't', severity: 'low', message: 'm', service: 'svc', path: '/tmp/x#p' };
|
|
6
|
+
const f2 = { id: 'r2', title: 't', severity: 'low', message: 'm', service: 'svc', path: '/tmp/x#p2' };
|
|
7
|
+
const set = new Set([fingerprintFinding(f1)]);
|
|
8
|
+
const { kept, suppressed } = applyBaseline([f1, f2], set);
|
|
9
|
+
assert.equal(kept.length, 1);
|
|
10
|
+
assert.equal(suppressed.length, 1);
|
|
11
|
+
assert.equal(kept[0].id, 'r2');
|
|
12
|
+
});
|
package/dist/cli.js
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import process from 'node:process';
|
|
4
|
+
import fs from 'node:fs/promises';
|
|
5
|
+
import { fileURLToPath } from 'node:url';
|
|
6
|
+
import { loadCompose } from './compose.js';
|
|
7
|
+
import { runRules } from './rules.js';
|
|
8
|
+
import { findingsToSarif } from './sarif.js';
|
|
9
|
+
import { resolveTargets } from './scan.js';
|
|
10
|
+
import { applyBaseline, loadBaseline, writeBaseline } from './baseline.js';
|
|
11
|
+
function usage() {
|
|
12
|
+
console.log(`ConfigSentry (MVP)\n\nUsage:\n configsentry <file-or-dir> [--json|--sarif] [--baseline <file>] [--write-baseline <file>]\n\nOutput:\n --json machine-readable findings\n --sarif SARIF 2.1.0 (for GitHub code scanning)\n\nBaselines:\n --baseline <file> suppress findings present in a baseline file\n --write-baseline <file> write baseline file for current findings and exit 0\n\nExit codes:\n 0 = no findings (after baseline suppression)
|
|
13
|
+
2 = findings present
|
|
14
|
+
1 = error
|
|
15
|
+
`);
|
|
16
|
+
}
|
|
17
|
+
async function main() {
|
|
18
|
+
const args = process.argv.slice(2);
|
|
19
|
+
if (args.includes('-v') || args.includes('--version')) {
|
|
20
|
+
try {
|
|
21
|
+
const here = path.dirname(fileURLToPath(import.meta.url));
|
|
22
|
+
const pkgPath = path.resolve(here, '../package.json');
|
|
23
|
+
const raw = await fs.readFile(pkgPath, 'utf8');
|
|
24
|
+
const pkg = JSON.parse(raw);
|
|
25
|
+
console.log(pkg.version || 'unknown');
|
|
26
|
+
}
|
|
27
|
+
catch {
|
|
28
|
+
console.log('unknown');
|
|
29
|
+
}
|
|
30
|
+
process.exit(0);
|
|
31
|
+
}
|
|
32
|
+
if (args.length === 0 || args.includes('-h') || args.includes('--help')) {
|
|
33
|
+
usage();
|
|
34
|
+
process.exit(0);
|
|
35
|
+
}
|
|
36
|
+
const json = args.includes('--json');
|
|
37
|
+
const sarif = args.includes('--sarif');
|
|
38
|
+
if (json && sarif) {
|
|
39
|
+
console.error('Error: choose only one output mode: --json or --sarif');
|
|
40
|
+
process.exit(1);
|
|
41
|
+
}
|
|
42
|
+
const baselineIdx = args.indexOf('--baseline');
|
|
43
|
+
const baselinePath = baselineIdx >= 0 ? args[baselineIdx + 1] : undefined;
|
|
44
|
+
const writeBaselineIdx = args.indexOf('--write-baseline');
|
|
45
|
+
const writeBaselinePath = writeBaselineIdx >= 0 ? args[writeBaselineIdx + 1] : undefined;
|
|
46
|
+
const target = args.find((a) => !a.startsWith('-'));
|
|
47
|
+
if (!target) {
|
|
48
|
+
usage();
|
|
49
|
+
process.exit(1);
|
|
50
|
+
}
|
|
51
|
+
const targetPaths = await resolveTargets(target);
|
|
52
|
+
if (targetPaths.length === 0) {
|
|
53
|
+
console.error(`No compose files found in: ${target}`);
|
|
54
|
+
process.exit(1);
|
|
55
|
+
}
|
|
56
|
+
let allFindings = [];
|
|
57
|
+
for (const targetPath of targetPaths) {
|
|
58
|
+
const { compose } = await loadCompose(targetPath);
|
|
59
|
+
allFindings = allFindings.concat(runRules(compose, targetPath));
|
|
60
|
+
}
|
|
61
|
+
// Baseline suppression
|
|
62
|
+
let suppressed = [];
|
|
63
|
+
let findings = allFindings;
|
|
64
|
+
if (baselinePath) {
|
|
65
|
+
const set = await loadBaseline(path.resolve(baselinePath));
|
|
66
|
+
const res = applyBaseline(allFindings, set);
|
|
67
|
+
findings = res.kept;
|
|
68
|
+
suppressed = res.suppressed;
|
|
69
|
+
}
|
|
70
|
+
// Baseline generation mode
|
|
71
|
+
if (writeBaselinePath) {
|
|
72
|
+
await writeBaseline(path.resolve(writeBaselinePath), allFindings);
|
|
73
|
+
console.log(`Wrote baseline: ${path.resolve(writeBaselinePath)} (${allFindings.length} finding(s))`);
|
|
74
|
+
process.exit(0);
|
|
75
|
+
}
|
|
76
|
+
if (json) {
|
|
77
|
+
console.log(JSON.stringify({ targetPaths, findings, suppressedCount: suppressed.length }, null, 2));
|
|
78
|
+
}
|
|
79
|
+
else if (sarif) {
|
|
80
|
+
console.log(JSON.stringify(findingsToSarif(findings), null, 2));
|
|
81
|
+
}
|
|
82
|
+
else {
|
|
83
|
+
const scope = targetPaths.length === 1 ? targetPaths[0] : `${targetPaths.length} file(s)`;
|
|
84
|
+
if (findings.length === 0) {
|
|
85
|
+
console.log(`✅ No findings for ${scope}`);
|
|
86
|
+
if (suppressed.length > 0) {
|
|
87
|
+
console.log(`(suppressed by baseline: ${suppressed.length})`);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
else {
|
|
91
|
+
console.log(`❌ ${findings.length} finding(s) for ${scope}`);
|
|
92
|
+
if (suppressed.length > 0) {
|
|
93
|
+
console.log(`(suppressed by baseline: ${suppressed.length})`);
|
|
94
|
+
}
|
|
95
|
+
console.log('');
|
|
96
|
+
for (const f of findings) {
|
|
97
|
+
console.log(`[${f.severity.toUpperCase()}] ${f.title}`);
|
|
98
|
+
console.log(`- service: ${f.service ?? '-'}
|
|
99
|
+
- rule: ${f.id}
|
|
100
|
+
- where: ${f.path ?? '-'}
|
|
101
|
+
- msg: ${f.message}`);
|
|
102
|
+
if (f.suggestion)
|
|
103
|
+
console.log(`- fix: ${f.suggestion}`);
|
|
104
|
+
console.log('');
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
process.exit(findings.length === 0 ? 0 : 2);
|
|
109
|
+
}
|
|
110
|
+
main().catch((err) => {
|
|
111
|
+
console.error('Error:', err);
|
|
112
|
+
process.exit(1);
|
|
113
|
+
});
|
package/dist/compose.js
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import YAML from 'yaml';
|
|
4
|
+
export async function loadCompose(filePath) {
|
|
5
|
+
const abs = path.resolve(filePath);
|
|
6
|
+
const text = await fs.readFile(abs, 'utf8');
|
|
7
|
+
// Support multi-document YAML (---). If multiple docs exist, Compose content is typically the first.
|
|
8
|
+
const docs = YAML.parseAllDocuments(text);
|
|
9
|
+
const first = docs[0];
|
|
10
|
+
const doc = first ? first.toJSON() : YAML.parse(text);
|
|
11
|
+
return { compose: doc, raw: doc };
|
|
12
|
+
}
|
package/dist/rules.js
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
const SENSITIVE_PORTS = new Set([5432, 3306, 6379, 27017, 9200]);
|
|
2
|
+
function normalizePorts(ports) {
|
|
3
|
+
if (!Array.isArray(ports))
|
|
4
|
+
return [];
|
|
5
|
+
const res = [];
|
|
6
|
+
for (const p of ports) {
|
|
7
|
+
if (typeof p === 'number') {
|
|
8
|
+
res.push({ containerPort: p, raw: String(p) });
|
|
9
|
+
continue;
|
|
10
|
+
}
|
|
11
|
+
if (typeof p !== 'string')
|
|
12
|
+
continue;
|
|
13
|
+
// patterns:
|
|
14
|
+
// "8080:80"
|
|
15
|
+
// "127.0.0.1:8080:80"
|
|
16
|
+
// "5432:5432"
|
|
17
|
+
const parts = p.split(':');
|
|
18
|
+
if (parts.length === 2) {
|
|
19
|
+
const hostPort = Number(parts[0]);
|
|
20
|
+
const containerPort = Number(parts[1]);
|
|
21
|
+
res.push({ hostPort: Number.isFinite(hostPort) ? hostPort : undefined, containerPort: Number.isFinite(containerPort) ? containerPort : undefined, raw: p });
|
|
22
|
+
}
|
|
23
|
+
else if (parts.length === 3) {
|
|
24
|
+
const hostIp = parts[0];
|
|
25
|
+
const hostPort = Number(parts[1]);
|
|
26
|
+
const containerPort = Number(parts[2]);
|
|
27
|
+
res.push({ hostIp, hostPort: Number.isFinite(hostPort) ? hostPort : undefined, containerPort: Number.isFinite(containerPort) ? containerPort : undefined, raw: p });
|
|
28
|
+
}
|
|
29
|
+
else {
|
|
30
|
+
res.push({ raw: p });
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
return res;
|
|
34
|
+
}
|
|
35
|
+
export function runRules(compose, targetPath) {
|
|
36
|
+
const findings = [];
|
|
37
|
+
const services = compose?.services ?? {};
|
|
38
|
+
for (const [serviceName, svc] of Object.entries(services)) {
|
|
39
|
+
// Rule: privileged
|
|
40
|
+
if (svc?.privileged === true) {
|
|
41
|
+
findings.push({
|
|
42
|
+
id: 'compose.privileged',
|
|
43
|
+
title: 'Privileged container',
|
|
44
|
+
severity: 'high',
|
|
45
|
+
message: `Service '${serviceName}' runs with privileged: true.`,
|
|
46
|
+
service: serviceName,
|
|
47
|
+
path: `${targetPath}#services.${serviceName}.privileged`,
|
|
48
|
+
suggestion: 'Remove privileged: true unless absolutely required; prefer adding only the needed capabilities.'
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
// Rule: docker socket mount
|
|
52
|
+
const volumes = Array.isArray(svc?.volumes) ? svc.volumes : [];
|
|
53
|
+
for (const v of volumes) {
|
|
54
|
+
if (typeof v !== 'string')
|
|
55
|
+
continue;
|
|
56
|
+
if (v.includes('/var/run/docker.sock')) {
|
|
57
|
+
findings.push({
|
|
58
|
+
id: 'compose.docker-socket',
|
|
59
|
+
title: 'Docker socket mounted',
|
|
60
|
+
severity: 'high',
|
|
61
|
+
message: `Service '${serviceName}' mounts /var/run/docker.sock which effectively grants root-on-host.`,
|
|
62
|
+
service: serviceName,
|
|
63
|
+
path: `${targetPath}#services.${serviceName}.volumes`,
|
|
64
|
+
suggestion: 'Avoid mounting the docker socket. If you need it, isolate the runner and treat it as privileged infrastructure.'
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
if (v.startsWith('/:') || v.startsWith('/:/')) {
|
|
68
|
+
findings.push({
|
|
69
|
+
id: 'compose.host-root-mount',
|
|
70
|
+
title: 'Host root mounted',
|
|
71
|
+
severity: 'high',
|
|
72
|
+
message: `Service '${serviceName}' appears to mount the host root filesystem ('${v}').`,
|
|
73
|
+
service: serviceName,
|
|
74
|
+
path: `${targetPath}#services.${serviceName}.volumes`,
|
|
75
|
+
suggestion: 'Avoid mounting /. Mount only specific directories required by the app.'
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
// Rule: restart policy
|
|
80
|
+
if (svc?.restart == null) {
|
|
81
|
+
findings.push({
|
|
82
|
+
id: 'compose.missing-restart',
|
|
83
|
+
title: 'Missing restart policy',
|
|
84
|
+
severity: 'medium',
|
|
85
|
+
message: `Service '${serviceName}' has no restart policy.`,
|
|
86
|
+
service: serviceName,
|
|
87
|
+
path: `${targetPath}#services.${serviceName}.restart`,
|
|
88
|
+
suggestion: "Set restart: unless-stopped (or on-failure) to improve resilience."
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
// Rule: healthcheck
|
|
92
|
+
if (svc?.healthcheck == null) {
|
|
93
|
+
findings.push({
|
|
94
|
+
id: 'compose.missing-healthcheck',
|
|
95
|
+
title: 'Missing healthcheck',
|
|
96
|
+
severity: 'medium',
|
|
97
|
+
message: `Service '${serviceName}' has no healthcheck.`,
|
|
98
|
+
service: serviceName,
|
|
99
|
+
path: `${targetPath}#services.${serviceName}.healthcheck`,
|
|
100
|
+
suggestion: 'Add a healthcheck so orchestrators can detect broken containers (and dependent services can wait on healthy state).'
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
// Rule: runs as root
|
|
104
|
+
const user = svc?.user;
|
|
105
|
+
if (user == null || user === '0' || user === 0 || user === 'root') {
|
|
106
|
+
findings.push({
|
|
107
|
+
id: 'compose.runs-as-root',
|
|
108
|
+
title: 'Container likely runs as root',
|
|
109
|
+
severity: 'high',
|
|
110
|
+
message: `Service '${serviceName}' does not specify a non-root user (user:).`,
|
|
111
|
+
service: serviceName,
|
|
112
|
+
path: `${targetPath}#services.${serviceName}.user`,
|
|
113
|
+
suggestion: 'Set user: "1000:1000" (or a dedicated UID/GID) and ensure the image supports running unprivileged.'
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
// Rule: exposed sensitive ports
|
|
117
|
+
const ports = normalizePorts(svc?.ports);
|
|
118
|
+
for (const p of ports) {
|
|
119
|
+
const hostIp = p.hostIp;
|
|
120
|
+
const hostPort = p.hostPort;
|
|
121
|
+
const containerPort = p.containerPort;
|
|
122
|
+
const checkPort = containerPort ?? hostPort;
|
|
123
|
+
if (checkPort == null)
|
|
124
|
+
continue;
|
|
125
|
+
if (!SENSITIVE_PORTS.has(checkPort))
|
|
126
|
+
continue;
|
|
127
|
+
const bindsAll = hostIp == null || hostIp === '0.0.0.0' || hostIp === '';
|
|
128
|
+
if (bindsAll) {
|
|
129
|
+
findings.push({
|
|
130
|
+
id: 'compose.exposed-sensitive-port',
|
|
131
|
+
title: 'Sensitive port exposed publicly',
|
|
132
|
+
severity: 'high',
|
|
133
|
+
message: `Service '${serviceName}' exposes a commonly sensitive port (${checkPort}) on all interfaces (ports: '${p.raw}').`,
|
|
134
|
+
service: serviceName,
|
|
135
|
+
path: `${targetPath}#services.${serviceName}.ports`,
|
|
136
|
+
suggestion: `Bind to 127.0.0.1 (e.g. '127.0.0.1:${hostPort ?? checkPort}:${containerPort ?? checkPort}') or remove the port and use an internal network.`
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
return findings;
|
|
142
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import test from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { runRules } from './rules.js';
|
|
4
|
+
test('detects privileged container', () => {
|
|
5
|
+
const compose = { services: { app: { privileged: true } } };
|
|
6
|
+
const findings = runRules(compose, 'docker-compose.yml');
|
|
7
|
+
assert.ok(findings.some((f) => f.id === 'compose.privileged' && f.service === 'app'));
|
|
8
|
+
});
|
|
9
|
+
test('detects sensitive port exposed', () => {
|
|
10
|
+
const compose = { services: { db: { ports: ['5432:5432'] } } };
|
|
11
|
+
const findings = runRules(compose, 'docker-compose.yml');
|
|
12
|
+
assert.ok(findings.some((f) => f.id === 'compose.exposed-sensitive-port' && f.service === 'db'));
|
|
13
|
+
});
|
|
14
|
+
test('detects docker socket mount', () => {
|
|
15
|
+
const compose = { services: { runner: { volumes: ['/var/run/docker.sock:/var/run/docker.sock'] } } };
|
|
16
|
+
const findings = runRules(compose, 'docker-compose.yml');
|
|
17
|
+
assert.ok(findings.some((f) => f.id === 'compose.docker-socket' && f.service === 'runner'));
|
|
18
|
+
});
|
package/dist/sarif.js
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
// Minimal SARIF 2.1.0 generator for GitHub code scanning.
|
|
2
|
+
// Docs: https://docs.oasis-open.org/sarif/sarif/v2.1.0/sarif-v2.1.0.html
|
|
3
|
+
function level(sev) {
|
|
4
|
+
const s = String(sev || '').toLowerCase();
|
|
5
|
+
if (s === 'high' || s === 'critical' || s === 'error')
|
|
6
|
+
return 'error';
|
|
7
|
+
if (s === 'medium' || s === 'warn' || s === 'warning')
|
|
8
|
+
return 'warning';
|
|
9
|
+
return 'note';
|
|
10
|
+
}
|
|
11
|
+
export function findingsToSarif(findings, opts = {}) {
|
|
12
|
+
const toolName = opts.toolName || 'ConfigSentry';
|
|
13
|
+
const rulesById = new Map();
|
|
14
|
+
for (const f of findings) {
|
|
15
|
+
if (!rulesById.has(f.id)) {
|
|
16
|
+
rulesById.set(f.id, {
|
|
17
|
+
id: f.id,
|
|
18
|
+
name: f.id,
|
|
19
|
+
shortDescription: { text: f.title },
|
|
20
|
+
fullDescription: { text: f.message },
|
|
21
|
+
help: { text: f.suggestion ? `${f.message}\n\nFix: ${f.suggestion}` : f.message },
|
|
22
|
+
defaultConfiguration: { level: level(f.severity) },
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
const results = findings.map((f) => {
|
|
27
|
+
const res = {
|
|
28
|
+
ruleId: f.id,
|
|
29
|
+
level: level(f.severity),
|
|
30
|
+
message: { text: f.suggestion ? `${f.message} Fix: ${f.suggestion}` : f.message },
|
|
31
|
+
properties: {
|
|
32
|
+
severity: f.severity,
|
|
33
|
+
service: f.service ?? undefined,
|
|
34
|
+
},
|
|
35
|
+
};
|
|
36
|
+
// Best-effort location: we store a pseudo "where" path today.
|
|
37
|
+
// If it contains "file#pointer", split it; else treat it as a file uri.
|
|
38
|
+
if (f.path) {
|
|
39
|
+
const [file, fragment] = String(f.path).split('#');
|
|
40
|
+
res.locations = [
|
|
41
|
+
{
|
|
42
|
+
physicalLocation: {
|
|
43
|
+
artifactLocation: { uri: file },
|
|
44
|
+
region: fragment ? { snippet: { text: fragment } } : undefined,
|
|
45
|
+
},
|
|
46
|
+
},
|
|
47
|
+
];
|
|
48
|
+
}
|
|
49
|
+
return res;
|
|
50
|
+
});
|
|
51
|
+
return {
|
|
52
|
+
version: '2.1.0',
|
|
53
|
+
$schema: 'https://json.schemastore.org/sarif-2.1.0.json',
|
|
54
|
+
runs: [
|
|
55
|
+
{
|
|
56
|
+
tool: {
|
|
57
|
+
driver: {
|
|
58
|
+
name: toolName,
|
|
59
|
+
informationUri: 'https://github.com/alfredMorgenstern/configsentry',
|
|
60
|
+
rules: Array.from(rulesById.values()),
|
|
61
|
+
},
|
|
62
|
+
},
|
|
63
|
+
results,
|
|
64
|
+
},
|
|
65
|
+
],
|
|
66
|
+
};
|
|
67
|
+
}
|
package/dist/scan.js
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
const COMPOSE_FILENAMES = new Set([
|
|
4
|
+
'docker-compose.yml',
|
|
5
|
+
'docker-compose.yaml',
|
|
6
|
+
'compose.yml',
|
|
7
|
+
'compose.yaml',
|
|
8
|
+
]);
|
|
9
|
+
async function isFile(p) {
|
|
10
|
+
try {
|
|
11
|
+
return (await fs.stat(p)).isFile();
|
|
12
|
+
}
|
|
13
|
+
catch {
|
|
14
|
+
return false;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
async function isDir(p) {
|
|
18
|
+
try {
|
|
19
|
+
return (await fs.stat(p)).isDirectory();
|
|
20
|
+
}
|
|
21
|
+
catch {
|
|
22
|
+
return false;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
export async function resolveTargets(input) {
|
|
26
|
+
const abs = path.resolve(input);
|
|
27
|
+
if (await isFile(abs))
|
|
28
|
+
return [abs];
|
|
29
|
+
if (await isDir(abs)) {
|
|
30
|
+
const entries = await fs.readdir(abs);
|
|
31
|
+
const hits = [];
|
|
32
|
+
for (const e of entries) {
|
|
33
|
+
if (COMPOSE_FILENAMES.has(e))
|
|
34
|
+
hits.push(path.join(abs, e));
|
|
35
|
+
// Common pattern: docker-compose.prod.yml etc.
|
|
36
|
+
if (/^docker-compose\..+\.ya?ml$/i.test(e))
|
|
37
|
+
hits.push(path.join(abs, e));
|
|
38
|
+
if (/^compose\..+\.ya?ml$/i.test(e))
|
|
39
|
+
hits.push(path.join(abs, e));
|
|
40
|
+
}
|
|
41
|
+
// de-dupe
|
|
42
|
+
return Array.from(new Set(hits)).sort();
|
|
43
|
+
}
|
|
44
|
+
// Not a file/dir: treat as a path anyway (will fail later with a nice error)
|
|
45
|
+
return [abs];
|
|
46
|
+
}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/package.json
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "configsentry",
|
|
3
|
+
"version": "0.0.9",
|
|
4
|
+
"description": "Developer-first guardrails for docker-compose.yml (security + ops footguns).",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"author": "Marius Morgenstern",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "https://github.com/alfredMorgenstern/configsentry.git"
|
|
11
|
+
},
|
|
12
|
+
"homepage": "https://configsentry.morgenstern.work",
|
|
13
|
+
"bugs": {
|
|
14
|
+
"url": "https://github.com/alfredMorgenstern/configsentry/issues"
|
|
15
|
+
},
|
|
16
|
+
"bin": {
|
|
17
|
+
"configsentry": "dist/cli.js"
|
|
18
|
+
},
|
|
19
|
+
"files": [
|
|
20
|
+
"dist/",
|
|
21
|
+
"src/",
|
|
22
|
+
"README.md",
|
|
23
|
+
"LICENSE"
|
|
24
|
+
],
|
|
25
|
+
"engines": {
|
|
26
|
+
"node": ">=18"
|
|
27
|
+
},
|
|
28
|
+
"scripts": {
|
|
29
|
+
"test": "node --test dist/**/*.test.js",
|
|
30
|
+
"build": "tsc -p tsconfig.json",
|
|
31
|
+
"prepack": "npm run build",
|
|
32
|
+
"start": "node dist/cli.js",
|
|
33
|
+
"dev": "node --loader ts-node/esm src/cli.ts",
|
|
34
|
+
"lint:example": "node dist/cli.js ./example.docker-compose.yml"
|
|
35
|
+
},
|
|
36
|
+
"keywords": [
|
|
37
|
+
"docker",
|
|
38
|
+
"docker-compose",
|
|
39
|
+
"compose",
|
|
40
|
+
"security",
|
|
41
|
+
"devops",
|
|
42
|
+
"lint",
|
|
43
|
+
"yaml"
|
|
44
|
+
],
|
|
45
|
+
"private": false,
|
|
46
|
+
"publishConfig": {
|
|
47
|
+
"access": "public"
|
|
48
|
+
},
|
|
49
|
+
"dependencies": {
|
|
50
|
+
"yaml": "^2.8.2",
|
|
51
|
+
"zod": "^4.3.6"
|
|
52
|
+
},
|
|
53
|
+
"devDependencies": {
|
|
54
|
+
"@types/node": "^25.2.3",
|
|
55
|
+
"ts-node": "^10.9.2",
|
|
56
|
+
"typescript": "^5.9.3"
|
|
57
|
+
}
|
|
58
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import test from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { fingerprintFinding, applyBaseline } from './baseline.js';
|
|
4
|
+
|
|
5
|
+
test('baseline suppression works', () => {
|
|
6
|
+
const f1 = { id: 'r1', title: 't', severity: 'low', message: 'm', service: 'svc', path: '/tmp/x#p' };
|
|
7
|
+
const f2 = { id: 'r2', title: 't', severity: 'low', message: 'm', service: 'svc', path: '/tmp/x#p2' };
|
|
8
|
+
|
|
9
|
+
const set = new Set([fingerprintFinding(f1 as any)]);
|
|
10
|
+
const { kept, suppressed } = applyBaseline([f1 as any, f2 as any], set);
|
|
11
|
+
|
|
12
|
+
assert.equal(kept.length, 1);
|
|
13
|
+
assert.equal(suppressed.length, 1);
|
|
14
|
+
assert.equal(kept[0].id, 'r2');
|
|
15
|
+
});
|
package/src/baseline.ts
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import crypto from 'node:crypto';
|
|
3
|
+
import type { Finding } from './types.js';
|
|
4
|
+
|
|
5
|
+
export type BaselineEntry = {
|
|
6
|
+
fingerprint: string;
|
|
7
|
+
id: string;
|
|
8
|
+
service?: string;
|
|
9
|
+
path?: string;
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
export type BaselineFile = {
|
|
13
|
+
version: 1;
|
|
14
|
+
generatedAt: string;
|
|
15
|
+
tool: string;
|
|
16
|
+
entries: BaselineEntry[];
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
export function fingerprintFinding(f: Finding): string {
|
|
20
|
+
const h = crypto.createHash('sha256');
|
|
21
|
+
// Keep stable + minimal; avoid messages that could change wording.
|
|
22
|
+
h.update(String(f.id));
|
|
23
|
+
h.update('\n');
|
|
24
|
+
h.update(String(f.service ?? ''));
|
|
25
|
+
h.update('\n');
|
|
26
|
+
h.update(String(f.path ?? ''));
|
|
27
|
+
return h.digest('hex');
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export async function writeBaseline(path: string, findings: Finding[]) {
|
|
31
|
+
const entries: BaselineEntry[] = findings.map((f) => ({
|
|
32
|
+
fingerprint: fingerprintFinding(f),
|
|
33
|
+
id: f.id,
|
|
34
|
+
service: f.service,
|
|
35
|
+
path: f.path,
|
|
36
|
+
}));
|
|
37
|
+
|
|
38
|
+
const file: BaselineFile = {
|
|
39
|
+
version: 1,
|
|
40
|
+
generatedAt: new Date().toISOString(),
|
|
41
|
+
tool: 'ConfigSentry',
|
|
42
|
+
entries,
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
await fs.writeFile(path, JSON.stringify(file, null, 2) + '\n', 'utf8');
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export async function loadBaseline(path: string): Promise<Set<string>> {
|
|
49
|
+
const raw = await fs.readFile(path, 'utf8');
|
|
50
|
+
const json = JSON.parse(raw);
|
|
51
|
+
const entries: any[] = Array.isArray(json?.entries) ? json.entries : [];
|
|
52
|
+
const set = new Set<string>();
|
|
53
|
+
for (const e of entries) {
|
|
54
|
+
if (typeof e?.fingerprint === 'string') set.add(e.fingerprint);
|
|
55
|
+
}
|
|
56
|
+
return set;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
export function applyBaseline(findings: Finding[], baselineFingerprints: Set<string>) {
|
|
60
|
+
const kept: Finding[] = [];
|
|
61
|
+
const suppressed: Finding[] = [];
|
|
62
|
+
for (const f of findings) {
|
|
63
|
+
const fp = fingerprintFinding(f);
|
|
64
|
+
if (baselineFingerprints.has(fp)) suppressed.push(f);
|
|
65
|
+
else kept.push(f);
|
|
66
|
+
}
|
|
67
|
+
return { kept, suppressed };
|
|
68
|
+
}
|
package/src/cli.ts
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import process from 'node:process';
|
|
4
|
+
import fs from 'node:fs/promises';
|
|
5
|
+
import { fileURLToPath } from 'node:url';
|
|
6
|
+
import { loadCompose } from './compose.js';
|
|
7
|
+
import { runRules } from './rules.js';
|
|
8
|
+
import { findingsToSarif } from './sarif.js';
|
|
9
|
+
import { resolveTargets } from './scan.js';
|
|
10
|
+
import { applyBaseline, loadBaseline, writeBaseline } from './baseline.js';
|
|
11
|
+
|
|
12
|
+
function usage() {
|
|
13
|
+
console.log(`ConfigSentry (MVP)\n\nUsage:\n configsentry <file-or-dir> [--json|--sarif] [--baseline <file>] [--write-baseline <file>]\n\nOutput:\n --json machine-readable findings\n --sarif SARIF 2.1.0 (for GitHub code scanning)\n\nBaselines:\n --baseline <file> suppress findings present in a baseline file\n --write-baseline <file> write baseline file for current findings and exit 0\n\nExit codes:\n 0 = no findings (after baseline suppression)
|
|
14
|
+
2 = findings present
|
|
15
|
+
1 = error
|
|
16
|
+
`);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async function main() {
|
|
20
|
+
const args = process.argv.slice(2);
|
|
21
|
+
|
|
22
|
+
if (args.includes('-v') || args.includes('--version')) {
|
|
23
|
+
try {
|
|
24
|
+
const here = path.dirname(fileURLToPath(import.meta.url));
|
|
25
|
+
const pkgPath = path.resolve(here, '../package.json');
|
|
26
|
+
const raw = await fs.readFile(pkgPath, 'utf8');
|
|
27
|
+
const pkg = JSON.parse(raw);
|
|
28
|
+
console.log(pkg.version || 'unknown');
|
|
29
|
+
} catch {
|
|
30
|
+
console.log('unknown');
|
|
31
|
+
}
|
|
32
|
+
process.exit(0);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (args.length === 0 || args.includes('-h') || args.includes('--help')) {
|
|
36
|
+
usage();
|
|
37
|
+
process.exit(0);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const json = args.includes('--json');
|
|
41
|
+
const sarif = args.includes('--sarif');
|
|
42
|
+
if (json && sarif) {
|
|
43
|
+
console.error('Error: choose only one output mode: --json or --sarif');
|
|
44
|
+
process.exit(1);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const baselineIdx = args.indexOf('--baseline');
|
|
48
|
+
const baselinePath = baselineIdx >= 0 ? args[baselineIdx + 1] : undefined;
|
|
49
|
+
const writeBaselineIdx = args.indexOf('--write-baseline');
|
|
50
|
+
const writeBaselinePath = writeBaselineIdx >= 0 ? args[writeBaselineIdx + 1] : undefined;
|
|
51
|
+
|
|
52
|
+
const target = args.find((a) => !a.startsWith('-'));
|
|
53
|
+
if (!target) {
|
|
54
|
+
usage();
|
|
55
|
+
process.exit(1);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const targetPaths = await resolveTargets(target);
|
|
59
|
+
if (targetPaths.length === 0) {
|
|
60
|
+
console.error(`No compose files found in: ${target}`);
|
|
61
|
+
process.exit(1);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
let allFindings = [] as any[];
|
|
65
|
+
for (const targetPath of targetPaths) {
|
|
66
|
+
const { compose } = await loadCompose(targetPath);
|
|
67
|
+
allFindings = allFindings.concat(runRules(compose, targetPath));
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Baseline suppression
|
|
71
|
+
let suppressed: any[] = [];
|
|
72
|
+
let findings = allFindings;
|
|
73
|
+
if (baselinePath) {
|
|
74
|
+
const set = await loadBaseline(path.resolve(baselinePath));
|
|
75
|
+
const res = applyBaseline(allFindings, set);
|
|
76
|
+
findings = res.kept;
|
|
77
|
+
suppressed = res.suppressed;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Baseline generation mode
|
|
81
|
+
if (writeBaselinePath) {
|
|
82
|
+
await writeBaseline(path.resolve(writeBaselinePath), allFindings);
|
|
83
|
+
console.log(`Wrote baseline: ${path.resolve(writeBaselinePath)} (${allFindings.length} finding(s))`);
|
|
84
|
+
process.exit(0);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
if (json) {
|
|
88
|
+
console.log(JSON.stringify({ targetPaths, findings, suppressedCount: suppressed.length }, null, 2));
|
|
89
|
+
} else if (sarif) {
|
|
90
|
+
console.log(JSON.stringify(findingsToSarif(findings), null, 2));
|
|
91
|
+
} else {
|
|
92
|
+
const scope = targetPaths.length === 1 ? targetPaths[0] : `${targetPaths.length} file(s)`;
|
|
93
|
+
|
|
94
|
+
if (findings.length === 0) {
|
|
95
|
+
console.log(`✅ No findings for ${scope}`);
|
|
96
|
+
if (suppressed.length > 0) {
|
|
97
|
+
console.log(`(suppressed by baseline: ${suppressed.length})`);
|
|
98
|
+
}
|
|
99
|
+
} else {
|
|
100
|
+
console.log(`❌ ${findings.length} finding(s) for ${scope}`);
|
|
101
|
+
if (suppressed.length > 0) {
|
|
102
|
+
console.log(`(suppressed by baseline: ${suppressed.length})`);
|
|
103
|
+
}
|
|
104
|
+
console.log('');
|
|
105
|
+
for (const f of findings) {
|
|
106
|
+
console.log(`[${f.severity.toUpperCase()}] ${f.title}`);
|
|
107
|
+
console.log(`- service: ${f.service ?? '-'}
|
|
108
|
+
- rule: ${f.id}
|
|
109
|
+
- where: ${f.path ?? '-'}
|
|
110
|
+
- msg: ${f.message}`);
|
|
111
|
+
if (f.suggestion) console.log(`- fix: ${f.suggestion}`);
|
|
112
|
+
console.log('');
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
process.exit(findings.length === 0 ? 0 : 2);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
main().catch((err) => {
|
|
121
|
+
console.error('Error:', err);
|
|
122
|
+
process.exit(1);
|
|
123
|
+
});
|
package/src/compose.ts
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import YAML from 'yaml';
|
|
4
|
+
|
|
5
|
+
export type ComposeFile = {
|
|
6
|
+
services?: Record<string, any>;
|
|
7
|
+
};
|
|
8
|
+
|
|
9
|
+
export async function loadCompose(filePath: string): Promise<{ compose: ComposeFile; raw: any }> {
|
|
10
|
+
const abs = path.resolve(filePath);
|
|
11
|
+
const text = await fs.readFile(abs, 'utf8');
|
|
12
|
+
|
|
13
|
+
// Support multi-document YAML (---). If multiple docs exist, Compose content is typically the first.
|
|
14
|
+
const docs = YAML.parseAllDocuments(text);
|
|
15
|
+
const first = docs[0];
|
|
16
|
+
const doc = first ? first.toJSON() : YAML.parse(text);
|
|
17
|
+
|
|
18
|
+
return { compose: doc as ComposeFile, raw: doc };
|
|
19
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import test from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { runRules } from './rules.js';
|
|
4
|
+
|
|
5
|
+
test('detects privileged container', () => {
|
|
6
|
+
const compose = { services: { app: { privileged: true } } };
|
|
7
|
+
const findings = runRules(compose, 'docker-compose.yml');
|
|
8
|
+
assert.ok(findings.some((f) => f.id === 'compose.privileged' && f.service === 'app'));
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
test('detects sensitive port exposed', () => {
|
|
12
|
+
const compose = { services: { db: { ports: ['5432:5432'] } } };
|
|
13
|
+
const findings = runRules(compose, 'docker-compose.yml');
|
|
14
|
+
assert.ok(findings.some((f) => f.id === 'compose.exposed-sensitive-port' && f.service === 'db'));
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
test('detects docker socket mount', () => {
|
|
18
|
+
const compose = { services: { runner: { volumes: ['/var/run/docker.sock:/var/run/docker.sock'] } } };
|
|
19
|
+
const findings = runRules(compose, 'docker-compose.yml');
|
|
20
|
+
assert.ok(findings.some((f) => f.id === 'compose.docker-socket' && f.service === 'runner'));
|
|
21
|
+
});
|
package/src/rules.ts
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import type { Finding } from './types.js';
|
|
2
|
+
|
|
3
|
+
const SENSITIVE_PORTS = new Set([5432, 3306, 6379, 27017, 9200]);
|
|
4
|
+
|
|
5
|
+
function normalizePorts(ports: any): Array<{ hostIp?: string; hostPort?: number; containerPort?: number; raw: string }> {
|
|
6
|
+
if (!Array.isArray(ports)) return [];
|
|
7
|
+
const res: Array<{ hostIp?: string; hostPort?: number; containerPort?: number; raw: string }> = [];
|
|
8
|
+
for (const p of ports) {
|
|
9
|
+
if (typeof p === 'number') {
|
|
10
|
+
res.push({ containerPort: p, raw: String(p) });
|
|
11
|
+
continue;
|
|
12
|
+
}
|
|
13
|
+
if (typeof p !== 'string') continue;
|
|
14
|
+
// patterns:
|
|
15
|
+
// "8080:80"
|
|
16
|
+
// "127.0.0.1:8080:80"
|
|
17
|
+
// "5432:5432"
|
|
18
|
+
const parts = p.split(':');
|
|
19
|
+
if (parts.length === 2) {
|
|
20
|
+
const hostPort = Number(parts[0]);
|
|
21
|
+
const containerPort = Number(parts[1]);
|
|
22
|
+
res.push({ hostPort: Number.isFinite(hostPort) ? hostPort : undefined, containerPort: Number.isFinite(containerPort) ? containerPort : undefined, raw: p });
|
|
23
|
+
} else if (parts.length === 3) {
|
|
24
|
+
const hostIp = parts[0];
|
|
25
|
+
const hostPort = Number(parts[1]);
|
|
26
|
+
const containerPort = Number(parts[2]);
|
|
27
|
+
res.push({ hostIp, hostPort: Number.isFinite(hostPort) ? hostPort : undefined, containerPort: Number.isFinite(containerPort) ? containerPort : undefined, raw: p });
|
|
28
|
+
} else {
|
|
29
|
+
res.push({ raw: p });
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
return res;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function runRules(compose: any, targetPath: string): Finding[] {
|
|
36
|
+
const findings: Finding[] = [];
|
|
37
|
+
const services: Record<string, any> = compose?.services ?? {};
|
|
38
|
+
|
|
39
|
+
for (const [serviceName, svc] of Object.entries(services)) {
|
|
40
|
+
// Rule: privileged
|
|
41
|
+
if (svc?.privileged === true) {
|
|
42
|
+
findings.push({
|
|
43
|
+
id: 'compose.privileged',
|
|
44
|
+
title: 'Privileged container',
|
|
45
|
+
severity: 'high',
|
|
46
|
+
message: `Service '${serviceName}' runs with privileged: true.`,
|
|
47
|
+
service: serviceName,
|
|
48
|
+
path: `${targetPath}#services.${serviceName}.privileged`,
|
|
49
|
+
suggestion: 'Remove privileged: true unless absolutely required; prefer adding only the needed capabilities.'
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Rule: docker socket mount
|
|
54
|
+
const volumes: any[] = Array.isArray(svc?.volumes) ? svc.volumes : [];
|
|
55
|
+
for (const v of volumes) {
|
|
56
|
+
if (typeof v !== 'string') continue;
|
|
57
|
+
if (v.includes('/var/run/docker.sock')) {
|
|
58
|
+
findings.push({
|
|
59
|
+
id: 'compose.docker-socket',
|
|
60
|
+
title: 'Docker socket mounted',
|
|
61
|
+
severity: 'high',
|
|
62
|
+
message: `Service '${serviceName}' mounts /var/run/docker.sock which effectively grants root-on-host.`,
|
|
63
|
+
service: serviceName,
|
|
64
|
+
path: `${targetPath}#services.${serviceName}.volumes`,
|
|
65
|
+
suggestion: 'Avoid mounting the docker socket. If you need it, isolate the runner and treat it as privileged infrastructure.'
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
if (v.startsWith('/:') || v.startsWith('/:/')) {
|
|
69
|
+
findings.push({
|
|
70
|
+
id: 'compose.host-root-mount',
|
|
71
|
+
title: 'Host root mounted',
|
|
72
|
+
severity: 'high',
|
|
73
|
+
message: `Service '${serviceName}' appears to mount the host root filesystem ('${v}').`,
|
|
74
|
+
service: serviceName,
|
|
75
|
+
path: `${targetPath}#services.${serviceName}.volumes`,
|
|
76
|
+
suggestion: 'Avoid mounting /. Mount only specific directories required by the app.'
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Rule: restart policy
|
|
82
|
+
if (svc?.restart == null) {
|
|
83
|
+
findings.push({
|
|
84
|
+
id: 'compose.missing-restart',
|
|
85
|
+
title: 'Missing restart policy',
|
|
86
|
+
severity: 'medium',
|
|
87
|
+
message: `Service '${serviceName}' has no restart policy.`,
|
|
88
|
+
service: serviceName,
|
|
89
|
+
path: `${targetPath}#services.${serviceName}.restart`,
|
|
90
|
+
suggestion: "Set restart: unless-stopped (or on-failure) to improve resilience."
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Rule: healthcheck
|
|
95
|
+
if (svc?.healthcheck == null) {
|
|
96
|
+
findings.push({
|
|
97
|
+
id: 'compose.missing-healthcheck',
|
|
98
|
+
title: 'Missing healthcheck',
|
|
99
|
+
severity: 'medium',
|
|
100
|
+
message: `Service '${serviceName}' has no healthcheck.`,
|
|
101
|
+
service: serviceName,
|
|
102
|
+
path: `${targetPath}#services.${serviceName}.healthcheck`,
|
|
103
|
+
suggestion: 'Add a healthcheck so orchestrators can detect broken containers (and dependent services can wait on healthy state).'
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Rule: runs as root
|
|
108
|
+
const user = svc?.user;
|
|
109
|
+
if (user == null || user === '0' || user === 0 || user === 'root') {
|
|
110
|
+
findings.push({
|
|
111
|
+
id: 'compose.runs-as-root',
|
|
112
|
+
title: 'Container likely runs as root',
|
|
113
|
+
severity: 'high',
|
|
114
|
+
message: `Service '${serviceName}' does not specify a non-root user (user:).`,
|
|
115
|
+
service: serviceName,
|
|
116
|
+
path: `${targetPath}#services.${serviceName}.user`,
|
|
117
|
+
suggestion: 'Set user: "1000:1000" (or a dedicated UID/GID) and ensure the image supports running unprivileged.'
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// Rule: exposed sensitive ports
|
|
122
|
+
const ports = normalizePorts(svc?.ports);
|
|
123
|
+
for (const p of ports) {
|
|
124
|
+
const hostIp = p.hostIp;
|
|
125
|
+
const hostPort = p.hostPort;
|
|
126
|
+
const containerPort = p.containerPort;
|
|
127
|
+
|
|
128
|
+
const checkPort = containerPort ?? hostPort;
|
|
129
|
+
if (checkPort == null) continue;
|
|
130
|
+
if (!SENSITIVE_PORTS.has(checkPort)) continue;
|
|
131
|
+
|
|
132
|
+
const bindsAll = hostIp == null || hostIp === '0.0.0.0' || hostIp === '';
|
|
133
|
+
if (bindsAll) {
|
|
134
|
+
findings.push({
|
|
135
|
+
id: 'compose.exposed-sensitive-port',
|
|
136
|
+
title: 'Sensitive port exposed publicly',
|
|
137
|
+
severity: 'high',
|
|
138
|
+
message: `Service '${serviceName}' exposes a commonly sensitive port (${checkPort}) on all interfaces (ports: '${p.raw}').`,
|
|
139
|
+
service: serviceName,
|
|
140
|
+
path: `${targetPath}#services.${serviceName}.ports`,
|
|
141
|
+
suggestion: `Bind to 127.0.0.1 (e.g. '127.0.0.1:${hostPort ?? checkPort}:${containerPort ?? checkPort}') or remove the port and use an internal network.`
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
return findings;
|
|
148
|
+
}
|
package/src/sarif.ts
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import type { Finding } from './types.js';
|
|
2
|
+
|
|
3
|
+
// Minimal SARIF 2.1.0 generator for GitHub code scanning.
|
|
4
|
+
// Docs: https://docs.oasis-open.org/sarif/sarif/v2.1.0/sarif-v2.1.0.html
|
|
5
|
+
|
|
6
|
+
function level(sev: string): 'error' | 'warning' | 'note' {
|
|
7
|
+
const s = String(sev || '').toLowerCase();
|
|
8
|
+
if (s === 'high' || s === 'critical' || s === 'error') return 'error';
|
|
9
|
+
if (s === 'medium' || s === 'warn' || s === 'warning') return 'warning';
|
|
10
|
+
return 'note';
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export function findingsToSarif(findings: Finding[], opts: { toolName?: string; repoRoot?: string } = {}) {
|
|
14
|
+
const toolName = opts.toolName || 'ConfigSentry';
|
|
15
|
+
|
|
16
|
+
const rulesById = new Map<string, any>();
|
|
17
|
+
for (const f of findings) {
|
|
18
|
+
if (!rulesById.has(f.id)) {
|
|
19
|
+
rulesById.set(f.id, {
|
|
20
|
+
id: f.id,
|
|
21
|
+
name: f.id,
|
|
22
|
+
shortDescription: { text: f.title },
|
|
23
|
+
fullDescription: { text: f.message },
|
|
24
|
+
help: { text: f.suggestion ? `${f.message}\n\nFix: ${f.suggestion}` : f.message },
|
|
25
|
+
defaultConfiguration: { level: level(f.severity) },
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const results = findings.map((f) => {
|
|
31
|
+
const res: any = {
|
|
32
|
+
ruleId: f.id,
|
|
33
|
+
level: level(f.severity),
|
|
34
|
+
message: { text: f.suggestion ? `${f.message} Fix: ${f.suggestion}` : f.message },
|
|
35
|
+
properties: {
|
|
36
|
+
severity: f.severity,
|
|
37
|
+
service: f.service ?? undefined,
|
|
38
|
+
},
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
// Best-effort location: we store a pseudo "where" path today.
|
|
42
|
+
// If it contains "file#pointer", split it; else treat it as a file uri.
|
|
43
|
+
if (f.path) {
|
|
44
|
+
const [file, fragment] = String(f.path).split('#');
|
|
45
|
+
res.locations = [
|
|
46
|
+
{
|
|
47
|
+
physicalLocation: {
|
|
48
|
+
artifactLocation: { uri: file },
|
|
49
|
+
region: fragment ? { snippet: { text: fragment } } : undefined,
|
|
50
|
+
},
|
|
51
|
+
},
|
|
52
|
+
];
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return res;
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
version: '2.1.0',
|
|
60
|
+
$schema: 'https://json.schemastore.org/sarif-2.1.0.json',
|
|
61
|
+
runs: [
|
|
62
|
+
{
|
|
63
|
+
tool: {
|
|
64
|
+
driver: {
|
|
65
|
+
name: toolName,
|
|
66
|
+
informationUri: 'https://github.com/alfredMorgenstern/configsentry',
|
|
67
|
+
rules: Array.from(rulesById.values()),
|
|
68
|
+
},
|
|
69
|
+
},
|
|
70
|
+
results,
|
|
71
|
+
},
|
|
72
|
+
],
|
|
73
|
+
};
|
|
74
|
+
}
|
package/src/scan.ts
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
|
|
4
|
+
const COMPOSE_FILENAMES = new Set([
|
|
5
|
+
'docker-compose.yml',
|
|
6
|
+
'docker-compose.yaml',
|
|
7
|
+
'compose.yml',
|
|
8
|
+
'compose.yaml',
|
|
9
|
+
]);
|
|
10
|
+
|
|
11
|
+
async function isFile(p: string) {
|
|
12
|
+
try {
|
|
13
|
+
return (await fs.stat(p)).isFile();
|
|
14
|
+
} catch {
|
|
15
|
+
return false;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async function isDir(p: string) {
|
|
20
|
+
try {
|
|
21
|
+
return (await fs.stat(p)).isDirectory();
|
|
22
|
+
} catch {
|
|
23
|
+
return false;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export async function resolveTargets(input: string): Promise<string[]> {
|
|
28
|
+
const abs = path.resolve(input);
|
|
29
|
+
|
|
30
|
+
if (await isFile(abs)) return [abs];
|
|
31
|
+
|
|
32
|
+
if (await isDir(abs)) {
|
|
33
|
+
const entries = await fs.readdir(abs);
|
|
34
|
+
const hits: string[] = [];
|
|
35
|
+
for (const e of entries) {
|
|
36
|
+
if (COMPOSE_FILENAMES.has(e)) hits.push(path.join(abs, e));
|
|
37
|
+
// Common pattern: docker-compose.prod.yml etc.
|
|
38
|
+
if (/^docker-compose\..+\.ya?ml$/i.test(e)) hits.push(path.join(abs, e));
|
|
39
|
+
if (/^compose\..+\.ya?ml$/i.test(e)) hits.push(path.join(abs, e));
|
|
40
|
+
}
|
|
41
|
+
// de-dupe
|
|
42
|
+
return Array.from(new Set(hits)).sort();
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Not a file/dir: treat as a path anyway (will fail later with a nice error)
|
|
46
|
+
return [abs];
|
|
47
|
+
}
|
package/src/types.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export type Severity = 'low' | 'medium' | 'high';
|
|
2
|
+
|
|
3
|
+
export type Finding = {
|
|
4
|
+
id: string;
|
|
5
|
+
title: string;
|
|
6
|
+
severity: Severity;
|
|
7
|
+
message: string;
|
|
8
|
+
service?: string;
|
|
9
|
+
path?: string;
|
|
10
|
+
suggestion?: string;
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
export type Report = {
|
|
14
|
+
targetPath: string;
|
|
15
|
+
findings: Finding[];
|
|
16
|
+
};
|