@scantrix/cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +219 -0
- package/dist/astConfigParser.js +308 -0
- package/dist/astRuleHelpers.js +1451 -0
- package/dist/auditConfig.js +81 -0
- package/dist/ciExtractor.js +327 -0
- package/dist/cli.js +156 -0
- package/dist/configExtractor.js +261 -0
- package/dist/cypressExtractor.js +217 -0
- package/dist/diffTracker.js +310 -0
- package/dist/report.js +1904 -0
- package/dist/sarifFormatter.js +88 -0
- package/dist/scanResult.js +45 -0
- package/dist/scanner.js +3519 -0
- package/dist/scoring.js +206 -0
- package/dist/sinks/index.js +29 -0
- package/dist/sinks/jsonSink.js +28 -0
- package/dist/sinks/types.js +2 -0
- package/docs/high-res-icon.svg +26 -0
- package/docs/scantrix-logo-light.svg +64 -0
- package/docs/scantrix-logo.svg +64 -0
- package/package.json +55 -0
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.loadBaseline = loadBaseline;
|
|
7
|
+
exports.getGitInfo = getGitInfo;
|
|
8
|
+
exports.saveSnapshot = saveSnapshot;
|
|
9
|
+
exports.listSnapshots = listSnapshots;
|
|
10
|
+
exports.findLatestBaseline = findLatestBaseline;
|
|
11
|
+
exports.loadSnapshotHistory = loadSnapshotHistory;
|
|
12
|
+
exports.computeDiff = computeDiff;
|
|
13
|
+
exports.formatDiffMarkdown = formatDiffMarkdown;
|
|
14
|
+
exports.formatDiffConsoleSummary = formatDiffConsoleSummary;
|
|
15
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
16
|
+
const path_1 = __importDefault(require("path"));
|
|
17
|
+
const child_process_1 = require("child_process");
|
|
18
|
+
/**
|
|
19
|
+
* Load a previous findings.json as the diff baseline.
|
|
20
|
+
*/
|
|
21
|
+
async function loadBaseline(baselinePath) {
|
|
22
|
+
try {
|
|
23
|
+
const raw = await promises_1.default.readFile(baselinePath, "utf8");
|
|
24
|
+
return JSON.parse(raw);
|
|
25
|
+
}
|
|
26
|
+
catch {
|
|
27
|
+
return [];
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Attempt to read git branch and short commit SHA from a repo path.
|
|
32
|
+
*/
|
|
33
|
+
function getGitInfo(repoPath) {
|
|
34
|
+
const opts = repoPath ? { cwd: repoPath } : undefined;
|
|
35
|
+
try {
|
|
36
|
+
const branch = (0, child_process_1.execSync)("git rev-parse --abbrev-ref HEAD", { ...opts, stdio: ["pipe", "pipe", "pipe"] })
|
|
37
|
+
.toString().trim();
|
|
38
|
+
const commit = (0, child_process_1.execSync)("git rev-parse --short HEAD", { ...opts, stdio: ["pipe", "pipe", "pipe"] })
|
|
39
|
+
.toString().trim();
|
|
40
|
+
return { branch, commit };
|
|
41
|
+
}
|
|
42
|
+
catch {
|
|
43
|
+
return {};
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Save a timestamped snapshot of findings to the output directory.
|
|
48
|
+
* Creates `<outDir>/snapshots/<ISO-date>/findings.json` + `meta.json`.
|
|
49
|
+
*/
|
|
50
|
+
async function saveSnapshot(outDir, findings, optionsOrRiskScore) {
|
|
51
|
+
// Backwards-compatible: accept bare riskScore number or options object
|
|
52
|
+
const opts = typeof optionsOrRiskScore === "number"
|
|
53
|
+
? { riskScore: optionsOrRiskScore }
|
|
54
|
+
: optionsOrRiskScore ?? {};
|
|
55
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
|
|
56
|
+
const snapshotDir = path_1.default.join(outDir, "snapshots", timestamp);
|
|
57
|
+
await promises_1.default.mkdir(snapshotDir, { recursive: true });
|
|
58
|
+
await promises_1.default.writeFile(path_1.default.join(snapshotDir, "findings.json"), JSON.stringify(findings, null, 2), "utf8");
|
|
59
|
+
const git = getGitInfo(opts.repoPath);
|
|
60
|
+
const meta = {
|
|
61
|
+
timestamp: new Date().toISOString(),
|
|
62
|
+
findingsCount: findings.length,
|
|
63
|
+
highCount: findings.filter((f) => f.severity === "high").length,
|
|
64
|
+
mediumCount: findings.filter((f) => f.severity === "medium").length,
|
|
65
|
+
lowCount: findings.filter((f) => f.severity === "low").length,
|
|
66
|
+
riskScore: opts.riskScore,
|
|
67
|
+
totalFiles: opts.inventory?.totalFiles,
|
|
68
|
+
testFiles: opts.inventory?.testFiles,
|
|
69
|
+
gitBranch: git.branch,
|
|
70
|
+
gitCommit: git.commit,
|
|
71
|
+
};
|
|
72
|
+
await promises_1.default.writeFile(path_1.default.join(snapshotDir, "meta.json"), JSON.stringify(meta, null, 2), "utf8");
|
|
73
|
+
return snapshotDir;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* List all snapshots in chronological order.
|
|
77
|
+
*/
|
|
78
|
+
async function listSnapshots(outDir) {
|
|
79
|
+
const snapshotsDir = path_1.default.join(outDir, "snapshots");
|
|
80
|
+
try {
|
|
81
|
+
const entries = await promises_1.default.readdir(snapshotsDir, { withFileTypes: true });
|
|
82
|
+
return entries
|
|
83
|
+
.filter((e) => e.isDirectory())
|
|
84
|
+
.map((e) => e.name)
|
|
85
|
+
.sort() // ISO timestamp dirs sort chronologically as strings
|
|
86
|
+
.map((name) => path_1.default.join(snapshotsDir, name));
|
|
87
|
+
}
|
|
88
|
+
catch {
|
|
89
|
+
return [];
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Find the latest snapshot directory (most recent previous scan).
|
|
94
|
+
* Returns the path to the snapshot's findings.json, or undefined if none exist.
|
|
95
|
+
*/
|
|
96
|
+
async function findLatestBaseline(outDir) {
|
|
97
|
+
const snapshots = await listSnapshots(outDir);
|
|
98
|
+
if (snapshots.length === 0)
|
|
99
|
+
return undefined;
|
|
100
|
+
const latest = snapshots[snapshots.length - 1];
|
|
101
|
+
const findingsPath = path_1.default.join(latest, "findings.json");
|
|
102
|
+
try {
|
|
103
|
+
await promises_1.default.access(findingsPath);
|
|
104
|
+
return findingsPath;
|
|
105
|
+
}
|
|
106
|
+
catch {
|
|
107
|
+
return undefined;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Load the snapshot history — severity counts over time for diff comparison.
|
|
112
|
+
*/
|
|
113
|
+
async function loadSnapshotHistory(outDir) {
|
|
114
|
+
const snapshots = await listSnapshots(outDir);
|
|
115
|
+
const history = [];
|
|
116
|
+
for (const dir of snapshots) {
|
|
117
|
+
try {
|
|
118
|
+
const raw = await promises_1.default.readFile(path_1.default.join(dir, "meta.json"), "utf8");
|
|
119
|
+
history.push(JSON.parse(raw));
|
|
120
|
+
}
|
|
121
|
+
catch {
|
|
122
|
+
// skip corrupt snapshots
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
return history;
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Normalise an evidence key for comparison.
|
|
129
|
+
* On Windows the drive-letter casing can vary between runs,
|
|
130
|
+
* so we lower-case the whole key to avoid phantom diffs.
|
|
131
|
+
*/
|
|
132
|
+
function normalizeEvidenceKey(file, line) {
|
|
133
|
+
return `${file.toLowerCase().replace(/\\/g, "/")}:${line}`;
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Compare two sets of findings by stable findingId.
|
|
137
|
+
* Each findingId appears at most once per scan, so direct set comparison works.
|
|
138
|
+
* Also computes evidence-level deltas for persistent findings.
|
|
139
|
+
*/
|
|
140
|
+
function computeDiff(previous, current, options) {
|
|
141
|
+
const prevIds = new Set(previous.map((f) => f.findingId));
|
|
142
|
+
const currIds = new Set(current.map((f) => f.findingId));
|
|
143
|
+
const prevMap = new Map(previous.map((f) => [f.findingId, f]));
|
|
144
|
+
const newFindings = current.filter((f) => !prevIds.has(f.findingId));
|
|
145
|
+
const resolvedFindings = previous.filter((f) => !currIds.has(f.findingId));
|
|
146
|
+
const persistentFindings = current.filter((f) => prevIds.has(f.findingId));
|
|
147
|
+
// Compute evidence-level changes for persistent findings
|
|
148
|
+
const evidenceChanges = [];
|
|
149
|
+
for (const curr of persistentFindings) {
|
|
150
|
+
const prev = prevMap.get(curr.findingId);
|
|
151
|
+
if (!prev)
|
|
152
|
+
continue;
|
|
153
|
+
const prevKeys = new Set(prev.evidence.map((e) => normalizeEvidenceKey(e.file, e.line)));
|
|
154
|
+
const currKeys = new Set(curr.evidence.map((e) => normalizeEvidenceKey(e.file, e.line)));
|
|
155
|
+
const added = [...currKeys].filter((k) => !prevKeys.has(k)).length;
|
|
156
|
+
const removed = [...prevKeys].filter((k) => !currKeys.has(k)).length;
|
|
157
|
+
if (added > 0 || removed > 0) {
|
|
158
|
+
evidenceChanges.push({
|
|
159
|
+
findingId: curr.findingId,
|
|
160
|
+
title: curr.title,
|
|
161
|
+
addedEvidence: added,
|
|
162
|
+
removedEvidence: removed,
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
// Compute inventory delta if both sides are available
|
|
167
|
+
let inventoryDelta;
|
|
168
|
+
const bi = options?.baselineInventory;
|
|
169
|
+
const ci = options?.currentInventory;
|
|
170
|
+
if (bi && ci &&
|
|
171
|
+
typeof bi.totalFiles === "number" && typeof ci.totalFiles === "number" &&
|
|
172
|
+
typeof bi.testFiles === "number" && typeof ci.testFiles === "number") {
|
|
173
|
+
inventoryDelta = {
|
|
174
|
+
totalFiles: { prev: bi.totalFiles, curr: ci.totalFiles },
|
|
175
|
+
testFiles: { prev: bi.testFiles, curr: ci.testFiles },
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
const parts = [];
|
|
179
|
+
if (newFindings.length > 0)
|
|
180
|
+
parts.push(`${newFindings.length} new`);
|
|
181
|
+
if (resolvedFindings.length > 0)
|
|
182
|
+
parts.push(`${resolvedFindings.length} resolved`);
|
|
183
|
+
if (persistentFindings.length > 0)
|
|
184
|
+
parts.push(`${persistentFindings.length} persistent`);
|
|
185
|
+
const summary = parts.length ? parts.join(", ") : "No changes";
|
|
186
|
+
return {
|
|
187
|
+
newFindings,
|
|
188
|
+
resolvedFindings,
|
|
189
|
+
persistentFindings,
|
|
190
|
+
summary,
|
|
191
|
+
currentTimestamp: new Date().toISOString(),
|
|
192
|
+
evidenceChanges,
|
|
193
|
+
inventoryDelta,
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Render the diff as a Markdown section suitable for inclusion in the report.
|
|
198
|
+
*/
|
|
199
|
+
function formatDiffMarkdown(diff) {
|
|
200
|
+
const md = [];
|
|
201
|
+
md.push(`## Change Analysis (Compared to Baseline)`);
|
|
202
|
+
md.push(``);
|
|
203
|
+
if (diff.baselineTimestamp) {
|
|
204
|
+
md.push(`> Baseline: ${diff.baselineTimestamp} → Current: ${diff.currentTimestamp}`);
|
|
205
|
+
md.push(``);
|
|
206
|
+
}
|
|
207
|
+
// Inventory delta (file counts)
|
|
208
|
+
if (diff.inventoryDelta) {
|
|
209
|
+
const { totalFiles, testFiles } = diff.inventoryDelta;
|
|
210
|
+
const fmtDelta = (prev, curr) => {
|
|
211
|
+
const d = curr - prev;
|
|
212
|
+
if (d === 0)
|
|
213
|
+
return `${curr} (no change)`;
|
|
214
|
+
return `${prev} → ${curr} (${d > 0 ? "+" : ""}${d})`;
|
|
215
|
+
};
|
|
216
|
+
md.push(`| Metric | Change |`);
|
|
217
|
+
md.push(`|--------|--------|`);
|
|
218
|
+
md.push(`| Total files scanned | ${fmtDelta(totalFiles.prev, totalFiles.curr)} |`);
|
|
219
|
+
md.push(`| Test files | ${fmtDelta(testFiles.prev, testFiles.curr)} |`);
|
|
220
|
+
md.push(``);
|
|
221
|
+
}
|
|
222
|
+
md.push(`**Summary:** ${diff.summary}`);
|
|
223
|
+
md.push(``);
|
|
224
|
+
if (diff.newFindings.length > 0) {
|
|
225
|
+
md.push(`### 🆕 New Issues`);
|
|
226
|
+
md.push(``);
|
|
227
|
+
md.push(`| ID | Title | Severity |`);
|
|
228
|
+
md.push(`|----|-------|----------|`);
|
|
229
|
+
for (const f of diff.newFindings) {
|
|
230
|
+
const sev = f.severity === "high"
|
|
231
|
+
? "🔴 High"
|
|
232
|
+
: f.severity === "medium"
|
|
233
|
+
? "🟠 Medium"
|
|
234
|
+
: "🟡 Low";
|
|
235
|
+
md.push(`| \`${f.findingId}\` | ${f.title} | ${sev} |`);
|
|
236
|
+
}
|
|
237
|
+
md.push(``);
|
|
238
|
+
}
|
|
239
|
+
if (diff.resolvedFindings.length > 0) {
|
|
240
|
+
md.push(`### ✅ Resolved Issues`);
|
|
241
|
+
md.push(``);
|
|
242
|
+
md.push(`| ID | Title | Severity |`);
|
|
243
|
+
md.push(`|----|-------|----------|`);
|
|
244
|
+
for (const f of diff.resolvedFindings) {
|
|
245
|
+
const sev = f.severity === "high"
|
|
246
|
+
? "🔴 High"
|
|
247
|
+
: f.severity === "medium"
|
|
248
|
+
? "🟠 Medium"
|
|
249
|
+
: "🟡 Low";
|
|
250
|
+
md.push(`| \`${f.findingId}\` | ${f.title} | ${sev} |`);
|
|
251
|
+
}
|
|
252
|
+
md.push(``);
|
|
253
|
+
}
|
|
254
|
+
if (diff.persistentFindings.length > 0) {
|
|
255
|
+
md.push(`### 🔄 Persistent Issues (${diff.persistentFindings.length})`);
|
|
256
|
+
md.push(``);
|
|
257
|
+
for (const f of diff.persistentFindings) {
|
|
258
|
+
const sev = f.severity === "high"
|
|
259
|
+
? "🔴"
|
|
260
|
+
: f.severity === "medium"
|
|
261
|
+
? "🟠"
|
|
262
|
+
: "🟡";
|
|
263
|
+
md.push(`- ${sev} **${f.findingId}**: ${f.title}`);
|
|
264
|
+
}
|
|
265
|
+
md.push(``);
|
|
266
|
+
}
|
|
267
|
+
if (diff.evidenceChanges.length > 0) {
|
|
268
|
+
md.push(`### 📊 Evidence Changes (within persistent findings)`);
|
|
269
|
+
md.push(``);
|
|
270
|
+
md.push(`| ID | Title | New Evidence | Removed Evidence |`);
|
|
271
|
+
md.push(`|----|-------|:-----------:|:---------------:|`);
|
|
272
|
+
for (const c of diff.evidenceChanges) {
|
|
273
|
+
md.push(`| \`${c.findingId}\` | ${c.title} | +${c.addedEvidence} | -${c.removedEvidence} |`);
|
|
274
|
+
}
|
|
275
|
+
md.push(``);
|
|
276
|
+
}
|
|
277
|
+
return md.join("\n");
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Format a one-line console summary for the diff.
|
|
281
|
+
*/
|
|
282
|
+
function formatDiffConsoleSummary(diff) {
|
|
283
|
+
const parts = [];
|
|
284
|
+
if (diff.newFindings.length > 0) {
|
|
285
|
+
parts.push(`${diff.newFindings.length} new issue${diff.newFindings.length > 1 ? "s" : ""}`);
|
|
286
|
+
}
|
|
287
|
+
if (diff.resolvedFindings.length > 0) {
|
|
288
|
+
parts.push(`${diff.resolvedFindings.length} resolved`);
|
|
289
|
+
}
|
|
290
|
+
if (diff.persistentFindings.length > 0) {
|
|
291
|
+
parts.push(`${diff.persistentFindings.length} unchanged`);
|
|
292
|
+
}
|
|
293
|
+
if (parts.length === 0)
|
|
294
|
+
return "[diff] First scan — no baseline to compare.";
|
|
295
|
+
let line = `[diff] Delta since last audit: ${parts.join(", ")}.`;
|
|
296
|
+
if (diff.inventoryDelta) {
|
|
297
|
+
const { totalFiles, testFiles } = diff.inventoryDelta;
|
|
298
|
+
const fd = totalFiles.curr - totalFiles.prev;
|
|
299
|
+
const td = testFiles.curr - testFiles.prev;
|
|
300
|
+
if (fd !== 0 || td !== 0) {
|
|
301
|
+
const deltas = [];
|
|
302
|
+
if (fd !== 0)
|
|
303
|
+
deltas.push(`files ${totalFiles.prev}→${totalFiles.curr}`);
|
|
304
|
+
if (td !== 0)
|
|
305
|
+
deltas.push(`tests ${testFiles.prev}→${testFiles.curr}`);
|
|
306
|
+
line += ` (codebase: ${deltas.join(", ")})`;
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
return line;
|
|
310
|
+
}
|