@scantrix/cli 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,81 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.loadAuditConfig = loadAuditConfig;
7
+ exports.isRuleDisabled = isRuleDisabled;
8
+ exports.getEffectiveSeverity = getEffectiveSeverity;
9
+ exports.generateDefaultConfig = generateDefaultConfig;
10
+ const promises_1 = __importDefault(require("fs/promises"));
11
+ const path_1 = __importDefault(require("path"));
12
+ const DEFAULT_CONFIG = {
13
+ rules: { disabled: [], severityOverrides: {} },
14
+ exclude: [],
15
+ thresholds: { gradeA: 80, gradeB: 60, gradeC: 40 },
16
+ };
17
+ async function loadAuditConfig(repoPath, explicitPath) {
18
+ const configPath = explicitPath
19
+ ? path_1.default.resolve(explicitPath)
20
+ : path_1.default.join(repoPath, ".auditrc.json");
21
+ try {
22
+ const raw = await promises_1.default.readFile(configPath, "utf8");
23
+ const parsed = JSON.parse(raw);
24
+ return mergeConfig(DEFAULT_CONFIG, parsed);
25
+ }
26
+ catch {
27
+ return { ...DEFAULT_CONFIG };
28
+ }
29
+ }
30
+ function mergeConfig(defaults, overrides) {
31
+ return {
32
+ rules: {
33
+ disabled: overrides.rules?.disabled ?? defaults.rules?.disabled ?? [],
34
+ severityOverrides: {
35
+ ...(defaults.rules?.severityOverrides ?? {}),
36
+ ...(overrides.rules?.severityOverrides ?? {}),
37
+ },
38
+ },
39
+ exclude: overrides.exclude ?? defaults.exclude ?? [],
40
+ thresholds: {
41
+ gradeA: overrides.thresholds?.gradeA ?? defaults.thresholds?.gradeA ?? 80,
42
+ gradeB: overrides.thresholds?.gradeB ?? defaults.thresholds?.gradeB ?? 60,
43
+ gradeC: overrides.thresholds?.gradeC ?? defaults.thresholds?.gradeC ?? 40,
44
+ },
45
+ };
46
+ }
47
+ function isRuleDisabled(config, ruleId) {
48
+ return config.rules?.disabled?.includes(ruleId) ?? false;
49
+ }
50
+ function getEffectiveSeverity(config, ruleId, defaultSeverity) {
51
+ return config.rules?.severityOverrides?.[ruleId] ?? defaultSeverity;
52
+ }
53
+ /**
54
+ * Generate a well-documented starter .auditrc.json with all options commented.
55
+ */
56
+ function generateDefaultConfig() {
57
+ const config = {
58
+ "$schema": "https://scantrix.com/schemas/auditrc.json",
59
+ "rules": {
60
+ "disabled": [],
61
+ "severityOverrides": {},
62
+ },
63
+ "exclude": [
64
+ "node_modules/**",
65
+ "dist/**",
66
+ "coverage/**",
67
+ ],
68
+ "thresholds": {
69
+ "gradeA": 80,
70
+ "gradeB": 60,
71
+ "gradeC": 40,
72
+ },
73
+ "_comments": {
74
+ "disabled": "Add finding IDs to suppress entirely, e.g. [\"PW-LOC-001\", \"ARCH-006\"]",
75
+ "severityOverrides": "Override severity for specific findings, e.g. { \"PW-FLAKE-001\": \"medium\" }",
76
+ "exclude": "Glob patterns to exclude from file scanning",
77
+ "thresholds": "Health score thresholds for letter grades (0-100, higher=better). gradeA=min score for A, gradeB=min for B, gradeC=min for C, below C = D/F",
78
+ },
79
+ };
80
+ return JSON.stringify(config, null, 2) + "\n";
81
+ }
@@ -0,0 +1,327 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.extractCiSummary = extractCiSummary;
7
+ const fast_glob_1 = __importDefault(require("fast-glob"));
8
+ const promises_1 = __importDefault(require("fs/promises"));
9
+ const path_1 = __importDefault(require("path"));
10
+ function normalize(p) {
11
+ return p.replace(/\\/g, "/");
12
+ }
13
+ function classifyCiFile(file) {
14
+ const n = normalize(file).toLowerCase();
15
+ if (n.includes("/.github/workflows/") &&
16
+ (n.endsWith(".yml") || n.endsWith(".yaml"))) {
17
+ return "github_actions";
18
+ }
19
+ // Typical Azure Pipelines
20
+ if (n.endsWith("/azure-pipelines.yml") || n.endsWith("/azure-pipelines.yaml")) {
21
+ return "azure_pipelines";
22
+ }
23
+ // Repo conventions: /pipelines/ folder or names containing azure/ado/pipeline
24
+ const base = path_1.default.basename(n);
25
+ if (n.includes("/pipelines/"))
26
+ return "azure_pipelines";
27
+ if (base.includes("pipeline") || base.includes("azure") || base.includes("ado"))
28
+ return "azure_pipelines";
29
+ return "unknown";
30
+ }
31
+ function contentHasAny(content, patterns) {
32
+ return patterns.some((p) => p.test(content));
33
+ }
34
+ function looksLikeCiByPath(file) {
35
+ const n = normalize(file).toLowerCase();
36
+ const base = path_1.default.basename(n);
37
+ return (n.includes("/pipelines/") ||
38
+ n.includes("/.github/workflows/") ||
39
+ base.includes("pipeline") ||
40
+ base.includes("azure") ||
41
+ base.includes("ado") ||
42
+ base === "azure-pipelines.yml" ||
43
+ base === "azure-pipelines.yaml");
44
+ }
45
+ function looksLikePipelineYamlContent(c) {
46
+ // super lightweight heuristic to avoid false positives (e.g., docker-compose, k8s manifests)
47
+ return contentHasAny(c, [
48
+ /^\s*trigger\s*:/mi,
49
+ /^\s*schedules\s*:/mi,
50
+ /^\s*stages\s*:/mi,
51
+ /^\s*jobs\s*:/mi,
52
+ /^\s*steps\s*:/mi,
53
+ /^\s*pool\s*:/mi,
54
+ /^\s*-+\s*task\s*:/mi,
55
+ /PublishPipelineArtifact/i,
56
+ /PublishTestResults/i,
57
+ /actions\/upload-artifact/i,
58
+ ]);
59
+ }
60
+ async function extractCiSummary(repoPath) {
61
+ // Bulletproof discovery: find all YAML files, then filter down.
62
+ const allYaml = (await (0, fast_glob_1.default)(["**/*.yml", "**/*.yaml"], {
63
+ cwd: repoPath,
64
+ absolute: true,
65
+ dot: true, // include .github/workflows
66
+ onlyFiles: true,
67
+ followSymbolicLinks: false,
68
+ ignore: ["**/node_modules/**", "**/dist/**", "**/build/**", "**/.git/**"],
69
+ }));
70
+ // First-pass filter by path/name conventions
71
+ const likelyByPath = allYaml.filter(looksLikeCiByPath);
72
+ // Second-pass: if we found nothing, fall back to scanning content for CI-ish keys.
73
+ let candidates = likelyByPath;
74
+ if (!candidates.length && allYaml.length) {
75
+ const contentCandidates = [];
76
+ // Cap to keep it fast on huge repos
77
+ for (const f of allYaml.slice(0, 300)) {
78
+ try {
79
+ const c = await promises_1.default.readFile(f, "utf8");
80
+ if (looksLikePipelineYamlContent(c))
81
+ contentCandidates.push(f);
82
+ }
83
+ catch {
84
+ // ignore unreadable
85
+ }
86
+ }
87
+ candidates = contentCandidates;
88
+ }
89
+ const files = candidates
90
+ .map((f) => ({ file: normalize(f), kind: classifyCiFile(f) }))
91
+ .filter((f) => !f.file.toLowerCase().includes("/node_modules/"));
92
+ const summary = {
93
+ files,
94
+ detectedAzurePipelines: files.some((f) => f.kind === "azure_pipelines"),
95
+ detectedGitHubActions: files.some((f) => f.kind === "github_actions"),
96
+ publishesPlaywrightReport: false,
97
+ publishesJUnit: false,
98
+ publishesTracesOrTestResultsDir: false,
99
+ publishesArtifactsOnFailure: false,
100
+ usesCache: false,
101
+ usesSharding: false,
102
+ shardingFiles: [],
103
+ setsWorkersEnv: false,
104
+ workersEnvName: undefined,
105
+ workersEnvFiles: [],
106
+ setsHeadlessEnv: false,
107
+ headlessEnvName: undefined,
108
+ mentionsNodeSetup: false,
109
+ mentionsNpmInstall: false,
110
+ mentionsPlaywrightInstall: false,
111
+ playwrightInstallMethod: undefined,
112
+ usesSelfHostedPool: false,
113
+ usesMicrosoftHostedPool: false,
114
+ mentionsBrowsersPreinstalled: false,
115
+ hasFailureHandling: false,
116
+ failureHandlingMethods: [],
117
+ setsCiEnvTrue: false,
118
+ ciEnvTrueFiles: [],
119
+ hasPipelineTimeout: false,
120
+ pipelineTimeoutMinutes: undefined,
121
+ hasReporterInPlaywrightCommand: false,
122
+ notes: [],
123
+ };
124
+ if (!files.length) {
125
+ summary.notes.push(`No CI pipeline YAML detected under repoPath: ${normalize(repoPath)}`);
126
+ summary.notes.push(`YAML files scanned: ${allYaml.length}`);
127
+ return summary;
128
+ }
129
+ for (const f of files) {
130
+ let content = "";
131
+ try {
132
+ content = await promises_1.default.readFile(f.file, "utf8");
133
+ }
134
+ catch {
135
+ continue;
136
+ }
137
+ const c = content;
138
+ // Pool type signals
139
+ if (contentHasAny(c, [
140
+ /\bpool\s*:\s*\n\s*name\s*:/i,
141
+ /\bpool\s*:\s*{[^}]*\bname\s*:/i,
142
+ ])) {
143
+ summary.usesSelfHostedPool = true;
144
+ }
145
+ if (contentHasAny(c, [/\bvmImage\s*:/i])) {
146
+ summary.usesMicrosoftHostedPool = true;
147
+ }
148
+ if (contentHasAny(c, [
149
+ /already\s+has\s+browsers?/i,
150
+ /browsers?.*already\s+installed/i,
151
+ /playwright.*already\s+has\s+browsers/i,
152
+ ])) {
153
+ summary.mentionsBrowsersPreinstalled = true;
154
+ }
155
+ // Report publishing
156
+ if (contentHasAny(c, [/playwright-report/i, /\bhtml-report\b/i])) {
157
+ if (contentHasAny(c, [
158
+ /\bPublishPipelineArtifact\b/i,
159
+ /\bPublishBuildArtifacts\b/i,
160
+ /\bactions\/upload-artifact\b/i,
161
+ /\bupload-artifact\b/i,
162
+ /\bpublish:\s*/i,
163
+ ])) {
164
+ summary.publishesPlaywrightReport = true;
165
+ }
166
+ }
167
+ // JUnit publishing
168
+ if (contentHasAny(c, [/PublishTestResults@/i, /\bPublishTestResults\b/i])) {
169
+ summary.publishesJUnit = true;
170
+ }
171
+ else if (contentHasAny(c, [/test-results\.xml/i, /junit/i])) {
172
+ if (contentHasAny(c, [
173
+ /\bPublishPipelineArtifact\b/i,
174
+ /\bPublishBuildArtifacts\b/i,
175
+ /\bactions\/upload-artifact\b/i,
176
+ /\bupload-artifact\b/i,
177
+ ])) {
178
+ summary.publishesJUnit = true;
179
+ }
180
+ }
181
+ // Traces/test-results publishing
182
+ if (contentHasAny(c, [/test-results/i, /trace\.zip/i, /traces?/i])) {
183
+ if (contentHasAny(c, [
184
+ /\bPublishPipelineArtifact\b/i,
185
+ /\bPublishBuildArtifacts\b/i,
186
+ /\bactions\/upload-artifact\b/i,
187
+ /\bupload-artifact\b/i,
188
+ ])) {
189
+ summary.publishesTracesOrTestResultsDir = true;
190
+ }
191
+ }
192
+ // Detect if artifact publishing is configured to run even on failure
193
+ // Azure DevOps: condition: always() or condition: failed() or condition: succeededOrFailed()
194
+ // GitHub Actions: if: always() or if: failure() or if: ${{ always() }}
195
+ if (contentHasAny(c, [
196
+ // Azure DevOps conditions
197
+ /condition\s*:\s*always\s*\(\)/i,
198
+ /condition\s*:\s*failed\s*\(\)/i,
199
+ /condition\s*:\s*succeededOrFailed\s*\(\)/i,
200
+ // GitHub Actions conditions
201
+ /if\s*:\s*always\s*\(\)/i,
202
+ /if\s*:\s*failure\s*\(\)/i,
203
+ /if\s*:\s*\$\{\{\s*always\s*\(\)/i,
204
+ /if\s*:\s*\$\{\{\s*failure\s*\(\)/i,
205
+ // Combined with artifact keywords nearby (within ~200 chars context)
206
+ ])) {
207
+ // Check if it's associated with artifact/test-results publishing
208
+ if (contentHasAny(c, [
209
+ /test-results/i,
210
+ /playwright-report/i,
211
+ /trace/i,
212
+ /upload-artifact/i,
213
+ /PublishPipelineArtifact/i,
214
+ /PublishBuildArtifacts/i,
215
+ ])) {
216
+ summary.publishesArtifactsOnFailure = true;
217
+ }
218
+ }
219
+ // Cache usage — tightened to avoid false negatives from unrelated YAML keys
220
+ // Only match explicit CI cache tasks/actions, not arbitrary `cache:` keys
221
+ if (contentHasAny(c, [
222
+ /\bCache@2\b/i,
223
+ /\bactions\/cache\b/i,
224
+ /\bactions\/setup-node[\s\S]{0,200}cache\s*:/i,
225
+ /\bnpm\s+ci\s+--cache/i,
226
+ ])) {
227
+ summary.usesCache = true;
228
+ }
229
+ // Env wiring
230
+ if (contentHasAny(c, [/\bWORKERS\b/i])) {
231
+ summary.setsWorkersEnv = true;
232
+ summary.workersEnvName = "WORKERS";
233
+ summary.workersEnvFiles.push(normalize(f.file));
234
+ }
235
+ if (contentHasAny(c, [/\bHEADLESS_MODE\b/i])) {
236
+ summary.setsHeadlessEnv = true;
237
+ summary.headlessEnvName = "HEADLESS_MODE";
238
+ }
239
+ // Tooling steps
240
+ if (contentHasAny(c, [
241
+ /\bNodeTool@0\b/i,
242
+ /actions\/setup-node/i,
243
+ /\bsetup-node\b/i,
244
+ ])) {
245
+ summary.mentionsNodeSetup = true;
246
+ }
247
+ if (contentHasAny(c, [
248
+ /\bnpm ci\b/i,
249
+ /\bnpm install\b/i,
250
+ /\byarn install\b/i,
251
+ /\bpnpm install\b/i,
252
+ ])) {
253
+ summary.mentionsNpmInstall = true;
254
+ }
255
+ if (contentHasAny(c, [/\bnpx playwright install\b/i, /playwright install\b/i])) {
256
+ summary.mentionsPlaywrightInstall = true;
257
+ // Detect install method
258
+ if (contentHasAny(c, [/npx playwright install --with-deps/i])) {
259
+ summary.playwrightInstallMethod = "npx playwright install --with-deps";
260
+ }
261
+ else if (contentHasAny(c, [/npx playwright install(?!\s+--with-deps)/i])) {
262
+ summary.playwrightInstallMethod = "npx playwright install";
263
+ }
264
+ else {
265
+ summary.playwrightInstallMethod = "unknown";
266
+ }
267
+ }
268
+ // Sharding detection
269
+ if (contentHasAny(c, [/--shard=/i, /\bshard\b.*\d+\/\d+/i])) {
270
+ summary.usesSharding = true;
271
+ summary.shardingFiles.push(normalize(f.file));
272
+ }
273
+ // Failure handling detection
274
+ if (contentHasAny(c, [/continueOnError\s*:\s*true/i])) {
275
+ summary.hasFailureHandling = true;
276
+ if (!summary.failureHandlingMethods.includes("continueOnError")) {
277
+ summary.failureHandlingMethods.push("continueOnError");
278
+ }
279
+ }
280
+ if (contentHasAny(c, [/condition\s*:.*failed\(\)/i, /if\s*:.*failure\(\)/i])) {
281
+ summary.hasFailureHandling = true;
282
+ if (!summary.failureHandlingMethods.includes("conditional steps on failure")) {
283
+ summary.failureHandlingMethods.push("conditional steps on failure");
284
+ }
285
+ }
286
+ // CI-012: Does the pipeline explicitly set CI=true?
287
+ // GitHub Actions sets CI=true automatically, Azure DevOps does not.
288
+ if (contentHasAny(c, [
289
+ /\bCI\s*:\s*['"]?true['"]?/i, // map-style: CI: true / CI: 'true'
290
+ /\bCI=true\b/i, // script: CI=true
291
+ /\benv\s*:\s*\n[^]*?\bCI\s*:\s/i, // env block with CI key
292
+ /- name:\s*['"]?CI['"]?\s*\n\s*value:\s*['"]?true['"]?/i, // Azure list-style variables
293
+ ])) {
294
+ summary.setsCiEnvTrue = true;
295
+ summary.ciEnvTrueFiles.push(normalize(f.file));
296
+ }
297
+ // GitHub Actions implicitly sets CI=true
298
+ if (f.kind === "github_actions") {
299
+ summary.setsCiEnvTrue = true;
300
+ summary.ciEnvTrueFiles.push(normalize(f.file));
301
+ }
302
+ // CI-013: Pipeline timeout detection
303
+ // Azure DevOps: timeoutInMinutes at job/step level
304
+ // GitHub Actions: timeout-minutes at job/step level
305
+ const adoTimeoutMatch = c.match(/\btimeoutInMinutes\s*:\s*(\d+)/i);
306
+ const ghaTimeoutMatch = c.match(/\btimeout-minutes\s*:\s*(\d+)/i);
307
+ const timeoutMatch = adoTimeoutMatch || ghaTimeoutMatch;
308
+ if (timeoutMatch) {
309
+ summary.hasPipelineTimeout = true;
310
+ const mins = Number(timeoutMatch[1]);
311
+ if (Number.isFinite(mins)) {
312
+ summary.pipelineTimeoutMinutes = summary.pipelineTimeoutMinutes
313
+ ? Math.max(summary.pipelineTimeoutMinutes, mins)
314
+ : mins;
315
+ }
316
+ }
317
+ // CI-014: Does `npx playwright test` command include --reporter?
318
+ if (contentHasAny(c, [/playwright\s+test\b[^\n]*--reporter/i])) {
319
+ summary.hasReporterInPlaywrightCommand = true;
320
+ }
321
+ }
322
+ // Dedupes (safe + makes CI-006 deterministic)
323
+ summary.workersEnvFiles = [...new Set(summary.workersEnvFiles)];
324
+ summary.shardingFiles = [...new Set(summary.shardingFiles)];
325
+ summary.ciEnvTrueFiles = [...new Set(summary.ciEnvTrueFiles)];
326
+ return summary;
327
+ }
package/dist/cli.js ADDED
@@ -0,0 +1,156 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ var __importDefault = (this && this.__importDefault) || function (mod) {
4
+ return (mod && mod.__esModule) ? mod : { "default": mod };
5
+ };
6
+ Object.defineProperty(exports, "__esModule", { value: true });
7
+ const minimist_1 = __importDefault(require("minimist"));
8
+ const path_1 = __importDefault(require("path"));
9
+ const fs_1 = require("fs");
10
+ const scanner_1 = require("./scanner");
11
+ const report_1 = require("./report");
12
+ const auditConfig_1 = require("./auditConfig");
13
+ const diffTracker_1 = require("./diffTracker");
14
+ const scanResult_1 = require("./scanResult");
15
+ const sinks_1 = require("./sinks");
16
+ const scoring_1 = require("./scoring");
17
+ const promises_1 = __importDefault(require("fs/promises"));
18
+ async function main() {
19
+ const args = (0, minimist_1.default)(process.argv.slice(2));
20
+ const subcommand = args._[0];
21
+ // ── Subcommand: init ─────────────────────────────────────────────
22
+ if (subcommand === "init") {
23
+ const targetDir = args._[1] || ".";
24
+ const configPath = path_1.default.join(path_1.default.resolve(targetDir), ".auditrc.json");
25
+ try {
26
+ await promises_1.default.access(configPath);
27
+ console.error(`[init] .auditrc.json already exists at ${configPath}`);
28
+ process.exit(1);
29
+ }
30
+ catch {
31
+ // File doesn't exist — good
32
+ }
33
+ await promises_1.default.writeFile(configPath, (0, auditConfig_1.generateDefaultConfig)(), "utf8");
34
+ console.log(`[init] Created .auditrc.json at ${configPath}`);
35
+ return;
36
+ }
37
+ // ── Standard scan ────────────────────────────────────────────────
38
+ const repoPath = args.repo || args._[0];
39
+ const outDir = args.out || "./audit-out";
40
+ const checkUpdates = Boolean(args.updates || args.update || args.checkUpdates);
41
+ const configPath = args.config; // --config .auditrc.json
42
+ const diffPath = args.diff; // --diff path/to/baseline/findings.json
43
+ const formatArg = args.format; // --format md,html,json,sarif,email
44
+ const noTrend = Boolean(args["no-trend"]); // --no-trend to disable auto-snapshot
45
+ const jsonPath = args["json-path"]; // --json-path path/to/results.json
46
+ const repoName = args["repo-name"]; // --repo-name display name override
47
+ const branchOverride = args["branch"]; // --branch override detected branch
48
+ if (!repoPath) {
49
+ console.error("Usage:\n" +
50
+ " scantrix <repoPath> [--out <dir>] [--updates] [--config <path>]\n" +
51
+ " [--diff <baseline.json>] [--format md,html,json,sarif,email]\n" +
52
+ " [--no-trend] [--json-path <path>]\n" +
53
+ " [--repo-name <name>] [--branch <branch>]\n" +
54
+ "\n" +
55
+ " scantrix init [dir] Create a starter .auditrc.json\n" +
56
+ "\n" +
57
+ "Environment variables:\n" +
58
+ " SCANTRIX_JSON_PATH Write canonical results JSON to this path\n");
59
+ process.exit(1);
60
+ }
61
+ const absRepo = path_1.default.resolve(repoPath);
62
+ const absOut = path_1.default.resolve(outDir);
63
+ // Read tool version from package.json (dist/cli.js → ../package.json)
64
+ const pkg = JSON.parse((0, fs_1.readFileSync)(path_1.default.resolve(__dirname, "..", "package.json"), "utf8"));
65
+ const toolVersion = pkg.version ?? "0.0.0";
66
+ // ── Load optional .auditrc.json ────────────────────────────────────
67
+ const auditConfig = await (0, auditConfig_1.loadAuditConfig)(configPath ?? repoPath);
68
+ // ── Parse --format flag ────────────────────────────────────────────
69
+ const validFormats = ["md", "html", "json", "sarif", "email"];
70
+ let formats;
71
+ if (formatArg) {
72
+ formats = formatArg.split(",").map(s => s.trim().toLowerCase()).filter((s) => validFormats.includes(s));
73
+ if (formats.length === 0)
74
+ formats = undefined; // fall back to all
75
+ }
76
+ // ── Resolve baseline for diff tracking ───────────────────────────────
77
+ let resolvedBaseline = diffPath;
78
+ let baselineMeta;
79
+ if (!resolvedBaseline && !noTrend) {
80
+ resolvedBaseline = await (0, diffTracker_1.findLatestBaseline)(outDir) ?? undefined;
81
+ // Load the baseline snapshot's meta.json for inventory comparison
82
+ if (resolvedBaseline) {
83
+ try {
84
+ const metaPath = path_1.default.join(path_1.default.dirname(resolvedBaseline), "meta.json");
85
+ const metaRaw = await promises_1.default.readFile(metaPath, "utf8");
86
+ baselineMeta = JSON.parse(metaRaw);
87
+ }
88
+ catch { /* no meta available — first scan or corrupt snapshot */ }
89
+ }
90
+ }
91
+ const startedAt = new Date().toISOString();
92
+ try {
93
+ const results = await (0, scanner_1.scanRepo)(repoPath, {
94
+ checkOutdatedDependencies: checkUpdates,
95
+ auditConfig,
96
+ repoName,
97
+ branch: branchOverride,
98
+ });
99
+ // ── Prevalence-based severity escalation ──────────────────────────
100
+ const effectiveTestFiles = results.inventory.testFiles +
101
+ (results.inventory.cypressTestFiles ?? 0) +
102
+ (results.inventory.seleniumTestFiles ?? 0);
103
+ results.findings = (0, scoring_1.applyEscalation)(results.findings, effectiveTestFiles);
104
+ await (0, report_1.writeReportArtifacts)(outDir, results, {
105
+ baselinePath: resolvedBaseline,
106
+ baselineMeta,
107
+ formats,
108
+ });
109
+ console.log(`[audit] wrote report to: ${outDir}`);
110
+ // ── Auto-snapshot ─────────────────────────────────────────────────
111
+ if (!noTrend) {
112
+ await (0, diffTracker_1.saveSnapshot)(outDir, results.findings, {
113
+ inventory: results.inventory,
114
+ repoPath: absRepo,
115
+ });
116
+ console.log(`[snapshot] Saved to ${outDir}/snapshots/`);
117
+ }
118
+ // ── Console diff summary ───────────────────────────────────────────
119
+ if (resolvedBaseline) {
120
+ const baseline = await (0, diffTracker_1.loadBaseline)(resolvedBaseline);
121
+ if (baseline.length > 0) {
122
+ const diff = (0, diffTracker_1.computeDiff)(baseline, results.findings, {
123
+ baselineInventory: baselineMeta,
124
+ currentInventory: results.inventory,
125
+ });
126
+ console.log((0, diffTracker_1.formatDiffConsoleSummary)(diff));
127
+ }
128
+ }
129
+ else if (!noTrend) {
130
+ console.log("[diff] First scan — no baseline to compare.");
131
+ }
132
+ // ── Write canonical ScanResult JSON ────────────────────────────────
133
+ const effectiveJsonPath = jsonPath ?? process.env.SCANTRIX_JSON_PATH;
134
+ if (effectiveJsonPath) {
135
+ const scanResult = (0, scanResult_1.buildScanResult)(results, {
136
+ version: toolVersion,
137
+ startedAt,
138
+ repoPath: absRepo,
139
+ });
140
+ const sink = new sinks_1.JsonSink(effectiveJsonPath);
141
+ await sink.write(scanResult);
142
+ console.log(`[sink:json] Results written to ${effectiveJsonPath}`);
143
+ }
144
+ }
145
+ catch (err) {
146
+ console.error("[audit] FAILED:", err);
147
+ // Write a minimal crash report so audit_summary.md is never empty again
148
+ await promises_1.default.mkdir(outDir, { recursive: true });
149
+ await promises_1.default.writeFile(path_1.default.join(outDir, "audit_summary.md"), `# Playwright Reliability Audit (Crash)\n\nRepo: \`${repoPath}\`\n\nError:\n\n\`\`\`\n${String(err)}\n\`\`\`\n`, "utf8");
150
+ throw err;
151
+ }
152
+ }
153
+ main().catch((e) => {
154
+ console.error(" Audit failed:", e);
155
+ process.exit(1);
156
+ });