ship18ion 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,34 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.checkGit = checkGit;
4
+ const child_process_1 = require("child_process");
5
+ async function checkGit(ctx) {
6
+ const results = [];
7
+ try {
8
+ // Check for uncommitted changes
9
+ const status = (0, child_process_1.execSync)('git status --porcelain', { cwd: ctx.cwd, encoding: 'utf-8' });
10
+ if (status.trim().length > 0) {
11
+ results.push({
12
+ status: 'warn',
13
+ message: 'Git working directory is dirty (uncommitted changes). Verify before shipping.',
14
+ ruleId: 'git-dirty',
15
+ });
16
+ }
17
+ // Check current branch
18
+ const branch = (0, child_process_1.execSync)('git rev-parse --abbrev-ref HEAD', { cwd: ctx.cwd, encoding: 'utf-8' }).trim();
19
+ const allowedBranches = ['main', 'master', 'staging', 'production', 'prod'];
20
+ if (!allowedBranches.includes(branch)) {
21
+ results.push({
22
+ status: 'warn',
23
+ message: `You are on branch '${branch}'. Production builds typically come from main/master.`,
24
+ ruleId: 'git-branch',
25
+ });
26
+ }
27
+ }
28
+ catch (e) {
29
+ // Not a git repo or git not found
30
+ // Silently fail or warn?
31
+ // results.push({ status: 'warn', message: 'Not a git repository or git command failed.', ruleId: 'git-error' });
32
+ }
33
+ return results;
34
+ }
@@ -0,0 +1,53 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.checkSecrets = checkSecrets;
7
+ const fs_1 = __importDefault(require("fs"));
8
+ const secrets_1 = require("../engine/secrets");
9
+ async function checkSecrets(ctx) {
10
+ const results = [];
11
+ // Skip binary files, lock files, node_modules (already ignored by scanner but specific check here)
12
+ const filesToCheck = ctx.files.filter(f => !f.match(/\.(png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot|lock|pdf)$/));
13
+ for (const file of filesToCheck) {
14
+ try {
15
+ const content = fs_1.default.readFileSync(file, 'utf-8');
16
+ const lines = content.split('\n');
17
+ lines.forEach((line, index) => {
18
+ // Check Regex Patterns
19
+ for (const pattern of secrets_1.SECRET_PATTERNS) {
20
+ if (pattern.regex.test(line)) {
21
+ results.push({
22
+ status: 'fail',
23
+ message: `Potential secret found: ${pattern.name}`,
24
+ ruleId: 'secret-pattern',
25
+ file,
26
+ line: index + 1
27
+ });
28
+ }
29
+ }
30
+ // Check Heuristics for assignments
31
+ // matches "key = '...'"
32
+ const genericMsg = line.match(/(api_?key|secret|token|password|auth)[\s]*[:=][\s]*['"]([a-zA-Z0-9_\-]{8,})['"]/i);
33
+ if (genericMsg) {
34
+ const match = genericMsg[2];
35
+ // Heuristic: Must be > 8 chars and not contain 'process.env' or template placeholders
36
+ if (match && match.length > 8 && !line.includes('process.env') && !match.includes('${')) {
37
+ results.push({
38
+ status: 'warn',
39
+ message: `Possible hardcoded secret (heuristic): ${genericMsg[1]}`,
40
+ ruleId: 'secret-heuristic',
41
+ file,
42
+ line: index + 1
43
+ });
44
+ }
45
+ }
46
+ });
47
+ }
48
+ catch (e) {
49
+ // Ignore read errors
50
+ }
51
+ }
52
+ return results;
53
+ }
@@ -0,0 +1,52 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.checkSecurity = checkSecurity;
7
+ const fs_1 = __importDefault(require("fs"));
8
+ async function checkSecurity(ctx) {
9
+ const results = [];
10
+ const codeFiles = ctx.files.filter(f => f.match(/\.(js|ts|jsx|tsx|json)$/));
11
+ for (const file of codeFiles) {
12
+ const content = fs_1.default.readFileSync(file, 'utf-8');
13
+ // 1. Check for Hardcoded NODE_ENV not being production?
14
+ // Actually we want to verify we are NOT hardcoding 'development' in prod context?
15
+ // Or "Debug / dev configs enabled" -> debug: true
16
+ if (content.match(/debug:\s*true/)) {
17
+ results.push({
18
+ status: 'warn',
19
+ message: 'Debug mode enabled (debug: true) found',
20
+ ruleId: 'security-debug-enabled',
21
+ file
22
+ });
23
+ }
24
+ // 2. CORS Wildcard
25
+ // "origin: '*'" or "origin: *"
26
+ if (content.match(/origin:\s*['"]?\*['"]?/)) {
27
+ // Default: Enabled (fail on wildcard) unless explicitly set to false
28
+ if (ctx.config.security?.noCorsWildcard !== false) {
29
+ results.push({
30
+ status: 'fail',
31
+ message: 'CORS wildcard origin (*) detected',
32
+ ruleId: 'security-cors-wildcard',
33
+ file
34
+ });
35
+ }
36
+ }
37
+ // 3. Hardcoded credentials (simple db keywords)
38
+ // postgres://user:pass@...
39
+ if (content.match(/:\/\/[a-zA-Z0-9]+:[a-zA-Z0-9]+@/)) {
40
+ // Exclude localhost
41
+ if (!content.includes('localhost') && !content.includes('127.0.0.1')) {
42
+ results.push({
43
+ status: 'fail',
44
+ message: 'Hardcoded database credentials in connection string',
45
+ ruleId: 'security-db-creds',
46
+ file
47
+ });
48
+ }
49
+ }
50
+ }
51
+ return results;
52
+ }
package/package.json ADDED
@@ -0,0 +1,37 @@
1
+ {
2
+ "name": "ship18ion",
3
+ "version": "1.0.0",
4
+ "description": "",
5
+ "main": "dist/cli/index.js",
6
+ "bin": {
7
+ "ship18ion": "./dist/cli/index.js"
8
+ },
9
+ "scripts": {
10
+ "build": "tsc",
11
+ "prepublishOnly": "npm run build",
12
+ "start": "node dist/cli/index.js",
13
+ "test": "echo \"Error: no test specified\" && exit 1"
14
+ },
15
+ "keywords": [
16
+ "cli",
17
+ "security",
18
+ "production",
19
+ "env",
20
+ "secrets",
21
+ "deployment",
22
+ "linter"
23
+ ],
24
+ "author": "TRIPLE HASH",
25
+ "license": "ISC",
26
+ "dependencies": {
27
+ "@babel/parser": "^7.28.5",
28
+ "@babel/traverse": "^7.28.5",
29
+ "@types/babel__traverse": "^7.28.0",
30
+ "@types/node": "^25.0.3",
31
+ "chalk": "^4.1.2",
32
+ "commander": "^14.0.2",
33
+ "dotenv": "^17.2.3",
34
+ "glob": "^13.0.0",
35
+ "typescript": "^5.9.3"
36
+ }
37
+ }
@@ -0,0 +1,34 @@
1
+ #!/usr/bin/env node
2
+ import { Command } from 'commander';
3
+ import chalk from 'chalk';
4
+ import { loadConfig } from '../engine/config';
5
+ import { runChecks } from '../engine/runner';
6
+ import { reportConsole } from '../reporters/console';
7
+
8
+ const program = new Command();
9
+
10
+ program
11
+ .name('ship18ion')
12
+ .description('Production Readiness Inspector')
13
+ .version('0.1.0');
14
+
15
+ program
16
+ .command('check', { isDefault: true })
17
+ .description('Run production readiness checks')
18
+ .option('--ci', 'Run in CI mode (minimal output, exit codes)')
19
+ .action(async (options) => {
20
+ // console.log(chalk.blue('Starting ship18ion checks...'));
21
+ const cwd = process.cwd();
22
+ const config = await loadConfig(cwd);
23
+
24
+ try {
25
+ const results = await runChecks(config, cwd);
26
+ // Uses console reporter for both normal and CI for now (it handles exit codes)
27
+ reportConsole(results, cwd);
28
+ } catch (e) {
29
+ console.error(chalk.red('Error running checks:'), e);
30
+ process.exit(1);
31
+ }
32
+ });
33
+
34
+ program.parse(process.argv);
@@ -0,0 +1,84 @@
1
+ import fs from 'fs';
2
+ import * as parser from '@babel/parser';
3
+ import traverse from '@babel/traverse';
4
+ import * as t from '@babel/types';
5
+
6
+ export interface EnvUsage {
7
+ name: string;
8
+ line: number;
9
+ file: string;
10
+ }
11
+
12
+ export function findEnvUsages(filePath: string): EnvUsage[] {
13
+ if (!fs.existsSync(filePath)) return [];
14
+
15
+ const code = fs.readFileSync(filePath, 'utf-8');
16
+ const usages: EnvUsage[] = [];
17
+
18
+ // Only parse JS/TS files
19
+ if (!/\.(js|ts|jsx|tsx)$/.test(filePath)) {
20
+ return [];
21
+ }
22
+
23
+ try {
24
+ const ast = parser.parse(code, {
25
+ sourceType: 'module',
26
+ plugins: ['typescript', 'jsx'],
27
+ });
28
+
29
+ traverse(ast, {
30
+ MemberExpression(path) {
31
+ // 1. Check for process.env.VAR
32
+ if (
33
+ t.isMemberExpression(path.node.object) &&
34
+ t.isIdentifier(path.node.object.object) &&
35
+ path.node.object.object.name === 'process' &&
36
+ t.isIdentifier(path.node.object.property) &&
37
+ path.node.object.property.name === 'env'
38
+ ) {
39
+ if (t.isIdentifier(path.node.property)) {
40
+ usages.push({
41
+ name: path.node.property.name,
42
+ line: path.node.loc?.start.line || 0,
43
+ file: filePath
44
+ });
45
+ } else if (t.isStringLiteral(path.node.property)) {
46
+ usages.push({
47
+ name: path.node.property.value,
48
+ line: path.node.loc?.start.line || 0,
49
+ file: filePath
50
+ });
51
+ }
52
+ }
53
+
54
+ // 2. Check for import.meta.env.VAR (Vite)
55
+ // AST structure: MemberExpression
56
+ // object: MemberExpression
57
+ // object: MetaProperty (import.meta)
58
+ // property: Identifier (env)
59
+ // property: Identifier (VAR)
60
+
61
+ if (
62
+ t.isMemberExpression(path.node.object) &&
63
+ t.isMetaProperty(path.node.object.object) &&
64
+ path.node.object.object.meta.name === 'import' &&
65
+ path.node.object.object.property.name === 'meta' &&
66
+ t.isIdentifier(path.node.object.property) &&
67
+ path.node.object.property.name === 'env'
68
+ ) {
69
+ if (t.isIdentifier(path.node.property)) {
70
+ usages.push({
71
+ name: path.node.property.name,
72
+ line: path.node.loc?.start.line || 0,
73
+ file: filePath
74
+ });
75
+ }
76
+ }
77
+ }
78
+ });
79
+
80
+ } catch (e) {
81
+ // console.warn(`Failed to parse ${filePath}:`, e);
82
+ }
83
+ return usages;
84
+ }
@@ -0,0 +1,28 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+
4
+ export interface Ship18ionConfig {
5
+ env?: {
6
+ required?: string[];
7
+ disallowed?: string[];
8
+ };
9
+ security?: {
10
+ noCorsWildcard?: boolean;
11
+ requireRateLimit?: boolean;
12
+ };
13
+ ignore?: string[];
14
+ }
15
+
16
+ export async function loadConfig(cwd: string = process.cwd()): Promise<Ship18ionConfig> {
17
+ const configPath = path.join(cwd, 'ship18ion.config.json');
18
+ if (fs.existsSync(configPath)) {
19
+ const content = fs.readFileSync(configPath, 'utf-8');
20
+ try {
21
+ return JSON.parse(content);
22
+ } catch (e) {
23
+ console.error('Failed to parse config file:', e);
24
+ return {};
25
+ }
26
+ }
27
+ return {};
28
+ }
@@ -0,0 +1,27 @@
1
+ import { Ship18ionConfig } from './config';
2
+ import { scanFiles } from './scanner';
3
+ import { RuleContext, RuleResult } from './types';
4
+ import { checkEnvVars } from '../rules/env';
5
+ import { checkSecrets } from '../rules/secrets';
6
+ import { checkSecurity } from '../rules/security';
7
+ import { checkDependencies, checkBuild } from '../rules/build';
8
+ import { checkNextJs } from '../rules/frameworks/nextjs';
9
+ import { checkGit } from '../rules/git';
10
+
11
+ export async function runChecks(config: Ship18ionConfig, cwd: string): Promise<RuleResult[]> {
12
+ const files = await scanFiles(cwd, config.ignore);
13
+ const ctx: RuleContext = { config, files, cwd };
14
+
15
+ const results: RuleResult[] = [];
16
+
17
+ // Run all checks
18
+ results.push(...await checkEnvVars(ctx));
19
+ results.push(...await checkSecrets(ctx));
20
+ results.push(...await checkSecurity(ctx));
21
+ results.push(...await checkDependencies(ctx));
22
+ results.push(...await checkBuild(ctx));
23
+ results.push(...await checkNextJs(ctx));
24
+ results.push(...await checkGit(ctx));
25
+
26
+ return results;
27
+ }
@@ -0,0 +1,13 @@
1
+ import { glob } from 'glob';
2
+ import path from 'path';
3
+
4
+ export async function scanFiles(cwd: string, ignore: string[] = []): Promise<string[]> {
5
+ const defaultIgnore = ['**/node_modules/**', '**/dist/**', '**/.git/**'];
6
+ // Scan for relevant files: JS/TS code, Configs (JSON/YAML), Env files
7
+ return glob('**/*.{js,ts,jsx,tsx,json,yaml,yml,env,env.*}', {
8
+ cwd,
9
+ ignore: [...defaultIgnore, ...ignore],
10
+ absolute: true,
11
+ dot: true, // Include .env files
12
+ });
13
+ }
@@ -0,0 +1,26 @@
1
+ export const SECRET_PATTERNS = [
2
+ { name: 'AWS Access Key', regex: /AKIA[0-9A-Z]{16}/ },
3
+ { name: 'Google API Key', regex: /AIza[0-9A-Za-z\\-_]{35}/ },
4
+ { name: 'Stripe Secret Key', regex: /sk_live_[0-9a-zA-Z]{24}/ },
5
+ { name: 'GitHub Personal Access Token', regex: /ghp_[0-9a-zA-Z]{36}/ },
6
+ { name: 'Generic Private Key', regex: /-----BEGIN .* PRIVATE KEY-----/ },
7
+ { name: 'Slack Bot Token', regex: /xoxb-[0-9]{11}-[0-9]{12}-[0-9a-zA-Z]{24}/ },
8
+ { name: 'OpenAI API Key', regex: /sk-[a-zA-Z0-9]{48}/ }
9
+ ];
10
+
11
+ export function calculateEntropy(str: string): number {
12
+ const len = str.length;
13
+ const frequencies = Array.from(str).reduce((freq, char) => {
14
+ freq[char] = (freq[char] || 0) + 1;
15
+ return freq;
16
+ }, {} as Record<string, number>);
17
+
18
+ return Object.values(frequencies).reduce((sum, f) => {
19
+ const p = f / len;
20
+ return sum - (p * Math.log2(p));
21
+ }, 0);
22
+ }
23
+
24
+ export function isHighEntropy(str: string, threshold = 4.5): boolean {
25
+ return calculateEntropy(str) > threshold;
26
+ }
@@ -0,0 +1,15 @@
1
+ import { Ship18ionConfig } from './config';
2
+
3
+ export interface RuleResult {
4
+ status: 'pass' | 'fail' | 'warn';
5
+ message: string;
6
+ file?: string;
7
+ line?: number;
8
+ ruleId: string;
9
+ }
10
+
11
+ export interface RuleContext {
12
+ config: Ship18ionConfig;
13
+ files: string[];
14
+ cwd: string;
15
+ }
@@ -0,0 +1,62 @@
1
+ import chalk from 'chalk';
2
+ import path from 'path';
3
+ import { RuleResult } from '../engine/types';
4
+
5
+ const CATEGORIES: Record<string, { icon: string; label: string }> = {
6
+ 'env': { icon: '🌱', label: 'Environment' },
7
+ 'secret': { icon: '🔐', label: 'Secrets' },
8
+ 'security': { icon: '⚠️', label: 'Security' },
9
+ 'dep': { icon: '📦', label: 'Dependency & Build' },
10
+ 'build': { icon: '📦', label: 'Dependency & Build' },
11
+ };
12
+
13
+ function getCategory(ruleId: string) {
14
+ const prefix = ruleId.split('-')[0];
15
+ return CATEGORIES[prefix] || { icon: '❓', label: 'Other' };
16
+ }
17
+
18
+ export function reportConsole(results: RuleResult[], cwd: string) {
19
+ if (results.length === 0) {
20
+ console.log(chalk.green('\n✅ Production Readiness Check Passed!\n'));
21
+ return;
22
+ }
23
+
24
+ const fails = results.filter(r => r.status === 'fail');
25
+ const warns = results.filter(r => r.status === 'warn');
26
+
27
+ if (fails.length > 0) {
28
+ console.log(chalk.red('\n❌ Production Readiness Check Failed\n'));
29
+ } else {
30
+ console.log(chalk.yellow('\n⚠️ Production Readiness Check Passed with Warnings\n'));
31
+ }
32
+
33
+ // Group by category
34
+ const grouped: Record<string, RuleResult[]> = {};
35
+ results.forEach(r => {
36
+ const cat = getCategory(r.ruleId);
37
+ const key = `${cat.icon} ${cat.label}`;
38
+ if (!grouped[key]) grouped[key] = [];
39
+ grouped[key].push(r);
40
+ });
41
+
42
+ for (const [category, items] of Object.entries(grouped)) {
43
+ console.log(chalk.bold(category));
44
+ for (const item of items) {
45
+ const sym = item.status === 'fail' ? chalk.red('✖') : chalk.yellow('!');
46
+ const location = item.file ? `${path.relative(cwd, item.file)}${item.line ? `:${item.line}` : ''}` : '';
47
+ console.log(` ${sym} ${item.message} ${chalk.gray(location)}`);
48
+ }
49
+ console.log('');
50
+ }
51
+
52
+ const summary = [];
53
+ if (fails.length > 0) summary.push(chalk.red(`${fails.length} errors`));
54
+ if (warns.length > 0) summary.push(chalk.yellow(`${warns.length} warnings`));
55
+
56
+ console.log(`Summary: ${summary.join(', ')}`);
57
+ console.log('');
58
+
59
+ if (fails.length > 0) {
60
+ process.exit(1);
61
+ }
62
+ }
@@ -0,0 +1,74 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import { RuleContext, RuleResult } from '../engine/types';
4
+
5
+ export async function checkDependencies(ctx: RuleContext): Promise<RuleResult[]> {
6
+ const results: RuleResult[] = [];
7
+
8
+ const packageJsons = ctx.files.filter(f => f.endsWith('package.json') && !f.includes('node_modules'));
9
+
10
+ const devToolsInProd = ['eslint', 'jest', 'mocha', 'nodemon', 'ts-node', 'typescript', 'webpack', 'babel-loader'];
11
+
12
+ for (const pkgFile of packageJsons) {
13
+ try {
14
+ const content = JSON.parse(fs.readFileSync(pkgFile, 'utf-8'));
15
+ const deps = content.dependencies || {};
16
+
17
+ for (const tool of devToolsInProd) {
18
+ if (deps[tool]) {
19
+ results.push({
20
+ status: 'warn',
21
+ message: `Dev dependency found in 'dependencies': ${tool}. Should be in 'devDependencies'?`,
22
+ ruleId: 'dep-dev-in-prod',
23
+ file: pkgFile
24
+ });
25
+ }
26
+ }
27
+ } catch (e) {
28
+ // ignore
29
+ }
30
+ }
31
+ return results;
32
+ }
33
+
34
+ export async function checkBuild(ctx: RuleContext): Promise<RuleResult[]> {
35
+ const results: RuleResult[] = [];
36
+
37
+ // Check for source maps in potential build dirs (dist, build, out, .next)
38
+ // Scanner ignores dist by default, but if we want to check build artifacts we might need to scan explicitly OR assumes user runs this in root.
39
+ // If the scanner ignores 'dist', we won't see them.
40
+ // So this check is effective only if scanner INCLUDES build dirs or we explicitly look for them.
41
+
42
+ // Let's explicitly check common build folders in CWD if they exist, ignoring scanner's ignore list for this specific check?
43
+ // Or just warn if we find .map files in the file list (meaning they were NOT ignored/cleaned).
44
+
45
+ const mapFiles = ctx.files.filter(f => f.endsWith('.map'));
46
+ for (const file of mapFiles) {
47
+ // Only if it looks like a build artifact
48
+ if (file.includes('/dist/') || file.includes('/build/') || file.includes('/.next/')) {
49
+ results.push({
50
+ status: 'warn',
51
+ message: 'Source map found in build output (information leak)',
52
+ ruleId: 'build-source-map',
53
+ file
54
+ });
55
+ }
56
+ }
57
+
58
+ // Check for .env in build folders
59
+ const envInBuild = ctx.files.filter(f =>
60
+ (f.endsWith('.env') || f.includes('.env.')) &&
61
+ (f.includes('/dist/') || f.includes('/build/') || f.includes('/.next/'))
62
+ );
63
+
64
+ for (const file of envInBuild) {
65
+ results.push({
66
+ status: 'fail',
67
+ message: 'Environment file found in build output!',
68
+ ruleId: 'build-env-leak',
69
+ file
70
+ });
71
+ }
72
+
73
+ return results;
74
+ }
@@ -0,0 +1,99 @@
1
+ import fs from 'fs';
2
+ import dotenv from 'dotenv';
3
+ import { RuleContext, RuleResult } from '../engine/types';
4
+ import { findEnvUsages } from '../engine/ast';
5
+
6
+ export async function checkEnvVars(ctx: RuleContext): Promise<RuleResult[]> {
7
+ const results: RuleResult[] = [];
8
+ const declaredEnvs = new Set<string>();
9
+ const usedEnvs = new Map<string, { file: string, line: number }[]>();
10
+
11
+ // 1. Find and parse .env files (definition detection)
12
+ const envFiles = ctx.files.filter(f => f.match(/\.env(\..+)?$/));
13
+ for (const file of envFiles) {
14
+ const content = fs.readFileSync(file, 'utf-8');
15
+ try {
16
+ const parsed = dotenv.parse(content);
17
+ Object.keys(parsed).forEach(k => declaredEnvs.add(k));
18
+ } catch (e) {
19
+ results.push({
20
+ status: 'warn',
21
+ message: `Failed to parse env file: ${file}`,
22
+ ruleId: 'env-parse-error',
23
+ file: file
24
+ });
25
+ }
26
+ }
27
+
28
+ // 2. Scan for usages
29
+ const codeFiles = ctx.files.filter(f => f.match(/\.(js|ts|jsx|tsx)$/));
30
+ for (const file of codeFiles) {
31
+ const usages = findEnvUsages(file);
32
+ for (const u of usages) {
33
+ if (!usedEnvs.has(u.name)) {
34
+ usedEnvs.set(u.name, []);
35
+ }
36
+ usedEnvs.get(u.name)?.push({ file, line: u.line });
37
+ }
38
+ }
39
+
40
+ // 3. Rule: Unused env vars
41
+ // Fail if Env vars exist but never used
42
+ for (const env of declaredEnvs) {
43
+ if (!usedEnvs.has(env)) {
44
+ // Ignore some common framework vars if needed, but strict mode says unused is bad.
45
+ results.push({
46
+ status: 'warn', // Warn for now, maybe fail? User said "Fail if Env vars exist but never used"
47
+ message: `Unused environment variable: ${env}`,
48
+ ruleId: 'env-unused',
49
+ file: envFiles[0] // Just point to first env file for now
50
+ });
51
+ }
52
+ }
53
+
54
+ // 4. Rule: Missing required env vars
55
+ // "App references process.env.X But it’s not defined anywhere"
56
+ // Also check strict list from config
57
+ const required = ctx.config.env?.required || [];
58
+
59
+ // Check missing from strict config
60
+ for (const req of required) {
61
+ if (!declaredEnvs.has(req)) {
62
+ results.push({
63
+ status: 'fail',
64
+ message: `Missing required environment variable (configured): ${req}`,
65
+ ruleId: 'env-missing-config',
66
+ });
67
+ }
68
+ }
69
+
70
+ // Check usage without definition
71
+ const commonSystemVars = ['NODE_ENV', 'PORT', 'CI'];
72
+ for (const [env, locs] of usedEnvs) {
73
+ if (!declaredEnvs.has(env) && !commonSystemVars.includes(env)) {
74
+ // Check if it is in disallowed list?
75
+ if (ctx.config.env?.disallowed?.includes(env)) {
76
+ results.push({
77
+ status: 'fail',
78
+ message: `Disallowed environment variable used: ${env}`,
79
+ ruleId: 'env-disallowed',
80
+ file: locs[0].file,
81
+ line: locs[0].line
82
+ });
83
+ } else {
84
+ // It's used but not in .env.
85
+ // We should probably warn unless we are in strict mode.
86
+ // User said: "Fail if Required env var is missing" -> checking usage implies requirement.
87
+ results.push({
88
+ status: 'warn',
89
+ message: `Environment variable used but not defined in .env: ${env}`,
90
+ ruleId: 'env-missing-definition',
91
+ file: locs[0].file,
92
+ line: locs[0].line
93
+ });
94
+ }
95
+ }
96
+ }
97
+
98
+ return results;
99
+ }