ship18ion 1.2.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/engine/ast.d.ts +6 -0
- package/dist/engine/config.d.ts +12 -0
- package/dist/engine/detector.d.ts +2 -0
- package/dist/engine/runner.d.ts +3 -0
- package/dist/engine/scanner.d.ts +1 -0
- package/dist/engine/secrets.d.ts +6 -0
- package/{src/engine/types.ts → dist/engine/types.d.ts} +15 -17
- package/dist/reporters/console.d.ts +2 -0
- package/dist/rules/build.d.ts +3 -0
- package/dist/rules/env.d.ts +2 -0
- package/dist/rules/frameworks/nextjs.d.ts +2 -0
- package/dist/rules/git.d.ts +2 -0
- package/dist/rules/hygiene.d.ts +2 -0
- package/dist/rules/packages.d.ts +2 -0
- package/dist/rules/secrets.d.ts +2 -0
- package/dist/rules/security.d.ts +2 -0
- package/package.json +9 -3
- package/src/cli/index.ts +0 -56
- package/src/engine/ast.ts +0 -84
- package/src/engine/config.ts +0 -28
- package/src/engine/detector.ts +0 -27
- package/src/engine/runner.ts +0 -62
- package/src/engine/scanner.ts +0 -22
- package/src/engine/secrets.ts +0 -26
- package/src/reporters/console.ts +0 -69
- package/src/rules/build.ts +0 -77
- package/src/rules/env.ts +0 -99
- package/src/rules/frameworks/nextjs.ts +0 -33
- package/src/rules/git.ts +0 -95
- package/src/rules/hygiene.ts +0 -52
- package/src/rules/packages.ts +0 -33
- package/src/rules/secrets.ts +0 -53
- package/src/rules/security.ts +0 -55
- package/tests/fixtures/leaky-app/.env +0 -3
- package/tests/fixtures/leaky-app/package.json +0 -7
- package/tests/fixtures/leaky-app/src/index.js +0 -21
- package/tsconfig.json +0 -15
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 champ18ion
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export interface Ship18ionConfig {
|
|
2
|
+
env?: {
|
|
3
|
+
required?: string[];
|
|
4
|
+
disallowed?: string[];
|
|
5
|
+
};
|
|
6
|
+
security?: {
|
|
7
|
+
noCorsWildcard?: boolean;
|
|
8
|
+
requireRateLimit?: boolean;
|
|
9
|
+
};
|
|
10
|
+
ignore?: string[];
|
|
11
|
+
}
|
|
12
|
+
export declare function loadConfig(cwd?: string): Promise<Ship18ionConfig>;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function scanFiles(cwd: string, ignore?: string[]): Promise<string[]>;
|
|
@@ -1,17 +1,15 @@
|
|
|
1
|
-
import { Ship18ionConfig } from './config';
|
|
2
|
-
import { FrameworkType } from './detector';
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
framework: FrameworkType;
|
|
17
|
-
}
|
|
1
|
+
import { Ship18ionConfig } from './config';
|
|
2
|
+
import { FrameworkType } from './detector';
|
|
3
|
+
export interface RuleResult {
|
|
4
|
+
status: 'pass' | 'fail' | 'warn';
|
|
5
|
+
message: string;
|
|
6
|
+
file?: string;
|
|
7
|
+
line?: number;
|
|
8
|
+
ruleId: string;
|
|
9
|
+
}
|
|
10
|
+
export interface RuleContext {
|
|
11
|
+
config: Ship18ionConfig;
|
|
12
|
+
files: string[];
|
|
13
|
+
cwd: string;
|
|
14
|
+
framework: FrameworkType;
|
|
15
|
+
}
|
package/package.json
CHANGED
|
@@ -1,11 +1,17 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "ship18ion",
|
|
3
|
-
"version": "1.2.
|
|
3
|
+
"version": "1.2.1",
|
|
4
4
|
"description": "",
|
|
5
5
|
"main": "dist/cli/index.js",
|
|
6
6
|
"bin": {
|
|
7
7
|
"ship18ion": "./dist/cli/index.js"
|
|
8
8
|
},
|
|
9
|
+
"files": [
|
|
10
|
+
"dist",
|
|
11
|
+
"README.md",
|
|
12
|
+
"CONTRIBUTING.md",
|
|
13
|
+
"LICENSE"
|
|
14
|
+
],
|
|
9
15
|
"scripts": {
|
|
10
16
|
"build": "tsc",
|
|
11
17
|
"prepublishOnly": "npm run build",
|
|
@@ -21,8 +27,8 @@
|
|
|
21
27
|
"deployment",
|
|
22
28
|
"linter"
|
|
23
29
|
],
|
|
24
|
-
"author": "
|
|
25
|
-
"license": "
|
|
30
|
+
"author": "champ18ion",
|
|
31
|
+
"license": "MIT",
|
|
26
32
|
"dependencies": {
|
|
27
33
|
"@babel/parser": "^7.28.5",
|
|
28
34
|
"@babel/traverse": "^7.28.5",
|
package/src/cli/index.ts
DELETED
|
@@ -1,56 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import { Command } from 'commander';
|
|
3
|
-
import chalk from 'chalk';
|
|
4
|
-
import { loadConfig } from '../engine/config';
|
|
5
|
-
import { runChecks } from '../engine/runner';
|
|
6
|
-
import { reportConsole } from '../reporters/console';
|
|
7
|
-
|
|
8
|
-
const program = new Command();
|
|
9
|
-
|
|
10
|
-
import figlet from 'figlet';
|
|
11
|
-
import gradient from 'gradient-string';
|
|
12
|
-
import ora from 'ora';
|
|
13
|
-
import { detectFramework } from '../engine/detector';
|
|
14
|
-
|
|
15
|
-
program
|
|
16
|
-
.command('check', { isDefault: true })
|
|
17
|
-
.description('Run production readiness checks')
|
|
18
|
-
.option('--ci', 'Run in CI mode (minimal output, exit codes)')
|
|
19
|
-
.action(async (options) => {
|
|
20
|
-
if (!options.ci) {
|
|
21
|
-
console.log(gradient.pastel.multiline(figlet.textSync('SHIP18ION')));
|
|
22
|
-
console.log(chalk.dim('Production Readiness Inspector\n'));
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
const cwd = process.cwd();
|
|
26
|
-
const config = await loadConfig(cwd);
|
|
27
|
-
const spinner = ora('Initializing...').start();
|
|
28
|
-
|
|
29
|
-
try {
|
|
30
|
-
let framework: string = 'unknown';
|
|
31
|
-
if (!options.ci) {
|
|
32
|
-
framework = await detectFramework(cwd);
|
|
33
|
-
spinner.text = `Detected Framework: ${chalk.cyan(framework.toUpperCase())}`;
|
|
34
|
-
await new Promise(r => setTimeout(r, 800)); // Brief pause to show framework
|
|
35
|
-
} else {
|
|
36
|
-
// Even in CI, simple detection is useful for reporting if needed, or we just skip
|
|
37
|
-
framework = await detectFramework(cwd);
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
const results = await runChecks(config, cwd, (stage) => {
|
|
41
|
-
if (!options.ci) spinner.text = stage;
|
|
42
|
-
});
|
|
43
|
-
|
|
44
|
-
spinner.succeed(chalk.green('Checks completed!'));
|
|
45
|
-
console.log('');
|
|
46
|
-
|
|
47
|
-
// Uses console reporter for both normal and CI for now (it handles exit codes)
|
|
48
|
-
reportConsole(results, cwd, framework);
|
|
49
|
-
} catch (e) {
|
|
50
|
-
spinner.fail(chalk.red('Error running checks'));
|
|
51
|
-
console.error(e);
|
|
52
|
-
process.exit(1);
|
|
53
|
-
}
|
|
54
|
-
});
|
|
55
|
-
|
|
56
|
-
program.parse(process.argv);
|
package/src/engine/ast.ts
DELETED
|
@@ -1,84 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import * as parser from '@babel/parser';
|
|
3
|
-
import traverse from '@babel/traverse';
|
|
4
|
-
import * as t from '@babel/types';
|
|
5
|
-
|
|
6
|
-
export interface EnvUsage {
|
|
7
|
-
name: string;
|
|
8
|
-
line: number;
|
|
9
|
-
file: string;
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
export function findEnvUsages(filePath: string): EnvUsage[] {
|
|
13
|
-
if (!fs.existsSync(filePath)) return [];
|
|
14
|
-
|
|
15
|
-
const code = fs.readFileSync(filePath, 'utf-8');
|
|
16
|
-
const usages: EnvUsage[] = [];
|
|
17
|
-
|
|
18
|
-
// Only parse JS/TS files
|
|
19
|
-
if (!/\.(js|ts|jsx|tsx)$/.test(filePath)) {
|
|
20
|
-
return [];
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
try {
|
|
24
|
-
const ast = parser.parse(code, {
|
|
25
|
-
sourceType: 'module',
|
|
26
|
-
plugins: ['typescript', 'jsx'],
|
|
27
|
-
});
|
|
28
|
-
|
|
29
|
-
traverse(ast, {
|
|
30
|
-
MemberExpression(path) {
|
|
31
|
-
// 1. Check for process.env.VAR
|
|
32
|
-
if (
|
|
33
|
-
t.isMemberExpression(path.node.object) &&
|
|
34
|
-
t.isIdentifier(path.node.object.object) &&
|
|
35
|
-
path.node.object.object.name === 'process' &&
|
|
36
|
-
t.isIdentifier(path.node.object.property) &&
|
|
37
|
-
path.node.object.property.name === 'env'
|
|
38
|
-
) {
|
|
39
|
-
if (t.isIdentifier(path.node.property)) {
|
|
40
|
-
usages.push({
|
|
41
|
-
name: path.node.property.name,
|
|
42
|
-
line: path.node.loc?.start.line || 0,
|
|
43
|
-
file: filePath
|
|
44
|
-
});
|
|
45
|
-
} else if (t.isStringLiteral(path.node.property)) {
|
|
46
|
-
usages.push({
|
|
47
|
-
name: path.node.property.value,
|
|
48
|
-
line: path.node.loc?.start.line || 0,
|
|
49
|
-
file: filePath
|
|
50
|
-
});
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
// 2. Check for import.meta.env.VAR (Vite)
|
|
55
|
-
// AST structure: MemberExpression
|
|
56
|
-
// object: MemberExpression
|
|
57
|
-
// object: MetaProperty (import.meta)
|
|
58
|
-
// property: Identifier (env)
|
|
59
|
-
// property: Identifier (VAR)
|
|
60
|
-
|
|
61
|
-
if (
|
|
62
|
-
t.isMemberExpression(path.node.object) &&
|
|
63
|
-
t.isMetaProperty(path.node.object.object) &&
|
|
64
|
-
path.node.object.object.meta.name === 'import' &&
|
|
65
|
-
path.node.object.object.property.name === 'meta' &&
|
|
66
|
-
t.isIdentifier(path.node.object.property) &&
|
|
67
|
-
path.node.object.property.name === 'env'
|
|
68
|
-
) {
|
|
69
|
-
if (t.isIdentifier(path.node.property)) {
|
|
70
|
-
usages.push({
|
|
71
|
-
name: path.node.property.name,
|
|
72
|
-
line: path.node.loc?.start.line || 0,
|
|
73
|
-
file: filePath
|
|
74
|
-
});
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
});
|
|
79
|
-
|
|
80
|
-
} catch (e) {
|
|
81
|
-
// console.warn(`Failed to parse ${filePath}:`, e);
|
|
82
|
-
}
|
|
83
|
-
return usages;
|
|
84
|
-
}
|
package/src/engine/config.ts
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
|
|
4
|
-
export interface Ship18ionConfig {
|
|
5
|
-
env?: {
|
|
6
|
-
required?: string[];
|
|
7
|
-
disallowed?: string[];
|
|
8
|
-
};
|
|
9
|
-
security?: {
|
|
10
|
-
noCorsWildcard?: boolean;
|
|
11
|
-
requireRateLimit?: boolean;
|
|
12
|
-
};
|
|
13
|
-
ignore?: string[];
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
export async function loadConfig(cwd: string = process.cwd()): Promise<Ship18ionConfig> {
|
|
17
|
-
const configPath = path.join(cwd, 'ship18ion.config.json');
|
|
18
|
-
if (fs.existsSync(configPath)) {
|
|
19
|
-
const content = fs.readFileSync(configPath, 'utf-8');
|
|
20
|
-
try {
|
|
21
|
-
return JSON.parse(content);
|
|
22
|
-
} catch (e) {
|
|
23
|
-
console.error('Failed to parse config file:', e);
|
|
24
|
-
return {};
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
return {};
|
|
28
|
-
}
|
package/src/engine/detector.ts
DELETED
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
|
|
4
|
-
export type FrameworkType = 'nextjs' | 'remix' | 'vite' | 'nestjs' | 'express' | 'fastify' | 'Node.js / Generic' | 'unknown';
|
|
5
|
-
|
|
6
|
-
export async function detectFramework(cwd: string): Promise<FrameworkType> {
|
|
7
|
-
const pkgPath = path.join(cwd, 'package.json');
|
|
8
|
-
if (!fs.existsSync(pkgPath)) {
|
|
9
|
-
return 'unknown';
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
try {
|
|
13
|
-
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
|
|
14
|
-
const deps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
15
|
-
|
|
16
|
-
if (deps['next']) return 'nextjs';
|
|
17
|
-
if (deps['@remix-run/react']) return 'remix';
|
|
18
|
-
if (deps['vite']) return 'vite';
|
|
19
|
-
if (deps['@nestjs/core']) return 'nestjs';
|
|
20
|
-
if (deps['express']) return 'express';
|
|
21
|
-
if (deps['fastify']) return 'fastify';
|
|
22
|
-
|
|
23
|
-
return 'Node.js / Generic';
|
|
24
|
-
} catch (e) {
|
|
25
|
-
return 'unknown';
|
|
26
|
-
}
|
|
27
|
-
}
|
package/src/engine/runner.ts
DELETED
|
@@ -1,62 +0,0 @@
|
|
|
1
|
-
import { Ship18ionConfig } from './config';
|
|
2
|
-
import { scanFiles } from './scanner';
|
|
3
|
-
import { RuleContext, RuleResult } from './types';
|
|
4
|
-
import { checkEnvVars } from '../rules/env';
|
|
5
|
-
import { checkSecrets } from '../rules/secrets';
|
|
6
|
-
import { checkSecurity } from '../rules/security';
|
|
7
|
-
import { checkDependencies, checkBuild } from '../rules/build';
|
|
8
|
-
import { checkHygiene } from '../rules/hygiene';
|
|
9
|
-
import { checkPackages } from '../rules/packages';
|
|
10
|
-
import { checkNextJs } from '../rules/frameworks/nextjs';
|
|
11
|
-
import { checkGit } from '../rules/git';
|
|
12
|
-
|
|
13
|
-
import { detectFramework } from './detector';
|
|
14
|
-
|
|
15
|
-
export async function runChecks(
|
|
16
|
-
config: Ship18ionConfig,
|
|
17
|
-
cwd: string,
|
|
18
|
-
onProgress?: (stage: string) => void
|
|
19
|
-
): Promise<RuleResult[]> {
|
|
20
|
-
if (onProgress) onProgress('Scanning files...');
|
|
21
|
-
const files = await scanFiles(cwd, config.ignore);
|
|
22
|
-
// Framework detection
|
|
23
|
-
const framework = await detectFramework(cwd);
|
|
24
|
-
|
|
25
|
-
const ctx: RuleContext = { config, files, cwd, framework };
|
|
26
|
-
|
|
27
|
-
const results: RuleResult[] = [];
|
|
28
|
-
|
|
29
|
-
// Run all checks
|
|
30
|
-
if (onProgress) onProgress('Checking environment variables...');
|
|
31
|
-
results.push(...await checkEnvVars(ctx));
|
|
32
|
-
|
|
33
|
-
if (onProgress) onProgress('Scanning for secrets...');
|
|
34
|
-
results.push(...await checkSecrets(ctx));
|
|
35
|
-
|
|
36
|
-
if (onProgress) onProgress('Analyzing security configurations...');
|
|
37
|
-
results.push(...await checkSecurity(ctx));
|
|
38
|
-
|
|
39
|
-
if (onProgress) onProgress('Verifying dependencies...');
|
|
40
|
-
results.push(...await checkDependencies(ctx));
|
|
41
|
-
|
|
42
|
-
if (onProgress) onProgress('Inspecting build artifacts...');
|
|
43
|
-
results.push(...await checkBuild(ctx));
|
|
44
|
-
|
|
45
|
-
// New Rules
|
|
46
|
-
if (onProgress) onProgress('Checking code hygiene...');
|
|
47
|
-
results.push(...await checkHygiene(ctx));
|
|
48
|
-
|
|
49
|
-
if (onProgress) onProgress('Validating packages...');
|
|
50
|
-
results.push(...await checkPackages(ctx));
|
|
51
|
-
|
|
52
|
-
// Framework specific checks
|
|
53
|
-
if (framework === 'nextjs') {
|
|
54
|
-
if (onProgress) onProgress('Running Next.js specific checks...');
|
|
55
|
-
results.push(...await checkNextJs(ctx));
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
if (onProgress) onProgress('Checking git status...');
|
|
59
|
-
results.push(...await checkGit(ctx));
|
|
60
|
-
|
|
61
|
-
return results;
|
|
62
|
-
}
|
package/src/engine/scanner.ts
DELETED
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
import { glob } from 'glob';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
|
|
4
|
-
export async function scanFiles(cwd: string, ignore: string[] = []): Promise<string[]> {
|
|
5
|
-
// Ignore build artifacts, node_modules, and git
|
|
6
|
-
const defaultIgnore = [
|
|
7
|
-
'**/node_modules/**',
|
|
8
|
-
'**/.git/**',
|
|
9
|
-
'**/dist/**',
|
|
10
|
-
'**/build/**',
|
|
11
|
-
'**/.next/**',
|
|
12
|
-
'**/.turbo/**',
|
|
13
|
-
'**/coverage/**'
|
|
14
|
-
];
|
|
15
|
-
// Scan for relevant files: JS/TS code, Configs (JSON/YAML), Env files
|
|
16
|
-
return glob('**/*.{js,ts,jsx,tsx,json,yaml,yml,env,env.*}', {
|
|
17
|
-
cwd,
|
|
18
|
-
ignore: [...defaultIgnore, ...ignore],
|
|
19
|
-
absolute: true,
|
|
20
|
-
dot: true, // Include .env files
|
|
21
|
-
});
|
|
22
|
-
}
|
package/src/engine/secrets.ts
DELETED
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
export const SECRET_PATTERNS = [
|
|
2
|
-
{ name: 'AWS Access Key', regex: /AKIA[0-9A-Z]{16}/ },
|
|
3
|
-
{ name: 'Google API Key', regex: /AIza[0-9A-Za-z\\-_]{35}/ },
|
|
4
|
-
{ name: 'Stripe Secret Key', regex: /sk_live_[0-9a-zA-Z]{24}/ },
|
|
5
|
-
{ name: 'GitHub Personal Access Token', regex: /ghp_[0-9a-zA-Z]{36}/ },
|
|
6
|
-
{ name: 'Generic Private Key', regex: /-----BEGIN .* PRIVATE KEY-----/ },
|
|
7
|
-
{ name: 'Slack Bot Token', regex: /xoxb-[0-9]{11}-[0-9]{12}-[0-9a-zA-Z]{24}/ },
|
|
8
|
-
{ name: 'OpenAI API Key', regex: /sk-[a-zA-Z0-9]{48}/ }
|
|
9
|
-
];
|
|
10
|
-
|
|
11
|
-
export function calculateEntropy(str: string): number {
|
|
12
|
-
const len = str.length;
|
|
13
|
-
const frequencies = Array.from(str).reduce((freq, char) => {
|
|
14
|
-
freq[char] = (freq[char] || 0) + 1;
|
|
15
|
-
return freq;
|
|
16
|
-
}, {} as Record<string, number>);
|
|
17
|
-
|
|
18
|
-
return Object.values(frequencies).reduce((sum, f) => {
|
|
19
|
-
const p = f / len;
|
|
20
|
-
return sum - (p * Math.log2(p));
|
|
21
|
-
}, 0);
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
export function isHighEntropy(str: string, threshold = 4.5): boolean {
|
|
25
|
-
return calculateEntropy(str) > threshold;
|
|
26
|
-
}
|
package/src/reporters/console.ts
DELETED
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
import chalk from 'chalk';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
import { RuleResult } from '../engine/types';
|
|
4
|
-
|
|
5
|
-
const CATEGORIES: Record<string, { icon: string; label: string }> = {
|
|
6
|
-
'env': { icon: '🌱', label: 'Environment' },
|
|
7
|
-
'secret': { icon: '🔐', label: 'Secrets' },
|
|
8
|
-
'security': { icon: '⚠️', label: 'Security' },
|
|
9
|
-
'dep': { icon: '📦', label: 'Dependency & Build' },
|
|
10
|
-
'build': { icon: '📦', label: 'Dependency & Build' },
|
|
11
|
-
'git': { icon: '🐙', label: 'Git & Repo' },
|
|
12
|
-
'hygiene': { icon: '🧹', label: 'Code Hygiene' },
|
|
13
|
-
'package': { icon: '📦', label: 'Packages' },
|
|
14
|
-
};
|
|
15
|
-
|
|
16
|
-
function getCategory(ruleId: string) {
|
|
17
|
-
const prefix = ruleId.split('-')[0];
|
|
18
|
-
return CATEGORIES[prefix] || { icon: '❓', label: 'Other' };
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
export function reportConsole(results: RuleResult[], cwd: string, framework?: string) {
|
|
22
|
-
if (framework) {
|
|
23
|
-
console.log(chalk.blue(`ℹ️ Framework: ${framework.toUpperCase()}`));
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
if (results.length === 0) {
|
|
27
|
-
console.log(chalk.green('\n✅ Production Readiness Check Passed!\n'));
|
|
28
|
-
return;
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
const fails = results.filter(r => r.status === 'fail');
|
|
32
|
-
const warns = results.filter(r => r.status === 'warn');
|
|
33
|
-
|
|
34
|
-
if (fails.length > 0) {
|
|
35
|
-
console.log(chalk.red('\n❌ Production Readiness Check Failed\n'));
|
|
36
|
-
} else {
|
|
37
|
-
console.log(chalk.yellow('\n⚠️ Production Readiness Check Passed with Warnings\n'));
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
// Group by category
|
|
41
|
-
const grouped: Record<string, RuleResult[]> = {};
|
|
42
|
-
results.forEach(r => {
|
|
43
|
-
const cat = getCategory(r.ruleId);
|
|
44
|
-
const key = `${cat.icon} ${cat.label}`;
|
|
45
|
-
if (!grouped[key]) grouped[key] = [];
|
|
46
|
-
grouped[key].push(r);
|
|
47
|
-
});
|
|
48
|
-
|
|
49
|
-
for (const [category, items] of Object.entries(grouped)) {
|
|
50
|
-
console.log(chalk.bold(category));
|
|
51
|
-
for (const item of items) {
|
|
52
|
-
const sym = item.status === 'fail' ? chalk.red('✖') : chalk.yellow('!');
|
|
53
|
-
const location = item.file ? `${path.relative(cwd, item.file)}${item.line ? `:${item.line}` : ''}` : '';
|
|
54
|
-
console.log(` ${sym} ${item.message} ${chalk.gray(location)}`);
|
|
55
|
-
}
|
|
56
|
-
console.log('');
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
const summary = [];
|
|
60
|
-
if (fails.length > 0) summary.push(chalk.red(`${fails.length} errors`));
|
|
61
|
-
if (warns.length > 0) summary.push(chalk.yellow(`${warns.length} warnings`));
|
|
62
|
-
|
|
63
|
-
console.log(`Summary: ${summary.join(', ')}`);
|
|
64
|
-
console.log('');
|
|
65
|
-
|
|
66
|
-
if (fails.length > 0) {
|
|
67
|
-
process.exit(1);
|
|
68
|
-
}
|
|
69
|
-
}
|
package/src/rules/build.ts
DELETED
|
@@ -1,77 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
import { RuleContext, RuleResult } from '../engine/types';
|
|
4
|
-
|
|
5
|
-
export async function checkDependencies(ctx: RuleContext): Promise<RuleResult[]> {
|
|
6
|
-
const results: RuleResult[] = [];
|
|
7
|
-
|
|
8
|
-
const packageJsons = ctx.files.filter(f => f.endsWith('package.json') && !f.includes('node_modules'));
|
|
9
|
-
|
|
10
|
-
const devToolsInProd = ['eslint', 'jest', 'mocha', 'nodemon', 'ts-node', 'typescript', 'webpack', 'babel-loader'];
|
|
11
|
-
|
|
12
|
-
for (const pkgFile of packageJsons) {
|
|
13
|
-
try {
|
|
14
|
-
const content = JSON.parse(fs.readFileSync(pkgFile, 'utf-8'));
|
|
15
|
-
const deps = content.dependencies || {};
|
|
16
|
-
|
|
17
|
-
for (const tool of devToolsInProd) {
|
|
18
|
-
if (deps[tool]) {
|
|
19
|
-
results.push({
|
|
20
|
-
status: 'warn',
|
|
21
|
-
message: `Dev dependency found in 'dependencies': ${tool}. Should be in 'devDependencies'?`,
|
|
22
|
-
ruleId: 'dep-dev-in-prod',
|
|
23
|
-
file: pkgFile
|
|
24
|
-
});
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
} catch (e) {
|
|
28
|
-
// ignore
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
return results;
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
import { glob } from 'glob';
|
|
35
|
-
|
|
36
|
-
export async function checkBuild(ctx: RuleContext): Promise<RuleResult[]> {
|
|
37
|
-
const results: RuleResult[] = [];
|
|
38
|
-
|
|
39
|
-
// Explicitly scan build folders (dist, build, .next, .output) for dangerous files
|
|
40
|
-
// The main scanner ignores these, so we check them separately here.
|
|
41
|
-
|
|
42
|
-
const buildDirs = ['dist', 'build', '.next', '.output'];
|
|
43
|
-
const foundBuildDirs = buildDirs.filter(d => fs.existsSync(path.join(ctx.cwd, d)));
|
|
44
|
-
|
|
45
|
-
if (foundBuildDirs.length === 0) {
|
|
46
|
-
return results;
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
// 1. Check for Source Maps (.map)
|
|
50
|
-
// We search inside the found build directories
|
|
51
|
-
for (const dir of foundBuildDirs) {
|
|
52
|
-
const mapFiles = await glob(`${dir}/**/*.map`, { cwd: ctx.cwd, absolute: true });
|
|
53
|
-
|
|
54
|
-
if (mapFiles.length > 0) {
|
|
55
|
-
results.push({
|
|
56
|
-
status: 'warn',
|
|
57
|
-
message: `Found ${mapFiles.length} source map files in '${dir}' (e.g. ${path.basename(mapFiles[0])}). Ensure these are not exposed publicly.`,
|
|
58
|
-
ruleId: 'build-source-map',
|
|
59
|
-
file: dir // Point to the directory itself
|
|
60
|
-
});
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
// 2. Check for .env files in build output
|
|
64
|
-
// We look for .env* files inside the build dir
|
|
65
|
-
const envFiles = await glob(`${dir}/**/*.env*`, { cwd: ctx.cwd, absolute: true });
|
|
66
|
-
for (const file of envFiles) {
|
|
67
|
-
results.push({
|
|
68
|
-
status: 'fail',
|
|
69
|
-
message: `Environment file found in build output (${dir})!`,
|
|
70
|
-
ruleId: 'build-env-leak',
|
|
71
|
-
file
|
|
72
|
-
});
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
return results;
|
|
77
|
-
}
|
package/src/rules/env.ts
DELETED
|
@@ -1,99 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import dotenv from 'dotenv';
|
|
3
|
-
import { RuleContext, RuleResult } from '../engine/types';
|
|
4
|
-
import { findEnvUsages } from '../engine/ast';
|
|
5
|
-
|
|
6
|
-
export async function checkEnvVars(ctx: RuleContext): Promise<RuleResult[]> {
|
|
7
|
-
const results: RuleResult[] = [];
|
|
8
|
-
const declaredEnvs = new Set<string>();
|
|
9
|
-
const usedEnvs = new Map<string, { file: string, line: number }[]>();
|
|
10
|
-
|
|
11
|
-
// 1. Find and parse .env files (definition detection)
|
|
12
|
-
const envFiles = ctx.files.filter(f => f.match(/\.env(\..+)?$/));
|
|
13
|
-
for (const file of envFiles) {
|
|
14
|
-
const content = fs.readFileSync(file, 'utf-8');
|
|
15
|
-
try {
|
|
16
|
-
const parsed = dotenv.parse(content);
|
|
17
|
-
Object.keys(parsed).forEach(k => declaredEnvs.add(k));
|
|
18
|
-
} catch (e) {
|
|
19
|
-
results.push({
|
|
20
|
-
status: 'warn',
|
|
21
|
-
message: `Failed to parse env file: ${file}`,
|
|
22
|
-
ruleId: 'env-parse-error',
|
|
23
|
-
file: file
|
|
24
|
-
});
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
// 2. Scan for usages
|
|
29
|
-
const codeFiles = ctx.files.filter(f => f.match(/\.(js|ts|jsx|tsx)$/));
|
|
30
|
-
for (const file of codeFiles) {
|
|
31
|
-
const usages = findEnvUsages(file);
|
|
32
|
-
for (const u of usages) {
|
|
33
|
-
if (!usedEnvs.has(u.name)) {
|
|
34
|
-
usedEnvs.set(u.name, []);
|
|
35
|
-
}
|
|
36
|
-
usedEnvs.get(u.name)?.push({ file, line: u.line });
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
// 3. Rule: Unused env vars
|
|
41
|
-
// Fail if Env vars exist but never used
|
|
42
|
-
for (const env of declaredEnvs) {
|
|
43
|
-
if (!usedEnvs.has(env)) {
|
|
44
|
-
// Ignore some common framework vars if needed, but strict mode says unused is bad.
|
|
45
|
-
results.push({
|
|
46
|
-
status: 'warn', // Warn for now, maybe fail? User said "Fail if Env vars exist but never used"
|
|
47
|
-
message: `Unused environment variable: ${env}`,
|
|
48
|
-
ruleId: 'env-unused',
|
|
49
|
-
file: envFiles[0] // Just point to first env file for now
|
|
50
|
-
});
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
// 4. Rule: Missing required env vars
|
|
55
|
-
// "App references process.env.X But it’s not defined anywhere"
|
|
56
|
-
// Also check strict list from config
|
|
57
|
-
const required = ctx.config.env?.required || [];
|
|
58
|
-
|
|
59
|
-
// Check missing from strict config
|
|
60
|
-
for (const req of required) {
|
|
61
|
-
if (!declaredEnvs.has(req)) {
|
|
62
|
-
results.push({
|
|
63
|
-
status: 'fail',
|
|
64
|
-
message: `Missing required environment variable (configured): ${req}`,
|
|
65
|
-
ruleId: 'env-missing-config',
|
|
66
|
-
});
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
// Check usage without definition
|
|
71
|
-
const commonSystemVars = ['NODE_ENV', 'PORT', 'CI'];
|
|
72
|
-
for (const [env, locs] of usedEnvs) {
|
|
73
|
-
if (!declaredEnvs.has(env) && !commonSystemVars.includes(env)) {
|
|
74
|
-
// Check if it is in disallowed list?
|
|
75
|
-
if (ctx.config.env?.disallowed?.includes(env)) {
|
|
76
|
-
results.push({
|
|
77
|
-
status: 'fail',
|
|
78
|
-
message: `Disallowed environment variable used: ${env}`,
|
|
79
|
-
ruleId: 'env-disallowed',
|
|
80
|
-
file: locs[0].file,
|
|
81
|
-
line: locs[0].line
|
|
82
|
-
});
|
|
83
|
-
} else {
|
|
84
|
-
// It's used but not in .env.
|
|
85
|
-
// We should probably warn unless we are in strict mode.
|
|
86
|
-
// User said: "Fail if Required env var is missing" -> checking usage implies requirement.
|
|
87
|
-
results.push({
|
|
88
|
-
status: 'warn',
|
|
89
|
-
message: `Environment variable used but not defined in .env: ${env}`,
|
|
90
|
-
ruleId: 'env-missing-definition',
|
|
91
|
-
file: locs[0].file,
|
|
92
|
-
line: locs[0].line
|
|
93
|
-
});
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
return results;
|
|
99
|
-
}
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
import { RuleContext, RuleResult } from '../../engine/types';
|
|
2
|
-
import { findEnvUsages } from '../../engine/ast';
|
|
3
|
-
|
|
4
|
-
export async function checkNextJs(ctx: RuleContext): Promise<RuleResult[]> {
|
|
5
|
-
const results: RuleResult[] = [];
|
|
6
|
-
|
|
7
|
-
// 1. Check for NEXT_PUBLIC_ secrets
|
|
8
|
-
const codeFiles = ctx.files.filter(f => f.match(/\.(js|ts|jsx|tsx)$/));
|
|
9
|
-
|
|
10
|
-
for (const file of codeFiles) {
|
|
11
|
-
const usages = findEnvUsages(file);
|
|
12
|
-
for (const usage of usages) {
|
|
13
|
-
if (usage.name.startsWith('NEXT_PUBLIC_')) {
|
|
14
|
-
// Heuristic: Does it look like a secret?
|
|
15
|
-
// e.g. NEXT_PUBLIC_SECRET_KEY, NEXT_PUBLIC_API_SECRET
|
|
16
|
-
if (usage.name.match(/SECRET|PASSWORD|TOKEN|KEY|AUTH/i)) {
|
|
17
|
-
// Exception: PUBLIC_KEY is often safe
|
|
18
|
-
if (!usage.name.match(/PUBLIC_KEY/i)) {
|
|
19
|
-
results.push({
|
|
20
|
-
status: 'warn',
|
|
21
|
-
message: `Potential secret exposed via NEXT_PUBLIC_ variable: ${usage.name}`,
|
|
22
|
-
ruleId: 'nextjs-public-secret',
|
|
23
|
-
file: file,
|
|
24
|
-
line: usage.line
|
|
25
|
-
});
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
return results;
|
|
33
|
-
}
|
package/src/rules/git.ts
DELETED
|
@@ -1,95 +0,0 @@
|
|
|
1
|
-
import { execSync } from 'child_process';
|
|
2
|
-
import { RuleContext, RuleResult } from '../engine/types';
|
|
3
|
-
|
|
4
|
-
import fs from 'fs';
|
|
5
|
-
import path from 'path';
|
|
6
|
-
|
|
7
|
-
export async function checkGit(ctx: RuleContext): Promise<RuleResult[]> {
|
|
8
|
-
const results: RuleResult[] = [];
|
|
9
|
-
|
|
10
|
-
try {
|
|
11
|
-
// ... (Existing git checks) ...
|
|
12
|
-
// Check for uncommitted changes
|
|
13
|
-
const status = execSync('git status --porcelain', { cwd: ctx.cwd, encoding: 'utf-8' });
|
|
14
|
-
if (status.trim().length > 0) {
|
|
15
|
-
results.push({
|
|
16
|
-
status: 'warn',
|
|
17
|
-
message: 'Git working directory is dirty (uncommitted changes). Verify before shipping.',
|
|
18
|
-
ruleId: 'git-dirty',
|
|
19
|
-
});
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
// Check current branch
|
|
23
|
-
const branch = execSync('git rev-parse --abbrev-ref HEAD', { cwd: ctx.cwd, encoding: 'utf-8' }).trim();
|
|
24
|
-
const allowedBranches = ['main', 'master', 'staging', 'production', 'prod'];
|
|
25
|
-
if (!allowedBranches.includes(branch)) {
|
|
26
|
-
// Warn, but maybe less aggressively? Keeping as warn.
|
|
27
|
-
results.push({
|
|
28
|
-
status: 'warn',
|
|
29
|
-
message: `You are on branch '${branch}'. Production builds typically come from main/master.`,
|
|
30
|
-
ruleId: 'git-branch',
|
|
31
|
-
});
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
// --- New: .gitignore Check ---
|
|
35
|
-
const gitignorePath = path.join(ctx.cwd, '.gitignore');
|
|
36
|
-
if (fs.existsSync(gitignorePath)) {
|
|
37
|
-
const content = fs.readFileSync(gitignorePath, 'utf-8');
|
|
38
|
-
const lines = content.split('\n').map(l => l.trim()).filter(l => l && !l.startsWith('#'));
|
|
39
|
-
|
|
40
|
-
// Helper to check if item is ignored (naive grep)
|
|
41
|
-
const isIgnored = (item: string) => lines.some(l => l.includes(item));
|
|
42
|
-
|
|
43
|
-
const requiredIgnores = ['node_modules', '.env'];
|
|
44
|
-
if (ctx.framework === 'nextjs') {
|
|
45
|
-
requiredIgnores.push('.next');
|
|
46
|
-
} else if (ctx.framework !== 'unknown') {
|
|
47
|
-
// For other frameworks, maybe 'dist' or 'build'
|
|
48
|
-
if (!isIgnored('dist') && !isIgnored('build')) {
|
|
49
|
-
// We can't strictly require one, but warn if NEITHER is found?
|
|
50
|
-
// Let's stick to safe defaults.
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
for (const item of requiredIgnores) {
|
|
55
|
-
if (!isIgnored(item)) {
|
|
56
|
-
results.push({
|
|
57
|
-
status: 'warn',
|
|
58
|
-
message: `.gitignore is missing '${item}'. This is critical for security and repo size.`,
|
|
59
|
-
ruleId: 'git-ignore-missing',
|
|
60
|
-
file: gitignorePath
|
|
61
|
-
});
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
// Check for specific dangerous files not being ignored
|
|
66
|
-
const dangerousPatterns = ['firebase.json', 'serviceAccountKey.json', '*.pem', '*.key'];
|
|
67
|
-
// This is tricky because firebase.json CAN be committed. serviceAccountKey.json should NOT.
|
|
68
|
-
|
|
69
|
-
if (!isIgnored('serviceAccountKey.json')) {
|
|
70
|
-
// Only warn if the FILE actually exists? Or just warn generic?
|
|
71
|
-
// Best to warn if the file exists AND isn't ignored.
|
|
72
|
-
if (fs.existsSync(path.join(ctx.cwd, 'serviceAccountKey.json'))) {
|
|
73
|
-
results.push({
|
|
74
|
-
status: 'fail',
|
|
75
|
-
message: 'serviceAccountKey.json exists but is NOT in .gitignore!',
|
|
76
|
-
ruleId: 'git-ignore-auth',
|
|
77
|
-
file: gitignorePath
|
|
78
|
-
});
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
} else {
|
|
83
|
-
results.push({
|
|
84
|
-
status: 'warn',
|
|
85
|
-
message: 'No .gitignore file found! node_modules and secrets might be committed.',
|
|
86
|
-
ruleId: 'git-no-ignore',
|
|
87
|
-
});
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
} catch (e) {
|
|
91
|
-
// Not a git repo or git not found
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
return results;
|
|
95
|
-
}
|
package/src/rules/hygiene.ts
DELETED
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import { RuleContext, RuleResult } from '../engine/types';
|
|
3
|
-
|
|
4
|
-
export async function checkHygiene(ctx: RuleContext): Promise<RuleResult[]> {
|
|
5
|
-
const results: RuleResult[] = [];
|
|
6
|
-
|
|
7
|
-
const codeFiles = ctx.files.filter(f =>
|
|
8
|
-
f.match(/\.(js|ts|jsx|tsx)$/) &&
|
|
9
|
-
!f.includes('.test.') &&
|
|
10
|
-
!f.includes('.spec.')
|
|
11
|
-
);
|
|
12
|
-
|
|
13
|
-
for (const file of codeFiles) {
|
|
14
|
-
const content = fs.readFileSync(file, 'utf-8');
|
|
15
|
-
const lines = content.split('\n');
|
|
16
|
-
|
|
17
|
-
lines.forEach((line, index) => {
|
|
18
|
-
const lineNum = index + 1;
|
|
19
|
-
|
|
20
|
-
// 1. Console Log Check
|
|
21
|
-
// Allow console.error and console.warn, but warn on console.log
|
|
22
|
-
if (line.includes('console.log(')) {
|
|
23
|
-
// Ignore if commented out
|
|
24
|
-
if (!line.trim().startsWith('//') && !line.trim().startsWith('*')) {
|
|
25
|
-
results.push({
|
|
26
|
-
status: 'warn',
|
|
27
|
-
message: 'Leftover console.log() call detected.',
|
|
28
|
-
ruleId: 'hygiene-console-log',
|
|
29
|
-
file,
|
|
30
|
-
line: lineNum
|
|
31
|
-
});
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
// 2. TODO / FIXME Check
|
|
36
|
-
if (line.match(/\/\/\s*(TODO|FIXME):/i)) {
|
|
37
|
-
|
|
38
|
-
if (line.match(/FIXME/i)) {
|
|
39
|
-
results.push({
|
|
40
|
-
status: 'warn',
|
|
41
|
-
message: 'FIXME comment found. Resolve before shipping.',
|
|
42
|
-
ruleId: 'hygiene-fixme',
|
|
43
|
-
file,
|
|
44
|
-
line: lineNum
|
|
45
|
-
});
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
});
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
return results;
|
|
52
|
-
}
|
package/src/rules/packages.ts
DELETED
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import { RuleContext, RuleResult } from '../engine/types';
|
|
3
|
-
|
|
4
|
-
export async function checkPackages(ctx: RuleContext): Promise<RuleResult[]> {
|
|
5
|
-
const results: RuleResult[] = [];
|
|
6
|
-
|
|
7
|
-
const packageJsons = ctx.files.filter(f => f.endsWith('package.json') && !f.includes('node_modules'));
|
|
8
|
-
|
|
9
|
-
for (const pkgFile of packageJsons) {
|
|
10
|
-
try {
|
|
11
|
-
const content = JSON.parse(fs.readFileSync(pkgFile, 'utf-8'));
|
|
12
|
-
const deps = Object.keys(content.dependencies || {});
|
|
13
|
-
const devDeps = Object.keys(content.devDependencies || {});
|
|
14
|
-
|
|
15
|
-
// Find intersection
|
|
16
|
-
const duplicates = deps.filter(d => devDeps.includes(d));
|
|
17
|
-
|
|
18
|
-
for (const dup of duplicates) {
|
|
19
|
-
results.push({
|
|
20
|
-
status: 'warn',
|
|
21
|
-
message: `Package '${dup}' is listed in both 'dependencies' and 'devDependencies'.`,
|
|
22
|
-
ruleId: 'package-duplicate',
|
|
23
|
-
file: pkgFile
|
|
24
|
-
});
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
} catch (e) {
|
|
28
|
-
// ignore malformed json
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
return results;
|
|
33
|
-
}
|
package/src/rules/secrets.ts
DELETED
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import { RuleContext, RuleResult } from '../engine/types';
|
|
3
|
-
import { SECRET_PATTERNS } from '../engine/secrets';
|
|
4
|
-
|
|
5
|
-
export async function checkSecrets(ctx: RuleContext): Promise<RuleResult[]> {
|
|
6
|
-
const results: RuleResult[] = [];
|
|
7
|
-
|
|
8
|
-
// Skip binary files, lock files, node_modules (already ignored by scanner but specific check here)
|
|
9
|
-
const filesToCheck = ctx.files.filter(f => !f.match(/\.(png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot|lock|pdf)$/));
|
|
10
|
-
|
|
11
|
-
for (const file of filesToCheck) {
|
|
12
|
-
try {
|
|
13
|
-
const content = fs.readFileSync(file, 'utf-8');
|
|
14
|
-
const lines = content.split('\n');
|
|
15
|
-
|
|
16
|
-
lines.forEach((line, index) => {
|
|
17
|
-
// Check Regex Patterns
|
|
18
|
-
for (const pattern of SECRET_PATTERNS) {
|
|
19
|
-
if (pattern.regex.test(line)) {
|
|
20
|
-
results.push({
|
|
21
|
-
status: 'fail',
|
|
22
|
-
message: `Potential secret found: ${pattern.name}`,
|
|
23
|
-
ruleId: 'secret-pattern',
|
|
24
|
-
file,
|
|
25
|
-
line: index + 1
|
|
26
|
-
});
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
// Check Heuristics for assignments
|
|
31
|
-
// matches "key = '...'"
|
|
32
|
-
const genericMsg = line.match(/(api_?key|secret|token|password|auth)[\s]*[:=][\s]*['"]([a-zA-Z0-9_\-]{8,})['"]/i);
|
|
33
|
-
if (genericMsg) {
|
|
34
|
-
const match = genericMsg[2];
|
|
35
|
-
// Heuristic: Must be > 8 chars and not contain 'process.env' or template placeholders
|
|
36
|
-
if (match && match.length > 8 && !line.includes('process.env') && !match.includes('${')) {
|
|
37
|
-
results.push({
|
|
38
|
-
status: 'warn',
|
|
39
|
-
message: `Possible hardcoded secret (heuristic): ${genericMsg[1]}`,
|
|
40
|
-
ruleId: 'secret-heuristic',
|
|
41
|
-
file,
|
|
42
|
-
line: index + 1
|
|
43
|
-
});
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
});
|
|
47
|
-
} catch (e) {
|
|
48
|
-
// Ignore read errors
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
return results;
|
|
53
|
-
}
|
package/src/rules/security.ts
DELETED
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import { RuleContext, RuleResult } from '../engine/types';
|
|
3
|
-
|
|
4
|
-
export async function checkSecurity(ctx: RuleContext): Promise<RuleResult[]> {
|
|
5
|
-
const results: RuleResult[] = [];
|
|
6
|
-
|
|
7
|
-
const codeFiles = ctx.files.filter(f => f.match(/\.(js|ts|jsx|tsx|json)$/));
|
|
8
|
-
|
|
9
|
-
for (const file of codeFiles) {
|
|
10
|
-
const content = fs.readFileSync(file, 'utf-8');
|
|
11
|
-
|
|
12
|
-
// 1. Check for Hardcoded NODE_ENV not being production?
|
|
13
|
-
// Actually we want to verify we are NOT hardcoding 'development' in prod context?
|
|
14
|
-
// Or "Debug / dev configs enabled" -> debug: true
|
|
15
|
-
|
|
16
|
-
if (content.match(/debug:\s*true/)) {
|
|
17
|
-
results.push({
|
|
18
|
-
status: 'warn',
|
|
19
|
-
message: 'Debug mode enabled (debug: true) found',
|
|
20
|
-
ruleId: 'security-debug-enabled',
|
|
21
|
-
file
|
|
22
|
-
});
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
// 2. CORS Wildcard
|
|
26
|
-
// "origin: '*'" or "origin: *"
|
|
27
|
-
if (content.match(/origin:\s*['"]?\*['"]?/)) {
|
|
28
|
-
// Default: Enabled (fail on wildcard) unless explicitly set to false
|
|
29
|
-
if (ctx.config.security?.noCorsWildcard !== false) {
|
|
30
|
-
results.push({
|
|
31
|
-
status: 'fail',
|
|
32
|
-
message: 'CORS wildcard origin (*) detected',
|
|
33
|
-
ruleId: 'security-cors-wildcard',
|
|
34
|
-
file
|
|
35
|
-
});
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
// 3. Hardcoded credentials (simple db keywords)
|
|
40
|
-
// postgres://user:pass@...
|
|
41
|
-
if (content.match(/:\/\/[a-zA-Z0-9]+:[a-zA-Z0-9]+@/)) {
|
|
42
|
-
// Exclude localhost
|
|
43
|
-
if (!content.includes('localhost') && !content.includes('127.0.0.1')) {
|
|
44
|
-
results.push({
|
|
45
|
-
status: 'fail',
|
|
46
|
-
message: 'Hardcoded database credentials in connection string',
|
|
47
|
-
ruleId: 'security-db-creds',
|
|
48
|
-
file
|
|
49
|
-
});
|
|
50
|
-
}
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
return results;
|
|
55
|
-
}
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
const express = require('express');
|
|
2
|
-
const app = express();
|
|
3
|
-
|
|
4
|
-
// Hardcoded secret (fake AWS key)
|
|
5
|
-
const awsKey = "AKIA1234567890123456";
|
|
6
|
-
|
|
7
|
-
// Missing env var usage (API_KEY is not in .env)
|
|
8
|
-
const apiKey = process.env.API_KEY;
|
|
9
|
-
|
|
10
|
-
// Debug mode enabled
|
|
11
|
-
const config = {
|
|
12
|
-
debug: true
|
|
13
|
-
};
|
|
14
|
-
|
|
15
|
-
// CORS wildcard
|
|
16
|
-
app.use(cors({ origin: '*' }));
|
|
17
|
-
|
|
18
|
-
// Hardcoded DB credentials
|
|
19
|
-
const db = "postgres://user:password@production-db.com/db";
|
|
20
|
-
|
|
21
|
-
app.listen(process.env.PORT || 3000);
|
package/tsconfig.json
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"compilerOptions": {
|
|
3
|
-
"target": "ES2020",
|
|
4
|
-
"module": "commonjs",
|
|
5
|
-
"outDir": "./dist",
|
|
6
|
-
"rootDir": "./src",
|
|
7
|
-
"strict": true,
|
|
8
|
-
"esModuleInterop": true,
|
|
9
|
-
"skipLibCheck": true,
|
|
10
|
-
"forceConsistentCasingInFileNames": true,
|
|
11
|
-
"resolveJsonModule": true
|
|
12
|
-
},
|
|
13
|
-
"include": ["src/**/*"],
|
|
14
|
-
"exclude": ["node_modules", "**/*.test.ts"]
|
|
15
|
-
}
|