secure-review-extension 1.0.11 → 1.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "secure-review-extension",
3
3
  "displayName": "Secure Review",
4
4
  "description": "Run deep static and Docker-based dynamic secure code reviews directly inside VS Code.",
5
- "version": "1.0.11",
5
+ "version": "1.0.12",
6
6
  "publisher": "Ankit-QI",
7
7
  "icon": "media/shield.png",
8
8
  "license": "MIT",
@@ -2,6 +2,7 @@ const fs = require("node:fs");
2
2
  const path = require("node:path");
3
3
  const os = require("node:os");
4
4
  const { execFileSync } = require("node:child_process");
5
+ const { SHELL_EXTENSIONS, looksLikeAnsibleContent, looksLikeAnsiblePath } = require("./scan-targets");
5
6
 
6
7
  function detectWorkspace(workspaceRoot) {
7
8
  const manifestNames = new Set(walkFiles(workspaceRoot, 4).map((file) => path.relative(workspaceRoot, file)));
@@ -113,7 +114,7 @@ function detectWorkspace(workspaceRoot) {
113
114
  if (ext === ".php") languages.add("php");
114
115
  if (ext === ".rb") languages.add("ruby");
115
116
  if (ext === ".cs") languages.add("csharp");
116
- if ([".sh", ".bash", ".zsh", ".ksh"].includes(ext)) languages.add("shell");
117
+ if (SHELL_EXTENSIONS.has(ext)) languages.add("shell");
117
118
  if ([".js", ".jsx", ".ts", ".tsx", ".mjs", ".cjs"].includes(ext)) languages.add("javascript");
118
119
  if ([".yml", ".yaml"].includes(ext) && looksLikeAnsibleFile(workspaceRoot, file)) frameworks.add("ansible");
119
120
  }
@@ -413,12 +414,7 @@ function readJson(filePath) {
413
414
  }
414
415
 
415
416
  function looksLikeAnsibleFile(workspaceRoot, relativePath) {
416
- const normalizedPath = relativePath.toLowerCase();
417
- if (/(^|\/)(playbooks?|roles|tasks|handlers|group_vars|host_vars)\//.test(normalizedPath)) {
418
- return true;
419
- }
420
-
421
- if (/(^|\/)(site|playbook|deploy|provision)\.ya?ml$/.test(normalizedPath)) {
417
+ if (looksLikeAnsiblePath(relativePath)) {
422
418
  return true;
423
419
  }
424
420
 
@@ -428,12 +424,7 @@ function looksLikeAnsibleFile(workspaceRoot, relativePath) {
428
424
  }
429
425
 
430
426
  try {
431
- const content = fs.readFileSync(fullPath, "utf8").toLowerCase();
432
- return (
433
- /(^|\n)\s*hosts\s*:\s*.+/m.test(content)
434
- && /(^|\n)\s*(tasks|handlers)\s*:/m.test(content)
435
- ) || /ansible\.builtin\./.test(content)
436
- || /(^|\n)\s*become\s*:\s*(true|yes)\b/m.test(content);
427
+ return looksLikeAnsibleContent(fs.readFileSync(fullPath, "utf8"));
437
428
  } catch {
438
429
  return false;
439
430
  }
@@ -0,0 +1,437 @@
1
+ const path = require("node:path");
2
+ const { hashFinding, toPosixPath } = require("../utils");
3
+
4
+ function analyzeRepository(files, workspaceRoot) {
5
+ const findings = [];
6
+ const fileSet = new Set(files.map((file) => file.relativePath));
7
+
8
+ const packageJsonFiles = files.filter((file) => path.basename(file.relativePath) === "package.json");
9
+ for (const packageJsonFile of packageJsonFiles) {
10
+ findings.push(...analyzePackageJson(packageJsonFile, fileSet, workspaceRoot));
11
+ }
12
+
13
+ const requirementsFiles = files.filter((file) => path.basename(file.relativePath) === "requirements.txt");
14
+ for (const requirementsFile of requirementsFiles) {
15
+ findings.push(...analyzeRequirements(requirementsFile, workspaceRoot));
16
+ }
17
+
18
+ const goModFiles = files.filter((file) => path.basename(file.relativePath) === "go.mod");
19
+ for (const goModFile of goModFiles) {
20
+ findings.push(...analyzeGoMod(goModFile, workspaceRoot));
21
+ }
22
+
23
+ const cargoTomlFiles = files.filter((file) => path.basename(file.relativePath) === "Cargo.toml");
24
+ for (const cargoTomlFile of cargoTomlFiles) {
25
+ findings.push(...analyzeCargoToml(cargoTomlFile, workspaceRoot));
26
+ }
27
+
28
+ for (const file of files) {
29
+ const base = path.basename(file.relativePath);
30
+ if (base === "Dockerfile") {
31
+ findings.push(...analyzeDockerfile(file, workspaceRoot));
32
+ }
33
+ if (base === "docker-compose.yml" || base === "docker-compose.yaml") {
34
+ findings.push(...analyzeDockerCompose(file, workspaceRoot));
35
+ }
36
+ if (file.relativePath.includes(".github/workflows/") && file.ext === ".yml") {
37
+ findings.push(...analyzeGithubWorkflow(file, workspaceRoot));
38
+ }
39
+ if (file.relativePath.toLowerCase().includes("openapi") || file.relativePath.endsWith("swagger.json") || file.relativePath.endsWith("swagger.yaml")) {
40
+ findings.push(...analyzeApiSpec(file, workspaceRoot));
41
+ }
42
+ if (looksLikeKubernetesManifest(file)) {
43
+ findings.push(...analyzeKubernetesManifest(file, workspaceRoot));
44
+ }
45
+ if (looksLikeWebServerConfig(file)) {
46
+ findings.push(...analyzeWebServerConfig(file, workspaceRoot));
47
+ }
48
+ if (file.ext === ".tf") {
49
+ findings.push(...analyzeTerraform(file, workspaceRoot));
50
+ }
51
+ if (looksLikeRuntimeConfig(file)) {
52
+ findings.push(...analyzeRuntimeEnvironment(file, workspaceRoot));
53
+ }
54
+ if (looksLikePackageManagerConfig(file)) {
55
+ findings.push(...analyzePackageManagerConfig(file, workspaceRoot));
56
+ }
57
+ findings.push(...analyzeFileSizeAndComplexity(file, workspaceRoot));
58
+ }
59
+
60
+ return findings;
61
+ }
62
+
63
+ function analyzePackageJson(file, fileSet, workspaceRoot) {
64
+ const findings = [];
65
+ try {
66
+ const payload = JSON.parse(file.content);
67
+ const dependencies = {
68
+ ...(payload.dependencies || {}),
69
+ ...(payload.devDependencies || {}),
70
+ ...(payload.optionalDependencies || {}),
71
+ ...(payload.peerDependencies || {})
72
+ };
73
+ const dependencyNames = Object.keys(dependencies);
74
+ const packageDir = path.posix.dirname(file.relativePath);
75
+
76
+ if (dependencyNames.length > 80) {
77
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "dependency-bloat", "High dependency count may increase maintenance and supply-chain risk", "medium", "Dependency Risk", "Dependency Bloat", "dependency-risk", `Detected ${dependencyNames.length} dependencies in package.json.`, "Review whether all dependencies are still required and remove unnecessary packages.", "Trim unused dependencies and keep the dependency graph intentionally small.", "Large dependency graphs increase attack surface and upgrade complexity.", ["Supply Chain Review"]));
78
+ }
79
+
80
+ const weakPins = dependencyNames.filter((name) => /^[~^*]/.test(String(dependencies[name])));
81
+ if (weakPins.length >= 5) {
82
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "weak-version-pinning", "Dependency version pinning appears loose", "medium", "Dependency Risk", "Version Pinning", "dependency-risk", weakPins.slice(0, 8).map((name) => `${name}@${dependencies[name]}`).join(", "), "Pin critical runtime dependencies more tightly or use a disciplined update workflow.", "Adopt explicit upgrade windows and lockfile hygiene for runtime dependencies.", "Loose version ranges can increase drift and make builds less predictable.", ["Supply Chain Review"]));
83
+ }
84
+
85
+ const prereleaseDependencies = dependencyNames.filter((name) => /(?:alpha|beta|rc|canary|next)/i.test(String(dependencies[name])));
86
+ if (prereleaseDependencies.length) {
87
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "prerelease-dependencies", "Workspace depends on prerelease package versions", "medium", "Dependency Risk", "Release Stability", "dependency-risk", prereleaseDependencies.slice(0, 8).map((name) => `${name}@${dependencies[name]}`).join(", "), "Prefer stable reviewed releases for production paths or document why prerelease packages are required.", "Isolate prerelease packages to development-only contexts or pin them under explicit approval.", "Prerelease dependencies often carry higher change risk and less predictable support.", ["Supply Chain Review"]));
88
+ }
89
+
90
+ const nonRegistryDependencies = dependencyNames.filter((name) => /^(git\+|https?:|file:|link:|github:|bitbucket:)/i.test(String(dependencies[name])));
91
+ if (nonRegistryDependencies.length) {
92
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "non-registry-dependencies", "Dependencies are pulled from git, file, or URL sources", "high", "Dependency Risk", "Supply Chain Source", "dependency-risk", nonRegistryDependencies.slice(0, 8).map((name) => `${name}@${dependencies[name]}`).join(", "), "Review non-registry dependency sources carefully and pin them to trusted immutable revisions where possible.", "Prefer reviewed registry releases or immutable commit SHAs instead of mutable URL or local path references.", "Non-registry dependency sources can bypass normal package review and make builds harder to reproduce.", ["Supply Chain Review"]));
93
+ }
94
+
95
+ const zeroMajorDependencies = dependencyNames.filter((name) => /^0\./.test(String(dependencies[name]).replace(/^[~^]/, "")));
96
+ if (zeroMajorDependencies.length >= 5) {
97
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "zero-major-dependencies", "Multiple dependencies are still on zero-major versions", "low", "Dependency Risk", "Maturity Signal", "dependency-risk", zeroMajorDependencies.slice(0, 8).map((name) => `${name}@${dependencies[name]}`).join(", "), "Review whether zero-major dependencies are appropriate for production-critical paths.", "Track maturity and support expectations for early-stage packages in core flows.", "Zero-major versions can signal APIs and maintenance posture that are still evolving quickly.", ["Supply Chain Review"]));
98
+ }
99
+
100
+ const expectedLockfiles = [
101
+ packageDir === "." ? "package-lock.json" : `${packageDir}/package-lock.json`,
102
+ packageDir === "." ? "pnpm-lock.yaml" : `${packageDir}/pnpm-lock.yaml`,
103
+ packageDir === "." ? "yarn.lock" : `${packageDir}/yarn.lock`
104
+ ];
105
+ if (!expectedLockfiles.some((candidate) => fileSet.has(candidate))) {
106
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "missing-lockfile", "JavaScript workspace appears to be missing a lockfile", "medium", "Dependency Risk", "Build Reproducibility", "dependency-risk", `No package-lock.json, pnpm-lock.yaml, or yarn.lock detected near ${file.relativePath}.`, "Commit a lockfile to support reproducible builds and dependency review.", "Use a lockfile in version control for deterministic installs and better auditability.", "Missing lockfiles make builds less reproducible and can hide dependency drift.", ["Build Integrity"]));
107
+ }
108
+
109
+ const scripts = payload.scripts || {};
110
+ for (const [name, command] of Object.entries(scripts)) {
111
+ if (/curl\s+.*\|\s*(bash|sh)/i.test(String(command))) {
112
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "unsafe-install-script", "Script pipes remote content into a shell", "high", "DevOps", "Unsafe Script Pattern", "security", `${name}: ${command}`, "Avoid piping remote content directly into a shell in package scripts.", "Download artifacts explicitly, verify them, and execute only trusted local files.", "Remote shell piping increases supply-chain and command-execution risk.", ["CWE-494"]));
113
+ }
114
+ }
115
+ } catch {
116
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "package-json-parse", "package.json could not be parsed during repository review", "low", "Reliability", "Manifest Parsing", "reliability", "Unable to parse package.json cleanly.", "Validate manifest syntax and keep project metadata well-formed.", "Fix malformed manifests early to keep tooling predictable.", "Broken manifests can disrupt review tooling and build workflows.", ["Build Reliability"]));
117
+ }
118
+
119
+ return findings;
120
+ }
121
+
122
+ function analyzeRequirements(file, workspaceRoot) {
123
+ const findings = [];
124
+ const lines = file.content.split(/\r?\n/).map((line) => line.trim()).filter(Boolean);
125
+ const unpinned = lines.filter((line) => !line.startsWith("#") && !/(==|~=|>=|<=)/.test(line));
126
+ if (unpinned.length >= 3) {
127
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "unpinned-python-dependencies", "Python dependencies may be insufficiently pinned", "medium", "Dependency Risk", "Version Pinning", "dependency-risk", unpinned.slice(0, 8).join(", "), "Pin Python dependencies to reviewed versions and add an upgrade process.", "Use constraints or lock tooling for deterministic Python environments.", "Unpinned dependencies increase build drift and make vulnerability management harder.", ["Supply Chain Review"]));
128
+ }
129
+
130
+ const vcsOrDirectRefs = lines.filter((line) => !line.startsWith("#") && /(@\s*git\+|git\+https?:|https?:\/\/|file:|-e\s+)/i.test(line));
131
+ if (vcsOrDirectRefs.length) {
132
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "python-direct-source-dependencies", "Python requirements include editable, VCS, or direct URL dependencies", "high", "Dependency Risk", "Supply Chain Source", "dependency-risk", vcsOrDirectRefs.slice(0, 8).join(", "), "Review direct-source Python dependencies carefully and pin them to immutable reviewed revisions where possible.", "Prefer reviewed package releases or explicitly approved commit-pinned references over mutable source locations.", "Editable, VCS, and direct URL dependencies can bypass normal package review and make builds less reproducible.", ["Supply Chain Review"]));
133
+ }
134
+
135
+ const prerelease = lines.filter((line) => !line.startsWith("#") && /(a\d+|b\d+|rc\d+|alpha|beta)/i.test(line));
136
+ if (prerelease.length) {
137
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "python-prerelease-dependencies", "Python requirements include prerelease versions", "medium", "Dependency Risk", "Release Stability", "dependency-risk", prerelease.slice(0, 8).join(", "), "Use stable reviewed versions for production paths unless prerelease usage is explicitly justified.", "Limit prerelease dependencies to controlled testing contexts or document the risk acceptance clearly.", "Prerelease Python packages can have higher change risk and weaker long-term support guarantees.", ["Supply Chain Review"]));
138
+ }
139
+
140
+ const insecureIndexes = lines.filter((line) => !line.startsWith("#") && /(--index-url|--extra-index-url)\s+http:\/\//i.test(line));
141
+ if (insecureIndexes.length) {
142
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "python-insecure-package-index", "Python requirements use an insecure package index URL", "high", "Dependency Risk", "Insecure Package Transport", "dependency-risk", insecureIndexes.slice(0, 6).join(", "), "Use HTTPS-backed package indexes and avoid cleartext dependency downloads.", "Move package sources to trusted TLS-protected registries and mirror endpoints.", "Insecure package index transport can expose dependency downloads to tampering or credential leakage.", ["Supply Chain Review", "CWE-319"]));
143
+ }
144
+
145
+ const trustedHosts = lines.filter((line) => !line.startsWith("#") && /--trusted-host\b/i.test(line));
146
+ if (trustedHosts.length) {
147
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "python-trusted-host", "Python requirements bypass normal package host verification", "medium", "Dependency Risk", "Transport Trust Override", "dependency-risk", trustedHosts.slice(0, 6).join(", "), "Use trusted hosts only when there is a documented and controlled need, and prefer proper TLS trust configuration.", "Fix certificate trust or mirror configuration instead of broadly bypassing host verification.", "Trusted-host overrides weaken package transport assurances and can mask registry trust problems.", ["Supply Chain Review"]));
148
+ }
149
+ return findings;
150
+ }
151
+
152
+ function analyzeGoMod(file, workspaceRoot) {
153
+ const findings = [];
154
+ const replaceDirectives = [...file.content.matchAll(/^\s*replace\s+(.+)$/gim)].map((match) => match[1].trim());
155
+ if (replaceDirectives.length) {
156
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "go-mod-replace", "go.mod contains replace directives", "medium", "Dependency Risk", "Module Override", "dependency-risk", replaceDirectives.slice(0, 6).join("; "), "Review replace directives carefully and ensure overridden modules are intentional, trusted, and reproducible.", "Document why each module override is needed and avoid local-path replacements in production builds.", "Module replacement directives can change supply-chain trust assumptions and make builds harder to reproduce.", ["Supply Chain Review"]));
157
+ }
158
+
159
+ const pseudoVersions = [...file.content.matchAll(/v\d+\.\d+\.\d+-\d{14}-[0-9a-f]{12}/g)].map((match) => match[0]);
160
+ if (pseudoVersions.length >= 3) {
161
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "go-pseudo-versions", "go.mod uses multiple pseudo-version dependencies", "low", "Dependency Risk", "Version Stability", "dependency-risk", pseudoVersions.slice(0, 6).join(", "), "Prefer tagged releases for production-critical dependencies where possible.", "Track pseudo-version usage explicitly so upgrades and incident response stay predictable.", "Pseudo-versions can be valid, but they often signal dependencies not pinned to formal releases.", ["Supply Chain Review"]));
162
+ }
163
+
164
+ return findings;
165
+ }
166
+
167
+ function analyzeCargoToml(file, workspaceRoot) {
168
+ const findings = [];
169
+ const gitOrPathDeps = [...file.content.matchAll(/^\s*[A-Za-z0-9_-]+\s*=\s*\{[^}]*\b(git|path)\s*=/gim)].map((match) => match[0].trim());
170
+ if (gitOrPathDeps.length) {
171
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "cargo-git-or-path-dependencies", "Cargo manifest includes git or path dependencies", "high", "Dependency Risk", "Supply Chain Source", "dependency-risk", gitOrPathDeps.slice(0, 8).join("; "), "Review git and path dependencies carefully and pin them to trusted immutable revisions where possible.", "Prefer published crate releases for production builds or document explicit exceptions for git/path dependencies.", "Git and path crate dependencies can bypass standard release review and reduce build reproducibility.", ["Supply Chain Review"]));
172
+ }
173
+
174
+ return findings;
175
+ }
176
+
177
+ function analyzeDockerfile(file, workspaceRoot) {
178
+ const findings = [];
179
+ if (/^FROM\s+.+:latest/im.test(file.content)) {
180
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "docker-latest-tag", "Docker image uses the latest tag", "medium", "DevOps", "Container Reproducibility", "outdated-practices", "Dockerfile uses a latest base image tag.", "Pin the base image to a reviewed version or digest.", "Use immutable digests or explicit version tags for container bases.", "Latest tags create non-deterministic builds and make rollbacks harder.", ["Container Review"]));
181
+ }
182
+ if (!/^USER\s+/im.test(file.content)) {
183
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "docker-root-user", "Dockerfile does not appear to switch away from root", "medium", "DevOps", "Container Privilege", "security", "No USER directive detected in Dockerfile.", "Run containers as a non-root user where feasible.", "Create a dedicated runtime user and drop privileges before launching the app.", "Root containers increase blast radius if the application is compromised.", ["Container Review"]));
184
+ }
185
+ if (/^\s*ENV\s+(?:NODE_ENV|FLASK_ENV|APP_ENV)\s*=\s*(development|dev|debug)\b/im.test(file.content) || /^\s*ENV\s+DEBUG\s*=\s*(1|true|yes)\b/im.test(file.content)) {
186
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "docker-debug-runtime", "Dockerfile bakes debug or development runtime settings into the image", "medium", "Configuration", "Runtime Environment", "security", "Debug or development-oriented ENV settings detected in Dockerfile.", "Avoid baking debug-oriented runtime settings into production container images.", "Keep production images environment-neutral and supply safe runtime values during deployment.", "Debug runtime defaults can expose verbose errors, weaker safeguards, or operational drift in deployed containers.", ["Container Review"]));
187
+ }
188
+ if (/^\s*(ENV|RUN)\s+.*(?:NODE_TLS_REJECT_UNAUTHORIZED\s*=\s*0|PYTHONHTTPSVERIFY\s*=\s*0|GIT_SSL_NO_VERIFY\s*=\s*1|strict-ssl\s+false|curl\s+-k\b)/im.test(file.content)) {
189
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "docker-tls-verification-disabled", "Dockerfile appears to disable TLS or certificate verification", "high", "Configuration", "Transport Security", "security", "Dockerfile contains environment or command settings that disable TLS verification.", "Remove TLS verification bypasses from image builds and runtime defaults.", "Configure trusted CAs or package mirrors instead of disabling certificate checks.", "Disabling TLS verification can expose artifact retrieval and outbound connections to interception and tampering.", ["Container Review", "CWE-295"]));
190
+ }
191
+ return findings;
192
+ }
193
+
194
+ function analyzeDockerCompose(file, workspaceRoot) {
195
+ const findings = [];
196
+
197
+ if (/privileged\s*:\s*true/i.test(file.content)) {
198
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "compose-privileged", "Docker Compose service runs in privileged mode", "high", "DevOps", "Container Privilege", "security", "privileged: true detected in Compose configuration.", "Avoid privileged containers unless there is a documented and tightly controlled exception.", "Drop privileged mode and grant only the minimum required capabilities.", "Privileged containers significantly expand host compromise risk if the service is breached.", ["Container Review"]));
199
+ }
200
+
201
+ if (/network_mode\s*:\s*["']?host["']?/i.test(file.content)) {
202
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "compose-host-network", "Docker Compose service uses host networking", "medium", "DevOps", "Network Isolation", "security", "network_mode: host detected in Compose configuration.", "Use bridged or explicitly defined networks unless host networking is strictly required.", "Isolate container networking to reduce unintended service exposure.", "Host networking bypasses normal container isolation and can expand network blast radius.", ["Container Review"]));
203
+ }
204
+
205
+ if (/ports:\s*[\r\n]+(?:\s*-\s*["']?0\.0\.0\.0:|\s*-\s*["']?\d+\.\d+\.\d+\.\d+:)/i.test(file.content)) {
206
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "compose-bind-all-interfaces", "Docker Compose publishes ports on broad host interfaces", "medium", "DevOps", "Network Exposure", "security", "Compose ports appear to bind to all interfaces or explicit host IPs.", "Publish only the ports that are required and bind them to trusted interfaces where possible.", "Limit published services to localhost or controlled network boundaries during development and testing.", "Broad port publishing can expose internal services more widely than intended.", ["Container Review"]));
207
+ }
208
+
209
+ return findings;
210
+ }
211
+
212
+ function analyzeGithubWorkflow(file, workspaceRoot) {
213
+ const findings = [];
214
+ const unpinnedActions = [...file.content.matchAll(/uses:\s*([^\n@]+)@(main|master|v?\d+\s*$)/gmi)];
215
+ if (unpinnedActions.length) {
216
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "workflow-unpinned-action", "GitHub Actions workflow may use loosely pinned actions", "medium", "DevOps", "CI Supply Chain", "security", unpinnedActions.slice(0, 5).map((match) => match[0].trim()).join("; "), "Pin actions to trusted versions or commit SHAs.", "Use immutable action SHAs for sensitive CI workflows.", "Loosely pinned CI actions increase supply-chain risk in build pipelines.", ["CI/CD Review"]));
217
+ }
218
+ return findings;
219
+ }
220
+
221
+ function analyzeApiSpec(file, workspaceRoot) {
222
+ const findings = [];
223
+ if (!/securitySchemes|security:/i.test(file.content)) {
224
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "api-spec-missing-security", "API specification may be missing explicit security definitions", "medium", "API Review", "Security Contract", "architecture", "No obvious securitySchemes or security declarations were found in the API spec.", "Document authentication and authorization expectations directly in the API contract.", "Use the API spec as the source of truth for auth and sensitive endpoint behavior.", "Underspecified API security contracts increase integration and review ambiguity.", ["API Review"]));
225
+ }
226
+ return findings;
227
+ }
228
+
229
+ function analyzeKubernetesManifest(file, workspaceRoot) {
230
+ const findings = [];
231
+
232
+ if (/privileged\s*:\s*true/i.test(file.content)) {
233
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "k8s-privileged-container", "Kubernetes workload enables privileged container mode", "high", "DevOps", "Pod Security", "security", "privileged: true detected in workload manifest.", "Avoid privileged pods unless there is a tightly controlled operational requirement.", "Use Pod Security Standards and least-privilege container settings by default.", "Privileged containers can break isolation boundaries and increase cluster compromise risk.", ["Kubernetes Review"]));
234
+ }
235
+
236
+ if (!/runAsNonRoot\s*:\s*true/i.test(file.content) && /kind:\s*(Deployment|StatefulSet|DaemonSet|Job|CronJob|Pod)/i.test(file.content)) {
237
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "k8s-run-as-root", "Kubernetes workload may not enforce non-root execution", "medium", "DevOps", "Pod Security", "security", "No obvious runAsNonRoot: true setting detected for workload manifest.", "Set runAsNonRoot and related securityContext fields for application containers.", "Adopt non-root container execution as the default baseline in cluster workloads.", "Root containers in Kubernetes increase the blast radius of container compromise.", ["Kubernetes Review"]));
238
+ }
239
+
240
+ if (/allowPrivilegeEscalation\s*:\s*true/i.test(file.content)) {
241
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "k8s-allow-privilege-escalation", "Kubernetes workload allows privilege escalation", "high", "DevOps", "Pod Security", "security", "allowPrivilegeEscalation: true detected in manifest.", "Disable privilege escalation unless there is an explicit and reviewed need.", "Set allowPrivilegeEscalation to false for normal application workloads.", "Privilege escalation increases the risk that a container escape or local exploit can become more severe.", ["Kubernetes Review"]));
242
+ }
243
+
244
+ return findings;
245
+ }
246
+
247
+ function analyzeWebServerConfig(file, workspaceRoot) {
248
+ const findings = [];
249
+
250
+ if (/(autoindex\s+on;|Options\s+\+Indexes)/i.test(file.content)) {
251
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "webserver-directory-listing", "Web server configuration appears to enable directory listing", "medium", "Configuration", "Directory Listing", "security", "Directory indexing/listing directive detected.", "Disable directory listing unless there is a deliberate and reviewed need for it.", "Serve only intended resources and avoid exposing directory indexes.", "Directory listing can leak internal files, naming conventions, and sensitive static assets.", ["Web Server Review"]));
252
+ }
253
+
254
+ if (!/(add_header\s+X-Frame-Options|Header\s+always\s+set\s+X-Frame-Options|add_header\s+Content-Security-Policy|Header\s+always\s+set\s+Content-Security-Policy)/i.test(file.content)) {
255
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "webserver-missing-security-headers", "Web server config may be missing clickjacking or CSP header hardening", "medium", "Configuration", "Security Headers", "security", "No obvious X-Frame-Options or Content-Security-Policy header directives detected.", "Add the required security headers at the web server or reverse proxy layer where appropriate.", "Define a consistent baseline for clickjacking and content loading protections in edge configs.", "Missing security headers can leave web applications more exposed even when application code is otherwise hardened.", ["Web Server Review"]));
256
+ }
257
+
258
+ return findings;
259
+ }
260
+
261
+ function analyzeTerraform(file, workspaceRoot) {
262
+ const findings = [];
263
+
264
+ if (/0\.0\.0\.0\/0/.test(file.content)) {
265
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "terraform-open-cidr", "Terraform allows exposure to 0.0.0.0/0", "high", "DevOps", "Network Exposure", "security", "0.0.0.0/0 detected in Terraform configuration.", "Restrict ingress and egress CIDRs to only the required address ranges.", "Use narrowly scoped network rules and document justified public exposure explicitly.", "World-open CIDRs can expose services or management planes broadly if applied to sensitive resources.", ["IaC Review"]));
266
+ }
267
+
268
+ if (/public_access\s*=\s*true|publicly_accessible\s*=\s*true/i.test(file.content)) {
269
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "terraform-public-resource", "Terraform marks a resource as publicly accessible", "medium", "DevOps", "Public Exposure", "security", "public access flag detected in Terraform resource definition.", "Review whether the resource truly needs public exposure and protect it with layered controls if so.", "Prefer private-by-default resource placement and explicit ingress controls.", "Publicly accessible infrastructure expands the attack surface and needs stronger review and monitoring.", ["IaC Review"]));
270
+ }
271
+
272
+ return findings;
273
+ }
274
+
275
+ function analyzeRuntimeEnvironment(file, workspaceRoot) {
276
+ const findings = [];
277
+ const content = file.content;
278
+
279
+ if (/(^|\n)\s*(?:DEBUG|FLASK_DEBUG|DJANGO_DEBUG|APP_DEBUG|ENABLE_DEBUG)\s*[:=]\s*(1|true|yes|on)\b/i.test(content)) {
280
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "runtime-debug-enabled", "Runtime configuration enables debug behavior", "medium", "Configuration", "Debug Mode", "security", "Debug-oriented runtime setting detected in config.", "Disable debug mode in deployed environments and use controlled logging or diagnostics instead.", "Keep debugging features off by default outside local development.", "Debug mode can expose stack traces, internal configuration, or relaxed security behavior in runtime environments.", ["Runtime Review"]));
281
+ }
282
+
283
+ if (/(^|\n)\s*(?:HOST|BIND|BIND_ADDRESS|LISTEN_ADDR|SERVER_HOST|FLASK_HOST)\s*[:=]\s*(0\.0\.0\.0|\*)\b/i.test(content)) {
284
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "runtime-bind-all-interfaces", "Runtime configuration binds the service to all interfaces", "medium", "Configuration", "Network Exposure", "security", "Broad bind address detected in runtime config.", "Bind services to the minimum required interface or document the network boundary controls protecting them.", "Prefer localhost or controlled internal interfaces for local and administrative services.", "Binding to all interfaces expands reachability and can expose services more broadly than intended.", ["Runtime Review"]));
285
+ }
286
+
287
+ if (/(^|\n)\s*(?:NODE_TLS_REJECT_UNAUTHORIZED|PYTHONHTTPSVERIFY|GIT_SSL_NO_VERIFY|NPM_CONFIG_STRICT_SSL|STRICT_SSL|CURL_INSECURE)\s*[:=]\s*(0|false|no|off)\b/i.test(content) || /(^|\n)\s*CURL_INSECURE\s*[:=]\s*(1|true|yes|on)\b/i.test(content)) {
288
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "runtime-tls-verification-disabled", "Runtime configuration disables TLS or certificate verification", "high", "Configuration", "Transport Security", "security", "TLS verification bypass detected in runtime config.", "Remove TLS verification bypasses and configure trusted CA roots or package mirrors properly.", "Use proper certificate trust management instead of disabling verification at runtime.", "Disabling TLS verification increases the risk of man-in-the-middle attacks and unsafe package or API transport.", ["Runtime Review", "CWE-295"]));
289
+ }
290
+
291
+ if (/(^|\n)\s*(?:LOG_LEVEL|LOGLEVEL)\s*[:=]\s*debug\b/i.test(content)) {
292
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "runtime-debug-logging", "Runtime configuration enables debug-level logging", "low", "Configuration", "Logging Exposure", "security", "Debug log level detected in runtime config.", "Review whether debug logging is appropriate for the target environment and redact sensitive data if retained.", "Use safer default log levels in shared or production-like environments.", "Debug logs can leak sensitive identifiers, tokens, or internal implementation details if enabled broadly.", ["Runtime Review"]));
293
+ }
294
+
295
+ return findings;
296
+ }
297
+
298
+ function analyzePackageManagerConfig(file, workspaceRoot) {
299
+ const findings = [];
300
+ const content = file.content;
301
+
302
+ if (/strict-ssl\s*=\s*false/i.test(content) || /registry\s*=\s*http:\/\//i.test(content)) {
303
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "package-manager-insecure-transport", "Package manager configuration weakens registry transport security", "high", "Dependency Risk", "Registry Transport", "dependency-risk", "Package manager config appears to disable strict SSL or use an HTTP registry.", "Use HTTPS registries and keep strict SSL verification enabled.", "Fix certificate trust or mirror configuration instead of weakening package-manager transport settings.", "Weak registry transport settings can expose dependency downloads and credentials to tampering or interception.", ["Supply Chain Review", "CWE-319"]));
304
+ }
305
+
306
+ if (/trusted-host\s*=|trusted-host\b/i.test(content)) {
307
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "package-manager-trusted-host", "Package manager configuration overrides normal host trust checks", "medium", "Dependency Risk", "Registry Trust Override", "dependency-risk", "Trusted host override detected in package-manager config.", "Review trusted-host usage carefully and prefer proper CA trust configuration or approved mirrors.", "Limit host-trust overrides to tightly controlled internal repositories with documented justification.", "Trust overrides weaken transport assurances and can make dependency integrity harder to reason about.", ["Supply Chain Review"]));
308
+ }
309
+
310
+ return findings;
311
+ }
312
+
313
+ function looksLikeKubernetesManifest(file) {
314
+ if (![".yml", ".yaml"].includes(file.ext)) {
315
+ return false;
316
+ }
317
+
318
+ return /kind:\s*(Deployment|StatefulSet|DaemonSet|Job|CronJob|Pod|Ingress|ServiceAccount|Role|ClusterRole)\b/i.test(file.content)
319
+ || /apiVersion:\s*apps\/v1/i.test(file.content)
320
+ || /apiVersion:\s*batch\/v1/i.test(file.content);
321
+ }
322
+
323
+ function looksLikeWebServerConfig(file) {
324
+ const normalized = file.relativePath.toLowerCase();
325
+ const base = path.basename(file.relativePath).toLowerCase();
326
+ return base === "nginx.conf"
327
+ || base === "default.conf"
328
+ || base === "apache2.conf"
329
+ || base === "httpd.conf"
330
+ || normalized.includes("/nginx/")
331
+ || normalized.includes("/apache/")
332
+ || normalized.includes("/httpd/");
333
+ }
334
+
335
+ function looksLikeRuntimeConfig(file) {
336
+ const normalized = file.relativePath.toLowerCase();
337
+ const base = path.basename(file.relativePath).toLowerCase();
338
+ return isEnvLikeFile(file.relativePath, base)
339
+ || [".properties", ".ini", ".cfg", ".conf"].includes(file.ext)
340
+ || base === "application.yml"
341
+ || base === "application.yaml"
342
+ || base === "application.properties"
343
+ || base === "settings.ini"
344
+ || base === "settings.cfg";
345
+ }
346
+
347
+ function looksLikePackageManagerConfig(file) {
348
+ const normalized = file.relativePath.toLowerCase();
349
+ const base = path.basename(file.relativePath).toLowerCase();
350
+ return base === ".npmrc"
351
+ || base === "pip.conf"
352
+ || base === "pip.ini"
353
+ || normalized.endsWith("/pip/pip.conf");
354
+ }
355
+
356
+ function analyzeFileSizeAndComplexity(file, workspaceRoot) {
357
+ const findings = [];
358
+ const lineCount = file.content.split(/\r?\n/).length;
359
+ if (lineCount > 800) {
360
+ findings.push(repoFinding(workspaceRoot, file.relativePath, "large-file", "Large source file may indicate weak modularity", "low", "Maintainability", "Large File", "maintainability", `${lineCount} lines detected in a single file.`, "Split large files into smaller modules with clearer ownership boundaries.", "Separate transport, business logic, and data handling concerns.", "Very large files are harder to review, test, and secure correctly.", ["Maintainability Review"]));
361
+ }
362
+ return findings;
363
+ }
364
+
365
+ function repoFinding(workspaceRoot, relativePath, code, title, severity, category, subcategory, reviewDomain, evidence, remediation, suggestion, whyItMatters, standards) {
366
+ const quality = deriveRepoFindingQuality({ code, category, reviewDomain, severity });
367
+ return {
368
+ id: hashFinding([relativePath, code, title]),
369
+ source: "static",
370
+ title,
371
+ severity,
372
+ confidence: "medium",
373
+ category,
374
+ subcategory,
375
+ reviewDomain,
376
+ filePath: workspaceRoot ? path.join(workspaceRoot, relativePath) : relativePath,
377
+ relativePath,
378
+ line: 1,
379
+ column: 1,
380
+ code,
381
+ message: `${title} in ${toPosixPath(relativePath)}`,
382
+ evidence,
383
+ remediation,
384
+ suggestion,
385
+ whyItMatters,
386
+ standards,
387
+ findingType: quality.findingType,
388
+ evidenceStrength: quality.evidenceStrength,
389
+ flaggedBy: "repository-heuristics",
390
+ manualReviewRecommended: quality.manualReviewRecommended
391
+ };
392
+ }
393
+
394
+ function deriveRepoFindingQuality({ code, category, reviewDomain, severity }) {
395
+ const lowerCode = String(code || "").toLowerCase();
396
+ const lowerCategory = String(category || "").toLowerCase();
397
+ const lowerDomain = String(reviewDomain || "").toLowerCase();
398
+
399
+ if (lowerDomain === "dependency-risk" || lowerCategory.includes("dependency")) {
400
+ return {
401
+ findingType: "dependency-risk",
402
+ evidenceStrength: "medium",
403
+ manualReviewRecommended: true
404
+ };
405
+ }
406
+
407
+ if (lowerCode.includes("debug") || lowerCode.includes("bind-all") || lowerCode.includes("open-cidr") || lowerCode.includes("public-resource") || lowerCode.includes("tls-verification")) {
408
+ return {
409
+ findingType: "contextual-warning",
410
+ evidenceStrength: severity === "high" ? "high" : "medium",
411
+ manualReviewRecommended: true
412
+ };
413
+ }
414
+
415
+ if (lowerDomain === "maintainability" || lowerDomain === "code-quality" || lowerDomain === "architecture") {
416
+ return {
417
+ findingType: "recommendation",
418
+ evidenceStrength: "medium",
419
+ manualReviewRecommended: true
420
+ };
421
+ }
422
+
423
+ return {
424
+ findingType: lowerDomain === "security" ? "contextual-warning" : "recommendation",
425
+ evidenceStrength: "medium",
426
+ manualReviewRecommended: true
427
+ };
428
+ }
429
+
430
+ function isEnvLikeFile(filePath, baseName) {
431
+ return [".env", ".env.local", ".env.development", ".env.production", ".env.test"].includes(baseName)
432
+ || /\.env(\.|$)/i.test(filePath);
433
+ }
434
+
435
+ module.exports = {
436
+ analyzeRepository
437
+ };
@@ -0,0 +1,66 @@
1
+ const SEVERITY = Object.freeze({
2
+ CRITICAL: "critical",
3
+ HIGH: "high",
4
+ MEDIUM: "medium",
5
+ LOW: "low"
6
+ });
7
+
8
+ const CONFIDENCE = Object.freeze({
9
+ HIGH: "high",
10
+ MEDIUM: "medium",
11
+ LOW: "low"
12
+ });
13
+
14
+ const VALID_SEVERITIES = new Set(Object.values(SEVERITY));
15
+ const VALID_CONFIDENCE = new Set(Object.values(CONFIDENCE));
16
+
17
+ function defineRule(rule, defaults = {}) {
18
+ const merged = {
19
+ confidence: CONFIDENCE.MEDIUM,
20
+ standards: [],
21
+ ...defaults,
22
+ ...rule
23
+ };
24
+
25
+ return {
26
+ ...merged,
27
+ severity: normalizeEnum(merged.severity, VALID_SEVERITIES, SEVERITY.MEDIUM),
28
+ confidence: normalizeEnum(merged.confidence, VALID_CONFIDENCE, CONFIDENCE.MEDIUM),
29
+ standards: normalizeArray(merged.standards),
30
+ includeExtensions: normalizeOptionalArray(merged.includeExtensions),
31
+ excludeExtensions: normalizeOptionalArray(merged.excludeExtensions),
32
+ includeFrameworks: normalizeOptionalArray(merged.includeFrameworks),
33
+ excludeFrameworks: normalizeOptionalArray(merged.excludeFrameworks),
34
+ includePathPatterns: normalizeOptionalArray(merged.includePathPatterns),
35
+ excludePathPatterns: normalizeOptionalArray(merged.excludePathPatterns)
36
+ };
37
+ }
38
+
39
+ function defineRuleFactory(defaults = {}) {
40
+ return (rule) => defineRule(rule, defaults);
41
+ }
42
+
43
+ function normalizeEnum(value, allowed, fallback) {
44
+ return allowed.has(value) ? value : fallback;
45
+ }
46
+
47
+ function normalizeArray(value) {
48
+ if (!value) {
49
+ return [];
50
+ }
51
+ return Array.isArray(value) ? value : [value];
52
+ }
53
+
54
+ function normalizeOptionalArray(value) {
55
+ if (value === undefined) {
56
+ return undefined;
57
+ }
58
+ return normalizeArray(value);
59
+ }
60
+
61
+ module.exports = {
62
+ CONFIDENCE,
63
+ SEVERITY,
64
+ defineRule,
65
+ defineRuleFactory
66
+ };