getdoorman 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +181 -0
  3. package/bin/doorman.js +444 -0
  4. package/package.json +74 -0
  5. package/src/ai-fixer.js +559 -0
  6. package/src/ast-scanner.js +434 -0
  7. package/src/auth.js +149 -0
  8. package/src/baseline.js +48 -0
  9. package/src/compliance.js +539 -0
  10. package/src/config.js +466 -0
  11. package/src/custom-rules.js +32 -0
  12. package/src/dashboard.js +202 -0
  13. package/src/detector.js +142 -0
  14. package/src/fix-engine.js +48 -0
  15. package/src/fix-registry-extra.js +95 -0
  16. package/src/fix-registry-go-rust.js +77 -0
  17. package/src/fix-registry-java-csharp.js +77 -0
  18. package/src/fix-registry-js.js +99 -0
  19. package/src/fix-registry-mcp-ai.js +57 -0
  20. package/src/fix-registry-python.js +87 -0
  21. package/src/fixer-ruby-php.js +608 -0
  22. package/src/fixer.js +2113 -0
  23. package/src/hooks.js +115 -0
  24. package/src/ignore.js +176 -0
  25. package/src/index.js +384 -0
  26. package/src/metrics.js +126 -0
  27. package/src/monorepo.js +65 -0
  28. package/src/presets.js +54 -0
  29. package/src/reporter.js +975 -0
  30. package/src/rule-worker.js +36 -0
  31. package/src/rules/ast-rules.js +756 -0
  32. package/src/rules/bugs/accessibility.js +235 -0
  33. package/src/rules/bugs/ai-codegen-fixable.js +172 -0
  34. package/src/rules/bugs/ai-codegen.js +365 -0
  35. package/src/rules/bugs/code-smell-bugs.js +247 -0
  36. package/src/rules/bugs/crypto-bugs.js +195 -0
  37. package/src/rules/bugs/docker-bugs.js +158 -0
  38. package/src/rules/bugs/general.js +361 -0
  39. package/src/rules/bugs/go-bugs.js +279 -0
  40. package/src/rules/bugs/index.js +73 -0
  41. package/src/rules/bugs/js-api.js +257 -0
  42. package/src/rules/bugs/js-array-object.js +210 -0
  43. package/src/rules/bugs/js-async-fixable.js +223 -0
  44. package/src/rules/bugs/js-async.js +211 -0
  45. package/src/rules/bugs/js-closure-scope.js +182 -0
  46. package/src/rules/bugs/js-database.js +203 -0
  47. package/src/rules/bugs/js-error-handling.js +148 -0
  48. package/src/rules/bugs/js-logic.js +261 -0
  49. package/src/rules/bugs/js-memory.js +214 -0
  50. package/src/rules/bugs/js-node.js +361 -0
  51. package/src/rules/bugs/js-react.js +373 -0
  52. package/src/rules/bugs/js-regex.js +200 -0
  53. package/src/rules/bugs/js-state.js +272 -0
  54. package/src/rules/bugs/js-type-coercion.js +318 -0
  55. package/src/rules/bugs/nextjs-bugs.js +242 -0
  56. package/src/rules/bugs/nextjs-fixable.js +120 -0
  57. package/src/rules/bugs/node-fixable.js +178 -0
  58. package/src/rules/bugs/python-advanced.js +245 -0
  59. package/src/rules/bugs/python-fixable.js +98 -0
  60. package/src/rules/bugs/python.js +284 -0
  61. package/src/rules/bugs/react-fixable.js +207 -0
  62. package/src/rules/bugs/ruby-bugs.js +182 -0
  63. package/src/rules/bugs/shell-bugs.js +181 -0
  64. package/src/rules/bugs/silent-failures.js +261 -0
  65. package/src/rules/bugs/ts-bugs.js +235 -0
  66. package/src/rules/bugs/unused-vars.js +65 -0
  67. package/src/rules/compliance/accessibility-ext.js +468 -0
  68. package/src/rules/compliance/education.js +322 -0
  69. package/src/rules/compliance/financial.js +421 -0
  70. package/src/rules/compliance/frameworks.js +507 -0
  71. package/src/rules/compliance/healthcare.js +520 -0
  72. package/src/rules/compliance/index.js +2714 -0
  73. package/src/rules/compliance/regional-eu.js +480 -0
  74. package/src/rules/compliance/regional-international.js +903 -0
  75. package/src/rules/cost/index.js +1993 -0
  76. package/src/rules/data/index.js +2503 -0
  77. package/src/rules/dependencies/index.js +1684 -0
  78. package/src/rules/deployment/index.js +2050 -0
  79. package/src/rules/index.js +71 -0
  80. package/src/rules/infrastructure/index.js +3048 -0
  81. package/src/rules/performance/index.js +3455 -0
  82. package/src/rules/quality/index.js +3175 -0
  83. package/src/rules/reliability/index.js +3040 -0
  84. package/src/rules/scope-rules.js +815 -0
  85. package/src/rules/security/ai-api.js +1177 -0
  86. package/src/rules/security/auth.js +1328 -0
  87. package/src/rules/security/cors.js +127 -0
  88. package/src/rules/security/crypto.js +527 -0
  89. package/src/rules/security/csharp.js +862 -0
  90. package/src/rules/security/csrf.js +193 -0
  91. package/src/rules/security/dart.js +835 -0
  92. package/src/rules/security/deserialization.js +291 -0
  93. package/src/rules/security/file-upload.js +187 -0
  94. package/src/rules/security/go.js +850 -0
  95. package/src/rules/security/headers.js +235 -0
  96. package/src/rules/security/index.js +65 -0
  97. package/src/rules/security/injection.js +1639 -0
  98. package/src/rules/security/mcp-server.js +71 -0
  99. package/src/rules/security/misconfiguration.js +660 -0
  100. package/src/rules/security/oauth-jwt.js +329 -0
  101. package/src/rules/security/path-traversal.js +295 -0
  102. package/src/rules/security/php.js +1054 -0
  103. package/src/rules/security/prototype-pollution.js +283 -0
  104. package/src/rules/security/rate-limiting.js +208 -0
  105. package/src/rules/security/ruby.js +1061 -0
  106. package/src/rules/security/rust.js +693 -0
  107. package/src/rules/security/secrets.js +747 -0
  108. package/src/rules/security/shell.js +647 -0
  109. package/src/rules/security/ssrf.js +298 -0
  110. package/src/rules/security/supply-chain-advanced.js +393 -0
  111. package/src/rules/security/supply-chain.js +734 -0
  112. package/src/rules/security/swift.js +835 -0
  113. package/src/rules/security/taint.js +27 -0
  114. package/src/rules/security/xss.js +520 -0
  115. package/src/scan-cache.js +71 -0
  116. package/src/scanner.js +710 -0
  117. package/src/scope-analyzer.js +685 -0
  118. package/src/share.js +88 -0
  119. package/src/taint.js +300 -0
  120. package/src/telemetry.js +183 -0
  121. package/src/tracer.js +190 -0
  122. package/src/upload.js +35 -0
  123. package/src/worker.js +31 -0
@@ -0,0 +1,2050 @@
1
+ const CI_EXTENSIONS = ['.yml', '.yaml'];
2
+ const CI_PATHS = ['.github/workflows', '.gitlab-ci', '.circleci', 'Jenkinsfile', '.travis'];
3
+
4
+ function isCIFile(f) {
5
+ return CI_EXTENSIONS.some(ext => f.endsWith(ext)) &&
6
+ CI_PATHS.some(p => f.includes(p));
7
+ }
8
+
9
+ function isTestFile(f) {
10
+ const lower = f.toLowerCase();
11
+ return lower.includes('.test.') || lower.includes('.spec.') ||
12
+ lower.includes('__tests__') || lower.includes('/test/') ||
13
+ lower.includes('/tests/');
14
+ }
15
+
16
+ const rules = [
17
+ // DEPLOY-001: No CI/CD pipeline found
18
+ {
19
+ id: 'DEPLOY-001',
20
+ category: 'deployment',
21
+ severity: 'high',
22
+ confidence: 'likely',
23
+ title: 'No CI/CD Pipeline Found',
24
+ check({ files }) {
25
+ const findings = [];
26
+ const ciIndicators = [
27
+ '.github/workflows', '.gitlab-ci.yml', '.gitlab-ci.yaml',
28
+ '.circleci/config.yml', 'Jenkinsfile', '.travis.yml', '.travis.yaml',
29
+ 'bitbucket-pipelines.yml', 'azure-pipelines.yml', '.buildkite',
30
+ ];
31
+
32
+ const hasCIPipeline = [...files.keys()].some(f =>
33
+ ciIndicators.some(indicator => f.includes(indicator))
34
+ );
35
+
36
+ if (!hasCIPipeline) {
37
+ findings.push({
38
+ ruleId: 'DEPLOY-001', category: 'deployment', severity: 'high',
39
+ title: 'No CI/CD pipeline configuration detected',
40
+ description: 'Add a CI/CD pipeline (.github/workflows, .gitlab-ci.yml, etc.) to automate testing and deployment.',
41
+ file: null, line: null, fix: null,
42
+ });
43
+ }
44
+ return findings;
45
+ },
46
+ },
47
+
48
+ // DEPLOY-002: CI pipeline missing test step
49
+ {
50
+ id: 'DEPLOY-002',
51
+ category: 'deployment',
52
+ severity: 'high',
53
+ confidence: 'likely',
54
+ title: 'CI Pipeline Missing Test Step',
55
+ check({ files }) {
56
+ const findings = [];
57
+ for (const [filepath, content] of files) {
58
+ if (isTestFile(filepath)) continue;
59
+ if (!isCIFile(filepath)) continue;
60
+
61
+ const hasTestStep = content.includes('npm test') ||
62
+ content.includes('yarn test') ||
63
+ content.includes('pnpm test') ||
64
+ content.includes('npx jest') ||
65
+ content.includes('npx vitest') ||
66
+ content.includes('pytest') ||
67
+ content.includes('run: test') ||
68
+ /name:\s*['"]?.*test/i.test(content);
69
+
70
+ if (!hasTestStep) {
71
+ findings.push({
72
+ ruleId: 'DEPLOY-002', category: 'deployment', severity: 'high',
73
+ title: 'CI pipeline has no test step',
74
+ description: 'Add a test step to your CI pipeline to catch regressions before deployment.',
75
+ file: filepath, line: null, fix: null,
76
+ });
77
+ }
78
+ }
79
+ return findings;
80
+ },
81
+ },
82
+
83
+ // DEPLOY-003: CI pipeline missing lint step
84
+ {
85
+ id: 'DEPLOY-003',
86
+ category: 'deployment',
87
+ severity: 'medium',
88
+ confidence: 'likely',
89
+ title: 'CI Pipeline Missing Lint Step',
90
+ check({ files }) {
91
+ const findings = [];
92
+ for (const [filepath, content] of files) {
93
+ if (isTestFile(filepath)) continue;
94
+ if (!isCIFile(filepath)) continue;
95
+
96
+ const hasLintStep = content.includes('npm run lint') ||
97
+ content.includes('yarn lint') ||
98
+ content.includes('pnpm lint') ||
99
+ content.includes('npx eslint') ||
100
+ content.includes('eslint') ||
101
+ content.includes('prettier --check') ||
102
+ content.includes('biome check') ||
103
+ /name:\s*['"]?.*lint/i.test(content);
104
+
105
+ if (!hasLintStep) {
106
+ findings.push({
107
+ ruleId: 'DEPLOY-003', category: 'deployment', severity: 'medium',
108
+ title: 'CI pipeline has no lint step',
109
+ description: 'Add a linting step to your CI pipeline to enforce code quality standards.',
110
+ file: filepath, line: null, fix: null,
111
+ });
112
+ }
113
+ }
114
+ return findings;
115
+ },
116
+ },
117
+
118
+ // DEPLOY-004: No branch protection (main/master pushed directly in CI)
119
+ {
120
+ id: 'DEPLOY-004',
121
+ category: 'deployment',
122
+ severity: 'high',
123
+ confidence: 'likely',
124
+ title: 'No Branch Protection',
125
+ check({ files }) {
126
+ const findings = [];
127
+ for (const [filepath, content] of files) {
128
+ if (isTestFile(filepath)) continue;
129
+ if (!isCIFile(filepath)) continue;
130
+
131
+ const lines = content.split('\n');
132
+ for (let i = 0; i < lines.length; i++) {
133
+ const line = lines[i];
134
+ if (line.match(/git\s+push\b/) &&
135
+ (line.includes('main') || line.includes('master')) &&
136
+ !line.includes('--tags') &&
137
+ !line.includes('pull_request')) {
138
+ findings.push({
139
+ ruleId: 'DEPLOY-004', category: 'deployment', severity: 'high',
140
+ title: 'CI config pushes directly to main/master branch',
141
+ description: 'Avoid pushing directly to protected branches. Use pull requests with required reviews and status checks.',
142
+ file: filepath, line: i + 1, fix: null,
143
+ });
144
+ }
145
+ }
146
+ }
147
+ return findings;
148
+ },
149
+ },
150
+
151
+ // DEPLOY-005: Secrets in CI config not using secrets manager
152
+ {
153
+ id: 'DEPLOY-005',
154
+ category: 'deployment',
155
+ severity: 'critical',
156
+ confidence: 'definite',
157
+ title: 'Hardcoded Secrets in CI Config',
158
+ check({ files }) {
159
+ const findings = [];
160
+ const secretPatterns = [
161
+ /(?:password|passwd|secret|token|api_key|apikey|api-key|private_key)\s*[:=]\s*['"][^${}'"]+['"]/i,
162
+ /(?:AWS_SECRET_ACCESS_KEY|AWS_ACCESS_KEY_ID)\s*[:=]\s*['"](?!\$\{\{)[^'"]+['"]/i,
163
+ ];
164
+
165
+ for (const [filepath, content] of files) {
166
+ if (isTestFile(filepath)) continue;
167
+ if (!isCIFile(filepath)) continue;
168
+
169
+ const lines = content.split('\n');
170
+ for (let i = 0; i < lines.length; i++) {
171
+ const line = lines[i];
172
+ if (line.trim().startsWith('#')) continue;
173
+
174
+ for (const pattern of secretPatterns) {
175
+ if (pattern.test(line)) {
176
+ findings.push({
177
+ ruleId: 'DEPLOY-005', category: 'deployment', severity: 'critical',
178
+ title: 'Possible hardcoded secret in CI configuration',
179
+ description: 'Use a secrets manager (${{ secrets.MY_SECRET }} in GitHub Actions, CI/CD variables in GitLab) instead of hardcoding credentials.',
180
+ file: filepath, line: i + 1, fix: null,
181
+ });
182
+ break;
183
+ }
184
+ }
185
+ }
186
+ }
187
+ return findings;
188
+ },
189
+ },
190
+
191
+ // DEPLOY-006: No staging/preview environment
192
+ {
193
+ id: 'DEPLOY-006',
194
+ category: 'deployment',
195
+ severity: 'medium',
196
+ confidence: 'likely',
197
+ title: 'No Staging/Preview Environment',
198
+ check({ files }) {
199
+ const findings = [];
200
+ const ciFiles = [];
201
+ for (const [filepath, content] of files) {
202
+ if (isTestFile(filepath)) continue;
203
+ if (isCIFile(filepath)) {
204
+ ciFiles.push({ filepath, content });
205
+ }
206
+ }
207
+
208
+ if (ciFiles.length === 0) return findings;
209
+
210
+ const hasProduction = ciFiles.some(({ content }) =>
211
+ content.includes('production') || content.includes('prod') ||
212
+ content.includes('deploy')
213
+ );
214
+
215
+ const hasStaging = ciFiles.some(({ content }) =>
216
+ content.includes('staging') || content.includes('preview') ||
217
+ content.includes('canary') || content.includes('pre-prod') ||
218
+ content.includes('preprod') || content.includes('development') ||
219
+ content.includes('dev-deploy')
220
+ );
221
+
222
+ if (hasProduction && !hasStaging) {
223
+ findings.push({
224
+ ruleId: 'DEPLOY-006', category: 'deployment', severity: 'medium',
225
+ title: 'No staging or preview environment detected in CI config',
226
+ description: 'Add a staging/preview deployment step to validate changes before production. Use preview deployments for pull requests.',
227
+ file: ciFiles[0].filepath, line: null, fix: null,
228
+ });
229
+ }
230
+ return findings;
231
+ },
232
+ },
233
+
234
+ // DEPLOY-007: Missing build step in deployment
235
+ {
236
+ id: 'DEPLOY-007',
237
+ category: 'deployment',
238
+ severity: 'high',
239
+ confidence: 'likely',
240
+ title: 'Missing Build Step in Deployment',
241
+ check({ files }) {
242
+ const findings = [];
243
+ for (const [filepath, content] of files) {
244
+ if (isTestFile(filepath)) continue;
245
+ if (!isCIFile(filepath)) continue;
246
+
247
+ const hasDeploy = content.includes('deploy') || content.includes('publish') ||
248
+ content.includes('release');
249
+
250
+ const hasBuild = content.includes('npm run build') ||
251
+ content.includes('yarn build') ||
252
+ content.includes('pnpm build') ||
253
+ content.includes('docker build') ||
254
+ content.includes('make build') ||
255
+ /name:\s*['"]?.*build/i.test(content);
256
+
257
+ if (hasDeploy && !hasBuild) {
258
+ findings.push({
259
+ ruleId: 'DEPLOY-007', category: 'deployment', severity: 'high',
260
+ title: 'CI pipeline deploys without a build step',
261
+ description: 'Add a build step before deployment to compile, bundle, or containerize the application.',
262
+ file: filepath, line: null, fix: null,
263
+ });
264
+ }
265
+ }
266
+ return findings;
267
+ },
268
+ },
269
+
270
+ // DEPLOY-008: No rollback strategy
271
+ {
272
+ id: 'DEPLOY-008',
273
+ category: 'deployment',
274
+ severity: 'high',
275
+ confidence: 'suggestion',
276
+ title: 'No Rollback Strategy',
277
+ check({ files }) {
278
+ const findings = [];
279
+ const ciFiles = [];
280
+ for (const [filepath, content] of files) {
281
+ if (isTestFile(filepath)) continue;
282
+ if (isCIFile(filepath) || filepath.endsWith('Dockerfile') || filepath.endsWith('docker-compose.yml')) {
283
+ ciFiles.push({ filepath, content });
284
+ }
285
+ }
286
+
287
+ if (ciFiles.length === 0) return findings;
288
+
289
+ const hasDeploy = ciFiles.some(({ content }) =>
290
+ content.includes('deploy') || content.includes('publish')
291
+ );
292
+
293
+ const hasRollback = ciFiles.some(({ content }) =>
294
+ content.includes('rollback') || content.includes('roll-back') ||
295
+ content.includes('previous') || content.includes('revert') ||
296
+ content.includes('--revision') || content.includes('image:') &&
297
+ (content.includes('${{ github.sha }}') || content.includes('${CI_COMMIT_SHA}')) ||
298
+ content.includes(':$TAG') || content.includes(':$VERSION') ||
299
+ content.includes('blue-green') || content.includes('blue_green')
300
+ );
301
+
302
+ if (hasDeploy && !hasRollback) {
303
+ findings.push({
304
+ ruleId: 'DEPLOY-008', category: 'deployment', severity: 'high',
305
+ title: 'No rollback strategy detected in deployment config',
306
+ description: 'Pin deployments to versioned images/artifacts (e.g., git SHA tags) and document a rollback procedure. Consider blue-green or canary deployments.',
307
+ file: ciFiles[0].filepath, line: null, fix: null,
308
+ });
309
+ }
310
+ return findings;
311
+ },
312
+ },
313
+
314
+ // DEPLOY-009: Deploy without health check verification
315
+ {
316
+ id: 'DEPLOY-009',
317
+ category: 'deployment',
318
+ severity: 'high',
319
+ confidence: 'likely',
320
+ title: 'Deploy Without Health Check Verification',
321
+ check({ files }) {
322
+ const findings = [];
323
+ for (const [filepath, content] of files) {
324
+ if (isTestFile(filepath)) continue;
325
+ if (!isCIFile(filepath)) continue;
326
+
327
+ const hasDeploy = content.includes('deploy') || content.includes('publish');
328
+
329
+ const hasHealthCheck = content.includes('health') ||
330
+ content.includes('healthz') ||
331
+ content.includes('readiness') ||
332
+ content.includes('liveness') ||
333
+ content.includes('smoke-test') ||
334
+ content.includes('smoke_test') ||
335
+ content.includes('curl') && (content.includes('/health') || content.includes('status')) ||
336
+ content.includes('wait-for') || content.includes('verify');
337
+
338
+ if (hasDeploy && !hasHealthCheck) {
339
+ findings.push({
340
+ ruleId: 'DEPLOY-009', category: 'deployment', severity: 'high',
341
+ title: 'Deployment has no health check or smoke test verification',
342
+ description: 'Add a post-deploy health check or smoke test to verify the service is running correctly after deployment.',
343
+ file: filepath, line: null, fix: null,
344
+ });
345
+ }
346
+ }
347
+ return findings;
348
+ },
349
+ },
350
+
351
+ // DEPLOY-010: Hardcoded production URLs in CI config
352
+ {
353
+ id: 'DEPLOY-010',
354
+ category: 'deployment',
355
+ severity: 'medium',
356
+ confidence: 'likely',
357
+ title: 'Hardcoded Production URLs in CI Config',
358
+ check({ files }) {
359
+ const findings = [];
360
+ const urlPatterns = [
361
+ /https?:\/\/(?:www\.)?[a-zA-Z0-9-]+\.(?:com|io|org|net|app|dev)(?:\/[^\s'"]*)?/,
362
+ /https?:\/\/\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/,
363
+ ];
364
+ const allowedUrls = [
365
+ 'github.com', 'gitlab.com', 'registry.npmjs.org', 'docker.io',
366
+ 'hub.docker.com', 'ghcr.io', 'gcr.io', 'amazonaws.com/ecr',
367
+ 'codecov.io', 'coveralls.io', 'sonarcloud.io',
368
+ ];
369
+
370
+ for (const [filepath, content] of files) {
371
+ if (isTestFile(filepath)) continue;
372
+ if (!isCIFile(filepath)) continue;
373
+
374
+ const lines = content.split('\n');
375
+ for (let i = 0; i < lines.length; i++) {
376
+ const line = lines[i];
377
+ if (line.trim().startsWith('#')) continue;
378
+
379
+ for (const pattern of urlPatterns) {
380
+ const match = line.match(pattern);
381
+ if (match) {
382
+ const url = match[0];
383
+ const isAllowed = allowedUrls.some(allowed => url.includes(allowed));
384
+ const usesVariable = /\$\{\{/.test(line) || /\$\{/.test(line) || /\$[A-Z_]/.test(line);
385
+
386
+ if (!isAllowed && !usesVariable) {
387
+ findings.push({
388
+ ruleId: 'DEPLOY-010', category: 'deployment', severity: 'medium',
389
+ title: 'Hardcoded URL found in CI configuration',
390
+ description: 'Use environment variables or CI/CD secrets for deployment URLs instead of hardcoding them. This makes it easier to manage multiple environments.',
391
+ file: filepath, line: i + 1, fix: null,
392
+ });
393
+ break;
394
+ }
395
+ }
396
+ }
397
+ }
398
+ }
399
+ return findings;
400
+ },
401
+ },
402
+
403
+ // DEPLOY-SEC-001: GitHub Actions not pinned to SHA
404
+ { id: 'DEPLOY-SEC-001', category: 'deployment', severity: 'high', confidence: 'likely', title: 'GitHub Actions Not Pinned to Commit SHA',
405
+ check({ files }) {
406
+ const findings = [];
407
+ for (const [fp, c] of files) {
408
+ if (!fp.includes('.github/workflows')) continue;
409
+ const lines = c.split('\n');
410
+ for (let i = 0; i < lines.length; i++) {
411
+ if (lines[i].match(/uses:\s+\S+@(?:main|master|latest|v\d+)$/) && !lines[i].includes('@sha256')) {
412
+ findings.push({ ruleId: 'DEPLOY-SEC-001', category: 'deployment', severity: 'high',
413
+ title: 'GitHub Action pinned to mutable tag/branch instead of commit SHA',
414
+ description: 'Pin third-party actions to a full commit SHA (uses: actions/checkout@abc123...) to prevent supply chain attacks. Mutable tags can be changed by the action maintainer.', file: fp, line: i + 1, fix: null });
415
+ }
416
+ }
417
+ }
418
+ return findings;
419
+ },
420
+ },
421
+
422
+ // DEPLOY-SEC-002: pull_request_target with checkout
423
+ { id: 'DEPLOY-SEC-002', category: 'deployment', severity: 'critical', confidence: 'definite', title: 'pull_request_target With Untrusted Code Checkout',
424
+ check({ files }) {
425
+ const findings = [];
426
+ for (const [fp, c] of files) {
427
+ if (!fp.includes('.github/workflows')) continue;
428
+ if (c.includes('pull_request_target') && c.match(/actions\/checkout/) && c.match(/ref.*head|head\.ref/)) {
429
+ findings.push({ ruleId: 'DEPLOY-SEC-002', category: 'deployment', severity: 'critical',
430
+ title: 'pull_request_target workflow checks out untrusted PR code with write permissions',
431
+ description: 'This is a known GitHub Actions attack vector (PWNING.md). Never checkout PR branch code in pull_request_target context.', file: fp, fix: null });
432
+ }
433
+ }
434
+ return findings;
435
+ },
436
+ },
437
+
438
+ // DEPLOY-SEC-003: Workflow injection via PR title/body
439
+ { id: 'DEPLOY-SEC-003', category: 'deployment', severity: 'critical', confidence: 'definite', title: 'Workflow Injection via PR Title/Body',
440
+ check({ files }) {
441
+ const findings = [];
442
+ for (const [fp, c] of files) {
443
+ if (!fp.includes('.github/workflows')) continue;
444
+ if (c.match(/\$\{\{\s*github\.event\.(?:pull_request\.|issue\.)(?:title|body|head\.ref)/) &&
445
+ c.match(/run:\s*\S/)) {
446
+ findings.push({ ruleId: 'DEPLOY-SEC-003', category: 'deployment', severity: 'critical',
447
+ title: 'User-controlled GitHub context value used in run: step — command injection risk',
448
+ description: "Don't use ${{ github.event.pull_request.title }} directly in run:. Assign to environment variable first and use that instead.", file: fp, fix: null });
449
+ }
450
+ }
451
+ return findings;
452
+ },
453
+ },
454
+
455
+ // DEPLOY-SEC-004: Secrets in CI logs
456
+ { id: 'DEPLOY-SEC-004', category: 'deployment', severity: 'critical', confidence: 'definite', title: 'Secrets Potentially Exposed in CI Logs',
457
+ check({ files }) {
458
+ const findings = [];
459
+ for (const [fp, c] of files) {
460
+ if (!isCIFile(fp)) continue;
461
+ const lines = c.split('\n');
462
+ for (let i = 0; i < lines.length; i++) {
463
+ if (lines[i].match(/echo\s+\$\{\{.*secrets\.|print\s+\$\{\{.*secrets\./)) {
464
+ findings.push({ ruleId: 'DEPLOY-SEC-004', category: 'deployment', severity: 'critical',
465
+ title: 'Secret value echoed/printed in CI — visible in build logs',
466
+ description: 'Never echo secrets in CI. Use add-mask or ensure secrets are only passed as env vars to steps that need them.', file: fp, line: i + 1, fix: null });
467
+ }
468
+ }
469
+ }
470
+ return findings;
471
+ },
472
+ },
473
+
474
+ // DEPLOY-SEC-005: Long-lived cloud credentials in CI
475
+ { id: 'DEPLOY-SEC-005', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Long-Lived Cloud Credentials in CI',
476
+ check({ files }) {
477
+ const findings = [];
478
+ for (const [fp, c] of files) {
479
+ if (!fp.includes('.github/workflows')) continue;
480
+ if ((c.includes('AWS_ACCESS_KEY_ID') || c.includes('AWS_SECRET_ACCESS_KEY') || c.includes('GOOGLE_CREDENTIALS')) &&
481
+ !c.includes('aws-actions/configure-aws-credentials') && !c.match(/id-token.*write|oidc/i)) {
482
+ findings.push({ ruleId: 'DEPLOY-SEC-005', category: 'deployment', severity: 'high',
483
+ title: 'Using long-lived cloud credentials instead of OIDC',
484
+ description: 'Use GitHub Actions OIDC with aws-actions/configure-aws-credentials for short-lived tokens. Long-lived keys are a leak risk.', file: fp, fix: null });
485
+ }
486
+ }
487
+ return findings;
488
+ },
489
+ },
490
+
491
+ // DEPLOY-REL-001: No semantic versioning
492
+ { id: 'DEPLOY-REL-001', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'No Semantic Versioning',
493
+ check({ files, stack }) {
494
+ const findings = [];
495
+ const pkg = files.get('package.json');
496
+ if (!pkg) return findings;
497
+ try {
498
+ const json = JSON.parse(pkg);
499
+ if (!json.version || json.version === '1.0.0' || json.version === '0.0.1') {
500
+ findings.push({ ruleId: 'DEPLOY-REL-001', category: 'deployment', severity: 'low',
501
+ title: 'Package version not updated from default', description: 'Increment the version using semantic versioning (major.minor.patch) with each release. Use npm version commands.', fix: null });
502
+ }
503
+ } catch {}
504
+ return findings;
505
+ },
506
+ },
507
+
508
+ // DEPLOY-REL-002: No automated smoke tests post-deploy
509
+ { id: 'DEPLOY-REL-002', category: 'deployment', severity: 'high', confidence: 'likely', title: 'No Post-Deployment Smoke Tests',
510
+ check({ files }) {
511
+ const findings = [];
512
+ for (const [fp, c] of files) {
513
+ if (!isCIFile(fp)) continue;
514
+ if (c.match(/deploy|release|production/i)) {
515
+ if (!c.match(/smoke|health.*check|post.deploy|after.deploy|curl.*health/i)) {
516
+ findings.push({ ruleId: 'DEPLOY-REL-002', category: 'deployment', severity: 'high',
517
+ title: 'No smoke tests after deployment', description: 'Add a post-deploy step that pings /health and runs critical path smoke tests. Catch broken deployments before users do.', file: fp, fix: null });
518
+ }
519
+ }
520
+ }
521
+ return findings;
522
+ },
523
+ },
524
+
525
+ // DEPLOY-REL-003: No blue-green or canary deployment
526
+ { id: 'DEPLOY-REL-003', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Zero-Downtime Deployment Strategy',
527
+ check({ files }) {
528
+ const has = [...files.values()].some(c => c.match(/blue.green|canary|rolling.*update|traffic.*split|feature.*flag/i));
529
+ if (!has && [...files.values()].some(c => c.match(/deploy|release/i))) {
530
+ return [{ ruleId: 'DEPLOY-REL-003', category: 'deployment', severity: 'medium',
531
+ title: 'No blue-green, canary, or rolling deployment strategy detected',
532
+ description: 'Direct deployments cause downtime. Use rolling updates, blue-green, or canary deployments for zero-downtime releases.', fix: null }];
533
+ }
534
+ return [];
535
+ },
536
+ },
537
+
538
+ // DEPLOY-REL-004: DB migrations not automated
539
+ { id: 'DEPLOY-REL-004', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Database Migrations Not Automated in Deploy',
540
+ check({ files }) {
541
+ const findings = [];
542
+ for (const [fp, c] of files) {
543
+ if (!isCIFile(fp)) continue;
544
+ if (c.match(/deploy|production/i) && !c.match(/migrate|prisma.*db.*push|knex.*migrate|sequelize.*migrate/i)) {
545
+ const hasMigrations = [...files.keys()].some(f => f.includes('migration') || f.includes('migrate'));
546
+ if (hasMigrations) {
547
+ findings.push({ ruleId: 'DEPLOY-REL-004', category: 'deployment', severity: 'high',
548
+ title: 'Database migrations not automated in deployment pipeline', description: 'Add a migration step (prisma migrate deploy, knex migrate:latest) to your CI/CD pipeline before starting the new app version.', file: fp, fix: null });
549
+ }
550
+ }
551
+ }
552
+ return findings;
553
+ },
554
+ },
555
+
556
+ // DEPLOY-REL-005: No rollback strategy
557
+ { id: 'DEPLOY-REL-005', category: 'deployment', severity: 'high', confidence: 'suggestion', title: 'No Rollback Strategy',
558
+ check({ files }) {
559
+ const has = [...files.values()].some(c => c.match(/rollback|revert.*deploy|previous.*version|undo.*deploy/i));
560
+ if (!has && [...files.values()].some(c => c.match(/deploy/i))) {
561
+ return [{ ruleId: 'DEPLOY-REL-005', category: 'deployment', severity: 'high',
562
+ title: 'No rollback strategy documented or configured', description: 'Define a rollback procedure: redeploy previous image tag, feature flags to disable, DB migration rollback plan.', fix: null }];
563
+ }
564
+ return [];
565
+ },
566
+ },
567
+
568
+ // DEPLOY-ENV-001: Shared database between environments
569
+ { id: 'DEPLOY-ENV-001', category: 'deployment', severity: 'critical', confidence: 'definite', title: 'Shared Database Between Environments',
570
+ check({ files }) {
571
+ const findings = [];
572
+ const prodDbUrl = [...files.values()].some(c => c.match(/DATABASE_URL.*prod|production.*DATABASE_URL/i));
573
+ const hasStagingEnv = [...files.keys()].some(f => f.match(/\.env\.staging|\.env\.test/));
574
+ if (prodDbUrl && hasStagingEnv) {
575
+ const stagingContent = [...files.entries()].filter(([f]) => f.match(/\.env\.staging|\.env\.test/)).map(([, c]) => c).join('');
576
+ if (stagingContent.match(/DATABASE_URL.*prod|production/i)) {
577
+ findings.push({ ruleId: 'DEPLOY-ENV-001', category: 'deployment', severity: 'critical',
578
+ title: 'Staging environment may share production database', description: 'Each environment must have its own isolated database. Staging using prod DB can corrupt production data.', fix: null });
579
+ }
580
+ }
581
+ return findings;
582
+ },
583
+ },
584
+
585
+ // DEPLOY-ENV-002: No environment parity
586
+ { id: 'DEPLOY-ENV-002', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Staging Environment',
587
+ check({ files }) {
588
+ const has = [...files.values()].some(c => c.match(/staging|stage|preview.*env|pre.?prod/i)) ||
589
+ [...files.keys()].some(f => f.match(/staging|\.env\.staging/));
590
+ if (!has && [...files.values()].some(c => c.match(/production|prod/i))) {
591
+ return [{ ruleId: 'DEPLOY-ENV-002', category: 'deployment', severity: 'medium',
592
+ title: 'No staging environment detected — deploying directly to production',
593
+ description: 'Add a staging environment that mirrors production. Test deployments there before promoting to production.', fix: null }];
594
+ }
595
+ return [];
596
+ },
597
+ },
598
+
599
+ // DEPLOY-ENV-003: Environment-specific code paths
600
+ { id: 'DEPLOY-ENV-003', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Environment-Specific Code Paths',
601
+ check({ files }) {
602
+ const findings = [];
603
+ for (const [fp, c] of files) {
604
+ if (!fp.match(/\.(js|ts)$/) || fp.includes('config') || fp.includes('test')) continue;
605
+ const lines = c.split('\n');
606
+ for (let i = 0; i < lines.length; i++) {
607
+ if (lines[i].match(/if.*NODE_ENV.*===.*['"]production['"]|process\.env\.NODE_ENV.*production/) &&
608
+ lines[i].match(/skip|disable|bypass|mock/i)) {
609
+ findings.push({ ruleId: 'DEPLOY-ENV-003', category: 'deployment', severity: 'medium',
610
+ title: 'Business logic branching on NODE_ENV — diverging from production behavior',
611
+ description: 'Use feature flags or configuration instead of NODE_ENV branching in business logic. Production-only code paths are not tested in staging.', file: fp, line: i + 1, fix: null });
612
+ }
613
+ }
614
+ }
615
+ return findings;
616
+ },
617
+ },
618
+
619
+ // DEPLOY-REG-001: Docker image not scanned before push
620
+ { id: 'DEPLOY-REG-001', category: 'deployment', severity: 'high', confidence: 'likely', title: 'No Container Vulnerability Scan Before Push',
621
+ check({ files }) {
622
+ const findings = [];
623
+ for (const [fp, c] of files) {
624
+ if (!isCIFile(fp)) continue;
625
+ if (c.match(/docker.*push|push.*docker|ecr.*push/i)) {
626
+ if (!c.match(/trivy|grype|snyk.*container|docker.*scan|anchore|clair/i)) {
627
+ findings.push({ ruleId: 'DEPLOY-REG-001', category: 'deployment', severity: 'high',
628
+ title: 'Docker image pushed without vulnerability scan', description: 'Add Trivy or Grype to scan images before pushing. Container vulnerabilities are a major attack surface.', file: fp, fix: null });
629
+ }
630
+ }
631
+ }
632
+ return findings;
633
+ },
634
+ },
635
+
636
+ // DEPLOY-SEC-010: CI workflow with excessive permissions
637
+ { id: 'DEPLOY-SEC-010', category: 'deployment', severity: 'high', confidence: 'likely', title: 'CI Workflow With Excessive Permissions',
638
+ check({ files }) {
639
+ const findings = [];
640
+ for (const [fp, c] of files) {
641
+ if (!isCIFile(fp)) continue;
642
+ if (c.match(/permissions:\s*write-all|permissions:\s*\n\s+contents:\s*write/) && !c.match(/\/\/ minimal/i)) {
643
+ findings.push({ ruleId: 'DEPLOY-SEC-010', category: 'deployment', severity: 'high', title: 'GitHub Actions workflow with write-all permissions', description: 'Use minimal permissions: permissions: { contents: read }. write-all gives workflows ability to modify any repo resource.', file: fp, fix: null });
644
+ }
645
+ }
646
+ return findings;
647
+ },
648
+ },
649
+
650
+ // DEPLOY-SEC-011: Artifact not signed
651
+ { id: 'DEPLOY-SEC-011', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Release Artifacts Not Signed',
652
+ check({ files }) {
653
+ const findings = [];
654
+ for (const [fp, c] of files) {
655
+ if (!isCIFile(fp)) continue;
656
+ const hasRelease = c.match(/release|publish|deploy|npm\s+publish/i);
657
+ const hasSigning = c.match(/gpg|cosign|sigstore|sign|checksum|sha256|SLSA/i);
658
+ if (hasRelease && !hasSigning) {
659
+ findings.push({ ruleId: 'DEPLOY-SEC-011', category: 'deployment', severity: 'medium', title: 'Release pipeline without artifact signing', description: 'Sign artifacts with GPG or Sigstore/Cosign. Signed artifacts enable consumers to verify they downloaded untampered code.', file: fp, fix: null });
660
+ }
661
+ }
662
+ return findings;
663
+ },
664
+ },
665
+
666
+ // DEPLOY-SEC-012: Production secrets in CI environment variables (not in vault)
667
+ { id: 'DEPLOY-SEC-012', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Secrets Set Directly in CI (Not From Vault)',
668
+ check({ files }) {
669
+ const findings = [];
670
+ for (const [fp, c] of files) {
671
+ if (!isCIFile(fp)) continue;
672
+ const lines = c.split('\n');
673
+ for (let i = 0; i < lines.length; i++) {
674
+ if (lines[i].match(/env:|environment:/i)) {
675
+ const block = lines.slice(i, i + 20).join('\n');
676
+ if (block.match(/PASSWORD\s*:|SECRET\s*:|API_KEY\s*:|PRIVATE_KEY\s*:/i) && !block.match(/\$\{\{|secrets\.|vault\.|aws.*secrets/i)) {
677
+ findings.push({ ruleId: 'DEPLOY-SEC-012', category: 'deployment', severity: 'high', title: 'Secret in CI env block without using secrets manager', description: 'Use ${{ secrets.MY_SECRET }} (GitHub) or vault references. Hardcoded secrets in workflow files are committed to the repository.', file: fp, line: i + 1, fix: null });
678
+ }
679
+ }
680
+ }
681
+ }
682
+ return findings;
683
+ },
684
+ },
685
+
686
+ // DEPLOY-PROC-001: No database migration strategy
687
+ { id: 'DEPLOY-PROC-001', category: 'deployment', severity: 'high', confidence: 'likely', title: 'No Database Migration Strategy in Deployment',
688
+ check({ files }) {
689
+ const findings = [];
690
+ const hasMigrations = [...files.values()].some(c => c.match(/migrate|knex.*migrate|sequelize.*migrate|prisma.*migrate|flyway|liquibase/i));
691
+ const hasDeployScript = [...files.values()].some(c => c.match(/deploy|release|ship/i));
692
+ if (hasDeployScript && !hasMigrations) {
693
+ findings.push({ ruleId: 'DEPLOY-PROC-001', category: 'deployment', severity: 'high', title: 'Deployment pipeline without database migration step', description: 'Add migration step before app deploy: npm run db:migrate. Deploying app before migrations causes errors if code expects new schema.', fix: null });
694
+ }
695
+ return findings;
696
+ },
697
+ },
698
+
699
+ // DEPLOY-PROC-002: Deployment without health check
700
+ { id: 'DEPLOY-PROC-002', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Deployment Without Post-Deploy Health Check',
701
+ check({ files }) {
702
+ const findings = [];
703
+ for (const [fp, c] of files) {
704
+ if (!isCIFile(fp)) continue;
705
+ if (c.match(/deploy|kubectl.*apply|helm.*upgrade|eb.*deploy/i)) {
706
+ if (!c.match(/health.*check|curl.*health|wait.*healthy|readiness|smoke.*test/i)) {
707
+ findings.push({ ruleId: 'DEPLOY-PROC-002', category: 'deployment', severity: 'high', title: 'Deployment without post-deploy health check — broken deploys go undetected', description: 'Add health check after deploy: curl -f https://api.example.com/health. Failed health checks should trigger automatic rollback.', file: fp, fix: null });
708
+ }
709
+ }
710
+ }
711
+ return findings;
712
+ },
713
+ },
714
+
715
+ // DEPLOY-PROC-003: Manual approval missing for production
716
+ { id: 'DEPLOY-PROC-003', category: 'deployment', severity: 'high', confidence: 'likely', title: 'No Manual Approval Gate for Production Deploy',
717
+ check({ files }) {
718
+ const findings = [];
719
+ for (const [fp, c] of files) {
720
+ if (!isCIFile(fp)) continue;
721
+ if (c.match(/production|prod\b/i) && c.match(/deploy|release/i)) {
722
+ if (!c.match(/approval|reviewers|manual.*trigger|workflow_dispatch|environment.*production/i)) {
723
+ findings.push({ ruleId: 'DEPLOY-PROC-003', category: 'deployment', severity: 'high', title: 'Production deployment without manual approval gate', description: 'Use GitHub Environments with required reviewers or workflow_dispatch for prod deploys. Automated prod deploys from CI can deploy broken code.', file: fp, fix: null });
724
+ }
725
+ }
726
+ }
727
+ return findings;
728
+ },
729
+ },
730
+
731
+ // DEPLOY-PROC-004: No staging environment
732
+ { id: 'DEPLOY-PROC-004', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Staging Environment in Pipeline',
733
+ check({ files }) {
734
+ const findings = [];
735
+ for (const [fp, c] of files) {
736
+ if (!isCIFile(fp)) continue;
737
+ if (c.match(/production|prod\b/i) && c.match(/deploy/i)) {
738
+ if (!c.match(/staging|stage\b|preview|uat|preprod/i)) {
739
+ findings.push({ ruleId: 'DEPLOY-PROC-004', category: 'deployment', severity: 'medium', title: 'Pipeline deploys directly to production without staging environment', description: 'Add a staging environment that mirrors production. Catch environment-specific bugs before they reach production customers.', file: fp, fix: null });
740
+ }
741
+ }
742
+ }
743
+ return findings;
744
+ },
745
+ },
746
+
747
+ // DEPLOY-PROC-005: No SAST in CI pipeline
748
+ { id: 'DEPLOY-PROC-005', category: 'deployment', severity: 'high', confidence: 'likely', title: 'No Static Application Security Testing (SAST) in CI',
749
+ check({ files }) {
750
+ const findings = [];
751
+ const hasSAST = [...files.values()].some(c => c.match(/semgrep|codeql|bandit|sonarqube|checkmarx|fortify|eslint.*security|tslint.*security/i));
752
+ if (!hasSAST) {
753
+ findings.push({ ruleId: 'DEPLOY-PROC-005', category: 'deployment', severity: 'high', title: 'No SAST tool in CI pipeline — security vulnerabilities not caught before deployment', description: 'Add Semgrep or CodeQL to CI. SAST tools catch injection, XSS, and other vulnerabilities automatically in pull requests.', fix: null });
754
+ }
755
+ return findings;
756
+ },
757
+ },
758
+
759
+ // DEPLOY-PROC-006: No DAST in pipeline
760
+ { id: 'DEPLOY-PROC-006', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Dynamic Application Security Testing (DAST)',
761
+ check({ files }) {
762
+ const findings = [];
763
+ const hasDAST = [...files.values()].some(c => c.match(/owasp.*zap|nuclei|burp.*suite|nikto|arachni/i));
764
+ if (!hasDAST) {
765
+ findings.push({ ruleId: 'DEPLOY-PROC-006', category: 'deployment', severity: 'medium', title: 'No DAST tool configured — runtime security vulnerabilities not tested', description: 'Add OWASP ZAP or Nuclei to your staging pipeline. DAST tests the running application and finds vulnerabilities SAST misses (auth bypasses, etc.).', fix: null });
766
+ }
767
+ return findings;
768
+ },
769
+ },
770
+
771
+ // DEPLOY-PROC-007: No Infrastructure drift detection
772
+ { id: 'DEPLOY-PROC-007', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Infrastructure Drift Detection',
773
+ check({ files }) {
774
+ const findings = [];
775
+ const hasTerraform = [...files.keys()].some(f => f.match(/\.tf$/));
776
+ const hasDrift = [...files.values()].some(c => c.match(/terraform.*plan|driftctl|infracost.*plan|checkov|tfsec/i));
777
+ if (hasTerraform && !hasDrift) {
778
+ findings.push({ ruleId: 'DEPLOY-PROC-007', category: 'deployment', severity: 'medium', title: 'No Terraform drift detection in CI — manual changes go undetected', description: 'Run terraform plan in CI and alert on drift. Manual console changes create drift that causes the next terraform apply to fail unexpectedly.', fix: null });
779
+ }
780
+ return findings;
781
+ },
782
+ },
783
+
784
+ // DEPLOY-PROC-008: Feature flags not used for risky changes
785
+ { id: 'DEPLOY-PROC-008', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'No Feature Flag System',
786
+ check({ files, stack }) {
787
+ const findings = [];
788
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
789
+ const hasFlags = ['launchdarkly-node-client-sdk', '@launchdarkly/node-server-sdk', 'unleash-client', 'flagsmith', 'posthog-node', '@growthbook/growthbook'].some(d => d in allDeps);
790
+ if (!hasFlags && Object.keys(allDeps).length > 30) {
791
+ findings.push({ ruleId: 'DEPLOY-PROC-008', category: 'deployment', severity: 'low', title: 'No feature flag system — cannot do gradual rollouts or kill-switch deployments', description: 'Add LaunchDarkly, Unleash, or Flagsmith. Feature flags enable canary releases, instant rollback without redeployment, and A/B testing.', fix: null });
792
+ }
793
+ return findings;
794
+ },
795
+ },
796
+
797
+ // DEPLOY-PROC-009: Deployment without notify/changelog
798
+ { id: 'DEPLOY-PROC-009', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'Deployment Without Notification',
799
+ check({ files }) {
800
+ const findings = [];
801
+ for (const [fp, c] of files) {
802
+ if (!isCIFile(fp)) continue;
803
+ if (c.match(/production|prod\b/i) && c.match(/deploy/i)) {
804
+ if (!c.match(/slack|teams|discord|pagerduty|webhook|notify|notification/i)) {
805
+ findings.push({ ruleId: 'DEPLOY-PROC-009', category: 'deployment', severity: 'low', title: 'Production deployment without notifications', description: 'Send deployment notifications to Slack/Teams. Teams need to know when prod changes so they can correlate with user reports.', file: fp, fix: null });
806
+ }
807
+ }
808
+ }
809
+ return findings;
810
+ },
811
+ },
812
+
813
+ // DEPLOY-PROC-010: No dependency license check in CI
814
+ { id: 'DEPLOY-PROC-010', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No License Compliance Check in CI',
815
+ check({ files }) {
816
+ const findings = [];
817
+ const hasLicenseCheck = [...files.values()].some(c => c.match(/license-checker|fossa|snyk.*license|licensee/i));
818
+ if (!hasLicenseCheck) {
819
+ findings.push({ ruleId: 'DEPLOY-PROC-010', category: 'deployment', severity: 'medium', title: 'No license compliance check in CI pipeline', description: 'Add license-checker or FOSSA to block GPL/AGPL dependencies in CI. GPL in production may require open-sourcing your application.', fix: null });
820
+ }
821
+ return findings;
822
+ },
823
+ },
824
+
825
+ // DEPLOY-PROC-011: Deployment script with --force or unsafe flags
826
+ { id: 'DEPLOY-PROC-011', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Deployment Using --force Flags',
827
+ check({ files }) {
828
+ const findings = [];
829
+ for (const [fp, c] of files) {
830
+ if (!isCIFile(fp) && !fp.match(/deploy|release|Makefile/i)) continue;
831
+ const lines = c.split('\n');
832
+ for (let i = 0; i < lines.length; i++) {
833
+ if (lines[i].match(/git push.*--force|kubectl.*--force|helm.*--force-upgrade|npm.*--force/i)) {
834
+ findings.push({ ruleId: 'DEPLOY-PROC-011', category: 'deployment', severity: 'high', title: `--force flag in deployment script — bypasses safety checks`, description: 'Remove --force from deployment commands. Force operations bypass safety mechanisms and can cause irreversible damage.', file: fp, line: i + 1, fix: null });
835
+ }
836
+ }
837
+ }
838
+ return findings;
839
+ },
840
+ },
841
+
842
+ // DEPLOY-PROC-012: No automated E2E tests in staging pipeline
843
+ { id: 'DEPLOY-PROC-012', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Automated E2E Tests in Staging Pipeline',
844
+ check({ files, stack }) {
845
+ const findings = [];
846
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
847
+ const hasE2E = ['cypress', 'playwright', '@playwright/test', 'puppeteer', 'testcafe', 'nightwatch'].some(d => d in allDeps);
848
+ const hasE2EInCI = [...files.values()].some(c => c.match(/cypress|playwright|puppeteer|e2e.*test|integration.*test/i));
849
+ if (!hasE2E && !hasE2EInCI && Object.keys(allDeps).length > 10) {
850
+ findings.push({ ruleId: 'DEPLOY-PROC-012', category: 'deployment', severity: 'medium', title: 'No E2E test framework — critical user journeys not automatically tested before deploy', description: 'Add Playwright or Cypress for E2E tests. Automated E2E tests catch regressions that unit tests miss.', fix: null });
851
+ }
852
+ return findings;
853
+ },
854
+ },
855
+
856
+ // DEPLOY-SEC-013: Reusable workflow with no input validation
857
+ { id: 'DEPLOY-SEC-013', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Reusable Workflow Without Input Validation',
858
+ check({ files }) {
859
+ const findings = [];
860
+ for (const [fp, c] of files) {
861
+ if (!isCIFile(fp)) continue;
862
+ if (c.match(/workflow_call:/i) && c.match(/inputs:/i)) {
863
+ if (!c.match(/required:\s*true|type:\s*string|type:\s*boolean/i)) {
864
+ findings.push({ ruleId: 'DEPLOY-SEC-013', category: 'deployment', severity: 'medium', title: 'Reusable GitHub Actions workflow with untyped inputs', description: 'Add type: and required: to workflow_call inputs. Untyped inputs can receive unexpected values causing security issues or failures.', file: fp, fix: null });
865
+ }
866
+ }
867
+ }
868
+ return findings;
869
+ },
870
+ },
871
+
872
+ // DEPLOY-SEC-014: NPM publish without provenance
873
+ { id: 'DEPLOY-SEC-014', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'npm publish Without Provenance',
874
+ check({ files }) {
875
+ const findings = [];
876
+ for (const [fp, c] of files) {
877
+ if (!isCIFile(fp)) continue;
878
+ if (c.match(/npm\s+publish/i) && !c.match(/--provenance|provenance.*true/i)) {
879
+ findings.push({ ruleId: 'DEPLOY-SEC-014', category: 'deployment', severity: 'medium', title: 'npm publish without --provenance flag — published package not linked to source', description: 'Add --provenance flag: npm publish --provenance. Provenance links packages to their source repo via OIDC, allowing users to verify origin.', file: fp, fix: null });
880
+ }
881
+ }
882
+ return findings;
883
+ },
884
+ },
885
+
886
+ // DEPLOY-PROC-013: No rollback tested in pipeline
887
+ { id: 'DEPLOY-PROC-013', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Rollback Strategy Not Automated',
888
+ check({ files }) {
889
+ const findings = [];
890
+ const hasRollback = [...files.values()].some(c => c.match(/rollback|roll-back|revert.*deploy|deploy.*revert|previous.*version|downgrade/i));
891
+ if (!hasRollback) {
892
+ findings.push({ ruleId: 'DEPLOY-PROC-013', category: 'deployment', severity: 'medium', title: 'No automated rollback strategy — manual recovery from failed deploys', description: 'Implement automated rollback on health check failure. Use Helm rollback, ECS revert, or Kubernetes rollout undo. Mean time to recovery depends on fast rollback.', fix: null });
893
+ }
894
+ return findings;
895
+ },
896
+ },
897
+
898
+ // DEPLOY-PROC-014: Build artifact not reused across environments
899
+ { id: 'DEPLOY-PROC-014', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Build Artifact Not Promoted Across Environments',
900
+ check({ files }) {
901
+ const findings = [];
902
+ for (const [fp, c] of files) {
903
+ if (!isCIFile(fp)) continue;
904
+ if (c.match(/build.*production|npm.*build.*prod/i)) {
905
+ if (!c.match(/artifact|promote|registry|ecr|pull.*same.*image|image.*sha/i)) {
906
+ findings.push({ ruleId: 'DEPLOY-PROC-014', category: 'deployment', severity: 'medium', title: 'Build executed per environment instead of promoting artifacts', description: 'Build once, promote the same artifact: build → staging → production. Rebuilding per environment means staging tests a different binary than production.', file: fp, fix: null });
907
+ }
908
+ }
909
+ }
910
+ return findings;
911
+ },
912
+ },
913
+
914
+ // DEPLOY-PROC-015: No dependency pinning in Dockerfile
915
+ { id: 'DEPLOY-PROC-015', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Dockerfile Installs Unpinned System Packages',
916
+ check({ files }) {
917
+ const findings = [];
918
+ for (const [fp, c] of files) {
919
+ if (!fp.endsWith('Dockerfile') && !fp.match(/Dockerfile\./)) continue;
920
+ const lines = c.split('\n');
921
+ for (let i = 0; i < lines.length; i++) {
922
+ if (lines[i].match(/apt-get install|apk add|yum install/i) && !lines[i].match(/=\d|--no-install-recommends.*[=\d]/)) {
923
+ findings.push({ ruleId: 'DEPLOY-PROC-015', category: 'deployment', severity: 'medium', title: 'System package installed without version pinning — non-reproducible builds', description: 'Pin package versions: apt-get install curl=7.88.1. Unpinned packages produce different images on each build, making rollback unreliable.', file: fp, line: i + 1, fix: null });
924
+ }
925
+ }
926
+ }
927
+ return findings;
928
+ },
929
+ },
930
+
931
+ // DEPLOY-PROC-016: No GitOps for K8s deployments
932
+ { id: 'DEPLOY-PROC-016', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'Kubernetes Without GitOps',
933
+ check({ files }) {
934
+ const findings = [];
935
+ const hasK8s = [...files.values()].some(c => c.match(/kind:\s*Deployment|kubectl.*apply/i));
936
+ const hasGitOps = [...files.values()].some(c => c.match(/argocd|flux\b|gitops|FluxCD|ArgoCD/i));
937
+ if (hasK8s && !hasGitOps) {
938
+ findings.push({ ruleId: 'DEPLOY-PROC-016', category: 'deployment', severity: 'low', title: 'Kubernetes without GitOps (ArgoCD/Flux) — cluster state not version controlled', description: 'Use ArgoCD or FluxCD for declarative GitOps deployments. kubectl apply in CI is fragile; GitOps ensures cluster state matches git.', fix: null });
939
+ }
940
+ return findings;
941
+ },
942
+ },
943
+
944
+ // DEPLOY-PROC-017: No dependabot/renovate for CI dependencies
945
+ { id: 'DEPLOY-PROC-017', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'GitHub Actions Not Auto-Updated',
946
+ check({ files }) {
947
+ const findings = [];
948
+ const hasDependabot = [...files.keys()].some(f => f.match(/dependabot\.ya?ml|renovate\.json/));
949
+ const hasActions = [...files.keys()].some(f => f.match(/\.github\/workflows/));
950
+ if (hasActions && !hasDependabot) {
951
+ findings.push({ ruleId: 'DEPLOY-PROC-017', category: 'deployment', severity: 'medium', title: 'No Dependabot/Renovate for GitHub Actions — action vulnerabilities not auto-patched', description: 'Add .github/dependabot.yml with package-ecosystem: github-actions. GitHub Actions have CVEs too and need automated updates.', fix: null });
952
+ }
953
+ return findings;
954
+ },
955
+ },
956
+
957
+ // DEPLOY-SEC-006: Untrusted input in GitHub Actions run step
958
+ { id: 'DEPLOY-SEC-006', category: 'deployment', severity: 'high', confidence: 'likely', title: 'GitHub Actions run Step With Untrusted Input',
959
+ check({ files }) {
960
+ const findings = [];
961
+ for (const [fp, c] of files) {
962
+ if (!isCIFile(fp)) continue;
963
+ const lines = c.split('\n');
964
+ for (let i = 0; i < lines.length; i++) {
965
+ if (lines[i].match(/run:\s/) && lines.slice(i, i + 5).join('\n').match(/\$\{\{\s*(?:github\.event\.|inputs\.|env\.|matrix\.)/)) {
966
+ findings.push({ ruleId: 'DEPLOY-SEC-006', category: 'deployment', severity: 'high', title: 'GitHub Actions run step with expression from external context', description: 'Assign to env var first: env: TITLE: ${{ github.event.pull_request.title }}. Then use $TITLE in run:. Direct interpolation in run: enables script injection.', file: fp, line: i + 1, fix: null });
967
+ }
968
+ }
969
+ }
970
+ return findings;
971
+ },
972
+ },
973
+ // DEPLOY-PROC-018: No canary deployment strategy
974
+ { id: 'DEPLOY-PROC-018', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'No Canary or Progressive Delivery',
975
+ check({ files }) {
976
+ const findings = [];
977
+ const allCode = [...files.values()].join('\n');
978
+ const hasCanary = allCode.match(/canary|blue.*green|progressive.*delivery|flagger|argo.*rollouts|weighted.*traffic/i);
979
+ if (!hasCanary) {
980
+ findings.push({ ruleId: 'DEPLOY-PROC-018', category: 'deployment', severity: 'low', title: 'No canary deployment or progressive delivery strategy', description: 'Use canary releases or blue-green deployments. Roll out to 5% of traffic first, validate, then increase. Reduces blast radius of bad deploys.', fix: null });
981
+ }
982
+ return findings;
983
+ },
984
+ },
985
+ // DEPLOY-PROC-019: No semantic-release or conventional commits
986
+ { id: 'DEPLOY-PROC-019', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'No Conventional Commit Messages',
987
+ check({ files, stack }) {
988
+ const findings = [];
989
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
990
+ const hasConventional = ['semantic-release', '@commitlint/cli', 'commitizen', 'standard-version', 'release-please'].some(d => d in allDeps);
991
+ if (!hasConventional) {
992
+ findings.push({ ruleId: 'DEPLOY-PROC-019', category: 'deployment', severity: 'low', title: 'No commit message convention enforced', description: 'Add commitlint + semantic-release. Conventional commits enable auto-generated changelogs and semantic versioning based on commit messages.', fix: null });
993
+ }
994
+ return findings;
995
+ },
996
+ },
997
+ // DEPLOY-PROC-020: No infrastructure cost estimation in CI
998
+ { id: 'DEPLOY-PROC-020', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'No Infrastructure Cost Estimation in PRs',
999
+ check({ files }) {
1000
+ const findings = [];
1001
+ const hasTerraform = [...files.keys()].some(f => f.match(/\.tf$/));
1002
+ const hasCostEstimate = [...files.values()].some(c => c.match(/infracost|cost.*estimate|terraform.*cost/i));
1003
+ if (hasTerraform && !hasCostEstimate) {
1004
+ findings.push({ ruleId: 'DEPLOY-PROC-020', category: 'deployment', severity: 'low', title: 'Terraform without Infracost — no cost visibility in pull requests', description: 'Add Infracost to CI to show cost changes in PRs. Prevents accidental addition of expensive resources (NAT Gateway, data transfer) without review.', fix: null });
1005
+ }
1006
+ return findings;
1007
+ },
1008
+ },
1009
+ // DEPLOY-PROC-021: Build without reproducibility
1010
+ { id: 'DEPLOY-PROC-021', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Non-Reproducible Builds',
1011
+ check({ files }) {
1012
+ const findings = [];
1013
+ for (const [fp, c] of files) {
1014
+ if (!fp.endsWith('Dockerfile') && !fp.match(/Dockerfile\./)) continue;
1015
+ if (c.match(/apt-get install|apk add/i) && !c.match(/apt-get install.*=\d|apk add.*=\d/i)) {
1016
+ findings.push({ ruleId: 'DEPLOY-PROC-021', category: 'deployment', severity: 'medium', title: 'Dockerfile installs unpinned system packages — builds not reproducible', description: 'Pin all apt/apk packages with exact versions. Unpinned packages produce different binaries on each build, making rollback verification unreliable.', file: fp, fix: null });
1017
+ }
1018
+ }
1019
+ return findings;
1020
+ },
1021
+ },
1022
+ // DEPLOY-SEC-007: GitHub Actions workflow with excessive permissions
1023
+ { id: 'DEPLOY-SEC-007', category: 'deployment', severity: 'high', confidence: 'likely', title: 'GitHub Actions Workflow with Top-Level write-all Permissions',
1024
+ check({ files }) {
1025
+ const findings = [];
1026
+ for (const [fp, c] of files) {
1027
+ if (!isCIFile(fp)) continue;
1028
+ if (c.match(/permissions:\s*write-all/) || (c.match(/permissions:/) && c.match(/:\s*write-all/))) {
1029
+ findings.push({ ruleId: 'DEPLOY-SEC-007', category: 'deployment', severity: 'high', title: 'Workflow has write-all permissions — follows least-privilege principle', description: 'Specify minimum required permissions per job. write-all grants the workflow token access to modify all repository resources, increasing blast radius of a compromised step.', file: fp, fix: null });
1030
+ }
1031
+ }
1032
+ return findings;
1033
+ },
1034
+ },
1035
+ // DEPLOY-PROC-022: No security scanning in PR checks
1036
+ { id: 'DEPLOY-PROC-022', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Pull Request Workflow Without Security Scanning Step',
1037
+ check({ files }) {
1038
+ const findings = [];
1039
+ for (const [fp, c] of files) {
1040
+ if (!isCIFile(fp)) continue;
1041
+ if (!c.match(/pull_request:|on:.*pull_request/)) continue;
1042
+ const hasSecurity = c.match(/trivy|snyk|grype|codeql|semgrep|checkov|trufflehog|gitleaks|bandit|gosec|npm audit|yarn audit/i);
1043
+ if (!hasSecurity) {
1044
+ findings.push({ ruleId: 'DEPLOY-PROC-022', category: 'deployment', severity: 'high', title: 'PR workflow missing security scanner — vulnerabilities not caught before merge', description: 'Add Trivy, Snyk, or CodeQL to PR checks. Security scanning on every PR catches vulnerabilities before they reach main branch and prevents regression.', file: fp, fix: null });
1045
+ }
1046
+ }
1047
+ return findings;
1048
+ },
1049
+ },
1050
+ // DEPLOY-PROC-023: Container image not scanned before deployment
1051
+ { id: 'DEPLOY-PROC-023', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Container Image Deployed Without Vulnerability Scan',
1052
+ check({ files }) {
1053
+ const findings = [];
1054
+ for (const [fp, c] of files) {
1055
+ if (!isCIFile(fp)) continue;
1056
+ if (!c.match(/docker.*build|docker.*push|Build.*image|build.*container/i)) continue;
1057
+ const hasScan = c.match(/trivy.*image|grype|snyk.*container|docker.*scout|aqua.*scanner|anchore/i);
1058
+ if (!hasScan) {
1059
+ findings.push({ ruleId: 'DEPLOY-PROC-023', category: 'deployment', severity: 'high', title: 'Docker image built and pushed without vulnerability scanning', description: 'Add Trivy or Grype image scan before pushing. Container images accumulate OS package vulnerabilities. Scanning before push prevents deploying images with known CVEs.', file: fp, fix: null });
1060
+ }
1061
+ }
1062
+ return findings;
1063
+ },
1064
+ },
1065
+ // DEPLOY-PROC-024: Deployment without blue-green strategy
1066
+ { id: 'DEPLOY-PROC-024', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Zero-Downtime Deployment Strategy',
1067
+ check({ files }) {
1068
+ const findings = [];
1069
+ for (const [fp, c] of files) {
1070
+ if (!isCIFile(fp)) continue;
1071
+ if (!c.match(/deploy|release|production/i)) continue;
1072
+ const hasZeroDowntime = c.match(/blue.?green|canary|rolling.*update|traffic.*shifting|weighted.*routing|deployment.*strategy/i);
1073
+ if (!hasZeroDowntime) {
1074
+ findings.push({ ruleId: 'DEPLOY-PROC-024', category: 'deployment', severity: 'medium', title: 'Deployment pipeline without zero-downtime strategy', description: 'Implement blue-green or rolling deployments. Direct cutover deployments cause downtime. Blue-green allows instant rollback; rolling updates maintain availability during deployment.', file: fp, fix: null });
1075
+ }
1076
+ }
1077
+ return findings;
1078
+ },
1079
+ },
1080
+ // DEPLOY-PROC-025: Missing artifact attestation
1081
+ { id: 'DEPLOY-PROC-025', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Build Artifacts Without SLSA Provenance Attestation',
1082
+ check({ files }) {
1083
+ const findings = [];
1084
+ for (const [fp, c] of files) {
1085
+ if (!isCIFile(fp)) continue;
1086
+ if (!c.match(/npm.*publish|docker.*push|release.*artifact/i)) continue;
1087
+ const hasAttestation = c.match(/slsa-github-generator|attest-build-provenance|cosign.*sign|in-toto/i);
1088
+ if (!hasAttestation) {
1089
+ findings.push({ ruleId: 'DEPLOY-PROC-025', category: 'deployment', severity: 'medium', title: 'Published artifact without provenance attestation (SLSA)', description: 'Add SLSA provenance using slsa-github-generator or GitHub attest-build-provenance action. Provenance attestation allows consumers to verify build integrity and origin.', file: fp, fix: null });
1090
+ }
1091
+ }
1092
+ return findings;
1093
+ },
1094
+ },
1095
+ // DEPLOY-PROC-026: Using latest Docker base image tag in production
1096
+ { id: 'DEPLOY-PROC-026', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Production Dockerfile Uses :latest Tag',
1097
+ check({ files }) {
1098
+ const findings = [];
1099
+ for (const [fp, c] of files) {
1100
+ if (!fp.endsWith('Dockerfile') && !fp.match(/Dockerfile\./)) continue;
1101
+ const lines = c.split('\n');
1102
+ for (let i = 0; i < lines.length; i++) {
1103
+ if (lines[i].match(/^FROM\s+\S+:latest\s*$|^FROM\s+\S+\s*$/) && !lines[i].match(/^FROM\s+\S+:\d|alpine|slim|AS\s+/i)) {
1104
+ findings.push({ ruleId: 'DEPLOY-PROC-026', category: 'deployment', severity: 'high', title: 'Dockerfile FROM uses :latest — non-deterministic builds', description: 'Pin base image to a specific digest (FROM node:20.11.0-alpine3.19@sha256:...). The :latest tag changes over time, causing builds to fail unexpectedly when the base image is updated.', file: fp, line: i + 1, fix: null });
1105
+ }
1106
+ }
1107
+ }
1108
+ return findings;
1109
+ },
1110
+ },
1111
+ // DEPLOY-SEC-008: Secrets accessible to all workflow jobs
1112
+ { id: 'DEPLOY-SEC-008', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Repository Secrets Exposed to Untrusted Workflow Steps',
1113
+ check({ files }) {
1114
+ const findings = [];
1115
+ for (const [fp, c] of files) {
1116
+ if (!isCIFile(fp)) continue;
1117
+ const lines = c.split('\n');
1118
+ for (let i = 0; i < lines.length; i++) {
1119
+ if (lines[i].match(/secrets\.\w+/) && !lines[i].match(/\/\/|#/)) {
1120
+ const ctx = lines.slice(Math.max(0, i - 20), i).join('\n');
1121
+ if (ctx.match(/uses:.*[^/]+\/[^@]+@\s*$|uses:.*[^/]+\/[^@]+$/) && !ctx.match(/@v\d|@[a-f0-9]{40}/)) {
1122
+ findings.push({ ruleId: 'DEPLOY-SEC-008', category: 'deployment', severity: 'high', title: 'Secret passed to Action using unpinned version — supply chain risk', description: 'Pin all Actions to a full commit SHA before passing secrets. Unpinned Actions can be updated maliciously to exfiltrate secrets.', file: fp, line: i + 1, fix: null });
1123
+ }
1124
+ }
1125
+ }
1126
+ }
1127
+ return findings;
1128
+ },
1129
+ },
1130
+ // DEPLOY-PROC-027: No dependency review in PRs
1131
+ { id: 'DEPLOY-PROC-027', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Dependency Review Action on Pull Requests',
1132
+ check({ files }) {
1133
+ const findings = [];
1134
+ const hasDependencyReview = [...files.values()].some(c => c.match(/dependency-review-action|actions\/dependency-review/i));
1135
+ const hasPRWorkflow = [...files.values()].some(c => c.match(/on:.*pull_request|on:.*\n\s+pull_request/));
1136
+ if (hasPRWorkflow && !hasDependencyReview) {
1137
+ findings.push({ ruleId: 'DEPLOY-PROC-027', category: 'deployment', severity: 'medium', title: 'PRs not scanned for vulnerable dependency additions', description: 'Add actions/dependency-review-action to PR workflows. This action blocks PRs that add dependencies with known vulnerabilities or incompatible licenses.', fix: null });
1138
+ }
1139
+ return findings;
1140
+ },
1141
+ },
1142
+ // DEPLOY-PROC-028: No infrastructure drift detection
1143
+ { id: 'DEPLOY-PROC-028', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Infrastructure Drift Detection in CI',
1144
+ check({ files }) {
1145
+ const findings = [];
1146
+ const hasTerraform = [...files.keys()].some(f => f.endsWith('.tf'));
1147
+ const hasDriftCheck = [...files.values()].some(c => c.match(/terraform.*plan.*detailed|drift.*detect|driftctl|infracost.*diff/i));
1148
+ if (hasTerraform && !hasDriftCheck) {
1149
+ findings.push({ ruleId: 'DEPLOY-PROC-028', category: 'deployment', severity: 'medium', title: 'Terraform used without drift detection in CI', description: 'Run terraform plan in CI to detect drift between code and real infrastructure. Undetected drift causes "works in code, broken in prod" failures during deployments.', fix: null });
1150
+ }
1151
+ return findings;
1152
+ },
1153
+ },
1154
+ // DEPLOY-PROC-029: No secrets rotation automation
1155
+ { id: 'DEPLOY-PROC-029', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Automated Secrets Rotation',
1156
+ check({ files }) {
1157
+ const findings = [];
1158
+ const hasSecrets = [...files.values()].some(c => c.match(/SecretsManager|getSecretValue|vault.*read|AWS_SECRET/i));
1159
+ const hasRotation = [...files.values()].some(c => c.match(/rotate.*secret|secret.*rotation|RotationLambda|rotateSecret/i));
1160
+ if (hasSecrets && !hasRotation) {
1161
+ findings.push({ ruleId: 'DEPLOY-PROC-029', category: 'deployment', severity: 'medium', title: 'Secrets Manager used without rotation configuration', description: 'Enable automatic secret rotation in AWS Secrets Manager. Long-lived credentials increase the blast radius of a credential leak. Rotate secrets at least every 90 days.', fix: null });
1162
+ }
1163
+ return findings;
1164
+ },
1165
+ },
1166
+ // DEPLOY-PROC-030: No performance regression testing in CI
1167
+ { id: 'DEPLOY-PROC-030', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Performance Regression Tests in CI Pipeline',
1168
+ check({ files }) {
1169
+ const findings = [];
1170
+ const hasCICD = [...files.keys()].some(f => f.match(/\.github\/workflows|\.gitlab-ci/));
1171
+ const hasPerfTest = [...files.values()].some(c => c.match(/k6|artillery|ab\s+-n|wrk\s+-t|loadtest|lighthouse.*ci|perf.*budget/i));
1172
+ if (hasCICD && !hasPerfTest) {
1173
+ findings.push({ ruleId: 'DEPLOY-PROC-030', category: 'deployment', severity: 'medium', title: 'No performance tests in CI — regressions not caught before deployment', description: 'Add Lighthouse CI or k6 to your pipeline. Without performance tests, slow query additions, bundle size increases, and memory leaks reach production undetected.', fix: null });
1174
+ }
1175
+ return findings;
1176
+ },
1177
+ },
1178
+ // DEPLOY-PROC-031: No DB migration safety checks
1179
+ { id: 'DEPLOY-PROC-031', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Database Migration Without Safety Checks',
1180
+ check({ files }) {
1181
+ const findings = [];
1182
+ for (const [fp, c] of files) {
1183
+ if (!fp.match(/migration\.|migrations\//i)) continue;
1184
+ if (c.match(/DROP\s+COLUMN|DROP\s+TABLE|ALTER.*DROP|TRUNCATE/i)) {
1185
+ if (!c.match(/IF\s+EXISTS|reversible|down\s*\(\s*\)|async\s+down/i)) {
1186
+ findings.push({ ruleId: 'DEPLOY-PROC-031', category: 'deployment', severity: 'high', title: 'Destructive migration without reversible down() method', description: 'Always implement the down() method for destructive migrations. Without a reversal path, a failed deployment with a DROP statement requires manual database recovery.', file: fp, fix: null });
1187
+ }
1188
+ }
1189
+ }
1190
+ return findings;
1191
+ },
1192
+ },
1193
+ // DEPLOY-PROC-032: CI pipeline without caching
1194
+ { id: 'DEPLOY-PROC-032', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'CI Pipeline Without Dependency Caching',
1195
+ check({ files }) {
1196
+ const findings = [];
1197
+ for (const [fp, c] of files) {
1198
+ if (!isCIFile(fp)) continue;
1199
+ if (c.match(/npm install|npm ci|yarn install|pnpm install/i)) {
1200
+ if (!c.match(/cache:|actions\/cache|restore-keys|key:.*node_modules/i)) {
1201
+ findings.push({ ruleId: 'DEPLOY-PROC-032', category: 'deployment', severity: 'low', title: 'CI installs dependencies without caching — slow and costly pipeline', description: 'Add dependency caching to CI workflows. Caching node_modules reduces pipeline time by 60-80% and cuts CI costs proportionally.', file: fp, fix: null });
1202
+ }
1203
+ }
1204
+ }
1205
+ return findings;
1206
+ },
1207
+ },
1208
+ // DEPLOY-PROC-033: Using curl | sh pattern in CI
1209
+ { id: 'DEPLOY-PROC-033', category: 'deployment', severity: 'critical', confidence: 'definite', title: 'Downloading and Executing Scripts via curl | sh',
1210
+ check({ files }) {
1211
+ const findings = [];
1212
+ for (const [fp, c] of files) {
1213
+ if (!isCIFile(fp) && !fp.match(/Makefile|\.sh$/)) continue;
1214
+ const lines = c.split('\n');
1215
+ for (let i = 0; i < lines.length; i++) {
1216
+ if (lines[i].match(/curl.*\|\s*(?:sudo\s+)?(?:ba)?sh|wget.*\|\s*(?:sudo\s+)?(?:ba)?sh/i)) {
1217
+ findings.push({ ruleId: 'DEPLOY-PROC-033', category: 'deployment', severity: 'critical', title: 'curl | sh pattern in CI — arbitrary remote code execution', description: 'Never pipe curl output directly to sh. Download and verify the checksum before executing. Compromised servers or MITM attacks can inject malicious code into the piped script.', file: fp, line: i + 1, fix: null });
1218
+ }
1219
+ }
1220
+ }
1221
+ return findings;
1222
+ },
1223
+ },
1224
+ // DEPLOY-PROC-034: No branch protection rules
1225
+ { id: 'DEPLOY-PROC-034', category: 'deployment', severity: 'high', confidence: 'likely', title: 'No Branch Protection Rules for Main Branch',
1226
+ check({ files }) {
1227
+ const findings = [];
1228
+ const hasBranchProtection = [...files.keys()].some(f => f.match(/branch.*protection|CODEOWNERS/i)) || [...files.values()].some(c => c.match(/required_status_checks|required_pull_request_reviews|enforce_admins/i));
1229
+ const hasCI = [...files.keys()].some(f => f.match(/\.github\/workflows/));
1230
+ if (hasCI && !hasBranchProtection) {
1231
+ findings.push({ ruleId: 'DEPLOY-PROC-034', category: 'deployment', severity: 'high', title: 'No branch protection rules — direct push to main possible', description: 'Enable branch protection on main/master: require PR reviews, passing status checks, and no direct pushes. Unprotected main branches allow unreviewed code to bypass CI/CD gates.', fix: null });
1232
+ }
1233
+ return findings;
1234
+ },
1235
+ },
1236
+ // DEPLOY-SEC-009: Environment variables logged in CI output
1237
+ { id: 'DEPLOY-SEC-009', category: 'deployment', severity: 'high', confidence: 'likely', title: 'CI Step That May Print Environment Variables',
1238
+ check({ files }) {
1239
+ const findings = [];
1240
+ for (const [fp, c] of files) {
1241
+ if (!isCIFile(fp)) continue;
1242
+ const lines = c.split('\n');
1243
+ for (let i = 0; i < lines.length; i++) {
1244
+ if (lines[i].match(/printenv|env\s*>|set\s*>|echo\s+\$[A-Z_]+/i) && !lines[i].match(/#/)) {
1245
+ findings.push({ ruleId: 'DEPLOY-SEC-009', category: 'deployment', severity: 'high', title: 'CI step printing environment variables — secrets exposed in build logs', description: 'Remove printenv and bare echo $VAR from CI steps. CI build logs are often accessible to all team members and sometimes stored publicly. Printing env vars exposes secrets.', file: fp, line: i + 1, fix: null });
1246
+ }
1247
+ }
1248
+ }
1249
+ return findings;
1250
+ },
1251
+ },
1252
+ // DEPLOY-PROC-035: Missing CODEOWNERS file
1253
+ { id: 'DEPLOY-PROC-035', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'No CODEOWNERS File Defined',
1254
+ check({ files }) {
1255
+ const findings = [];
1256
+ const hasCodeOwners = [...files.keys()].some(f => f.match(/CODEOWNERS/));
1257
+ const hasGitHubDir = [...files.keys()].some(f => f.match(/\.github\//));
1258
+ if (hasGitHubDir && !hasCodeOwners) {
1259
+ findings.push({ ruleId: 'DEPLOY-PROC-035', category: 'deployment', severity: 'low', title: 'No CODEOWNERS file — critical paths have no mandatory reviewer', description: 'Add a CODEOWNERS file to require review from domain experts for critical paths (auth, payments, migrations). Without it, changes to critical areas can be merged without appropriate review.', fix: null });
1260
+ }
1261
+ return findings;
1262
+ },
1263
+ },
1264
+ // DEPLOY-PROC-036: No semantic versioning enforced
1265
+ { id: 'DEPLOY-PROC-036', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'No Semantic Version Tagging in Release Process',
1266
+ check({ files }) {
1267
+ const findings = [];
1268
+ const hasRelease = [...files.values()].some(c => c.match(/release|publish|npm.*publish/i));
1269
+ const hasSemver = [...files.values()].some(c => c.match(/semantic-release|standard-version|release-please|changesets|release.*v\d+\.\d+\.\d+/i));
1270
+ if (hasRelease && !hasSemver) {
1271
+ findings.push({ ruleId: 'DEPLOY-PROC-036', category: 'deployment', severity: 'low', title: 'Release process without automated semantic versioning', description: 'Use semantic-release, release-please, or changesets for automated versioning. Manual versioning leads to inconsistent release numbers and missing changelog entries.', fix: null });
1272
+ }
1273
+ return findings;
1274
+ },
1275
+ },
1276
+ ];
1277
+
1278
+ export default rules;
1279
+
1280
+ // DEPLOY-037: Missing readiness probe
1281
+ rules.push({
1282
+ id: 'DEPLOY-037', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Kubernetes Deployment without readinessProbe',
1283
+ check({ files }) {
1284
+ const findings = [];
1285
+ for (const [fp, c] of files) {
1286
+ if (!fp.match(/\.ya?ml$/)) continue;
1287
+ if (c.match(/kind:\s*(?:Deployment|StatefulSet)/) && c.match(/containers:/) && !c.match(/readinessProbe:/)) {
1288
+ findings.push({ ruleId: 'DEPLOY-037', category: 'deployment', severity: 'high', title: 'K8s Deployment without readinessProbe — traffic sent before app is ready', description: 'Without a readinessProbe, Kubernetes routes traffic to pods before they finish startup. Add a readinessProbe to ensure traffic only flows to healthy pods.', file: fp, fix: null });
1289
+ }
1290
+ }
1291
+ return findings;
1292
+ },
1293
+ });
1294
+
1295
+ // DEPLOY-038: No rolling update strategy
1296
+ rules.push({
1297
+ id: 'DEPLOY-038', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Kubernetes Deployment without rolling update strategy',
1298
+ check({ files }) {
1299
+ const findings = [];
1300
+ for (const [fp, c] of files) {
1301
+ if (!fp.match(/\.ya?ml$/)) continue;
1302
+ if (c.match(/kind:\s*Deployment/) && !c.match(/strategy:/)) {
1303
+ findings.push({ ruleId: 'DEPLOY-038', category: 'deployment', severity: 'high', title: 'Deployment without rolling update strategy — deploys may cause downtime', description: 'Add strategy: type: RollingUpdate with maxUnavailable: 0 to ensure zero-downtime deployments. Without a strategy, Kubernetes may delete all old pods before new ones are ready.', file: fp, fix: null });
1304
+ }
1305
+ }
1306
+ return findings;
1307
+ },
1308
+ });
1309
+
1310
+ // DEPLOY-039: Docker image using :latest tag
1311
+ rules.push({
1312
+ id: 'DEPLOY-039', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Docker image with :latest tag — non-deterministic deployments',
1313
+ check({ files }) {
1314
+ const findings = [];
1315
+ for (const [fp, c] of files) {
1316
+ if (!fp.match(/Dockerfile|\.ya?ml$/i)) continue;
1317
+ const lines = c.split('\n');
1318
+ for (let i = 0; i < lines.length; i++) {
1319
+ if (/^\s*#/.test(lines[i])) continue;
1320
+ if (/(?:image:|FROM\s+)\S+:latest\b/.test(lines[i])) {
1321
+ findings.push({ ruleId: 'DEPLOY-039', category: 'deployment', severity: 'high', title: 'Docker image uses :latest tag — unpredictable what version is deployed', description: 'Using :latest makes deployments non-reproducible. Pin to a specific version or SHA digest: image: nginx:1.25.3 or image: nginx@sha256:...', file: fp, line: i + 1, fix: null });
1322
+ }
1323
+ }
1324
+ }
1325
+ return findings;
1326
+ },
1327
+ });
1328
+
1329
+ // DEPLOY-040: No non-root user in Dockerfile
1330
+ rules.push({
1331
+ id: 'DEPLOY-040', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Dockerfile without USER instruction — container runs as root',
1332
+ check({ files }) {
1333
+ const findings = [];
1334
+ for (const [fp, c] of files) {
1335
+ if (!fp.match(/Dockerfile/i)) continue;
1336
+ if (!c.match(/^USER\s+(?!root)/m)) {
1337
+ findings.push({ ruleId: 'DEPLOY-040', category: 'deployment', severity: 'high', title: 'Dockerfile without non-root USER — container runs as root', description: 'Add USER node (or appropriate non-root user) to Dockerfile. Running as root in a container is a security risk if the container is compromised.', file: fp, fix: null });
1338
+ }
1339
+ }
1340
+ return findings;
1341
+ },
1342
+ });
1343
+
1344
+ // DEPLOY-041: No secrets rotation policy
1345
+ rules.push({
1346
+ id: 'DEPLOY-041', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No secrets rotation configuration found',
1347
+ check({ files }) {
1348
+ const findings = [];
1349
+ const hasSecrets = [...files.values()].some(c => /secretsmanager|vault|AWS_SECRET|SECRET_ARN/i.test(c));
1350
+ const hasRotation = [...files.values()].some(c => /rotation|rotate.*secret|secret.*rotation|RotationRules/i.test(c));
1351
+ if (hasSecrets && !hasRotation) {
1352
+ findings.push({ ruleId: 'DEPLOY-041', category: 'deployment', severity: 'medium', title: 'Secrets manager used without rotation policy — static secrets', description: 'Enable automatic secret rotation in AWS Secrets Manager or HashiCorp Vault. Static secrets that never rotate are a persistent risk if compromised.', fix: null });
1353
+ }
1354
+ return findings;
1355
+ },
1356
+ });
1357
+
1358
+ // DEPLOY-042: Missing resource quotas
1359
+ rules.push({
1360
+ id: 'DEPLOY-042', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Kubernetes namespace without ResourceQuota',
1361
+ check({ files }) {
1362
+ const findings = [];
1363
+ const hasNamespace = [...files.keys()].some(f => f.match(/\.ya?ml$/) && /kind:\s*Namespace/.test(files.get(f)));
1364
+ const hasQuota = [...files.values()].some(c => /kind:\s*ResourceQuota/.test(c));
1365
+ if (hasNamespace && !hasQuota) {
1366
+ findings.push({ ruleId: 'DEPLOY-042', category: 'deployment', severity: 'medium', title: 'Kubernetes namespace without ResourceQuota — no CPU/memory limit per namespace', description: 'Without ResourceQuotas, a single namespace can consume all cluster resources. Add ResourceQuota to cap CPU, memory, and pod count per namespace.', fix: null });
1367
+ }
1368
+ return findings;
1369
+ },
1370
+ });
1371
+
1372
+ // DEPLOY-043: No canary or blue/green strategy
1373
+ rules.push({
1374
+ id: 'DEPLOY-043', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No progressive delivery strategy (canary/blue-green)',
1375
+ check({ files }) {
1376
+ const findings = [];
1377
+ const hasCriticalService = [...files.values()].some(c => /payment|checkout|auth|order/i.test(c) && /kind:\s*Deployment/.test(c));
1378
+ const hasProgressive = [...files.values()].some(c => /canary|blue.*green|argo.*rollout|flagger|spinnaker|progressive/i.test(c));
1379
+ if (hasCriticalService && !hasProgressive) {
1380
+ findings.push({ ruleId: 'DEPLOY-043', category: 'deployment', severity: 'medium', title: 'Critical service deployed without canary or blue/green strategy', description: 'For critical services (payments, auth), use Argo Rollouts or Flagger to gradually shift traffic to new versions, enabling instant rollback on metrics degradation.', fix: null });
1381
+ }
1382
+ return findings;
1383
+ },
1384
+ });
1385
+
1386
+ // DEPLOY-044: Missing backup verification in CI
1387
+ rules.push({
1388
+ id: 'DEPLOY-044', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No database backup verification step in CI/CD pipeline',
1389
+ check({ files }) {
1390
+ const findings = [];
1391
+ const hasMigration = [...files.values()].some(c => /migrate|migration|schema.*change/i.test(c));
1392
+ const hasBackupCheck = [...files.values()].some(c => /backup.*verif|restore.*test|verify.*backup|backup.*test/i.test(c));
1393
+ if (hasMigration && !hasBackupCheck) {
1394
+ findings.push({ ruleId: 'DEPLOY-044', category: 'deployment', severity: 'medium', title: 'Migrations deployed without backup verification step', description: 'Before running irreversible schema migrations, verify a recent backup exists and is restorable. Add a backup check step before migration stages.', fix: null });
1395
+ }
1396
+ return findings;
1397
+ },
1398
+ });
1399
+
1400
+ // DEPLOY-045: Dockerfile with no HEALTHCHECK
1401
+ rules.push({
1402
+ id: 'DEPLOY-045', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Dockerfile without HEALTHCHECK instruction',
1403
+ check({ files }) {
1404
+ const findings = [];
1405
+ for (const [fp, c] of files) {
1406
+ if (!fp.match(/Dockerfile/i)) continue;
1407
+ if (!c.match(/^HEALTHCHECK\s/m)) {
1408
+ findings.push({ ruleId: 'DEPLOY-045', category: 'deployment', severity: 'medium', title: 'Dockerfile without HEALTHCHECK — Docker cannot detect unhealthy containers', description: 'Add HEALTHCHECK instruction to enable Docker to detect and restart unhealthy containers: HEALTHCHECK --interval=30s --timeout=3s CMD curl -f http://localhost/health || exit 1', file: fp, fix: null });
1409
+ }
1410
+ }
1411
+ return findings;
1412
+ },
1413
+ });
1414
+
1415
+ // DEPLOY-046: No multi-stage Docker build
1416
+ rules.push({
1417
+ id: 'DEPLOY-046', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'Single-stage Dockerfile — production image includes build tools',
1418
+ check({ files }) {
1419
+ const findings = [];
1420
+ for (const [fp, c] of files) {
1421
+ if (!fp.match(/Dockerfile/i)) continue;
1422
+ const fromCount = (c.match(/^FROM\s+/gm) || []).length;
1423
+ if (fromCount === 1 && /npm\s+install|yarn\s+install|RUN.*build/i.test(c)) {
1424
+ findings.push({ ruleId: 'DEPLOY-046', category: 'deployment', severity: 'low', title: 'Single-stage Dockerfile — dev dependencies included in production image', description: 'Use multi-stage builds: FROM node:18 AS builder / RUN npm ci / FROM node:18-alpine / COPY --from=builder. This reduces image size and attack surface.', file: fp, fix: null });
1425
+ }
1426
+ }
1427
+ return findings;
1428
+ },
1429
+ });
1430
+
1431
+ // DEPLOY-047: No .dockerignore file
1432
+ rules.push({
1433
+ id: 'DEPLOY-047', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Dockerfile present without .dockerignore — sensitive files may be copied',
1434
+ check({ files }) {
1435
+ const findings = [];
1436
+ const hasDockerfile = [...files.keys()].some(f => f.match(/Dockerfile/i));
1437
+ const hasDockerignore = [...files.keys()].some(f => f.match(/\.dockerignore$/));
1438
+ if (hasDockerfile && !hasDockerignore) {
1439
+ findings.push({ ruleId: 'DEPLOY-047', category: 'deployment', severity: 'medium', title: 'No .dockerignore file — .env, node_modules, and secrets may be copied into image', description: 'Create a .dockerignore file to exclude .env files, node_modules, .git, and other sensitive/unnecessary files from Docker build context.', fix: null });
1440
+ }
1441
+ return findings;
1442
+ },
1443
+ });
1444
+
1445
+ // DEPLOY-048: Missing PodDisruptionBudget
1446
+ rules.push({
1447
+ id: 'DEPLOY-048', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No PodDisruptionBudget for critical services',
1448
+ check({ files }) {
1449
+ const findings = [];
1450
+ const hasDeployment = [...files.values()].some(c => /kind:\s*Deployment/.test(c));
1451
+ const hasPDB = [...files.values()].some(c => /kind:\s*PodDisruptionBudget/.test(c));
1452
+ if (hasDeployment && !hasPDB) {
1453
+ findings.push({ ruleId: 'DEPLOY-048', category: 'deployment', severity: 'medium', title: 'No PodDisruptionBudget — node maintenance can take down all pods simultaneously', description: 'Add a PodDisruptionBudget with minAvailable: 1 to ensure at least one pod stays running during node maintenance and voluntary disruptions.', fix: null });
1454
+ }
1455
+ return findings;
1456
+ },
1457
+ });
1458
+
1459
+ // DEPLOY-049: Helm chart without resource limits
1460
+ rules.push({
1461
+ id: 'DEPLOY-049', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Helm values without resource limits defined',
1462
+ check({ files }) {
1463
+ const findings = [];
1464
+ for (const [fp, c] of files) {
1465
+ if (!fp.match(/values.*\.ya?ml$|Chart\.ya?ml$/i)) continue;
1466
+ if (c.match(/resources:/) && !c.match(/limits:/)) {
1467
+ findings.push({ ruleId: 'DEPLOY-049', category: 'deployment', severity: 'high', title: 'Helm values.yaml has resources section without limits', description: 'Define resources.limits.cpu and resources.limits.memory in Helm values to prevent containers from consuming unlimited cluster resources.', file: fp, fix: null });
1468
+ }
1469
+ }
1470
+ return findings;
1471
+ },
1472
+ });
1473
+
1474
+ // DEPLOY-050: No environment-specific config separation
1475
+ rules.push({
1476
+ id: 'DEPLOY-050', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No environment-specific configuration separation',
1477
+ check({ files }) {
1478
+ const findings = [];
1479
+ const hasConfig = [...files.values()].some(c => /NODE_ENV.*production|DATABASE_URL|API_URL/i.test(c));
1480
+ const hasEnvSep = [...files.keys()].some(f => f.match(/\.env\.(?:production|staging|development)|config.*prod.*\.js|environments\/prod/i));
1481
+ if (hasConfig && !hasEnvSep) {
1482
+ findings.push({ ruleId: 'DEPLOY-050', category: 'deployment', severity: 'medium', title: 'No environment-specific config files — production and dev settings mixed', description: 'Separate configuration by environment using .env.production, .env.staging, and .env.development files. Load the appropriate file based on NODE_ENV.', fix: null });
1483
+ }
1484
+ return findings;
1485
+ },
1486
+ });
1487
+
1488
+ // DEPLOY-051: Missing Terraform state locking
1489
+ rules.push({
1490
+ id: 'DEPLOY-051', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Terraform without remote state locking',
1491
+ check({ files }) {
1492
+ const findings = [];
1493
+ for (const [fp, c] of files) {
1494
+ if (!fp.match(/\.tf$/)) continue;
1495
+ if (c.match(/terraform\s*\{/) && c.match(/backend\s*["']/) && !c.match(/dynamodb_table|lockFilePath|lock/i)) {
1496
+ findings.push({ ruleId: 'DEPLOY-051', category: 'deployment', severity: 'high', title: 'Terraform backend without state locking — concurrent applies cause state corruption', description: 'Configure state locking using DynamoDB (for S3 backend) to prevent concurrent terraform applies from corrupting state.', file: fp, fix: null });
1497
+ }
1498
+ }
1499
+ return findings;
1500
+ },
1501
+ });
1502
+
1503
+ // DEPLOY-052: No automated rollback on deployment failure
1504
+ rules.push({
1505
+ id: 'DEPLOY-052', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'CI/CD pipeline without automated rollback on failure',
1506
+ check({ files }) {
1507
+ const findings = [];
1508
+ const hasDeployStep = [...files.values()].some(c => /deploy|kubectl.*apply|helm.*upgrade|serverless.*deploy/i.test(c));
1509
+ const hasRollback = [...files.values()].some(c => /rollback|helm.*rollback|kubectl.*rollout.*undo|revert.*deploy/i.test(c));
1510
+ if (hasDeployStep && !hasRollback) {
1511
+ findings.push({ ruleId: 'DEPLOY-052', category: 'deployment', severity: 'medium', title: 'Deployment pipeline without rollback step — manual intervention required on failure', description: 'Add an automated rollback step to your CI/CD pipeline: kubectl rollout undo or helm rollback. Automatic rollback minimizes time-to-recovery on bad deploys.', fix: null });
1512
+ }
1513
+ return findings;
1514
+ },
1515
+ });
1516
+
1517
+ // DEPLOY-053 through DEPLOY-072: Additional deployment rules
1518
+
1519
+ // DEPLOY-053: No container registry vulnerability scanning in CI
1520
+ rules.push({
1521
+ id: 'DEPLOY-053', category: 'deployment', severity: 'high', confidence: 'likely', title: 'CI/CD pipeline without container image vulnerability scan',
1522
+ check({ files }) {
1523
+ const findings = [];
1524
+ const hasCIFile = [...files.keys()].some(f => f.match(/\.github\/workflows|\.gitlab-ci|\.circleci|Jenkinsfile/i));
1525
+ const hasScan = [...files.values()].some(c => /trivy|grype|snyk.*container|clair|anchore|dockle/i.test(c));
1526
+ const hasDocker = [...files.keys()].some(f => f.match(/Dockerfile/i));
1527
+ if (hasCIFile && hasDocker && !hasScan) {
1528
+ findings.push({ ruleId: 'DEPLOY-053', category: 'deployment', severity: 'high', title: 'Docker image built without vulnerability scan in CI', description: 'Add Trivy or Grype to scan container images for CVEs before pushing to registry: trivy image myapp:latest', fix: null });
1529
+ }
1530
+ return findings;
1531
+ },
1532
+ });
1533
+
1534
+ // DEPLOY-054: Missing terraform plan before apply
1535
+ rules.push({
1536
+ id: 'DEPLOY-054', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Terraform apply in CI without prior plan review',
1537
+ check({ files }) {
1538
+ const findings = [];
1539
+ for (const [fp, c] of files) {
1540
+ if (!fp.match(/\.ya?ml$/) || !fp.match(/\.github|\.gitlab|ci\//i)) continue;
1541
+ if (c.match(/terraform.*apply/) && !c.match(/terraform.*plan/)) {
1542
+ findings.push({ ruleId: 'DEPLOY-054', category: 'deployment', severity: 'high', title: 'terraform apply without plan step — no review of infrastructure changes', description: 'Always run terraform plan before terraform apply and require approval for the plan output. Blindly applying can cause unexpected infrastructure changes.', file: fp, fix: null });
1543
+ }
1544
+ }
1545
+ return findings;
1546
+ },
1547
+ });
1548
+
1549
+ // DEPLOY-055: Environment variables hardcoded in CI config
1550
+ rules.push({
1551
+ id: 'DEPLOY-055', category: 'deployment', severity: 'critical', confidence: 'definite', title: 'Hardcoded secret in CI/CD configuration file',
1552
+ check({ files }) {
1553
+ const findings = [];
1554
+ for (const [fp, c] of files) {
1555
+ if (!fp.match(/\.ya?ml$/) || !fp.match(/\.github|\.gitlab|jenkins|circleci/i)) continue;
1556
+ const lines = c.split('\n');
1557
+ for (let i = 0; i < lines.length; i++) {
1558
+ if (/^\s*#/.test(lines[i])) continue;
1559
+ if (/(?:PASSWORD|SECRET|TOKEN|API_KEY|PRIVATE_KEY)\s*:\s*[A-Za-z0-9+/=_\-]{10,}/i.test(lines[i]) && !/\$\{|\$\(|\$secrets|\${{/i.test(lines[i])) {
1560
+ findings.push({ ruleId: 'DEPLOY-055', category: 'deployment', severity: 'critical', title: 'Hardcoded secret in CI configuration', description: 'Store secrets in CI/CD secrets management (GitHub Secrets, GitLab CI Variables) and reference them as ${{ secrets.MY_SECRET }} or $MY_SECRET.', file: fp, line: i + 1, fix: null });
1561
+ }
1562
+ }
1563
+ }
1564
+ return findings;
1565
+ },
1566
+ });
1567
+
1568
+ // DEPLOY-056: No dependency caching in CI
1569
+ rules.push({
1570
+ id: 'DEPLOY-056', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'CI pipeline without dependency caching — slow builds',
1571
+ check({ files }) {
1572
+ const findings = [];
1573
+ for (const [fp, c] of files) {
1574
+ if (!fp.match(/\.ya?ml$/) || !fp.match(/\.github\/workflows|\.gitlab-ci|\.circleci/i)) continue;
1575
+ if (c.match(/npm\s+ci|yarn\s+install|pip\s+install/) && !c.match(/cache:/)) {
1576
+ findings.push({ ruleId: 'DEPLOY-056', category: 'deployment', severity: 'low', title: 'CI builds dependencies without caching — slow pipeline', description: 'Cache node_modules or pip packages in CI to speed up builds: use actions/cache@v3 (GitHub Actions) or cache: npm key in GitLab CI.', file: fp, fix: null });
1577
+ }
1578
+ }
1579
+ return findings;
1580
+ },
1581
+ });
1582
+
1583
+ // DEPLOY-057: Docker build without BuildKit
1584
+ rules.push({
1585
+ id: 'DEPLOY-057', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'Docker build without BuildKit — slower builds',
1586
+ check({ files }) {
1587
+ const findings = [];
1588
+ for (const [fp, c] of files) {
1589
+ if (!fp.match(/\.ya?ml$|Makefile/)) continue;
1590
+ if (c.match(/docker\s+build\b/) && !c.match(/DOCKER_BUILDKIT=1|buildx\s+build/)) {
1591
+ findings.push({ ruleId: 'DEPLOY-057', category: 'deployment', severity: 'low', title: 'Docker build without BuildKit — use DOCKER_BUILDKIT=1 for faster builds', description: 'BuildKit provides better caching, parallel builds, and security for build secrets. Enable with: DOCKER_BUILDKIT=1 docker build or use docker buildx build.', file: fp, fix: null });
1592
+ }
1593
+ }
1594
+ return findings;
1595
+ },
1596
+ });
1597
+
1598
+ // DEPLOY-058: No image tag immutability in ECR
1599
+ rules.push({
1600
+ id: 'DEPLOY-058', category: 'deployment', severity: 'high', confidence: 'likely', title: 'ECR repository without image tag immutability',
1601
+ check({ files }) {
1602
+ const findings = [];
1603
+ for (const [fp, c] of files) {
1604
+ if (!fp.match(/\.tf$/)) continue;
1605
+ if (c.match(/aws_ecr_repository/) && !c.match(/image_tag_mutability\s*=\s*["']IMMUTABLE["']/)) {
1606
+ findings.push({ ruleId: 'DEPLOY-058', category: 'deployment', severity: 'high', title: 'ECR without IMMUTABLE tag — existing image tags can be overwritten', description: 'Set image_tag_mutability = "IMMUTABLE" to prevent overwriting deployed image tags. Mutable tags can lead to deploying different code than expected.', file: fp, fix: null });
1607
+ }
1608
+ }
1609
+ return findings;
1610
+ },
1611
+ });
1612
+
1613
+ // DEPLOY-059: Missing artifact integrity check
1614
+ rules.push({
1615
+ id: 'DEPLOY-059', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No artifact integrity verification in deployment pipeline',
1616
+ check({ files }) {
1617
+ const findings = [];
1618
+ const hasDeployment = [...files.values()].some(c => /deploy.*stage|kubectl.*apply|helm.*upgrade/i.test(c));
1619
+ const hasSignatureCheck = [...files.values()].some(c => /cosign|sigstore|sha256sum|checksum|verify.*sig|signature.*verify/i.test(c));
1620
+ if (hasDeployment && !hasSignatureCheck) {
1621
+ findings.push({ ruleId: 'DEPLOY-059', category: 'deployment', severity: 'medium', title: 'Deployment without artifact integrity verification', description: 'Sign artifacts with Cosign or verify SHA checksums to ensure what you deploy matches what was built. Prevents supply chain attacks on deployment artifacts.', fix: null });
1622
+ }
1623
+ return findings;
1624
+ },
1625
+ });
1626
+
1627
+ // DEPLOY-060: No minimum replica count for services
1628
+ rules.push({
1629
+ id: 'DEPLOY-060', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Kubernetes Deployment with replicas: 1 — single point of failure',
1630
+ check({ files }) {
1631
+ const findings = [];
1632
+ for (const [fp, c] of files) {
1633
+ if (!fp.match(/\.ya?ml$/)) continue;
1634
+ if (c.match(/kind:\s*Deployment/) && c.match(/replicas:\s*1\b/)) {
1635
+ findings.push({ ruleId: 'DEPLOY-060', category: 'deployment', severity: 'medium', title: 'Deployment with replicas: 1 — pod failure causes full service outage', description: 'Set replicas: 2 or more for any service that requires availability. A single replica means any pod restart (update, node failure, OOM kill) causes downtime.', file: fp, fix: null });
1636
+ }
1637
+ }
1638
+ return findings;
1639
+ },
1640
+ });
1641
+
1642
+ // DEPLOY-061 through DEPLOY-075
1643
+
1644
+ // DEPLOY-061: No monitoring/alerting configured
1645
+ rules.push({
1646
+ id: 'DEPLOY-061', category: 'deployment', severity: 'high', confidence: 'likely', title: 'No monitoring or alerting configuration found',
1647
+ check({ files }) {
1648
+ const findings = [];
1649
+ const hasProductionConfig = [...files.values()].some(c => /NODE_ENV.*production|app\.listen/i.test(c));
1650
+ const hasMonitoring = [...files.values()].some(c => /cloudwatch|datadog|newrelic|prometheus|grafana|pagerduty|opsgenie|sentry|rollbar|honeybadger/i.test(c));
1651
+ if (hasProductionConfig && !hasMonitoring) {
1652
+ findings.push({ ruleId: 'DEPLOY-061', category: 'deployment', severity: 'high', title: 'Production service without monitoring — outages go undetected', description: 'Configure monitoring and alerting (CloudWatch, Datadog, Sentry) to detect errors, high latency, and availability issues in production.', fix: null });
1653
+ }
1654
+ return findings;
1655
+ },
1656
+ });
1657
+
1658
+ // DEPLOY-062: No log aggregation
1659
+ rules.push({
1660
+ id: 'DEPLOY-062', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No log aggregation service configured',
1661
+ check({ files }) {
1662
+ const findings = [];
1663
+ const hasApp = [...files.values()].some(c => /express|fastify|koa/i.test(c));
1664
+ const hasLogAgg = [...files.values()].some(c => /elasticsearch|kibana|logstash|splunk|sumologic|cloudwatch.*logs|fluentd|loki|papertrail/i.test(c));
1665
+ if (hasApp && !hasLogAgg) {
1666
+ findings.push({ ruleId: 'DEPLOY-062', category: 'deployment', severity: 'medium', title: 'No centralized log aggregation — logs only on individual pods/instances', description: 'Configure log aggregation (CloudWatch, ELK, Loki) to collect logs from all instances centrally. Otherwise logs are lost when pods restart.', fix: null });
1667
+ }
1668
+ return findings;
1669
+ },
1670
+ });
1671
+
1672
+ // DEPLOY-063: Missing node version in .nvmrc
1673
+ rules.push({
1674
+ id: 'DEPLOY-063', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'No .nvmrc or .node-version file — inconsistent Node.js version',
1675
+ check({ files }) {
1676
+ const findings = [];
1677
+ const hasNodeProject = [...files.keys()].some(f => f.match(/package\.json$/));
1678
+ const hasNvmrc = [...files.keys()].some(f => f.match(/\.nvmrc$|\.node-version$/));
1679
+ if (hasNodeProject && !hasNvmrc) {
1680
+ findings.push({ ruleId: 'DEPLOY-063', category: 'deployment', severity: 'low', title: 'No .nvmrc — Node.js version inconsistent across developer machines', description: 'Create an .nvmrc file with the required Node.js version to ensure consistent environments: echo "18.19.0" > .nvmrc', fix: null });
1681
+ }
1682
+ return findings;
1683
+ },
1684
+ });
1685
+
1686
+ // DEPLOY-064: Terraform without module versioning
1687
+ rules.push({
1688
+ id: 'DEPLOY-064', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Terraform module without pinned version',
1689
+ check({ files }) {
1690
+ const findings = [];
1691
+ for (const [fp, c] of files) {
1692
+ if (!fp.match(/\.tf$/)) continue;
1693
+ const lines = c.split('\n');
1694
+ for (let i = 0; i < lines.length; i++) {
1695
+ if (/^\s*#/.test(lines[i])) continue;
1696
+ if (/source\s*=\s*["'].*github\.com.*\/\//.test(lines[i])) {
1697
+ const ctx = lines.slice(i, Math.min(lines.length, i + 5)).join('\n');
1698
+ if (!/version\s*=\s*["'][0-9]|ref=/.test(ctx) && !ctx.match(/\?ref=/)) {
1699
+ findings.push({ ruleId: 'DEPLOY-064', category: 'deployment', severity: 'medium', title: 'Terraform module without pinned version/ref — breaking changes auto-applied', description: 'Pin Terraform module versions: source = "module?ref=v1.2.3". Without pinning, changes to the module are automatically pulled on next init.', file: fp, line: i + 1, fix: null });
1700
+ }
1701
+ }
1702
+ }
1703
+ }
1704
+ return findings;
1705
+ },
1706
+ });
1707
+
1708
+ // DEPLOY-065: No disaster recovery plan
1709
+ rules.push({
1710
+ id: 'DEPLOY-065', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No disaster recovery or backup strategy found',
1711
+ check({ files }) {
1712
+ const findings = [];
1713
+ const hasTF = [...files.keys()].some(f => f.match(/\.tf$/));
1714
+ const hasDR = [...files.values()].some(c => /backup|aws_db_instance.*backup|snapshot|cross_region_replica|RTO|RPO|disaster.*recovery/i.test(c));
1715
+ if (hasTF && !hasDR) {
1716
+ findings.push({ ruleId: 'DEPLOY-065', category: 'deployment', severity: 'medium', title: 'No backup or disaster recovery configuration found in Terraform', description: 'Configure automated backups for databases, define RTO and RPO targets, and document disaster recovery procedures. Enable automated snapshots on RDS and S3 versioning.', fix: null });
1717
+ }
1718
+ return findings;
1719
+ },
1720
+ });
1721
+
1722
+ // DEPLOY-066: Missing readiness in startup probe
1723
+ rules.push({
1724
+ id: 'DEPLOY-066', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Kubernetes container without startup probe for slow-starting apps',
1725
+ check({ files }) {
1726
+ const findings = [];
1727
+ for (const [fp, c] of files) {
1728
+ if (!fp.match(/\.ya?ml$/)) continue;
1729
+ if (c.match(/kind:\s*Deployment/) && c.match(/livenessProbe:/) && !c.match(/startupProbe:/)) {
1730
+ findings.push({ ruleId: 'DEPLOY-066', category: 'deployment', severity: 'medium', title: 'Deployment with livenessProbe but no startupProbe — slow apps killed prematurely', description: 'Add a startupProbe for applications that take more than 30 seconds to initialize. Without it, the livenessProbe may kill the pod before it finishes starting.', file: fp, fix: null });
1731
+ }
1732
+ }
1733
+ return findings;
1734
+ },
1735
+ });
1736
+
1737
+ // DEPLOY-067: No tagging strategy for cloud resources
1738
+ rules.push({
1739
+ id: 'DEPLOY-067', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'Terraform resources without cost allocation tags',
1740
+ check({ files }) {
1741
+ const findings = [];
1742
+ for (const [fp, c] of files) {
1743
+ if (!fp.match(/\.tf$/)) continue;
1744
+ if (c.match(/resource\s+["']aws_/) && !c.match(/tags\s*=\s*\{/)) {
1745
+ findings.push({ ruleId: 'DEPLOY-067', category: 'deployment', severity: 'low', title: 'AWS resources without tags — no cost allocation or resource management', description: 'Tag all AWS resources with environment, team, and cost-center tags for cost allocation and operational management: tags = { Environment = "production", Team = "platform" }', file: fp, fix: null });
1746
+ }
1747
+ }
1748
+ return findings;
1749
+ },
1750
+ });
1751
+
1752
+ // DEPLOY-068: Missing Kubernetes HorizontalPodAutoscaler
1753
+ rules.push({
1754
+ id: 'DEPLOY-068', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Kubernetes Deployment without HorizontalPodAutoscaler',
1755
+ check({ files }) {
1756
+ const findings = [];
1757
+ const deployFiles = [];
1758
+ const hpaFiles = new Set();
1759
+ for (const [fp, c] of files) {
1760
+ if (!fp.endsWith('.yaml') && !fp.endsWith('.yml')) continue;
1761
+ if (/kind:\s*Deployment/.test(c)) deployFiles.push(fp);
1762
+ if (/kind:\s*HorizontalPodAutoscaler/.test(c)) hpaFiles.add(fp.replace(/\/[^/]+$/, ''));
1763
+ }
1764
+ for (const fp of deployFiles) {
1765
+ const dir = fp.replace(/\/[^/]+$/, '');
1766
+ if (!hpaFiles.has(dir)) findings.push({ ruleId: 'DEPLOY-068', category: 'deployment', severity: 'medium', title: 'Kubernetes Deployment without HorizontalPodAutoscaler — no auto-scaling', description: 'Create an HPA to automatically scale pods based on CPU/memory metrics.', file: fp, fix: null });
1767
+ }
1768
+ return findings;
1769
+ },
1770
+ });
1771
+
1772
+ // DEPLOY-069: Dockerfile not using multi-stage build
1773
+ rules.push({
1774
+ id: 'DEPLOY-069', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Dockerfile without multi-stage build — large final image',
1775
+ check({ files }) {
1776
+ const findings = [];
1777
+ for (const [fp, c] of files) {
1778
+ if (!fp.endsWith('Dockerfile') && !fp.endsWith('.dockerfile')) continue;
1779
+ const fromCount = (c.match(/^FROM\s/gm) || []).length;
1780
+ if (fromCount === 1 && /npm install|yarn install/.test(c)) findings.push({ ruleId: 'DEPLOY-069', category: 'deployment', severity: 'medium', title: 'Single-stage Dockerfile includes build tools in final image — increases attack surface and image size', description: 'Use multi-stage builds: one stage for building, another for the final minimal runtime image.', file: fp, fix: null });
1781
+ }
1782
+ return findings;
1783
+ },
1784
+ });
1785
+
1786
+ // DEPLOY-070: CI/CD pipeline without linting step
1787
+ rules.push({
1788
+ id: 'DEPLOY-070', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'CI/CD pipeline without linting step',
1789
+ check({ files }) {
1790
+ const findings = [];
1791
+ const ciFiles = [...files.keys()].filter(f => f.includes('.github/workflows') || f.includes('.gitlab-ci') || f.includes('Jenkinsfile') || f.includes('circleci'));
1792
+ for (const fp of ciFiles) {
1793
+ const c = files.get(fp) || '';
1794
+ if (!/lint|eslint|tslint|prettier/.test(c)) findings.push({ ruleId: 'DEPLOY-070', category: 'deployment', severity: 'low', title: 'CI pipeline without linting — code quality issues may reach production', description: 'Add a lint step to your CI pipeline to catch code quality issues before deployment.', file: fp, fix: null });
1795
+ }
1796
+ return findings;
1797
+ },
1798
+ });
1799
+
1800
+ // DEPLOY-071: No security scanning in CI pipeline
1801
+ rules.push({
1802
+ id: 'DEPLOY-071', category: 'deployment', severity: 'high', confidence: 'likely', title: 'CI/CD pipeline without security scanning',
1803
+ check({ files }) {
1804
+ const findings = [];
1805
+ const ciFiles = [...files.keys()].filter(f => f.includes('.github/workflows') || f.includes('.gitlab-ci') || f.includes('Jenkinsfile'));
1806
+ for (const fp of ciFiles) {
1807
+ const c = files.get(fp) || '';
1808
+ if (!/trivy|snyk|grype|clair|anchore|owasp|security.scan|npm.audit|semgrep/i.test(c)) findings.push({ ruleId: 'DEPLOY-071', category: 'deployment', severity: 'high', title: 'CI pipeline without security scanning — vulnerabilities not caught before deployment', description: 'Add security scanning (Trivy, Snyk, OWASP) to your CI pipeline.', file: fp, fix: null });
1809
+ }
1810
+ return findings;
1811
+ },
1812
+ });
1813
+
1814
+ // DEPLOY-072: Missing deployment rollback documentation
1815
+ rules.push({
1816
+ id: 'DEPLOY-072', category: 'deployment', severity: 'medium', confidence: 'suggestion', title: 'No rollback strategy in deployment configuration',
1817
+ check({ files }) {
1818
+ const findings = [];
1819
+ for (const [fp, c] of files) {
1820
+ if (!fp.endsWith('.yaml') && !fp.endsWith('.yml')) continue;
1821
+ if (/kind:\s*Deployment/.test(c) && !/rollback|revisionHistoryLimit/.test(c)) findings.push({ ruleId: 'DEPLOY-072', category: 'deployment', severity: 'medium', title: 'Kubernetes Deployment without revisionHistoryLimit — rollback history not preserved', description: 'Set revisionHistoryLimit in Deployment spec to retain rollback history.', file: fp, fix: null });
1822
+ }
1823
+ return findings;
1824
+ },
1825
+ });
1826
+
1827
+ // DEPLOY-073: Missing container image tag pinning (SHA digest)
1828
+ rules.push({
1829
+ id: 'DEPLOY-073', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Container image referenced by tag rather than digest',
1830
+ check({ files }) {
1831
+ const findings = [];
1832
+ for (const [fp, c] of files) {
1833
+ if (!fp.endsWith('.yaml') && !fp.endsWith('.yml') && !fp.endsWith('Dockerfile') && !fp.endsWith('.dockerfile')) continue;
1834
+ const lines = c.split('\n');
1835
+ for (let i = 0; i < lines.length; i++) {
1836
+ if (/image:\s*\S+:\S+/.test(lines[i]) && !/@sha256:/.test(lines[i]) && !/latest/.test(lines[i])) continue;
1837
+ if (/image:\s*\S+:latest/.test(lines[i])) findings.push({ ruleId: 'DEPLOY-073', category: 'deployment', severity: 'medium', title: 'Container image using :latest tag — not reproducible', description: 'Pin container images to specific tags or SHA digests for reproducible deployments.', file: fp, line: i + 1, fix: null });
1838
+ }
1839
+ }
1840
+ return findings;
1841
+ },
1842
+ });
1843
+
1844
+ // DEPLOY-074: No post-deployment smoke tests
1845
+ rules.push({
1846
+ id: 'DEPLOY-074', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'CI/CD pipeline without post-deployment smoke tests',
1847
+ check({ files }) {
1848
+ const findings = [];
1849
+ const ciFiles = [...files.keys()].filter(f => f.includes('.github/workflows') || f.includes('.gitlab-ci'));
1850
+ for (const fp of ciFiles) {
1851
+ const c = files.get(fp) || '';
1852
+ if (!/smoke.test|post.deploy|healthcheck|curl.*health|wget.*health/i.test(c)) findings.push({ ruleId: 'DEPLOY-074', category: 'deployment', severity: 'medium', title: 'No post-deployment smoke test — deployment failures not detected automatically', description: 'Add smoke tests after deployment to verify the application is responding correctly.', file: fp, fix: null });
1853
+ }
1854
+ return findings;
1855
+ },
1856
+ });
1857
+
1858
+ // DEPLOY-075: Helm chart without resource requests
1859
+ rules.push({
1860
+ id: 'DEPLOY-075', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Helm chart values without resource requests defined',
1861
+ check({ files }) {
1862
+ const findings = [];
1863
+ for (const [fp, c] of files) {
1864
+ if (!fp.endsWith('.yaml') && !fp.endsWith('.yml')) continue;
1865
+ if (!fp.includes('helm') && !fp.includes('chart') && !fp.includes('values')) continue;
1866
+ if (/replicaCount|image:|service:/.test(c) && !/resources:|requests:|cpu:|memory:/.test(c)) findings.push({ ruleId: 'DEPLOY-075', category: 'deployment', severity: 'high', title: 'Helm chart without resource requests — pods may be over/under-provisioned', description: 'Define resource requests and limits in Helm chart values for predictable scheduling.', file: fp, fix: null });
1867
+ }
1868
+ return findings;
1869
+ },
1870
+ });
1871
+
1872
+ // DEPLOY-076: Docker COPY with too-broad glob
1873
+ rules.push({
1874
+ id: 'DEPLOY-076', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Dockerfile COPY . . without .dockerignore — copies unnecessary files',
1875
+ check({ files }) {
1876
+ const findings = [];
1877
+ const hasDockerignore = [...files.keys()].some(f => f.endsWith('.dockerignore'));
1878
+ for (const [fp, c] of files) {
1879
+ if (!fp.endsWith('Dockerfile') && !fp.endsWith('.dockerfile')) continue;
1880
+ if (/^COPY\s+\.\s+\./m.test(c) && !hasDockerignore) findings.push({ ruleId: 'DEPLOY-076', category: 'deployment', severity: 'medium', title: 'Dockerfile COPY . . without .dockerignore — includes .git, node_modules, secrets', description: 'Create a .dockerignore file to exclude unnecessary files from the Docker build context.', file: fp, fix: null });
1881
+ }
1882
+ return findings;
1883
+ },
1884
+ });
1885
+
1886
+ // DEPLOY-077: Missing environment-specific configuration management
1887
+ rules.push({
1888
+ id: 'DEPLOY-077', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No environment configuration management strategy',
1889
+ check({ files }) {
1890
+ const findings = [];
1891
+ const hasEnvConfig = [...files.keys()].some(f => /\.env\.(?:production|staging|development)|config\.(?:prod|staging)/.test(f)) || [...files.values()].some(c => /NODE_ENV.*production|process\.env\.NODE_ENV/.test(c));
1892
+ if (!hasEnvConfig) {
1893
+ const pkgJson = [...files.keys()].find(f => f.endsWith('package.json'));
1894
+ if (pkgJson) findings.push({ ruleId: 'DEPLOY-077', category: 'deployment', severity: 'medium', title: 'No environment-specific configuration strategy found', description: 'Use environment variables or per-environment config files to manage configuration across deployments.', file: pkgJson, fix: null });
1895
+ }
1896
+ return findings;
1897
+ },
1898
+ });
1899
+
1900
+ // DEPLOY-078: Missing container registry authentication in CI
1901
+ rules.push({
1902
+ id: 'DEPLOY-078', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Container registry credentials not using CI secrets',
1903
+ check({ files }) {
1904
+ const findings = [];
1905
+ const ciFiles = [...files.keys()].filter(f => f.includes('.github/workflows') || f.includes('.gitlab-ci'));
1906
+ for (const fp of ciFiles) {
1907
+ const c = files.get(fp) || '';
1908
+ if (/docker.login|docker.push|ecr|gcr|registry/i.test(c) && /password:|DOCKER_PASSWORD|REGISTRY_PASSWORD/i.test(c) && !/secrets\.|vault\.\$\{/i.test(c)) {
1909
+ findings.push({ ruleId: 'DEPLOY-078', category: 'deployment', severity: 'high', title: 'Container registry credentials may be hardcoded in CI config', description: 'Store registry credentials in CI/CD secrets variables, not hardcoded in pipeline configuration.', file: fp, fix: null });
1910
+ }
1911
+ }
1912
+ return findings;
1913
+ },
1914
+ });
1915
+
1916
+ // DEPLOY-079: Missing Kubernetes NetworkPolicy
1917
+ rules.push({
1918
+ id: 'DEPLOY-079', category: 'deployment', severity: 'high', confidence: 'likely', title: 'No Kubernetes NetworkPolicy — all pods can communicate freely',
1919
+ check({ files }) {
1920
+ const findings = [];
1921
+ const hasDeployment = [...files.values()].some(c => /kind:\s*Deployment/.test(c));
1922
+ const hasNetPolicy = [...files.values()].some(c => /kind:\s*NetworkPolicy/.test(c));
1923
+ if (hasDeployment && !hasNetPolicy) {
1924
+ const deployFile = [...files.keys()].find(f => (f.endsWith('.yaml') || f.endsWith('.yml')) && /Deployment/.test(files.get(f) || ''));
1925
+ if (deployFile) findings.push({ ruleId: 'DEPLOY-079', category: 'deployment', severity: 'high', title: 'No Kubernetes NetworkPolicy found — unrestricted pod-to-pod communication', description: 'Define NetworkPolicies to restrict ingress/egress between pods following least-privilege networking.', file: deployFile, fix: null });
1926
+ }
1927
+ return findings;
1928
+ },
1929
+ });
1930
+
1931
+ // DEPLOY-080: No CHANGELOG or release notes
1932
+ rules.push({
1933
+ id: 'DEPLOY-080', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'No CHANGELOG file — release history not documented',
1934
+ check({ files }) {
1935
+ const findings = [];
1936
+ const hasChangelog = [...files.keys()].some(f => /CHANGELOG|CHANGES|HISTORY|RELEASES/i.test(f));
1937
+ if (!hasChangelog) {
1938
+ const pkgJson = [...files.keys()].find(f => f.endsWith('package.json'));
1939
+ if (pkgJson) findings.push({ ruleId: 'DEPLOY-080', category: 'deployment', severity: 'low', title: 'No CHANGELOG file — changes between releases not documented', description: 'Maintain a CHANGELOG.md using Keep a Changelog format to document changes for each release.', file: pkgJson, fix: null });
1940
+ }
1941
+ return findings;
1942
+ },
1943
+ });
1944
+
1945
+ // DEPLOY-081: GitHub Actions without pinned action versions
1946
+ rules.push({
1947
+ id: 'DEPLOY-081', category: 'deployment', severity: 'high', confidence: 'likely', title: 'GitHub Actions using mutable version tags instead of SHA',
1948
+ check({ files }) {
1949
+ const findings = [];
1950
+ for (const [fp, c] of files) {
1951
+ if (!fp.includes('.github/workflows')) continue;
1952
+ const lines = c.split('\n');
1953
+ for (let i = 0; i < lines.length; i++) {
1954
+ if (/uses:\s*\w+\/\w+@v\d/.test(lines[i]) && !/@[0-9a-f]{40}/.test(lines[i])) {
1955
+ findings.push({ ruleId: 'DEPLOY-081', category: 'deployment', severity: 'high', title: 'GitHub Action using mutable tag — supply chain attack risk', description: 'Pin GitHub Actions to full commit SHAs (e.g. actions/checkout@abc123...) to prevent supply chain attacks.', file: fp, line: i + 1, fix: null });
1956
+ break;
1957
+ }
1958
+ }
1959
+ }
1960
+ return findings;
1961
+ },
1962
+ });
1963
+
1964
+ // DEPLOY-082: Missing Kubernetes resource quotas on namespace
1965
+ rules.push({
1966
+ id: 'DEPLOY-082', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'No Kubernetes ResourceQuota on namespace',
1967
+ check({ files }) {
1968
+ const findings = [];
1969
+ const hasDeployment = [...files.values()].some(c => /kind:\s*Deployment/.test(c));
1970
+ const hasResourceQuota = [...files.values()].some(c => /kind:\s*ResourceQuota/.test(c));
1971
+ if (hasDeployment && !hasResourceQuota) {
1972
+ const deployFile = [...files.keys()].find(f => (f.endsWith('.yaml') || f.endsWith('.yml')) && /Deployment/.test(files.get(f) || ''));
1973
+ if (deployFile) findings.push({ ruleId: 'DEPLOY-082', category: 'deployment', severity: 'medium', title: 'No ResourceQuota defined — namespace can consume unlimited cluster resources', description: 'Define ResourceQuotas per namespace to prevent resource starvation across workloads.', file: deployFile, fix: null });
1974
+ }
1975
+ return findings;
1976
+ },
1977
+ });
1978
+
1979
+ // DEPLOY-083: Missing pre-deployment database migration check
1980
+ rules.push({
1981
+ id: 'DEPLOY-083', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Deployment without pre-migration step for database changes',
1982
+ check({ files }) {
1983
+ const findings = [];
1984
+ const hasMigrations = [...files.keys()].some(f => /migration|migrate/.test(f));
1985
+ if (!hasMigrations) return findings;
1986
+ const ciFiles = [...files.keys()].filter(f => f.includes('.github/workflows') || f.includes('.gitlab-ci'));
1987
+ for (const fp of ciFiles) {
1988
+ const c = files.get(fp) || '';
1989
+ if (!/migrate|migration/.test(c)) findings.push({ ruleId: 'DEPLOY-083', category: 'deployment', severity: 'high', title: 'CI/CD pipeline does not run database migrations before deployment', description: 'Run database migrations as part of your deployment pipeline before updating application pods.', file: fp, fix: null });
1990
+ }
1991
+ return findings;
1992
+ },
1993
+ });
1994
+
1995
+ // DEPLOY-084: Missing Kubernetes liveness vs readiness probe distinction
1996
+ rules.push({
1997
+ id: 'DEPLOY-084', category: 'deployment', severity: 'medium', confidence: 'likely', title: 'Kubernetes Deployment with same endpoint for liveness and readiness',
1998
+ check({ files }) {
1999
+ const findings = [];
2000
+ for (const [fp, c] of files) {
2001
+ if (!fp.endsWith('.yaml') && !fp.endsWith('.yml')) continue;
2002
+ if (!/kind:\s*Deployment/.test(c)) continue;
2003
+ const lMatch = c.match(/livenessProbe:[\s\S]{0,200}path:\s*(\S+)/);
2004
+ const rMatch = c.match(/readinessProbe:[\s\S]{0,200}path:\s*(\S+)/);
2005
+ if (lMatch && rMatch && lMatch[1] === rMatch[1]) findings.push({ ruleId: 'DEPLOY-084', category: 'deployment', severity: 'medium', title: 'Liveness and readiness probes use same endpoint — liveness should check process, readiness checks dependencies', description: 'Use different endpoints: liveness checks if the process is alive, readiness checks if it can serve traffic.', file: fp, fix: null });
2006
+ }
2007
+ return findings;
2008
+ },
2009
+ });
2010
+
2011
+ // DEPLOY-085: Missing image vulnerability scanning in CI
2012
+ rules.push({
2013
+ id: 'DEPLOY-085', category: 'deployment', severity: 'high', confidence: 'likely', title: 'CI pipeline builds Docker image without vulnerability scan',
2014
+ check({ files }) {
2015
+ const findings = [];
2016
+ const ciFiles = [...files.keys()].filter(f => f.includes('.github/workflows') || f.includes('.gitlab-ci'));
2017
+ for (const fp of ciFiles) {
2018
+ const c = files.get(fp) || '';
2019
+ if (/docker.build|docker build/i.test(c) && !/trivy|grype|anchore|clair|snyk container/i.test(c)) findings.push({ ruleId: 'DEPLOY-085', category: 'deployment', severity: 'high', title: 'Docker image built in CI without vulnerability scan', description: 'Add Trivy or Grype scanning after docker build to detect vulnerabilities before pushing.', file: fp, fix: null });
2020
+ }
2021
+ return findings;
2022
+ },
2023
+ });
2024
+
2025
+ // DEPLOY-086: No automated dependency updates configured
2026
+ rules.push({
2027
+ id: 'DEPLOY-086', category: 'deployment', severity: 'low', confidence: 'suggestion', title: 'No automated dependency update configuration (Dependabot/Renovate)',
2028
+ check({ files }) {
2029
+ const findings = [];
2030
+ const hasDependabot = [...files.keys()].some(f => f.includes('.github/dependabot.yml') || f.includes('renovate.json'));
2031
+ if (!hasDependabot) {
2032
+ const pkgJson = [...files.keys()].find(f => f.endsWith('package.json'));
2033
+ if (pkgJson) findings.push({ ruleId: 'DEPLOY-086', category: 'deployment', severity: 'low', title: 'No Dependabot or Renovate configuration — dependencies not automatically updated', description: 'Add .github/dependabot.yml or renovate.json to automate dependency update PRs.', file: pkgJson, fix: null });
2034
+ }
2035
+ return findings;
2036
+ },
2037
+ });
2038
+
2039
+ // DEPLOY-087: Missing Kubernetes PodSecurityContext
2040
+ rules.push({
2041
+ id: 'DEPLOY-087', category: 'deployment', severity: 'high', confidence: 'likely', title: 'Kubernetes Pod without securityContext settings',
2042
+ check({ files }) {
2043
+ const findings = [];
2044
+ for (const [fp, c] of files) {
2045
+ if (!fp.endsWith('.yaml') && !fp.endsWith('.yml')) continue;
2046
+ if (/kind:\s*(?:Deployment|Pod|StatefulSet)/.test(c) && !/securityContext/.test(c)) findings.push({ ruleId: 'DEPLOY-087', category: 'deployment', severity: 'high', title: 'Kubernetes Pod without securityContext — runs with default insecure settings', description: 'Add securityContext to pods: set runAsNonRoot: true, readOnlyRootFilesystem: true.', file: fp, fix: null });
2047
+ }
2048
+ return findings;
2049
+ },
2050
+ });