sentinel-ci 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +235 -0
- data/bin/gh-workflow-scanner +1 -0
- data/bin/sentinel +57 -0
- data/lib/auto_fix.rb +485 -0
- data/lib/cli/bot.rb +53 -0
- data/lib/cli/fix.rb +50 -0
- data/lib/cli/scan.rb +145 -0
- data/lib/clone_client.rb +64 -0
- data/lib/finding.rb +27 -0
- data/lib/formatter/json.rb +18 -0
- data/lib/formatter/terminal.rb +47 -0
- data/lib/github_client.rb +98 -0
- data/lib/local_client.rb +33 -0
- data/lib/rule_engine.rb +39 -0
- data/lib/rules/allow_forks_artifact.rb +22 -0
- data/lib/rules/base.rb +33 -0
- data/lib/rules/build_publish_same_job.rb +39 -0
- data/lib/rules/credential_window.rb +43 -0
- data/lib/rules/curl_pipe_shell.rb +29 -0
- data/lib/rules/dangerous_triggers.rb +43 -0
- data/lib/rules/docker_build_arg_secrets.rb +30 -0
- data/lib/rules/git_config_global.rb +25 -0
- data/lib/rules/missing_env_protection.rb +37 -0
- data/lib/rules/missing_frozen_lockfile.rb +28 -0
- data/lib/rules/missing_permissions.rb +18 -0
- data/lib/rules/missing_persist_creds.rb +51 -0
- data/lib/rules/missing_timeouts.rb +25 -0
- data/lib/rules/overly_broad_triggers.rb +31 -0
- data/lib/rules/shell_injection_expr.rb +57 -0
- data/lib/rules/shell_injection_jq.rb +59 -0
- data/lib/rules/static_aws_credentials.rb +33 -0
- data/lib/rules/unpinned_actions.rb +35 -0
- data/lib/rules/unpinned_docker_image.rb +25 -0
- data/lib/rules/unscoped_app_token.rb +31 -0
- data/lib/scanner.rb +95 -0
- data/lib/sha_resolver.rb +60 -0
- data/lib/version.rb +3 -0
- data/lib/workflow.rb +100 -0
- metadata +84 -0
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class DockerBuildArgSecrets < Base
|
|
3
|
+
def name = "docker-build-arg-secrets"
|
|
4
|
+
def description = "Secrets passed as Docker build-args (visible in image layers)"
|
|
5
|
+
def severity = :high
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
findings = []
|
|
9
|
+
|
|
10
|
+
workflow.lines_of(/build-args:/).each do |line_num|
|
|
11
|
+
(line_num..(line_num + 20)).each do |i|
|
|
12
|
+
break if i > workflow.raw_lines.length
|
|
13
|
+
line = workflow.line_content(i)
|
|
14
|
+
break if line&.match?(/^\s*\w+:/) && !line.match?(/^\s+["']?[A-Z_]+=/)
|
|
15
|
+
|
|
16
|
+
if line&.match?(/secrets\./)
|
|
17
|
+
findings << finding(workflow,
|
|
18
|
+
line: i,
|
|
19
|
+
code: line.strip,
|
|
20
|
+
message: "Secret in Docker build-arg — extractable via docker history",
|
|
21
|
+
fix: "Use --secret flag or RUN --mount=type=secret instead of build-arg"
|
|
22
|
+
)
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
findings
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
end
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class GitConfigGlobal < Base
|
|
3
|
+
def name = "git-config-global"
|
|
4
|
+
def description = "git config --global persists credentials beyond the repo clone"
|
|
5
|
+
def severity = :medium
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
findings = []
|
|
9
|
+
|
|
10
|
+
workflow.lines_of(/git config --global/).each do |line_num|
|
|
11
|
+
line = workflow.line_content(line_num)
|
|
12
|
+
next unless line&.match?(/insteadOf|url\.|credential/)
|
|
13
|
+
|
|
14
|
+
findings << finding(workflow,
|
|
15
|
+
line: line_num,
|
|
16
|
+
code: line.strip,
|
|
17
|
+
message: "git config --global writes credentials to ~/.gitconfig — accessible to all subsequent git operations",
|
|
18
|
+
fix: "Use --local instead of --global to scope to the repo clone"
|
|
19
|
+
)
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
findings
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
end
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class MissingEnvProtection < Base
|
|
3
|
+
def name = "missing-env-protection"
|
|
4
|
+
def description = "Publish/deploy job without GitHub Environment protection"
|
|
5
|
+
def severity = :medium
|
|
6
|
+
|
|
7
|
+
PUBLISH_INDICATORS = /npm publish|pnpm publish|twine upload|gem push|docker push|railway up|cdk deploy/
|
|
8
|
+
OIDC_PUBLISH = /id-token:\s*write/
|
|
9
|
+
|
|
10
|
+
def check(workflow)
|
|
11
|
+
findings = []
|
|
12
|
+
|
|
13
|
+
workflow.jobs.each do |job_id, job|
|
|
14
|
+
next if job.key?("environment")
|
|
15
|
+
|
|
16
|
+
steps = workflow.steps(job)
|
|
17
|
+
has_publish = steps.any? { |s| s["run"]&.match?(PUBLISH_INDICATORS) }
|
|
18
|
+
|
|
19
|
+
job_perms = workflow.permissions(scope: :job, job: job)
|
|
20
|
+
has_oidc = job_perms&.to_s&.match?(OIDC_PUBLISH) ||
|
|
21
|
+
workflow.permissions(scope: :workflow)&.to_s&.match?(OIDC_PUBLISH)
|
|
22
|
+
|
|
23
|
+
if has_publish || has_oidc
|
|
24
|
+
line = workflow.line_of(/^\s+#{Regexp.escape(job_id)}:/)
|
|
25
|
+
findings << finding(workflow,
|
|
26
|
+
line: line || 0,
|
|
27
|
+
code: "#{job_id}:",
|
|
28
|
+
message: "Publish/deploy job without environment protection — no human gate before publication",
|
|
29
|
+
fix: "Add environment: <name> with required reviewers"
|
|
30
|
+
)
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
findings
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
end
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class MissingFrozenLockfile < Base
|
|
3
|
+
def name = "missing-frozen-lockfile"
|
|
4
|
+
def description = "Package install without lockfile enforcement"
|
|
5
|
+
def severity = :medium
|
|
6
|
+
|
|
7
|
+
INSTALL_WITHOUT_LOCK = /(?:npm|pnpm|yarn)\s+install(?!\s+(-g|--global|--frozen-lockfile|--ci|--immutable))/
|
|
8
|
+
|
|
9
|
+
def check(workflow)
|
|
10
|
+
findings = []
|
|
11
|
+
|
|
12
|
+
workflow.raw_lines.each_with_index do |line, i|
|
|
13
|
+
next unless line.match?(INSTALL_WITHOUT_LOCK)
|
|
14
|
+
next if line.match?(/--frozen-lockfile|--ci|--immutable|npm ci/)
|
|
15
|
+
next if line.strip.start_with?("#")
|
|
16
|
+
|
|
17
|
+
findings << finding(workflow,
|
|
18
|
+
line: i + 1,
|
|
19
|
+
code: line.strip,
|
|
20
|
+
message: "Package install without --frozen-lockfile — dependency resolution may differ from tested versions",
|
|
21
|
+
fix: "Use pnpm install --frozen-lockfile or npm ci"
|
|
22
|
+
)
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
findings
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class MissingPermissions < Base
|
|
3
|
+
def name = "missing-permissions"
|
|
4
|
+
def description = "No top-level permissions block"
|
|
5
|
+
def severity = :medium
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
return [] if workflow.permissions(scope: :workflow)
|
|
9
|
+
|
|
10
|
+
line = workflow.line_of(/^jobs:/) || 1
|
|
11
|
+
[finding(workflow,
|
|
12
|
+
line: line,
|
|
13
|
+
message: "No top-level permissions block — jobs inherit broad default token permissions",
|
|
14
|
+
fix: "Add permissions: contents: read at the workflow level"
|
|
15
|
+
)]
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class MissingPersistCreds < Base
|
|
3
|
+
def name = "missing-persist-credentials"
|
|
4
|
+
def description = "actions/checkout without persist-credentials: false"
|
|
5
|
+
def severity = :high
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
findings = []
|
|
9
|
+
seen_checkout_lines = Hash.new(0)
|
|
10
|
+
|
|
11
|
+
workflow.jobs.each do |_job_id, job|
|
|
12
|
+
job_pushes = job_does_push?(job, workflow)
|
|
13
|
+
|
|
14
|
+
workflow.steps(job).each do |step|
|
|
15
|
+
next unless step["uses"]&.match?(/actions\/checkout[@\s]|actions\/checkout$/)
|
|
16
|
+
|
|
17
|
+
with = step["with"] || {}
|
|
18
|
+
persist = with["persist-credentials"]
|
|
19
|
+
|
|
20
|
+
next if persist == false || persist == "false"
|
|
21
|
+
next if job_pushes && persist == true
|
|
22
|
+
|
|
23
|
+
uses = step["uses"]
|
|
24
|
+
all_lines = workflow.lines_of(/uses:\s*#{Regexp.escape(uses)}/)
|
|
25
|
+
idx = seen_checkout_lines[uses]
|
|
26
|
+
line = all_lines[idx] || all_lines.last
|
|
27
|
+
seen_checkout_lines[uses] += 1
|
|
28
|
+
|
|
29
|
+
findings << finding(workflow,
|
|
30
|
+
line: line || 0,
|
|
31
|
+
code: "uses: #{uses}",
|
|
32
|
+
message: "Checkout without persist-credentials: false — token persists in .git/config",
|
|
33
|
+
fix: "Add persist-credentials: false to the with: block"
|
|
34
|
+
)
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
findings
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
private
|
|
42
|
+
|
|
43
|
+
def job_does_push?(job, workflow)
|
|
44
|
+
workflow.steps(job).any? { |s|
|
|
45
|
+
run = s["run"]&.to_s
|
|
46
|
+
run&.match?(/git push|gh pr create|peter-evans\/create-pull-request/) ||
|
|
47
|
+
s["uses"]&.match?(/create-pull-request|yaml-update-action/)
|
|
48
|
+
}
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
end
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class MissingTimeouts < Base
|
|
3
|
+
def name = "missing-timeouts"
|
|
4
|
+
def description = "Job without timeout-minutes"
|
|
5
|
+
def severity = :medium
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
findings = []
|
|
9
|
+
|
|
10
|
+
workflow.jobs.each do |job_id, job|
|
|
11
|
+
next if job.key?("timeout-minutes")
|
|
12
|
+
|
|
13
|
+
line = workflow.line_of(/^\s+#{Regexp.escape(job_id)}:/)
|
|
14
|
+
findings << finding(workflow,
|
|
15
|
+
line: line || 0,
|
|
16
|
+
code: "#{job_id}:",
|
|
17
|
+
message: "Job '#{job_id}' has no timeout-minutes — default is 360 minutes (6 hours)",
|
|
18
|
+
fix: "Add timeout-minutes: appropriate for the job type (5-30 min)"
|
|
19
|
+
)
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
findings
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
end
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class OverlyBroadTriggers < Base
|
|
3
|
+
def name = "overly-broad-triggers"
|
|
4
|
+
def description = "Push or pull_request trigger without branch filter"
|
|
5
|
+
def severity = :low
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
findings = []
|
|
9
|
+
triggers = workflow.triggers
|
|
10
|
+
|
|
11
|
+
return findings unless triggers.is_a?(Hash)
|
|
12
|
+
|
|
13
|
+
%w[push pull_request].each do |trigger|
|
|
14
|
+
next unless triggers.key?(trigger)
|
|
15
|
+
config = triggers[trigger]
|
|
16
|
+
|
|
17
|
+
if config.nil? || config == true || (config.is_a?(Hash) && !config.key?("branches") && !config.key?("branches-ignore") && !config.key?("tags") && !config.key?("tags-ignore") && !config.key?("paths") && !config.key?("paths-ignore"))
|
|
18
|
+
line = workflow.line_of(/^\s+#{trigger}:/)
|
|
19
|
+
findings << finding(workflow,
|
|
20
|
+
line: line || 0,
|
|
21
|
+
code: "#{trigger}:",
|
|
22
|
+
message: "'#{trigger}' trigger with no branch filter — runs on all branches",
|
|
23
|
+
fix: "Add branches: [main] to scope the trigger"
|
|
24
|
+
)
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
findings
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class ShellInjectionExpr < Base
|
|
3
|
+
def name = "shell-injection-expr"
|
|
4
|
+
def description = "Attacker-controllable ${{ }} expression in run: block"
|
|
5
|
+
def severity = :critical
|
|
6
|
+
|
|
7
|
+
DANGEROUS_CONTEXTS = %w[
|
|
8
|
+
github.event.pull_request.title
|
|
9
|
+
github.event.pull_request.body
|
|
10
|
+
github.event.pull_request.head.ref
|
|
11
|
+
github.event.pull_request.head.label
|
|
12
|
+
github.event.issue.title
|
|
13
|
+
github.event.issue.body
|
|
14
|
+
github.event.comment.body
|
|
15
|
+
github.event.review.body
|
|
16
|
+
github.event.discussion.title
|
|
17
|
+
github.event.discussion.body
|
|
18
|
+
github.event.workflow_run.head_branch
|
|
19
|
+
github.head_ref
|
|
20
|
+
github.actor
|
|
21
|
+
github.triggering_actor
|
|
22
|
+
].freeze
|
|
23
|
+
|
|
24
|
+
PATTERN = /\$\{\{\s*(#{DANGEROUS_CONTEXTS.map { |c| Regexp.escape(c) }.join('|')})/
|
|
25
|
+
|
|
26
|
+
def check(workflow)
|
|
27
|
+
findings = []
|
|
28
|
+
workflow.lines_of(PATTERN).each do |line_num|
|
|
29
|
+
line = workflow.line_content(line_num)
|
|
30
|
+
next unless in_run_block?(workflow, line_num)
|
|
31
|
+
|
|
32
|
+
match = line.match(PATTERN)
|
|
33
|
+
next unless match
|
|
34
|
+
|
|
35
|
+
findings << finding(workflow,
|
|
36
|
+
line: line_num,
|
|
37
|
+
code: line.strip,
|
|
38
|
+
message: "Attacker-controllable expression ${{ #{match[1]} }} in run: block — shell injection risk",
|
|
39
|
+
fix: "Move to env: block and reference as $ENV_VAR in the shell"
|
|
40
|
+
)
|
|
41
|
+
end
|
|
42
|
+
findings
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
private
|
|
46
|
+
|
|
47
|
+
def in_run_block?(workflow, target_line)
|
|
48
|
+
(target_line - 1).downto([target_line - 20, 0].max) do |i|
|
|
49
|
+
content = workflow.raw_lines[i]
|
|
50
|
+
return true if content&.match?(/^\s+run:\s*[\|>]?\s*$/) || content&.match?(/^\s+run:\s+\S/)
|
|
51
|
+
return true if content&.match?(/^\s+-\s+run:\s*[\|>]?\s*$/) || content&.match?(/^\s+-\s+run:\s+\S/)
|
|
52
|
+
return false if content&.match?(/^\s+(uses|with|if|id|name|env):/) || content&.match?(/^\s+-\s+name:/)
|
|
53
|
+
end
|
|
54
|
+
false
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
end
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class ShellInjectionJq < Base
|
|
3
|
+
def name = "shell-injection-jq"
|
|
4
|
+
def description = "Shell variable interpolated in double-quoted jq/curl JSON argument"
|
|
5
|
+
def severity = :critical
|
|
6
|
+
|
|
7
|
+
ATTACKER_ENV_VARS = %w[
|
|
8
|
+
PR_TITLE PR_BODY PR_AUTHOR HEAD_REF ISSUE_TITLE ISSUE_BODY COMMENT_BODY
|
|
9
|
+
PR_HEAD_REF BRANCH_NAME
|
|
10
|
+
].freeze
|
|
11
|
+
|
|
12
|
+
JQ_PATTERN = /jq\s+([a-zA-Z-]+\s+)*--arg\s+\w+\s+"[^"]*\$\{/
|
|
13
|
+
CURL_JSON_PATTERN = /curl\s.*-d\s+"[^"]*\$\{/
|
|
14
|
+
def check(workflow)
|
|
15
|
+
findings = []
|
|
16
|
+
|
|
17
|
+
workflow.raw_lines.each_with_index do |line, i|
|
|
18
|
+
line_num = i + 1
|
|
19
|
+
|
|
20
|
+
if line.match?(JQ_PATTERN)
|
|
21
|
+
var_match = line.match(/\$\{(\w+)\}/)
|
|
22
|
+
next unless var_match
|
|
23
|
+
var_name = var_match[1]
|
|
24
|
+
next unless potentially_attacker_controlled?(var_name)
|
|
25
|
+
|
|
26
|
+
findings << finding(workflow,
|
|
27
|
+
line: line_num,
|
|
28
|
+
code: line.strip,
|
|
29
|
+
message: "${#{var_name}} interpolated in double-quoted jq argument — $(command) executes via bash substitution",
|
|
30
|
+
fix: "Use jq --arg: jq -nc --arg #{var_name.downcase} \"$#{var_name}\" '{text: $#{var_name.downcase}}'"
|
|
31
|
+
)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
if line.match?(CURL_JSON_PATTERN)
|
|
35
|
+
var_match = line.match(/\$\{(\w+)\}/)
|
|
36
|
+
next unless var_match
|
|
37
|
+
var_name = var_match[1]
|
|
38
|
+
next unless potentially_attacker_controlled?(var_name)
|
|
39
|
+
|
|
40
|
+
findings << finding(workflow,
|
|
41
|
+
line: line_num,
|
|
42
|
+
code: line.strip,
|
|
43
|
+
message: "${#{var_name}} interpolated in double-quoted curl JSON — command substitution risk",
|
|
44
|
+
fix: "Build JSON payload with jq -nc --arg instead of string interpolation"
|
|
45
|
+
)
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
findings
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
private
|
|
53
|
+
|
|
54
|
+
def potentially_attacker_controlled?(var_name)
|
|
55
|
+
ATTACKER_ENV_VARS.any? { |v| var_name.upcase == v } ||
|
|
56
|
+
var_name.match?(/^(PR_|ISSUE_|COMMENT_)?(TITLE|BODY|HEAD_REF|BRANCH_NAME|COMMENT_BODY|AUTHOR)$/i)
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
end
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class StaticAwsCredentials < Base
|
|
3
|
+
def name = "static-aws-credentials"
|
|
4
|
+
def description = "AWS credentials using static keys instead of OIDC"
|
|
5
|
+
def severity = :high
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
findings = []
|
|
9
|
+
|
|
10
|
+
workflow.jobs.each do |_job_id, job|
|
|
11
|
+
workflow.steps(job).each do |step|
|
|
12
|
+
next unless step["uses"]&.include?("configure-aws-credentials")
|
|
13
|
+
|
|
14
|
+
with = step["with"] || {}
|
|
15
|
+
has_static = with.key?("aws-access-key-id")
|
|
16
|
+
has_oidc = with.key?("role-to-assume")
|
|
17
|
+
|
|
18
|
+
if has_static && !has_oidc
|
|
19
|
+
line = workflow.line_of(/aws-access-key-id/)
|
|
20
|
+
findings << finding(workflow,
|
|
21
|
+
line: line || 0,
|
|
22
|
+
code: "aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}",
|
|
23
|
+
message: "Static AWS access keys — long-lived credentials that don't auto-expire",
|
|
24
|
+
fix: "Use OIDC federation: role-to-assume with id-token: write permission"
|
|
25
|
+
)
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
findings
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class UnpinnedActions < Base
|
|
3
|
+
def name = "unpinned-actions"
|
|
4
|
+
def description = "Action referenced by tag instead of SHA pin"
|
|
5
|
+
def severity = :critical
|
|
6
|
+
|
|
7
|
+
SHA_PATTERN = /@[0-9a-f]{40}\b/
|
|
8
|
+
FIRST_PARTY = %w[actions/ github/].freeze
|
|
9
|
+
|
|
10
|
+
def check(workflow)
|
|
11
|
+
findings = []
|
|
12
|
+
workflow.uses_actions.each do |action|
|
|
13
|
+
uses = action[:uses]
|
|
14
|
+
next if uses.nil?
|
|
15
|
+
next if uses.start_with?("./")
|
|
16
|
+
next if uses.start_with?("docker://")
|
|
17
|
+
next if uses.match?(SHA_PATTERN)
|
|
18
|
+
|
|
19
|
+
first_party = FIRST_PARTY.any? { |prefix| uses.start_with?(prefix) }
|
|
20
|
+
sev = first_party ? :medium : :critical
|
|
21
|
+
|
|
22
|
+
findings << Finding.new(
|
|
23
|
+
rule: name,
|
|
24
|
+
severity: sev,
|
|
25
|
+
file: workflow.filename,
|
|
26
|
+
line: action[:line] || 0,
|
|
27
|
+
code: "uses: #{uses}",
|
|
28
|
+
message: "Action '#{uses}' is not SHA-pinned — tag references are mutable",
|
|
29
|
+
fix: "Pin to a commit SHA: uses: #{uses.split('@').first}@<commit-sha> # #{uses.split('@').last}"
|
|
30
|
+
)
|
|
31
|
+
end
|
|
32
|
+
findings
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
end
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class UnpinnedDockerImage < Base
|
|
3
|
+
def name = "unpinned-docker-image"
|
|
4
|
+
def description = "Docker image referenced by :latest tag"
|
|
5
|
+
def severity = :low
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
findings = []
|
|
9
|
+
|
|
10
|
+
workflow.lines_of(/:latest\b/).each do |line_num|
|
|
11
|
+
line = workflow.line_content(line_num)
|
|
12
|
+
next unless line&.match?(/docker:\/\/.*:latest|image:.*:latest|uses:.*:latest|docker:.*:latest|container:.*:latest/)
|
|
13
|
+
|
|
14
|
+
findings << finding(workflow,
|
|
15
|
+
line: line_num,
|
|
16
|
+
code: line.strip,
|
|
17
|
+
message: "Docker image uses :latest tag — mutable, not reproducible",
|
|
18
|
+
fix: "Pin to a specific digest: image@sha256:..."
|
|
19
|
+
)
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
findings
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
end
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class UnscopedAppToken < Base
|
|
3
|
+
def name = "unscoped-app-token"
|
|
4
|
+
def description = "GitHub App token without scoped permissions"
|
|
5
|
+
def severity = :high
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
findings = []
|
|
9
|
+
|
|
10
|
+
workflow.jobs.each do |_job_id, job|
|
|
11
|
+
workflow.steps(job).each do |step|
|
|
12
|
+
next unless step["uses"]&.include?("create-github-app-token")
|
|
13
|
+
|
|
14
|
+
with = step["with"] || {}
|
|
15
|
+
has_permissions = with.keys.any? { |k| k.start_with?("permission-") }
|
|
16
|
+
|
|
17
|
+
unless has_permissions
|
|
18
|
+
line = workflow.line_of(/create-github-app-token/)
|
|
19
|
+
findings << finding(workflow,
|
|
20
|
+
line: line || 0,
|
|
21
|
+
message: "App token inherits blanket installation permissions",
|
|
22
|
+
fix: "Add permission-<name>: write inputs to scope the token"
|
|
23
|
+
)
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
findings
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
data/lib/scanner.rb
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
require_relative "finding"
|
|
2
|
+
require_relative "workflow"
|
|
3
|
+
require_relative "rule_engine"
|
|
4
|
+
require_relative "github_client"
|
|
5
|
+
require_relative "local_client"
|
|
6
|
+
require_relative "clone_client"
|
|
7
|
+
require_relative "formatter/terminal"
|
|
8
|
+
require_relative "formatter/json"
|
|
9
|
+
|
|
10
|
+
class Scanner
|
|
11
|
+
def initialize(client:, formatter:, min_severity: :low)
|
|
12
|
+
@client = client
|
|
13
|
+
@formatter = formatter
|
|
14
|
+
@min_severity = min_severity
|
|
15
|
+
@engine = RuleEngine.new
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def scan(repo)
|
|
19
|
+
raw_workflows = @client.fetch_workflows(repo)
|
|
20
|
+
|
|
21
|
+
workflows = raw_workflows.map { |w|
|
|
22
|
+
Workflow.new(filename: w[:filename], content: w[:content])
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
dependabot = @client.fetch_dependabot_config(repo)
|
|
26
|
+
has_zizmor = workflows.any? { |w| w.filename.match?(/zizmor/i) }
|
|
27
|
+
has_dependabot_actions = dependabot_has_actions?(dependabot)
|
|
28
|
+
|
|
29
|
+
findings = []
|
|
30
|
+
|
|
31
|
+
workflows.each do |wf|
|
|
32
|
+
next if wf.parse_error?
|
|
33
|
+
findings.concat(@engine.scan(wf))
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
unless has_dependabot_actions
|
|
37
|
+
findings << Finding.new(
|
|
38
|
+
rule: "missing-dependabot",
|
|
39
|
+
severity: :low,
|
|
40
|
+
file: "dependabot.yml",
|
|
41
|
+
line: 0,
|
|
42
|
+
code: nil,
|
|
43
|
+
message: "No Dependabot configuration for github-actions ecosystem",
|
|
44
|
+
fix: "Add package-ecosystem: github-actions to .github/dependabot.yml"
|
|
45
|
+
)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
unless has_zizmor
|
|
49
|
+
findings << Finding.new(
|
|
50
|
+
rule: "missing-zizmor",
|
|
51
|
+
severity: :low,
|
|
52
|
+
file: "(missing)",
|
|
53
|
+
line: 0,
|
|
54
|
+
code: nil,
|
|
55
|
+
message: "No zizmor static analysis workflow found",
|
|
56
|
+
fix: "Add a security_zizmor.yml workflow for GitHub Actions static analysis"
|
|
57
|
+
)
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
findings.select! { |f| severity_passes?(f.severity) }
|
|
61
|
+
|
|
62
|
+
output = @formatter.format(
|
|
63
|
+
repo: repo,
|
|
64
|
+
workflow_count: workflows.length,
|
|
65
|
+
findings: findings
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
{ output: output, findings: findings, workflow_count: workflows.length }
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def scan_org(org)
|
|
72
|
+
repos = @client.fetch_repos(org)
|
|
73
|
+
results = []
|
|
74
|
+
|
|
75
|
+
repos.each do |repo|
|
|
76
|
+
$stderr.puts "Scanning #{repo}..." if @formatter.is_a?(Formatter::Terminal)
|
|
77
|
+
results << scan(repo)
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
results
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
private
|
|
84
|
+
|
|
85
|
+
def dependabot_has_actions?(config)
|
|
86
|
+
return false unless config.is_a?(Hash)
|
|
87
|
+
updates = config["updates"]
|
|
88
|
+
return false unless updates.is_a?(Array)
|
|
89
|
+
updates.any? { |u| u["package-ecosystem"] == "github-actions" }
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
def severity_passes?(sev)
|
|
93
|
+
(Finding::SEVERITY_ORDER[sev] || 99) <= (Finding::SEVERITY_ORDER[@min_severity] || 99)
|
|
94
|
+
end
|
|
95
|
+
end
|
data/lib/sha_resolver.rb
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
require "net/http"
|
|
2
|
+
require "json"
|
|
3
|
+
require "uri"
|
|
4
|
+
|
|
5
|
+
class ShaResolver
|
|
6
|
+
API_BASE = "https://api.github.com"
|
|
7
|
+
|
|
8
|
+
def initialize(token: nil)
|
|
9
|
+
@token = token || ENV["GITHUB_TOKEN"]
|
|
10
|
+
@cache = {}
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def resolve(owner_action, tag)
|
|
14
|
+
repo = extract_repo(owner_action)
|
|
15
|
+
key = "#{repo}@#{tag}"
|
|
16
|
+
@cache[key] ||= fetch_sha(repo, tag)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
private
|
|
20
|
+
|
|
21
|
+
def extract_repo(owner_action)
|
|
22
|
+
parts = owner_action.split("/")
|
|
23
|
+
"#{parts[0]}/#{parts[1]}"
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def fetch_sha(repo, tag)
|
|
27
|
+
encoded_repo = repo.split("/").map { |p| URI.encode_www_form_component(p) }.join("/")
|
|
28
|
+
encoded_tag = URI.encode_www_form_component(tag)
|
|
29
|
+
uri = URI("#{API_BASE}/repos/#{encoded_repo}/commits/#{encoded_tag}")
|
|
30
|
+
req = Net::HTTP::Get.new(uri)
|
|
31
|
+
req["Accept"] = "application/vnd.github+json"
|
|
32
|
+
req["Authorization"] = "Bearer #{@token}" if @token
|
|
33
|
+
req["X-GitHub-Api-Version"] = "2022-11-28"
|
|
34
|
+
|
|
35
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
|
36
|
+
http.use_ssl = true
|
|
37
|
+
http.open_timeout = 10
|
|
38
|
+
http.read_timeout = 30
|
|
39
|
+
|
|
40
|
+
resp = http.request(req)
|
|
41
|
+
|
|
42
|
+
case resp.code.to_i
|
|
43
|
+
when 200
|
|
44
|
+
data = JSON.parse(resp.body)
|
|
45
|
+
data["sha"]
|
|
46
|
+
when 404
|
|
47
|
+
$stderr.puts "ShaResolver: tag '#{tag}' not found for #{repo}"
|
|
48
|
+
nil
|
|
49
|
+
when 403
|
|
50
|
+
$stderr.puts "ShaResolver: rate limited or forbidden for #{repo}"
|
|
51
|
+
nil
|
|
52
|
+
else
|
|
53
|
+
$stderr.puts "ShaResolver: API error #{resp.code} for #{repo}@#{tag}"
|
|
54
|
+
nil
|
|
55
|
+
end
|
|
56
|
+
rescue Net::OpenTimeout, Net::ReadTimeout, SocketError, Errno::ECONNREFUSED => e
|
|
57
|
+
$stderr.puts "SHA resolve failed for #{repo}@#{tag}: #{e.message}"
|
|
58
|
+
nil
|
|
59
|
+
end
|
|
60
|
+
end
|
data/lib/version.rb
ADDED