sentinel-ci 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +235 -0
- data/bin/gh-workflow-scanner +1 -0
- data/bin/sentinel +57 -0
- data/lib/auto_fix.rb +485 -0
- data/lib/cli/bot.rb +53 -0
- data/lib/cli/fix.rb +50 -0
- data/lib/cli/scan.rb +145 -0
- data/lib/clone_client.rb +64 -0
- data/lib/finding.rb +27 -0
- data/lib/formatter/json.rb +18 -0
- data/lib/formatter/terminal.rb +47 -0
- data/lib/github_client.rb +98 -0
- data/lib/local_client.rb +33 -0
- data/lib/rule_engine.rb +39 -0
- data/lib/rules/allow_forks_artifact.rb +22 -0
- data/lib/rules/base.rb +33 -0
- data/lib/rules/build_publish_same_job.rb +39 -0
- data/lib/rules/credential_window.rb +43 -0
- data/lib/rules/curl_pipe_shell.rb +29 -0
- data/lib/rules/dangerous_triggers.rb +43 -0
- data/lib/rules/docker_build_arg_secrets.rb +30 -0
- data/lib/rules/git_config_global.rb +25 -0
- data/lib/rules/missing_env_protection.rb +37 -0
- data/lib/rules/missing_frozen_lockfile.rb +28 -0
- data/lib/rules/missing_permissions.rb +18 -0
- data/lib/rules/missing_persist_creds.rb +51 -0
- data/lib/rules/missing_timeouts.rb +25 -0
- data/lib/rules/overly_broad_triggers.rb +31 -0
- data/lib/rules/shell_injection_expr.rb +57 -0
- data/lib/rules/shell_injection_jq.rb +59 -0
- data/lib/rules/static_aws_credentials.rb +33 -0
- data/lib/rules/unpinned_actions.rb +35 -0
- data/lib/rules/unpinned_docker_image.rb +25 -0
- data/lib/rules/unscoped_app_token.rb +31 -0
- data/lib/scanner.rb +95 -0
- data/lib/sha_resolver.rb +60 -0
- data/lib/version.rb +3 -0
- data/lib/workflow.rb +100 -0
- metadata +84 -0
data/lib/cli/scan.rb
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
require "optparse"
|
|
2
|
+
require_relative "../scanner"
|
|
3
|
+
|
|
4
|
+
options = {
|
|
5
|
+
format: "terminal",
|
|
6
|
+
severity: :low,
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
parser = OptionParser.new do |opts|
|
|
10
|
+
opts.banner = "Usage: sentinel scan [options] [REPO]"
|
|
11
|
+
opts.separator ""
|
|
12
|
+
opts.separator "Scan GitHub Actions workflows for security issues."
|
|
13
|
+
opts.separator ""
|
|
14
|
+
|
|
15
|
+
opts.on("--format FORMAT", %w[terminal json], "Output format: terminal (default) or json") do |f|
|
|
16
|
+
options[:format] = f
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
opts.on("--severity LEVEL", %i[critical high medium low],
|
|
20
|
+
"Minimum severity: critical, high, medium, low (default: low)") do |s|
|
|
21
|
+
options[:severity] = s
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
opts.on("--local PATH", "Scan a local directory instead of GitHub API") do |p|
|
|
25
|
+
options[:local] = p
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
opts.on("--org ORG", "Scan all repos in a GitHub organization") do |o|
|
|
29
|
+
options[:org] = o
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
opts.on("--token TOKEN", "GitHub API token (default: GITHUB_TOKEN env var)") do |t|
|
|
33
|
+
options[:token] = t
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
opts.on("-h", "--help", "Show this help message") do
|
|
37
|
+
puts opts
|
|
38
|
+
exit 0
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
begin
|
|
43
|
+
parser.parse!
|
|
44
|
+
rescue OptionParser::InvalidArgument, OptionParser::InvalidOption => e
|
|
45
|
+
$stderr.puts e.message
|
|
46
|
+
$stderr.puts parser
|
|
47
|
+
exit 2
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
repo = ARGV.shift
|
|
51
|
+
|
|
52
|
+
modes = [options[:local], options[:org], repo].compact
|
|
53
|
+
if modes.empty?
|
|
54
|
+
$stderr.puts "Error: must specify --local PATH, --org ORG, or a REPO argument"
|
|
55
|
+
$stderr.puts parser
|
|
56
|
+
exit 2
|
|
57
|
+
elsif modes.length > 1
|
|
58
|
+
$stderr.puts "Error: specify only one of --local, --org, or REPO"
|
|
59
|
+
$stderr.puts parser
|
|
60
|
+
exit 2
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def resolve_token(options)
|
|
64
|
+
return options[:token] if options[:token]
|
|
65
|
+
return ENV["GITHUB_TOKEN"] if ENV["GITHUB_TOKEN"]
|
|
66
|
+
|
|
67
|
+
gh_path = `which gh 2>/dev/null`.strip
|
|
68
|
+
if !gh_path.empty? && system("gh", "auth", "status", [:out, :err] => File::NULL)
|
|
69
|
+
token = `gh auth token 2>/dev/null`.strip
|
|
70
|
+
return token unless token.empty?
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
nil
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
token = resolve_token(options)
|
|
77
|
+
|
|
78
|
+
client = if options[:local]
|
|
79
|
+
LocalClient.new(options[:local])
|
|
80
|
+
elsif options[:org]
|
|
81
|
+
unless token
|
|
82
|
+
$stderr.puts "Error: --org requires a GitHub token to list repos."
|
|
83
|
+
$stderr.puts ""
|
|
84
|
+
$stderr.puts " export GITHUB_TOKEN=$(gh auth token)"
|
|
85
|
+
$stderr.puts " sentinel scan --org #{options[:org]}"
|
|
86
|
+
exit 2
|
|
87
|
+
end
|
|
88
|
+
GitHubClient.new(token: token)
|
|
89
|
+
else
|
|
90
|
+
if token
|
|
91
|
+
GitHubClient.new(token: token)
|
|
92
|
+
else
|
|
93
|
+
CloneClient.new
|
|
94
|
+
end
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
formatter = case options[:format]
|
|
98
|
+
when "json" then Formatter::Json.new
|
|
99
|
+
else Formatter::Terminal.new
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
scanner = Scanner.new(client: client, formatter: formatter, min_severity: options[:severity])
|
|
103
|
+
|
|
104
|
+
all_findings = []
|
|
105
|
+
|
|
106
|
+
begin
|
|
107
|
+
if options[:local]
|
|
108
|
+
result = scanner.scan(options[:local])
|
|
109
|
+
puts result[:output]
|
|
110
|
+
all_findings.concat(result[:findings])
|
|
111
|
+
elsif options[:org]
|
|
112
|
+
results = scanner.scan_org(options[:org])
|
|
113
|
+
|
|
114
|
+
if options[:format] == "json"
|
|
115
|
+
combined = results.map { |r| JSON.parse(r[:output]) }
|
|
116
|
+
puts JSON.pretty_generate(combined)
|
|
117
|
+
else
|
|
118
|
+
results.each { |r| puts r[:output] }
|
|
119
|
+
|
|
120
|
+
totals = Hash.new(0)
|
|
121
|
+
results.each do |r|
|
|
122
|
+
r[:findings].each { |f| totals[f.severity] += 1 }
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
summary = Finding::SEVERITIES
|
|
126
|
+
.select { |s| totals[s] > 0 }
|
|
127
|
+
.map { |s| "#{totals[s]} #{s}" }
|
|
128
|
+
.join(", ")
|
|
129
|
+
|
|
130
|
+
total = results.sum { |r| r[:findings].length }
|
|
131
|
+
$stderr.puts "\nOrg scan complete: #{results.length} repos, #{total} findings (#{summary})"
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
results.each { |r| all_findings.concat(r[:findings]) }
|
|
135
|
+
else
|
|
136
|
+
result = scanner.scan(repo)
|
|
137
|
+
puts result[:output]
|
|
138
|
+
all_findings.concat(result[:findings])
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
has_critical_or_high = all_findings.any? { |f| f.critical? || f.high? }
|
|
142
|
+
exit(has_critical_or_high ? 1 : 0)
|
|
143
|
+
ensure
|
|
144
|
+
client.cleanup if client.respond_to?(:cleanup)
|
|
145
|
+
end
|
data/lib/clone_client.rb
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
require "tmpdir"
|
|
2
|
+
require "fileutils"
|
|
3
|
+
require_relative "local_client"
|
|
4
|
+
|
|
5
|
+
class CloneClient
|
|
6
|
+
REPO_FORMAT = %r{\A[A-Za-z0-9\-_.]+/[A-Za-z0-9\-_.]+\z}
|
|
7
|
+
|
|
8
|
+
def initialize
|
|
9
|
+
@tmpdir = nil
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def fetch_workflows(repo)
|
|
13
|
+
unless repo.match?(REPO_FORMAT)
|
|
14
|
+
$stderr.puts "Invalid repo format: #{repo} (expected owner/repo)"
|
|
15
|
+
return []
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
@tmpdir = Dir.mktmpdir("sentinel-")
|
|
19
|
+
|
|
20
|
+
# Shallow sparse clone — only .github/ directory
|
|
21
|
+
success = system(
|
|
22
|
+
"git", "clone", "--depth", "1", "--filter=blob:none", "--sparse",
|
|
23
|
+
"https://github.com/#{repo}.git", @tmpdir,
|
|
24
|
+
[:out, :err] => File::NULL
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
unless success
|
|
28
|
+
$stderr.puts ""
|
|
29
|
+
$stderr.puts "ERROR: Could not access #{repo}"
|
|
30
|
+
$stderr.puts ""
|
|
31
|
+
$stderr.puts "This repo may be private. To scan private repos:"
|
|
32
|
+
$stderr.puts ""
|
|
33
|
+
$stderr.puts " export GITHUB_TOKEN=$(gh auth token)"
|
|
34
|
+
$stderr.puts " sentinel scan #{repo}"
|
|
35
|
+
$stderr.puts ""
|
|
36
|
+
$stderr.puts "Or pass a token directly:"
|
|
37
|
+
$stderr.puts ""
|
|
38
|
+
$stderr.puts " sentinel scan --token ghp_xxx #{repo}"
|
|
39
|
+
$stderr.puts ""
|
|
40
|
+
exit 2
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
system(
|
|
44
|
+
"git", "-C", @tmpdir, "sparse-checkout", "set", ".github",
|
|
45
|
+
[:out, :err] => File::NULL
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
LocalClient.new(@tmpdir).fetch_workflows(repo)
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
def fetch_dependabot_config(repo)
|
|
52
|
+
return nil unless @tmpdir
|
|
53
|
+
LocalClient.new(@tmpdir).fetch_dependabot_config(repo)
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def file_exists?(repo, path)
|
|
57
|
+
return false unless @tmpdir
|
|
58
|
+
File.exist?(File.join(@tmpdir, path))
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
def cleanup
|
|
62
|
+
FileUtils.rm_rf(@tmpdir) if @tmpdir
|
|
63
|
+
end
|
|
64
|
+
end
|
data/lib/finding.rb
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
Finding = Struct.new(:rule, :severity, :file, :line, :code, :message, :fix, keyword_init: true)
|
|
2
|
+
|
|
3
|
+
class Finding
|
|
4
|
+
SEVERITIES = %i[critical high medium low].freeze
|
|
5
|
+
SEVERITY_ORDER = SEVERITIES.each_with_index.to_h.freeze
|
|
6
|
+
|
|
7
|
+
def <=>(other)
|
|
8
|
+
(SEVERITY_ORDER[severity] || 99) <=> (SEVERITY_ORDER[other.severity] || 99)
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def critical? = severity == :critical
|
|
12
|
+
def high? = severity == :high
|
|
13
|
+
def medium? = severity == :medium
|
|
14
|
+
def low? = severity == :low
|
|
15
|
+
|
|
16
|
+
def to_h
|
|
17
|
+
{
|
|
18
|
+
rule: rule,
|
|
19
|
+
severity: severity.to_s,
|
|
20
|
+
file: file,
|
|
21
|
+
line: line,
|
|
22
|
+
code: code,
|
|
23
|
+
message: message,
|
|
24
|
+
fix: fix
|
|
25
|
+
}
|
|
26
|
+
end
|
|
27
|
+
end
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
require "json"
|
|
2
|
+
|
|
3
|
+
module Formatter
|
|
4
|
+
class Json
|
|
5
|
+
def format(repo:, workflow_count:, findings:)
|
|
6
|
+
summary = Finding::SEVERITIES.each_with_object({}) { |s, h|
|
|
7
|
+
h[s.to_s] = findings.count { |f| f.severity == s }
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
JSON.pretty_generate({
|
|
11
|
+
repo: repo,
|
|
12
|
+
workflows: workflow_count,
|
|
13
|
+
findings: findings.sort.map(&:to_h),
|
|
14
|
+
summary: summary
|
|
15
|
+
})
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
module Formatter
|
|
2
|
+
class Terminal
|
|
3
|
+
COLORS = {
|
|
4
|
+
critical: "\e[31m", # red
|
|
5
|
+
high: "\e[33m", # yellow
|
|
6
|
+
medium: "\e[36m", # cyan
|
|
7
|
+
low: "\e[90m", # dim
|
|
8
|
+
reset: "\e[0m",
|
|
9
|
+
bold: "\e[1m",
|
|
10
|
+
green: "\e[32m",
|
|
11
|
+
}.freeze
|
|
12
|
+
|
|
13
|
+
def format(repo:, workflow_count:, findings:)
|
|
14
|
+
lines = []
|
|
15
|
+
lines << ""
|
|
16
|
+
lines << "#{c(:bold)}=== #{repo} (#{workflow_count} workflows) ===#{c(:reset)}"
|
|
17
|
+
lines << ""
|
|
18
|
+
|
|
19
|
+
if findings.empty?
|
|
20
|
+
lines << " #{c(:green)}No findings.#{c(:reset)}"
|
|
21
|
+
else
|
|
22
|
+
findings.sort.each do |f|
|
|
23
|
+
sev = f.severity.to_s.upcase.ljust(10)
|
|
24
|
+
lines << " #{c(f.severity)}#{sev}#{c(:reset)} #{c(:bold)}#{f.rule}#{c(:reset)} #{f.file}:#{f.line}"
|
|
25
|
+
lines << " #{f.message}"
|
|
26
|
+
lines << " #{c(:green)}Fix: #{f.fix}#{c(:reset)}" if f.fix
|
|
27
|
+
lines << ""
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
summary = Finding::SEVERITIES.map { |s|
|
|
31
|
+
count = findings.count { |f| f.severity == s }
|
|
32
|
+
next nil if count == 0
|
|
33
|
+
"#{c(s)}#{count} #{s}#{c(:reset)}"
|
|
34
|
+
}.compact.join(", ")
|
|
35
|
+
|
|
36
|
+
lines << " --- Summary: #{summary} ---"
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
lines << ""
|
|
40
|
+
lines.join("\n")
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
private
|
|
44
|
+
|
|
45
|
+
def c(name) = COLORS[name] || ""
|
|
46
|
+
end
|
|
47
|
+
end
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
require "net/http"
|
|
2
|
+
require "json"
|
|
3
|
+
require "uri"
|
|
4
|
+
require "base64"
|
|
5
|
+
require "yaml"
|
|
6
|
+
|
|
7
|
+
class GitHubClient
|
|
8
|
+
API_BASE = "https://api.github.com"
|
|
9
|
+
|
|
10
|
+
def initialize(token: nil)
|
|
11
|
+
@token = token || ENV["GITHUB_TOKEN"]
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def fetch_workflows(repo)
|
|
15
|
+
workflows = []
|
|
16
|
+
files = api_get("/repos/#{repo}/contents/.github/workflows")
|
|
17
|
+
return workflows unless files.is_a?(Array)
|
|
18
|
+
|
|
19
|
+
files.each do |f|
|
|
20
|
+
next unless f["name"].end_with?(".yml", ".yaml")
|
|
21
|
+
content = fetch_file_content(repo, f["path"])
|
|
22
|
+
next unless content
|
|
23
|
+
workflows << { filename: f["name"], content: content }
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
workflows
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def fetch_file_content(repo, path)
|
|
30
|
+
data = api_get("/repos/#{repo}/contents/#{path}")
|
|
31
|
+
return nil unless data.is_a?(Hash) && data["content"]
|
|
32
|
+
Base64.decode64(data["content"])
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def fetch_repos(org)
|
|
36
|
+
repos = []
|
|
37
|
+
page = 1
|
|
38
|
+
loop do
|
|
39
|
+
batch = api_get("/orgs/#{org}/repos?per_page=100&page=#{page}&type=all")
|
|
40
|
+
break unless batch.is_a?(Array) && !batch.empty?
|
|
41
|
+
batch.each do |r|
|
|
42
|
+
next if r["archived"]
|
|
43
|
+
repos << r["full_name"]
|
|
44
|
+
end
|
|
45
|
+
page += 1
|
|
46
|
+
break if batch.length < 100
|
|
47
|
+
end
|
|
48
|
+
repos.sort
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
def file_exists?(repo, path)
|
|
52
|
+
api_get("/repos/#{repo}/contents/#{path}")
|
|
53
|
+
true
|
|
54
|
+
rescue StandardError
|
|
55
|
+
false
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def fetch_dependabot_config(repo)
|
|
59
|
+
content = fetch_file_content(repo, ".github/dependabot.yml")
|
|
60
|
+
content ||= fetch_file_content(repo, ".github/dependabot.yaml")
|
|
61
|
+
return nil unless content
|
|
62
|
+
begin
|
|
63
|
+
YAML.safe_load(content)
|
|
64
|
+
rescue StandardError => e
|
|
65
|
+
nil
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
private
|
|
70
|
+
|
|
71
|
+
def api_get(path)
|
|
72
|
+
uri = URI("#{API_BASE}#{path}")
|
|
73
|
+
req = Net::HTTP::Get.new(uri)
|
|
74
|
+
req["Accept"] = "application/vnd.github+json"
|
|
75
|
+
req["Authorization"] = "Bearer #{@token}" if @token
|
|
76
|
+
req["X-GitHub-Api-Version"] = "2022-11-28"
|
|
77
|
+
|
|
78
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
|
79
|
+
http.use_ssl = true
|
|
80
|
+
http.open_timeout = 10
|
|
81
|
+
http.read_timeout = 30
|
|
82
|
+
|
|
83
|
+
resp = http.request(req)
|
|
84
|
+
|
|
85
|
+
case resp.code.to_i
|
|
86
|
+
when 200
|
|
87
|
+
JSON.parse(resp.body)
|
|
88
|
+
when 404
|
|
89
|
+
nil
|
|
90
|
+
when 403
|
|
91
|
+
$stderr.puts "Rate limited or forbidden: #{path}"
|
|
92
|
+
nil
|
|
93
|
+
else
|
|
94
|
+
$stderr.puts "API error #{resp.code}: #{path}"
|
|
95
|
+
nil
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
end
|
data/lib/local_client.rb
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
class LocalClient
|
|
2
|
+
def initialize(path)
|
|
3
|
+
@path = File.expand_path(path)
|
|
4
|
+
@workflows_dir = File.join(@path, ".github", "workflows")
|
|
5
|
+
end
|
|
6
|
+
|
|
7
|
+
def fetch_workflows(_repo = nil)
|
|
8
|
+
workflows = []
|
|
9
|
+
return workflows unless File.directory?(@workflows_dir)
|
|
10
|
+
|
|
11
|
+
Dir[File.join(@workflows_dir, "*.{yml,yaml}")].sort.each do |f|
|
|
12
|
+
content = File.read(f)
|
|
13
|
+
workflows << { filename: File.basename(f), content: content }
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
workflows
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def file_exists?(_repo, path)
|
|
20
|
+
File.exist?(File.join(@path, path))
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def fetch_dependabot_config(_repo)
|
|
24
|
+
path = File.join(@path, ".github", "dependabot.yml")
|
|
25
|
+
path = File.join(@path, ".github", "dependabot.yaml") unless File.exist?(path)
|
|
26
|
+
return nil unless File.exist?(path)
|
|
27
|
+
begin
|
|
28
|
+
YAML.safe_load(File.read(path))
|
|
29
|
+
rescue StandardError => e
|
|
30
|
+
nil
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
data/lib/rule_engine.rb
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
class RuleEngine
|
|
2
|
+
attr_reader :rules
|
|
3
|
+
|
|
4
|
+
def initialize
|
|
5
|
+
@rules = []
|
|
6
|
+
load_rules
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
def scan(workflow)
|
|
10
|
+
findings = []
|
|
11
|
+
@rules.each do |rule|
|
|
12
|
+
begin
|
|
13
|
+
findings.concat(rule.check(workflow))
|
|
14
|
+
rescue => e
|
|
15
|
+
$stderr.puts "Rule #{rule.name} failed on #{workflow.filename}: #{e.message}"
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
findings.sort
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
private
|
|
22
|
+
|
|
23
|
+
def load_rules
|
|
24
|
+
rules_dir = File.join(__dir__, "rules")
|
|
25
|
+
require File.join(rules_dir, "base.rb")
|
|
26
|
+
Dir[File.join(rules_dir, "*.rb")].sort.each do |file|
|
|
27
|
+
next if File.basename(file) == "base.rb"
|
|
28
|
+
require file
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
Rules.constants.each do |const|
|
|
32
|
+
klass = Rules.const_get(const)
|
|
33
|
+
next unless klass.is_a?(Class) && klass < Rules::Base
|
|
34
|
+
@rules << klass.new
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
@rules.sort_by! { |r| Finding::SEVERITY_ORDER[r.severity] || 99 }
|
|
38
|
+
end
|
|
39
|
+
end
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class AllowForksArtifact < Base
|
|
3
|
+
def name = "allow-forks-artifact"
|
|
4
|
+
def description = "Artifact download with allow_forks: true in privileged context"
|
|
5
|
+
def severity = :medium
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
findings = []
|
|
9
|
+
|
|
10
|
+
workflow.lines_of(/allow_forks:\s*true/).each do |line_num|
|
|
11
|
+
findings << finding(workflow,
|
|
12
|
+
line: line_num,
|
|
13
|
+
code: workflow.line_content(line_num).strip,
|
|
14
|
+
message: "Downloading fork-produced artifacts in a privileged workflow_run context",
|
|
15
|
+
fix: "Ensure fork-produced artifact content is not executed or processed unsafely"
|
|
16
|
+
)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
findings
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
end
|
data/lib/rules/base.rb
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class Base
|
|
3
|
+
def name
|
|
4
|
+
raise NotImplementedError
|
|
5
|
+
end
|
|
6
|
+
|
|
7
|
+
def description
|
|
8
|
+
raise NotImplementedError
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def severity
|
|
12
|
+
raise NotImplementedError
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def check(workflow)
|
|
16
|
+
raise NotImplementedError
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
private
|
|
20
|
+
|
|
21
|
+
def finding(workflow, line:, code: nil, message: nil, fix: nil)
|
|
22
|
+
Finding.new(
|
|
23
|
+
rule: name,
|
|
24
|
+
severity: severity,
|
|
25
|
+
file: workflow.filename,
|
|
26
|
+
line: line,
|
|
27
|
+
code: code || workflow.line_content(line)&.strip,
|
|
28
|
+
message: message || description,
|
|
29
|
+
fix: fix
|
|
30
|
+
)
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class BuildPublishSameJob < Base
|
|
3
|
+
def name = "build-publish-same-job"
|
|
4
|
+
def description = "Build and publish in same job with publish secrets available during build"
|
|
5
|
+
def severity = :high
|
|
6
|
+
|
|
7
|
+
INSTALL_PATTERNS = /npm install|pnpm install|yarn install|pip install|bundle install/
|
|
8
|
+
PUBLISH_PATTERNS = /npm publish|pnpm publish|npx pkg-pr-new|twine upload|gem push/
|
|
9
|
+
PUBLISH_SECRETS = /NPM_TOKEN|PYPI_TOKEN|GEM_HOST_API_KEY|NUGET_API_KEY/
|
|
10
|
+
|
|
11
|
+
def check(workflow)
|
|
12
|
+
findings = []
|
|
13
|
+
|
|
14
|
+
workflow.jobs.each do |job_id, job|
|
|
15
|
+
steps = workflow.steps(job)
|
|
16
|
+
has_install = steps.any? { |s| s["run"]&.match?(INSTALL_PATTERNS) }
|
|
17
|
+
has_publish = steps.any? { |s| s["run"]&.match?(PUBLISH_PATTERNS) }
|
|
18
|
+
|
|
19
|
+
next unless has_install && has_publish
|
|
20
|
+
|
|
21
|
+
job_env = job["env"]&.to_s || ""
|
|
22
|
+
step_envs = steps.map { |s| (s["env"] || {}).to_s }.join(" ")
|
|
23
|
+
all_env = job_env + step_envs
|
|
24
|
+
|
|
25
|
+
if all_env.match?(PUBLISH_SECRETS) || all_env.match?(/secrets\./)
|
|
26
|
+
line = workflow.line_of(/#{job_id}:/)
|
|
27
|
+
findings << finding(workflow,
|
|
28
|
+
line: line || 0,
|
|
29
|
+
code: "job: #{job_id}",
|
|
30
|
+
message: "Build and publish in same job — a compromised dependency could exfiltrate publish credentials",
|
|
31
|
+
fix: "Split into separate build (read-only) and publish (with secrets) jobs connected via artifacts"
|
|
32
|
+
)
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
findings
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
end
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class CredentialWindow < Base
|
|
3
|
+
def name = "credential-window"
|
|
4
|
+
def description = "Git credentials configured far before push step"
|
|
5
|
+
def severity = :high
|
|
6
|
+
|
|
7
|
+
MAX_STEPS_BETWEEN = 5
|
|
8
|
+
|
|
9
|
+
def check(workflow)
|
|
10
|
+
findings = []
|
|
11
|
+
|
|
12
|
+
workflow.jobs.each do |_job_id, job|
|
|
13
|
+
steps = workflow.steps(job)
|
|
14
|
+
cred_step = nil
|
|
15
|
+
push_step = nil
|
|
16
|
+
|
|
17
|
+
steps.each_with_index do |step, i|
|
|
18
|
+
run = step["run"]&.to_s
|
|
19
|
+
if run&.match?(/git config.*insteadOf|git remote set-url/)
|
|
20
|
+
cred_step = i if cred_step.nil?
|
|
21
|
+
end
|
|
22
|
+
if run&.match?(/git push/)
|
|
23
|
+
push_step = i
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
next unless cred_step && push_step
|
|
28
|
+
gap = push_step - cred_step
|
|
29
|
+
|
|
30
|
+
if gap > MAX_STEPS_BETWEEN
|
|
31
|
+
line = workflow.line_of(/git config.*insteadOf|git remote set-url/)
|
|
32
|
+
findings << finding(workflow,
|
|
33
|
+
line: line || 0,
|
|
34
|
+
message: "Git credentials configured #{gap} steps before push — #{gap - 1} steps have access to the token",
|
|
35
|
+
fix: "Move credential configuration to immediately before the push step"
|
|
36
|
+
)
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
findings
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class CurlPipeShell < Base
|
|
3
|
+
def name = "curl-pipe-shell"
|
|
4
|
+
def description = "Remote script piped directly to shell without integrity check"
|
|
5
|
+
def severity = :high
|
|
6
|
+
|
|
7
|
+
PIPE_PATTERN = /curl\s.*\|\s*(sudo\s+)?(sh|bash|zsh|source|\.)/
|
|
8
|
+
WGET_PIPE = /wget\s.*-O\s*-\s*\|\s*(sudo\s+)?(sh|bash|zsh)/
|
|
9
|
+
|
|
10
|
+
def check(workflow)
|
|
11
|
+
findings = []
|
|
12
|
+
|
|
13
|
+
workflow.raw_lines.each_with_index do |line, i|
|
|
14
|
+
next if line.strip.start_with?("#")
|
|
15
|
+
|
|
16
|
+
if line.match?(PIPE_PATTERN) || line.match?(WGET_PIPE)
|
|
17
|
+
findings << finding(workflow,
|
|
18
|
+
line: i + 1,
|
|
19
|
+
code: line.strip,
|
|
20
|
+
message: "Remote script piped to shell — no integrity verification, mutable endpoint",
|
|
21
|
+
fix: "Download first, verify checksum, then execute; or use a pinned GitHub Action instead"
|
|
22
|
+
)
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
findings
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
end
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
module Rules
|
|
2
|
+
class DangerousTriggers < Base
|
|
3
|
+
def name = "dangerous-triggers"
|
|
4
|
+
def description = "pull_request_target with fork code checkout"
|
|
5
|
+
def severity = :critical
|
|
6
|
+
|
|
7
|
+
def check(workflow)
|
|
8
|
+
findings = []
|
|
9
|
+
triggers = workflow.triggers
|
|
10
|
+
|
|
11
|
+
has_prt = case triggers
|
|
12
|
+
when Hash then triggers.key?("pull_request_target")
|
|
13
|
+
when Array then triggers.include?("pull_request_target")
|
|
14
|
+
when String then triggers == "pull_request_target"
|
|
15
|
+
else false
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
return findings unless has_prt
|
|
19
|
+
|
|
20
|
+
workflow.jobs.each do |_job_id, job|
|
|
21
|
+
workflow.steps(job).each do |step|
|
|
22
|
+
next unless step["uses"]&.include?("checkout")
|
|
23
|
+
|
|
24
|
+
with = step["with"] || {}
|
|
25
|
+
ref = with["ref"]&.to_s || ""
|
|
26
|
+
|
|
27
|
+
if ref.match?(/\bgithub\.event\.pull_request\.head\b|\.head_ref\b|pull_request\.head\.sha/i) ||
|
|
28
|
+
ref.match?(/\$\{\{\s*github\.head_ref\s*\}\}/)
|
|
29
|
+
line = workflow.line_of(/ref:.*head/i) || workflow.line_of(/checkout/)
|
|
30
|
+
findings << finding(workflow,
|
|
31
|
+
line: line || 0,
|
|
32
|
+
code: "ref: #{ref}",
|
|
33
|
+
message: "pull_request_target + checkout of PR head — fork code runs with base repo secrets",
|
|
34
|
+
fix: "Use pull_request trigger instead, or don't checkout PR head code"
|
|
35
|
+
)
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
findings
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|