oss-stats 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +5 -0
- data/CONTRIBUTING.md +32 -0
- data/Gemfile +11 -0
- data/LICENSE +201 -0
- data/README.md +110 -0
- data/bin/meeting_stats +450 -0
- data/bin/pipeline_visibility_stats +636 -0
- data/bin/promise_stats +312 -0
- data/bin/repo_stats +113 -0
- data/docs/MeetingStats.md +69 -0
- data/docs/PipelineVisibilityStats.md +51 -0
- data/docs/PromiseStats.md +56 -0
- data/docs/RepoStats.md +130 -0
- data/examples/meeting_stats_config.rb +22 -0
- data/examples/promise_stats_config.rb +23 -0
- data/examples/repo_stats_config.rb +49 -0
- data/initialization_data/Gemfile +3 -0
- data/initialization_data/README.md +20 -0
- data/initialization_data/rubocop.yml +2 -0
- data/lib/oss_stats/buildkite_client.rb +252 -0
- data/lib/oss_stats/buildkite_token.rb +15 -0
- data/lib/oss_stats/config/meeting_stats.rb +36 -0
- data/lib/oss_stats/config/promise_stats.rb +22 -0
- data/lib/oss_stats/config/repo_stats.rb +47 -0
- data/lib/oss_stats/config/shared.rb +43 -0
- data/lib/oss_stats/github_client.rb +55 -0
- data/lib/oss_stats/github_token.rb +23 -0
- data/lib/oss_stats/log.rb +25 -0
- data/lib/oss_stats/repo_stats.rb +1048 -0
- data/lib/oss_stats/version.rb +3 -0
- data/oss-stats.gemspec +39 -0
- data/spec/buildkite_client_spec.rb +171 -0
- data/spec/repo_stats_spec.rb +1242 -0
- metadata +181 -0
@@ -0,0 +1,636 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'base64'
|
4
|
+
require 'fileutils'
|
5
|
+
require 'json'
|
6
|
+
require 'mixlib/shellout'
|
7
|
+
require 'net/http'
|
8
|
+
require 'optparse'
|
9
|
+
require 'set'
|
10
|
+
require 'uri'
|
11
|
+
require 'yaml'
|
12
|
+
|
13
|
+
require_relative '../lib/oss_stats/buildkite_client'
|
14
|
+
require_relative '../lib/oss_stats/buildkite_token'
|
15
|
+
require_relative '../lib/oss_stats/github_client'
|
16
|
+
require_relative '../lib/oss_stats/github_token'
|
17
|
+
require_relative '../lib/oss_stats/log'
|
18
|
+
|
19
|
+
def get_expeditor_config(org, repo, client)
|
20
|
+
path = "/repos/#{org}/#{repo}/contents/.expeditor/config.yml"
|
21
|
+
res = client.get(path)
|
22
|
+
Base64.decode64(res['content'])
|
23
|
+
rescue
|
24
|
+
nil
|
25
|
+
end
|
26
|
+
|
27
|
+
def patch_yaml_public_flag!(text, pipeline_names)
|
28
|
+
lines = text.lines
|
29
|
+
current_pipeline = nil
|
30
|
+
modified = false
|
31
|
+
|
32
|
+
lines.each_with_index do |line, idx|
|
33
|
+
match = line.match(/^\s*-\s+(\S+?)(:)?\s*$/)
|
34
|
+
next unless match
|
35
|
+
log.trace("Processing line #{line}")
|
36
|
+
current_pipeline = Regexp.last_match(1)
|
37
|
+
has_block = Regexp.last_match(2)
|
38
|
+
indent = line[/^\s*/] + ' '
|
39
|
+
|
40
|
+
log.trace("Line is for #{current_pipeline}")
|
41
|
+
next unless pipeline_names.include?(current_pipeline)
|
42
|
+
log.trace("... which is a pipeline from #{pipeline_names}")
|
43
|
+
if has_block
|
44
|
+
next if lines[idx + 1..idx + 5].any? { |l| l =~ /^\s*public:\s*true/ }
|
45
|
+
else
|
46
|
+
lines[idx] = " #{line.strip}:\n"
|
47
|
+
end
|
48
|
+
lines.insert(idx + 1, indent + " public: true\n")
|
49
|
+
modified = true
|
50
|
+
end
|
51
|
+
|
52
|
+
modified ? lines.join : nil
|
53
|
+
end
|
54
|
+
|
55
|
+
def run_cmd!(args, cwd: nil, echo: true, retries: 0)
|
56
|
+
log.debug("Running: #{args.join(' ')}")
|
57
|
+
cmd = Mixlib::ShellOut.new(args, cwd:)
|
58
|
+
cmd.run_command
|
59
|
+
cmd.error!
|
60
|
+
puts cmd.stdout if echo
|
61
|
+
cmd.stdout
|
62
|
+
rescue Mixlib::ShellOut::ShellCommandFailed
|
63
|
+
if retries > 0
|
64
|
+
log.warn("Retrying command: #{args.join(' ')}")
|
65
|
+
retries -= 1
|
66
|
+
sleep(5)
|
67
|
+
retry
|
68
|
+
end
|
69
|
+
raise
|
70
|
+
end
|
71
|
+
|
72
|
+
def output(fh, msg)
|
73
|
+
if fh
|
74
|
+
fh.write("#{msg}\n")
|
75
|
+
else
|
76
|
+
log.info(msg)
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
def process_expeditor_pipelines(repo_info, options, gh_client, log)
|
81
|
+
total_pipeline_count = 0
|
82
|
+
private_pipeline_count = 0
|
83
|
+
skipped_by_pattern = Hash.new(0)
|
84
|
+
|
85
|
+
# Assuming repo_info is already fetched and confirmed public
|
86
|
+
repo = repo_info['name'] # Get repo name from repo_info
|
87
|
+
content = get_expeditor_config(options[:github_org], repo, gh_client)
|
88
|
+
unless content
|
89
|
+
log.debug("No expeditor config for #{repo}")
|
90
|
+
return { pipelines: [], total_processed: 0, private_found: 0,
|
91
|
+
skipped_counts: skipped_by_pattern }
|
92
|
+
end
|
93
|
+
|
94
|
+
begin
|
95
|
+
config = YAML.safe_load(content)
|
96
|
+
rescue Psych::SyntaxError => e
|
97
|
+
log.warn("Skipping #{repo} due to YAML error: #{e}")
|
98
|
+
return { pipelines: [], total_processed: 0, private_found: 0,
|
99
|
+
skipped_counts: skipped_by_pattern }
|
100
|
+
end
|
101
|
+
|
102
|
+
pipelines = config['pipelines'] || []
|
103
|
+
repo_missing_public = []
|
104
|
+
|
105
|
+
pipeline_names = pipelines.map do |pl|
|
106
|
+
pl.is_a?(String) ? pl : pl.keys
|
107
|
+
end.flatten
|
108
|
+
|
109
|
+
pipelines.each do |pipeline_block|
|
110
|
+
pipeline_block = { pipeline_block => {} } if pipeline_block.is_a?(String)
|
111
|
+
|
112
|
+
pipeline_block.each do |pipeline_name, pipeline_details|
|
113
|
+
if options[:verify_only] && !pipeline_name.start_with?('verify')
|
114
|
+
log.debug("Skipping non-verify pipeline #{pipeline_name}")
|
115
|
+
next
|
116
|
+
end
|
117
|
+
|
118
|
+
if pipeline_name.end_with?('_private')
|
119
|
+
pubname = pipeline_name.gsub('_private', '')
|
120
|
+
if pipeline_names.include?(pubname)
|
121
|
+
log.debug("Skipping #{pipeline_name}, #{pubname} exists")
|
122
|
+
skipped_by_pattern[pipeline_name] += 1
|
123
|
+
next
|
124
|
+
end
|
125
|
+
log.warn("There is a #{pipeline_name} pipeline but no #{pubname}")
|
126
|
+
end
|
127
|
+
|
128
|
+
skip_matched = options[:skip_patterns].find do |pat|
|
129
|
+
pipeline_name.include?(pat)
|
130
|
+
end
|
131
|
+
|
132
|
+
if skip_matched
|
133
|
+
skipped_by_pattern[pipeline_name] += 1
|
134
|
+
next
|
135
|
+
end
|
136
|
+
|
137
|
+
env = pipeline_details['env'] || []
|
138
|
+
if env.any? { |i| i['ADHOC'] }
|
139
|
+
log.warn("#{pipeline_name} is marked as adhoc but not named so")
|
140
|
+
skipped_by_pattern[pipeline_name] += 1
|
141
|
+
next
|
142
|
+
end
|
143
|
+
|
144
|
+
total_pipeline_count += 1
|
145
|
+
if !pipeline_details.is_a?(Hash) || pipeline_details['public'] != true
|
146
|
+
repo_missing_public << pipeline_name
|
147
|
+
private_pipeline_count += 1
|
148
|
+
end
|
149
|
+
end
|
150
|
+
end
|
151
|
+
|
152
|
+
# PR creation logic (specific to Chef Expeditor) - remains within this method
|
153
|
+
if !repo_missing_public.empty? && options[:make_prs_for].any? &&
|
154
|
+
options[:source_dir]
|
155
|
+
pipelines_to_fix = repo_missing_public & options[:make_prs_for]
|
156
|
+
unless pipelines_to_fix.empty?
|
157
|
+
patched = patch_yaml_public_flag!(content, pipelines_to_fix)
|
158
|
+
if patched
|
159
|
+
repo_path = File.join(options[:source_dir], repo)
|
160
|
+
unless Dir.exist?(repo_path)
|
161
|
+
run_cmd!(
|
162
|
+
['sj', 'sclone', "#{options[:github_org]}/#{repo}"],
|
163
|
+
cwd: options[:source_dir],
|
164
|
+
)
|
165
|
+
end
|
166
|
+
|
167
|
+
run_cmd!(%w{sj feature expeditor-public}, cwd: repo_path, retries: 2)
|
168
|
+
expeditor_path = File.join(repo_path, '.expeditor', 'config.yml')
|
169
|
+
FileUtils.mkdir_p(File.dirname(expeditor_path))
|
170
|
+
File.write(expeditor_path, patched)
|
171
|
+
|
172
|
+
run_cmd!(['git', 'add', expeditor_path], cwd: repo_path)
|
173
|
+
commit_message = "make pipelines public: #{pipelines_to_fix.join(', ')}"
|
174
|
+
run_cmd!(
|
175
|
+
['git', 'commit', '-sm', commit_message],
|
176
|
+
cwd: repo_path,
|
177
|
+
)
|
178
|
+
|
179
|
+
unless options[:assume_yes]
|
180
|
+
puts "\nDiff for #{repo}:"
|
181
|
+
run_cmd!(
|
182
|
+
['git', '--no-pager', 'diff', 'HEAD~1'], cwd: repo_path
|
183
|
+
)
|
184
|
+
print 'Create PR? [y/N] '
|
185
|
+
confirm = $stdin.gets.strip.downcase
|
186
|
+
# next unless confirm == 'y' # This 'next' would skip the return
|
187
|
+
return repo_missing_public unless confirm == 'y'
|
188
|
+
end
|
189
|
+
|
190
|
+
run_cmd!(%w{sj spush}, cwd: repo_path, retries: 2)
|
191
|
+
run_cmd!(%w{sj spr}, cwd: repo_path, retries: 2)
|
192
|
+
end
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
{
|
197
|
+
pipelines: repo_missing_public,
|
198
|
+
total_processed: total_pipeline_count,
|
199
|
+
private_found: private_pipeline_count,
|
200
|
+
skipped_counts: skipped_by_pattern,
|
201
|
+
}
|
202
|
+
end
|
203
|
+
|
204
|
+
def process_buildkite_pipelines(
|
205
|
+
repo_info,
|
206
|
+
options,
|
207
|
+
gh_client,
|
208
|
+
log,
|
209
|
+
bk_client,
|
210
|
+
buildkite_slug_visibility_lookup,
|
211
|
+
primary_org_pipelines_map_data
|
212
|
+
)
|
213
|
+
total_pipeline_count = 0
|
214
|
+
private_pipeline_count = 0
|
215
|
+
skipped_by_pattern = Hash.new(0)
|
216
|
+
|
217
|
+
repo_url = repo_info['html_url']
|
218
|
+
# repo name for logging, consistent with process_expeditor_pipelines
|
219
|
+
repo_name = repo_info['name']
|
220
|
+
|
221
|
+
if repo_name.nil? || repo_name.empty?
|
222
|
+
log.error('Repository name is nil or empty for ' \
|
223
|
+
"#{repo_info['html_url'] || 'unknown URL'}. " +
|
224
|
+
'Skipping Buildkite processing for this repo.')
|
225
|
+
return { pipelines: [], total_processed: 0, private_found: 0,
|
226
|
+
skipped_counts: skipped_by_pattern }
|
227
|
+
end
|
228
|
+
|
229
|
+
repo_missing_public = []
|
230
|
+
# This set tracks unique "#{org}/#{slug}" for this repo to avoid duplicates
|
231
|
+
reported_slugs = Set.new
|
232
|
+
seen_slugs = Set.new
|
233
|
+
|
234
|
+
# First, check to see if any pipelines in BK report being assocaited
|
235
|
+
# with this repo
|
236
|
+
pipelines_for_this_repo_direct =
|
237
|
+
primary_org_pipelines_map_data.fetch(repo_url, [])
|
238
|
+
pipelines_for_this_repo_direct&.each do |p_info|
|
239
|
+
slug = p_info[:slug]
|
240
|
+
visibility = p_info[:visibility] # Already known from initial scan
|
241
|
+
log.debug("Direct pipeline: #{options[:buildkite_org]}/#{slug}, " +
|
242
|
+
"vis: #{visibility}")
|
243
|
+
|
244
|
+
skip = options[:skip_patterns].find { |pat| slug.include?(pat) }
|
245
|
+
if skip
|
246
|
+
log.debug("Skipping #{slug} due to pattern: #{skip}")
|
247
|
+
skipped_by_pattern[report_key] += 1
|
248
|
+
next
|
249
|
+
end
|
250
|
+
|
251
|
+
report_key = "#{options[:buildkite_org]}/#{slug}"
|
252
|
+
next unless seen_slugs.add?(report_key)
|
253
|
+
|
254
|
+
total_pipeline_count += 1
|
255
|
+
next if visibility.casecmp('public').zero?
|
256
|
+
|
257
|
+
if reported_slugs.add?(report_key)
|
258
|
+
log.debug("Pipeline #{report_key} is #{visibility} (direct)")
|
259
|
+
repo_missing_public << slug
|
260
|
+
private_pipeline_count += 1
|
261
|
+
end
|
262
|
+
end
|
263
|
+
|
264
|
+
# However, more likely, we don't have access to see the pipeline or
|
265
|
+
# even the BK org. So walk the most recent PRs
|
266
|
+
log.debug("Starting BK PR analysis for #{repo_name} (URL: #{repo_url})")
|
267
|
+
recent_prs = gh_client.recent_prs(options[:github_org], repo_name)
|
268
|
+
recent_prs.reject! { |pr| pr['draft'] }
|
269
|
+
recent_prs.each do |pr_data|
|
270
|
+
pr_number = pr_data['number']
|
271
|
+
|
272
|
+
log.debug("Analyzing PR ##{pr_number}")
|
273
|
+
statuses = gh_client.pr_statuses(pr_data)
|
274
|
+
|
275
|
+
next if statuses.empty?
|
276
|
+
|
277
|
+
# walk statuses and pulls all relevant buildkite URLs and uniqueify them
|
278
|
+
unique_org_slug_pairs_from_statuses = Set.new
|
279
|
+
statuses.each do |status|
|
280
|
+
target_url = status['target_url']
|
281
|
+
next unless target_url.is_a?(String) && !target_url.empty?
|
282
|
+
bk_url_match = target_url.match(
|
283
|
+
%r{https://buildkite\.com/([^/]+)/([^/]+)},
|
284
|
+
)
|
285
|
+
next unless bk_url_match && bk_url_match.captures.length == 2
|
286
|
+
unique_org_slug_pairs_from_statuses.add(
|
287
|
+
[bk_url_match[1], bk_url_match[2]],
|
288
|
+
)
|
289
|
+
end
|
290
|
+
|
291
|
+
# walk buildkit URLs, and if we haven't seen them already, check
|
292
|
+
# if they're public
|
293
|
+
unique_org_slug_pairs_from_statuses.each do |bk_org, bk_slug|
|
294
|
+
report_key = "#{bk_org}/#{bk_slug}"
|
295
|
+
log.debug("Processing #{report_key} from PR")
|
296
|
+
|
297
|
+
skip = options[:skip_patterns].find { |pat| bk_slug.include?(pat) }
|
298
|
+
if skip
|
299
|
+
log.debug("Skipping #{bk_slug} due to pattern: #{skip}")
|
300
|
+
skipped_by_pattern[report_key] += 1
|
301
|
+
next
|
302
|
+
end
|
303
|
+
|
304
|
+
# check if we already reported in this.
|
305
|
+
next unless seen_slugs.add?(report_key)
|
306
|
+
total_pipeline_count += 1
|
307
|
+
|
308
|
+
visibility = nil
|
309
|
+
discovery_method = ''
|
310
|
+
|
311
|
+
if bk_org == options[:buildkite_org]
|
312
|
+
details_from_lookup = buildkite_slug_visibility_lookup[bk_slug]
|
313
|
+
if details_from_lookup
|
314
|
+
# It was in our initial scan of all pipelines, but was not
|
315
|
+
# reported as being associated with this repo, which is odd
|
316
|
+
# but not impossible (for pipelines that people kick off
|
317
|
+
# from some other source
|
318
|
+
log.warn(
|
319
|
+
"A PR has a pipeline we already knew about, but didn't" +
|
320
|
+
'report on, wat?',
|
321
|
+
)
|
322
|
+
visibility = details_from_lookup[:visibility]
|
323
|
+
discovery_method = 'from initial scan'
|
324
|
+
end
|
325
|
+
end
|
326
|
+
|
327
|
+
unless visibility
|
328
|
+
pipeline_data = bk_client.get_pipeline(bk_org, bk_slug)
|
329
|
+
discovery_method = 'queried directly'
|
330
|
+
visibility = pipeline_data&.dig('visibility')
|
331
|
+
end
|
332
|
+
|
333
|
+
next if visibility&.downcase == 'public'
|
334
|
+
|
335
|
+
source_info = "(via PR ##{pr_number}"
|
336
|
+
source_info += ", Org: #{bk_org}" if bk_org != options[:buildkite_org]
|
337
|
+
source_info += ", #{discovery_method})"
|
338
|
+
|
339
|
+
if reported_slugs.add?(report_key)
|
340
|
+
repo_missing_public << report_key
|
341
|
+
log.debug("#{report_key} source info: #{source_info}")
|
342
|
+
private_pipeline_count += 1
|
343
|
+
else
|
344
|
+
log.debug(
|
345
|
+
"Pipeline #{report_key} (via PR) already reported as private.",
|
346
|
+
)
|
347
|
+
end
|
348
|
+
end
|
349
|
+
end
|
350
|
+
|
351
|
+
{
|
352
|
+
pipelines: repo_missing_public,
|
353
|
+
total_processed: total_pipeline_count,
|
354
|
+
private_found: private_pipeline_count,
|
355
|
+
skipped_counts: skipped_by_pattern,
|
356
|
+
}
|
357
|
+
end
|
358
|
+
|
359
|
+
def main(options)
|
360
|
+
if options[:output]
|
361
|
+
fh = open(options[:output], 'w')
|
362
|
+
log.info("Generating report and writing to #{options[:output]}")
|
363
|
+
end
|
364
|
+
|
365
|
+
github_token = get_github_token!(options)
|
366
|
+
gh_client = OssStats::GitHubClient.new(github_token)
|
367
|
+
bk_client = nil
|
368
|
+
if options[:provider] == 'buildkite'
|
369
|
+
unless options[:buildkite_org]
|
370
|
+
raise ArgumentError, 'buildkite org required for buildkite provider'
|
371
|
+
end
|
372
|
+
buildkite_token = get_buildkite_token!(options)
|
373
|
+
bk_client = OssStats::BuildkiteClient.new(buildkite_token)
|
374
|
+
log.debug('Fetching all Buildkite pipelines...')
|
375
|
+
buildkite_pipelines_data = bk_client.pipelines_by_repo(
|
376
|
+
options[:buildkite_org],
|
377
|
+
)
|
378
|
+
bk_pipeline_count = buildkite_pipelines_data.values.flatten.count
|
379
|
+
bk_repo_count = buildkite_pipelines_data.keys.count
|
380
|
+
log.debug(
|
381
|
+
"Found #{bk_pipeline_count} Buildkite pipelines across " +
|
382
|
+
"#{bk_repo_count} repositories.",
|
383
|
+
)
|
384
|
+
|
385
|
+
# Create a global lookup for slug -> {visibility:, repo_url:} for efficiency
|
386
|
+
buildkite_slug_visibility_lookup = {}
|
387
|
+
buildkite_pipelines_data.each do |repo_url, pipelines|
|
388
|
+
pipelines.each do |p_info|
|
389
|
+
# Keyed by slug only, slugs are unique across one org's pipelines
|
390
|
+
buildkite_slug_visibility_lookup[p_info[:slug]] = {
|
391
|
+
visibility: p_info[:visibility],
|
392
|
+
repo_url:,
|
393
|
+
}
|
394
|
+
end
|
395
|
+
end
|
396
|
+
elsif options[:provider] != 'expeditor'
|
397
|
+
raise ArgumentError, "Unsupported provider: #{options[:provider]}"
|
398
|
+
end
|
399
|
+
|
400
|
+
total_pipeline_count = 0
|
401
|
+
private_pipeline_count = 0
|
402
|
+
repos_with_private = 0
|
403
|
+
skipped_by_pattern = Hash.new(0)
|
404
|
+
|
405
|
+
name = options[:github_org].capitalize
|
406
|
+
output(fh, "# #{name} Pipeline Visibility Report #{Date.today}\n")
|
407
|
+
if options[:repos].empty?
|
408
|
+
log.debug("Fetching repos under '#{options[:github_org]}'...")
|
409
|
+
page = 1
|
410
|
+
loop do
|
411
|
+
list = gh_client.get(
|
412
|
+
"/orgs/#{options[:github_org]}/repos?per_page=100&page=#{page}",
|
413
|
+
)
|
414
|
+
break if list.empty?
|
415
|
+
|
416
|
+
priv = list.select { |r| r['private'] }.map { |r| r['name'] }
|
417
|
+
unless priv.empty?
|
418
|
+
log.debug("Found private repos: #{priv.join(', ')}")
|
419
|
+
end
|
420
|
+
|
421
|
+
options[:repos].concat(
|
422
|
+
list.select { |r| !r['private'] }.map { |r| r['name'] },
|
423
|
+
)
|
424
|
+
page += 1
|
425
|
+
end
|
426
|
+
log.debug("Discovered these public repos: #{options[:repos].join(', ')}")
|
427
|
+
end
|
428
|
+
|
429
|
+
options[:repos].sort.each do |repo|
|
430
|
+
next if options[:skip_repos].include?(repo)
|
431
|
+
|
432
|
+
begin
|
433
|
+
repo_info = gh_client.get("/repos/#{options[:github_org]}/#{repo}")
|
434
|
+
rescue StandardError => e
|
435
|
+
log.error(
|
436
|
+
"Error fetching repo info for #{options[:github_org]}/#{repo}: " +
|
437
|
+
e.message,
|
438
|
+
)
|
439
|
+
log.error('Skipping this repository.')
|
440
|
+
next
|
441
|
+
end
|
442
|
+
|
443
|
+
if repo_info['private']
|
444
|
+
log.debug("Skipping private repo: #{repo}")
|
445
|
+
next
|
446
|
+
end
|
447
|
+
|
448
|
+
print('.') if fh
|
449
|
+
|
450
|
+
result = {}
|
451
|
+
if options[:provider] == 'expeditor'
|
452
|
+
result = process_expeditor_pipelines(
|
453
|
+
repo_info, options, gh_client, log
|
454
|
+
)
|
455
|
+
elsif options[:provider] == 'buildkite'
|
456
|
+
result = process_buildkite_pipelines(
|
457
|
+
repo_info,
|
458
|
+
options,
|
459
|
+
gh_client,
|
460
|
+
log,
|
461
|
+
bk_client,
|
462
|
+
buildkite_slug_visibility_lookup,
|
463
|
+
buildkite_pipelines_data,
|
464
|
+
)
|
465
|
+
end
|
466
|
+
|
467
|
+
# Accumulate results from the processed provider
|
468
|
+
private_pipelines_for_this_repo = result.fetch(:pipelines, [])
|
469
|
+
total_pipeline_count += result.fetch(:total_processed, 0)
|
470
|
+
private_pipeline_count += result.fetch(:private_found, 0)
|
471
|
+
result.fetch(:skipped_counts, {}).each do |pattern, count|
|
472
|
+
skipped_by_pattern[pattern] += count
|
473
|
+
end
|
474
|
+
|
475
|
+
next if private_pipelines_for_this_repo.empty?
|
476
|
+
# Construct repo identifier using html_url from repo_info if available
|
477
|
+
# 'repo' is the loop variable for the current repository name.
|
478
|
+
# 'repo_info' is the hash of details for that specific repository.
|
479
|
+
name = "#{options[:github_org]}/#{repo}"
|
480
|
+
repo_html_url = repo_info['html_url']
|
481
|
+
if repo_html_url
|
482
|
+
name = "[#{name}](#{repo_html_url})"
|
483
|
+
end
|
484
|
+
output(fh, "* #{name}")
|
485
|
+
private_pipelines_for_this_repo.sort.each do |pipeline_entry|
|
486
|
+
output(fh, " * #{pipeline_entry}")
|
487
|
+
end
|
488
|
+
repos_with_private += 1
|
489
|
+
end
|
490
|
+
|
491
|
+
if total_pipeline_count > 0
|
492
|
+
percentage_private = (
|
493
|
+
(private_pipeline_count.to_f / total_pipeline_count.to_f) * 100
|
494
|
+
).round(2)
|
495
|
+
output(
|
496
|
+
fh, "\nTotal percentage of private pipelines: #{percentage_private}%"
|
497
|
+
)
|
498
|
+
summary_line = format(
|
499
|
+
' --> %<private>d out of %<total>d across %<repos>d repos',
|
500
|
+
private: private_pipeline_count,
|
501
|
+
total: total_pipeline_count,
|
502
|
+
repos: repos_with_private,
|
503
|
+
)
|
504
|
+
output(fh, summary_line)
|
505
|
+
|
506
|
+
if skipped_by_pattern.any?
|
507
|
+
output(fh, ' --> Skipped pipelines:')
|
508
|
+
skipped_by_pattern.each_key do |pipeline|
|
509
|
+
output(fh, " - #{pipeline}")
|
510
|
+
end
|
511
|
+
output(fh, ' -> The following skip patterns were specified:')
|
512
|
+
options[:skip_patterns].each do |pat|
|
513
|
+
output(fh, " - #{pat}")
|
514
|
+
end
|
515
|
+
end
|
516
|
+
else
|
517
|
+
output(fh, 'No pipelines found (excluding skipped patterns).')
|
518
|
+
end
|
519
|
+
|
520
|
+
puts if fh
|
521
|
+
fh.close if options[:output]
|
522
|
+
end
|
523
|
+
|
524
|
+
# Command-line options
|
525
|
+
options = {
|
526
|
+
assume_yes: false,
|
527
|
+
log_level: :info,
|
528
|
+
make_prs_for: [],
|
529
|
+
pipeline_format: '%{github_org}-%{repo}-%{branch}-verify',
|
530
|
+
provider: 'buildkite',
|
531
|
+
repos: %w{},
|
532
|
+
skip_patterns: %w{},
|
533
|
+
skip_repos: [],
|
534
|
+
verify_only: true,
|
535
|
+
}
|
536
|
+
|
537
|
+
OptionParser.new do |opts|
|
538
|
+
opts.banner = 'Usage: check_gh_pipelines.rb [options]'
|
539
|
+
|
540
|
+
opts.on(
|
541
|
+
'--assume-yes',
|
542
|
+
'If set, do not prompt before making PRs.',
|
543
|
+
) { options[:assume_yes] = true }
|
544
|
+
|
545
|
+
opts.on(
|
546
|
+
'--github-token TOKEN',
|
547
|
+
'GitHub personal access token (or use GITHUB_TOKEN env var)',
|
548
|
+
) { |val| options[:github_token] = val }
|
549
|
+
|
550
|
+
opts.on(
|
551
|
+
'-l LEVEL',
|
552
|
+
'--log-level LEVEL',
|
553
|
+
'Set logging level to LEVEL. [default: info]',
|
554
|
+
) { |level| options[:log_level] = level.to_sym }
|
555
|
+
|
556
|
+
opts.on(
|
557
|
+
'--make-prs-for NAMES',
|
558
|
+
Array,
|
559
|
+
'Comma-separated list of pipeline names to make public if found private.',
|
560
|
+
) { |v| options[:make_prs_for] += v }
|
561
|
+
|
562
|
+
opts.on(
|
563
|
+
'--github-org ORG',
|
564
|
+
'GitHub org name to look at all repos for. Required.',
|
565
|
+
) { |v| options[:github_org] = v }
|
566
|
+
|
567
|
+
opts.on(
|
568
|
+
'-o FILE',
|
569
|
+
'--output FILE',
|
570
|
+
'Write the output to FILE',
|
571
|
+
) { |v| options[:output] = v }
|
572
|
+
|
573
|
+
opts.on(
|
574
|
+
'--repos REPO',
|
575
|
+
Array,
|
576
|
+
'GitHub repositories name. Can specify comma-separated list and/or use ' +
|
577
|
+
'the option multiple times. Leave blank for all repos in the org.',
|
578
|
+
) { |v| options[:repos] += v }
|
579
|
+
|
580
|
+
opts.on(
|
581
|
+
'--skip PATTERN',
|
582
|
+
Array,
|
583
|
+
'Pipeline name substring to skip. Can specify a comma-separated list ' +
|
584
|
+
'and/or use the option multiple times. ' +
|
585
|
+
"[default: #{options[:skip_patterns].join(',')}]",
|
586
|
+
) { |v| options[:skip_patterns] += v }
|
587
|
+
|
588
|
+
opts.on(
|
589
|
+
'--skip-repos REPOS',
|
590
|
+
Array,
|
591
|
+
'Comma-separated list of repos to skip even if they are public.',
|
592
|
+
) { |v| options[:skip_repos] += v }
|
593
|
+
|
594
|
+
opts.on(
|
595
|
+
'--source-dir DIR',
|
596
|
+
'Directory to look for or clone the repo into.',
|
597
|
+
) { |v| options[:source_dir] = v }
|
598
|
+
|
599
|
+
opts.on(
|
600
|
+
'--[no-]verify-only',
|
601
|
+
'By default we only look at verify pipelines as those are the only ones ' +
|
602
|
+
'that run on PRs. Use --no-verify-only to change this.',
|
603
|
+
) { |v| options[:verify_only] = v }
|
604
|
+
|
605
|
+
opts.on(
|
606
|
+
'--provider PROVIDER',
|
607
|
+
%w{expeditor buildkite},
|
608
|
+
'CI provider to use: buildkite or expeditor. Default: ' +
|
609
|
+
options[:provider].to_s, # Ensure string for concatenation
|
610
|
+
) { |v| options[:provider] = v }
|
611
|
+
|
612
|
+
opts.on(
|
613
|
+
'--buildkite-token TOKEN',
|
614
|
+
'Buildkite API token (or use BUILDKITE_TOKEN env var)',
|
615
|
+
) { |v| options[:buildkite_token] = v }
|
616
|
+
|
617
|
+
opts.on(
|
618
|
+
'--buildkite-org ORG',
|
619
|
+
'Buildkite organization slug',
|
620
|
+
) { |v| options[:buildkite_org] = v }
|
621
|
+
|
622
|
+
opts.on(
|
623
|
+
'--pipeline-format FORMAT',
|
624
|
+
'Expected pipeline name format string. Default: ' +
|
625
|
+
options[:pipeline_format].to_s, # Ensure string for concatenation
|
626
|
+
) { |v| options[:pipeline_format] = v }
|
627
|
+
end.parse!
|
628
|
+
|
629
|
+
log.level = options[:log_level] if options[:log_level]
|
630
|
+
options[:skip_patterns].uniq!
|
631
|
+
options[:repos].uniq!
|
632
|
+
options[:make_prs_for].uniq!
|
633
|
+
|
634
|
+
raise ArgumentError, 'GitHub org is required' unless options[:github_org]
|
635
|
+
|
636
|
+
main(options) if __FILE__ == $PROGRAM_NAME
|