github_repo_statistics 2.3.7 → 2.3.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 722258cfcbda685136e97c5b844f38b4da2937dbf3693b5131c628aad5131672
4
- data.tar.gz: e0377225ed1e088b3eef9ff283ebf254225b36546eae619378f28df4f64cd800
3
+ metadata.gz: 45f9b41fdeece55840918ee51ada62d6c23509620e50c19e5a495df7bea0d0a8
4
+ data.tar.gz: 1a128c270392e20ed6b0d48db13cac15f3d1dfd41ab5b5f8b0f97d46c263df9b
5
5
  SHA512:
6
- metadata.gz: 37cc51e5dd9c5c40ee4b816fcbfa39e0c288fe6fa17c298890e6f6dbc508923dd094f2bd75969bc906695c69a6e5bec6d36ccdb380b8c1d9a455247b86ce2af4
7
- data.tar.gz: 355b86a47bb3f55ac5426256a840e3e4a77320a093524f77c93627d73b08c53df3762a5c9c8bab34992f182ba7a74575806442933855858a4f17d245d3986cf2
6
+ metadata.gz: 00c4d3572b32f831af53cec591c415828f1326875794da0df1b1009880ffd90cd1c99c01e8a6cc56b8d40ccd113fae09ecbb65d826ab273e635d1e036d782d29
7
+ data.tar.gz: ceadd24b9ccc16eedaf41a99e9dc1ef7f7843994b162c0c4a89ba829528197f85715bc29305a0f6a3bbde6436f5aed7ef2ce7a716ff74168f2f72ef79e15554a
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- github_repo_statistics (2.3.7)
4
+ github_repo_statistics (2.3.8)
5
5
  date
6
6
  faraday-retry
7
7
  google-cloud-bigquery
@@ -78,6 +78,11 @@ OptionParser.new do |opts|
78
78
  options[:code_extension] = code_extension
79
79
  end
80
80
 
81
+ opts.on('--percentile STRING',
82
+ 'The percentile of file changes that are considered to be hotspot [default: "100"]') do |percentile|
83
+ options[:percentile] = percentile
84
+ end
85
+
81
86
  opts.on('--team-to-focus STRING',
82
87
  'The team identifier to focus for the metric collection [default: ""]') do |team_to_focus|
83
88
  options[:team_to_focus] = team_to_focus
@@ -93,6 +98,11 @@ OptionParser.new do |opts|
93
98
  options[:file_output] = true
94
99
  end
95
100
 
101
+ opts.on('--hotspot-check',
102
+ 'Enables a hotspot check [default:false]') do
103
+ options[:hotspot_check] = true
104
+ end
105
+
96
106
  opts.on('-v', '--version', 'Display the version of the gem') do
97
107
  puts "github_repo_statistics version #{GithubRepoStatistics::VERSION}"
98
108
  exit
@@ -124,6 +134,7 @@ EXCLUDED_FILES = options[:excluded_files]
124
134
  EXCLUDED_PRS = options[:excluded_prs]
125
135
  TEAM_TO_FOCUS = options[:team_to_focus]
126
136
  CODEOWNER_TO_FOCUS = options[:codeowner_to_focus]
137
+ PERCENTILE = options[:percentile] || '100'
127
138
 
128
139
  unless CI
129
140
  puts "\nDirectory: #{REPO_PATH}\n"
@@ -142,8 +153,20 @@ unless CI
142
153
  puts "Debug mode is: #{options[:debug] ? 'on' : 'off'}\n\n"
143
154
  end
144
155
 
145
- system("git checkout #{DEFAULT_BRANCH}", [:out] => File::NULL)
146
- system('git pull', %i[out err] => File::NULL)
147
-
148
- GithubRepoStatistics.new(duration_in_days: options[:duration_in_days] || 30, directory_path: REPO_PATH,
149
- begin_time: DateTime.now, steps: options[:steps].to_i, debug: options[:debug]).contribution_message
156
+ if options[:hotspot_check]
157
+ feature_branch = `git rev-parse --abbrev-ref HEAD`.chomp
158
+ changed_files = `git diff --name-only master...#{feature_branch} | grep #{REPO_PATH} | grep '\.swift'`.split
159
+ branch_hotspot_files = GithubRepoStatistics.new(duration_in_days: options[:duration_in_days] || 30, directory_path: REPO_PATH,
160
+ begin_time: DateTime.now, steps: options[:steps].to_i, debug: options[:debug]).hotspot_check(files: changed_files, branch: feature_branch)
161
+ system("git checkout #{DEFAULT_BRANCH}", [:out] => File::NULL)
162
+ system('git pull', %i[out err] => File::NULL)
163
+ master_hotspot_files = GithubRepoStatistics.new(duration_in_days: options[:duration_in_days] || 30, directory_path: REPO_PATH,
164
+ begin_time: DateTime.now, steps: options[:steps].to_i, debug: options[:debug]).hotspot_check(files: changed_files, branch: DEFAULT_BRANCH)
165
+
166
+ raise 'New hotspot was introduced, contact foundation to unblock the PR' if branch_hotspot_files > master_hotspot_files
167
+ else
168
+ system("git checkout #{DEFAULT_BRANCH}", [:out] => File::NULL)
169
+ system('git pull', %i[out err] => File::NULL)
170
+ GithubRepoStatistics.new(duration_in_days: options[:duration_in_days] || 30, directory_path: REPO_PATH,
171
+ begin_time: DateTime.now, steps: options[:steps].to_i, debug: options[:debug]).contribution_message
172
+ end
@@ -7,7 +7,6 @@ require 'pry'
7
7
  require_relative '../lib/github_repo_statistics/version'
8
8
  require_relative '../lib/github_repo_statistics/release_merge_report'
9
9
 
10
-
11
10
  options = {}
12
11
  OptionParser.new do |opts|
13
12
  opts.banner = 'Usage: github_repo_statistics [options]'
@@ -54,4 +53,4 @@ raise 'Please provide GitHub token using --github-token flag' if options[:github
54
53
  raise 'Please provide GitHub repo name using --github-repo flag' if options[:github_repo].nil?
55
54
  raise 'Please provide default GitHub branch using --default-branch flag' if DEFAULT_BRANCH.nil?
56
55
 
57
- ReleaseMergeReport.new(token: options[:github_token], repo: options[:github_repo], branch_prefix: "release/").report
56
+ ReleaseMergeReport.new(token: options[:github_token], repo: options[:github_repo], branch_prefix: 'release/').report
@@ -25,8 +25,8 @@ Gem::Specification.new do |spec|
25
25
  spec.require_paths = ['lib']
26
26
 
27
27
  spec.add_dependency 'date'
28
- spec.add_dependency 'octokit'
29
- spec.add_dependency 'pry'
30
28
  spec.add_dependency 'faraday-retry'
31
29
  spec.add_dependency 'google-cloud-bigquery'
30
+ spec.add_dependency 'octokit'
31
+ spec.add_dependency 'pry'
32
32
  end
@@ -22,9 +22,7 @@ class ForceMergeReport
22
22
  # Sort PRs into monthly chunks
23
23
  weekly_pull_requests = pull_requests.group_by { |pr| pr.closed_at.strftime('%Y-%W') }
24
24
 
25
- if weekly_pull_requests[Time.now.strftime('%Y-%W')].nil?
26
- weekly_pull_requests[Time.now.strftime('%Y-%W')] = []
27
- end
25
+ weekly_pull_requests[Time.now.strftime('%Y-%W')] = [] if weekly_pull_requests[Time.now.strftime('%Y-%W')].nil?
28
26
 
29
27
  # Initialize a hash to store monthly summaries
30
28
  weekly_summaries = Hash.new { |hash, key| hash[key] = { total: 0, failed: 0, workflows: Hash.new(0) } }
@@ -70,15 +68,15 @@ class ForceMergeReport
70
68
  # Print details of merged pull requests without meeting the required criteria for the last 6 months
71
69
  next if failed_checks.empty?
72
70
 
73
- unless @ci
74
- puts "PR ##{pr.number} - Merged at: #{pr.merged_at}"
75
- puts 'Failed Checks:'
76
- failed_checks.each do |check|
77
- puts "- Workflow: #{check.name || check.context}, Conclusion: #{check.conclusion || check.state}"
78
- end
79
- puts '---'
80
- puts
71
+ next if @ci
72
+
73
+ puts "PR ##{pr.number} - Merged at: #{pr.merged_at}"
74
+ puts 'Failed Checks:'
75
+ failed_checks.each do |check|
76
+ puts "- Workflow: #{check.name || check.context}, Conclusion: #{check.conclusion || check.state}"
81
77
  end
78
+ puts '---'
79
+ puts
82
80
  end
83
81
  end
84
82
 
@@ -93,43 +91,43 @@ class ForceMergeReport
93
91
 
94
92
  # ENV['BQ_CREDENTIALS'] = `cat /Users/serghei.moret/.config/gcloud/application_default_credentials.json`
95
93
 
96
- if ENV['BQ_CREDENTIALS']
97
- require "google/cloud/bigquery"
98
- require "json"
99
- creds = JSON.parse(ENV['BQ_CREDENTIALS'])
100
- bigquery = Google::Cloud::Bigquery.new(
101
- project_id: "hellofresh-android",
102
- credentials: creds
103
- )
104
- dataset = bigquery.dataset "github_data"
105
-
106
- failed_count = summary[:failed]
107
- size_check = summary[:workflows]["Mergeable: Size check"]
108
- sonarqube_check = summary[:workflows]['SonarQube Code Analysis']
109
- ui_tests_check = summary[:workflows]['UI Tests']
110
- unit_tests_check = summary[:workflows]['Unit Tests']
111
- total_prs = summary[:total]
112
-
113
- query = <<~SQL
114
- MERGE INTO force_merges AS target
115
- USING (SELECT '#{week}' AS calendar_week, '#{@repo}' AS platform) AS source
116
- ON target.calendar_week = source.calendar_week AND target.platform = source.platform
117
- WHEN MATCHED THEN
118
- UPDATE SET
119
- target.force_merges_count = #{failed_count},
120
- target.ui_tests_count = #{ui_tests_check},
121
- target.unit_tests_count = #{unit_tests_check},
122
- target.size_check_count = #{size_check},
123
- target.sonarqube_count = #{sonarqube_check},
124
- target.total_prs = #{total_prs},
125
- target.platform = '#{@repo}'
126
- WHEN NOT MATCHED THEN
127
- INSERT (calendar_week, force_merges_count, ui_tests_count, unit_tests_count, size_check_count, sonarqube_count, total_prs, platform)
128
- VALUES ('#{week}', #{failed_count}, #{ui_tests_check}, #{unit_tests_check}, #{size_check}, #{sonarqube_check}, #{total_prs}, '#{@repo}');
129
- SQL
130
-
131
- dataset.query(query)
132
- end
94
+ next unless ENV['BQ_CREDENTIALS']
95
+
96
+ require 'google/cloud/bigquery'
97
+ require 'json'
98
+ creds = JSON.parse(ENV['BQ_CREDENTIALS'])
99
+ bigquery = Google::Cloud::Bigquery.new(
100
+ project_id: 'hellofresh-android',
101
+ credentials: creds
102
+ )
103
+ dataset = bigquery.dataset 'github_data'
104
+
105
+ failed_count = summary[:failed]
106
+ size_check = summary[:workflows]['Mergeable: Size check']
107
+ sonarqube_check = summary[:workflows]['SonarQube Code Analysis']
108
+ ui_tests_check = summary[:workflows]['UI Tests']
109
+ unit_tests_check = summary[:workflows]['Unit Tests']
110
+ total_prs = summary[:total]
111
+
112
+ query = <<~SQL
113
+ MERGE INTO force_merges AS target
114
+ USING (SELECT '#{week}' AS calendar_week, '#{@repo}' AS platform) AS source
115
+ ON target.calendar_week = source.calendar_week AND target.platform = source.platform
116
+ WHEN MATCHED THEN
117
+ UPDATE SET
118
+ target.force_merges_count = #{failed_count},
119
+ target.ui_tests_count = #{ui_tests_check},
120
+ target.unit_tests_count = #{unit_tests_check},
121
+ target.size_check_count = #{size_check},
122
+ target.sonarqube_count = #{sonarqube_check},
123
+ target.total_prs = #{total_prs},
124
+ target.platform = '#{@repo}'
125
+ WHEN NOT MATCHED THEN
126
+ INSERT (calendar_week, force_merges_count, ui_tests_count, unit_tests_count, size_check_count, sonarqube_count, total_prs, platform)
127
+ VALUES ('#{week}', #{failed_count}, #{ui_tests_check}, #{unit_tests_check}, #{size_check}, #{sonarqube_check}, #{total_prs}, '#{@repo}');
128
+ SQL
129
+
130
+ dataset.query(query)
133
131
  end
134
132
  end
135
133
  end
@@ -183,23 +183,23 @@ class GithubRepoStatistics
183
183
  `git ls-tree -r --name-only $(git rev-list -1 HEAD) -- "#{directory_path}"`
184
184
  end
185
185
 
186
- def files_with_changes(directory_path:, start_date:, end_date:)
187
- `git log origin/master --name-only --pretty=format:"" --since="#{start_date}" --until="#{end_date}" "#{directory_path}"`
186
+ def files_with_changes(directory_path:, start_date:, end_date:, branch: DEFAULT_BRANCH)
187
+ `git log origin/#{branch} --name-only --pretty=format:"" --since="#{start_date}" --until="#{end_date}" "#{directory_path}"`
188
188
  end
189
189
 
190
- def git_commit_count(file:, start_date:, end_date:)
191
- `git log origin/master --since="#{start_date}" --until="#{end_date}" --follow -- "#{file}" | grep -c '^commit'`
190
+ def git_commit_count(file:, start_date:, end_date:, branch: DEFAULT_BRANCH)
191
+ `git log origin/#{branch} --since="#{start_date}" --until="#{end_date}" --follow -- "#{file}" | grep -c '^commit'`
192
192
  end
193
193
 
194
- def git_commit_info(file:, start_date:, end_date:)
195
- `git log origin/master --pretty=format:"%s" --since="#{start_date}" --until="#{end_date}" -- "#{file}"`
194
+ def git_commit_info(file:, start_date:, end_date:, branch: DEFAULT_BRANCH)
195
+ `git log origin/#{branch} --pretty=format:"%s" --since="#{start_date}" --until="#{end_date}" -- "#{file}"`
196
196
  end
197
197
 
198
198
  def new_changes?(file:)
199
- git_commit_info(file:, start_date: DateTime.now - 7, end_date: DateTime.now) == "" ? false : true
199
+ git_commit_info(file:, start_date: DateTime.now - 7, end_date: DateTime.now) != ''
200
200
  end
201
201
 
202
- def analyze_changed_files(uniq_code_files_with_changes:, start_date:, end_date:)
202
+ def analyze_changed_files(uniq_code_files_with_changes:, start_date:, end_date:, branch: DEFAULT_BRANCH)
203
203
  all_teams = []
204
204
  cross_teams_count = 0
205
205
  single_ownership_teams_count = 0
@@ -208,8 +208,8 @@ class GithubRepoStatistics
208
208
  file_team_map = {}
209
209
  uniq_code_files_with_changes.each do |file|
210
210
  filename = File.basename(file)
211
- commit_count = git_commit_count(file:, start_date:, end_date:).to_i
212
- git_log = git_commit_info(file:, start_date:, end_date:).split("\n")
211
+ commit_count = git_commit_count(file:, start_date:, end_date:, branch:).to_i
212
+ git_log = git_commit_info(file:, start_date:, end_date:, branch:).split("\n")
213
213
 
214
214
  if EXCLUDED_PRS
215
215
  excluded_prs = EXCLUDED_PRS.split(',')
@@ -218,16 +218,14 @@ class GithubRepoStatistics
218
218
 
219
219
  prs = git_log.map do |pr|
220
220
  match = pr.match(/#(\d+)/)
221
- if match
222
- match[0]
223
- end
221
+ match[0] if match
224
222
  end.uniq
225
223
 
226
224
  teams = git_log.map do |team|
227
225
  team.match(/#{TEAM_REGEX}/)[0].upcase
228
226
  end.reject { |e| EXCLUSIONS&.include?(e) }
229
227
 
230
- teams = calculate_percentile(teams, 90)
228
+ teams = calculate_percentile(teams, PERCENTILE.to_i)
231
229
 
232
230
  total_changes += commit_count
233
231
  all_teams << teams
@@ -251,7 +249,7 @@ class GithubRepoStatistics
251
249
  counts = arr.each_with_object(Hash.new(0)) { |item, hash| hash[item] += 1 }
252
250
 
253
251
  # Sort elements by their counts in descending order
254
- sorted_counts = counts.sort_by { |k, v| -v }.to_h
252
+ sorted_counts = counts.sort_by { |_k, v| -v }.to_h
255
253
 
256
254
  # Calculate the cut-off for the percentile
257
255
  total_count = arr.size
@@ -295,7 +293,7 @@ class GithubRepoStatistics
295
293
  occurrences = all_teams.flatten.compact.tally
296
294
  sorted_occurrences = occurrences.sort_by { |element, count| [-count, element] }
297
295
  contributors = Hash[sorted_occurrences]
298
- churn_count = file_team_map.values.map { |value| value.last }.sum
296
+ churn_count = file_team_map.values.map(&:last).sum
299
297
  hotspot_changes_percentage = ((churn_count.to_f / total_changes) * 100).round(2)
300
298
  # Filter files based on extension, existence and size
301
299
  filtered_files = filter_files(file_team_map:)
@@ -308,14 +306,14 @@ class GithubRepoStatistics
308
306
  # ENV['BQ_CREDENTIALS'] = `cat /Users/serghei.moret/.config/gcloud/application_default_credentials.json`
309
307
 
310
308
  if ENV['BQ_CREDENTIALS']
311
- require "google/cloud/bigquery"
312
- require "json"
309
+ require 'google/cloud/bigquery'
310
+ require 'json'
313
311
  creds = JSON.parse(ENV['BQ_CREDENTIALS'])
314
312
  bigquery = Google::Cloud::Bigquery.new(
315
- project_id: "hellofresh-android",
313
+ project_id: 'hellofresh-android',
316
314
  credentials: creds
317
315
  )
318
- dataset = bigquery.dataset "github_data"
316
+ dataset = bigquery.dataset 'github_data'
319
317
 
320
318
  files_with_multiple_contributor = file_team_map.count
321
319
  big_files_with_multiple_contributors = filtered_top_touched_files.count
@@ -328,24 +326,24 @@ class GithubRepoStatistics
328
326
  end
329
327
 
330
328
  query = <<~SQL
331
- INSERT INTO modularization (date, platform, single_contributor_percentage, files_changed_by_many_teams, file_count, cross_teams_count, single_ownership_teams_count, hotspot_changes_percentage, churn_count, total_changes, files_with_multiple_contributor, big_files_with_multiple_contributors, total_files_changed, hotspot_lines, big_files_count)
332
- VALUES ('#{@begin_time}', '#{platform}', #{files_with_single_contributor_percentage}, #{files_changed_by_many_teams}, #{file_count}, #{cross_teams_count}, #{single_ownership_teams_count}, #{hotspot_changes_percentage}, #{churn_count}, #{total_changes}, #{files_with_multiple_contributor}, #{big_files_with_multiple_contributors}, #{total_files_changed}, #{hotspot_lines}, #{big_files_count});
329
+ INSERT INTO modularization (date, platform, single_contributor_percentage, files_changed_by_many_teams, file_count, cross_teams_count, single_ownership_teams_count, hotspot_changes_percentage, churn_count, total_changes, files_with_multiple_contributor, big_files_with_multiple_contributors, total_files_changed, hotspot_lines, big_files_count)
330
+ VALUES ('#{@begin_time}', '#{platform}', #{files_with_single_contributor_percentage}, #{files_changed_by_many_teams}, #{file_count}, #{cross_teams_count}, #{single_ownership_teams_count}, #{hotspot_changes_percentage}, #{churn_count}, #{total_changes}, #{files_with_multiple_contributor}, #{big_files_with_multiple_contributors}, #{total_files_changed}, #{hotspot_lines}, #{big_files_count});
333
331
  SQL
334
332
 
335
333
  dataset.query(query)
336
334
 
337
- # delete_query = <<~SQL
338
- # DELETE FROM modularization
339
- # WHERE CONCAT(DATE(date), ' ', TIME(date)) NOT IN (
340
- # SELECT CONCAT(DATE(date), ' ', TIME(date))
341
- # FROM modularization AS m1
342
- # WHERE TIME(date) = (
343
- # SELECT MAX(TIME(date))
344
- # FROM modularization AS m2
345
- # WHERE DATE(m1.date) = DATE(m2.date)
346
- # )
347
- # );
348
- # SQL
335
+ # delete_query = <<~SQL
336
+ # DELETE FROM modularization
337
+ # WHERE CONCAT(DATE(date), ' ', TIME(date)) NOT IN (
338
+ # SELECT CONCAT(DATE(date), ' ', TIME(date))
339
+ # FROM modularization AS m1
340
+ # WHERE TIME(date) = (
341
+ # SELECT MAX(TIME(date))
342
+ # FROM modularization AS m2
343
+ # WHERE DATE(m1.date) = DATE(m2.date)
344
+ # )
345
+ # );
346
+ # SQL
349
347
 
350
348
  # dataset.query(delete_query)
351
349
  end
@@ -373,7 +371,7 @@ class GithubRepoStatistics
373
371
  if HOTSPOT
374
372
  hotspot_output = []
375
373
 
376
- filter_files(file_team_map: file_team_map, size: 0).each do |line|
374
+ filter_files(file_team_map:, size: 0).each do |line|
377
375
  file = line.first
378
376
  contributors = line.last.first
379
377
  lines_of_code = count_lines_of_code(file)
@@ -413,4 +411,16 @@ class GithubRepoStatistics
413
411
  @begin_time -= duration_in_days
414
412
  contribution_message
415
413
  end
414
+
415
+ def hotspot_check(files:, branch:)
416
+ duration_in_days = @duration_in_days.to_i
417
+ start_date = @begin_time.to_time.to_i - duration_in_days * 86_400
418
+ end_date = @begin_time.to_time.to_i
419
+
420
+ _, _, _, files_changed_by_many_teams, = analyze_changed_files(
421
+ uniq_code_files_with_changes: files, start_date:, end_date:, branch:
422
+ )
423
+
424
+ files_changed_by_many_teams
425
+ end
416
426
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'octokit'
2
4
  require 'json'
3
5
  require 'google/cloud/bigquery'
@@ -34,7 +36,7 @@ class ReleaseMergeReport
34
36
  client.auto_paginate = true
35
37
 
36
38
  tags = client.tags(@repo)
37
- branch_info = Hash.new { |hash, key| hash[key] = { count: 0, teams: [], tribes: []} }
39
+ branch_info = Hash.new { |hash, key| hash[key] = { count: 0, teams: [], tribes: [] } }
38
40
 
39
41
  tags.each do |tag|
40
42
  next if !tag.name.match?(/^(v23|v24)\./) && !tag.name.match?(/^(23|24)\./)
@@ -47,7 +49,7 @@ class ReleaseMergeReport
47
49
 
48
50
  # Count merged pull requests associated with the branch
49
51
  pull_requests = client.pull_requests(@repo, state: 'closed', sort: 'updated', direction: 'desc', base: branch_name)
50
- .select { |pr| pr.merged_at }
52
+ .select(&:merged_at)
51
53
 
52
54
  pull_requests.each do |pr|
53
55
  branch_info[branch_name][:count] += 1
@@ -65,8 +67,8 @@ class ReleaseMergeReport
65
67
  end
66
68
 
67
69
  def group_branch_counts(branch_info)
68
- patch_counts = Hash.new { |hash, key| hash[key] = { count: 0, teams: [], tribes: []} }
69
- hotfix_counts = Hash.new { |hash, key| hash[key] = { count: 0, teams: [], tribes: []} }
70
+ patch_counts = Hash.new { |hash, key| hash[key] = { count: 0, teams: [], tribes: [] } }
71
+ hotfix_counts = Hash.new { |hash, key| hash[key] = { count: 0, teams: [], tribes: [] } }
70
72
 
71
73
  branch_info.each do |branch, info|
72
74
  major_minor_version, patch_version = branch.match(/^#{@branch_prefix}(\d+\.\d+)(?:\.(\d+))?/)&.captures
@@ -77,8 +79,7 @@ class ReleaseMergeReport
77
79
  patch_counts[major_minor_version][:teams] += info[:teams]
78
80
  patch_counts[major_minor_version][:tribes] += info[:tribes]
79
81
 
80
-
81
- elsif info[:count] > 0
82
+ elsif (info[:count]).positive?
82
83
  # Branch is a hotfix version
83
84
  hotfix_counts[major_minor_version][:count] += info[:count]
84
85
  hotfix_counts[major_minor_version][:teams] += info[:teams]
@@ -95,14 +96,14 @@ class ReleaseMergeReport
95
96
  end
96
97
 
97
98
  def export_to_bigquery(branch_counts)
98
- require "google/cloud/bigquery"
99
- require "json"
99
+ require 'google/cloud/bigquery'
100
+ require 'json'
100
101
  creds = JSON.parse(ENV['BQ_CREDENTIALS'])
101
102
  bigquery = Google::Cloud::Bigquery.new(
102
- project_id: "hellofresh-android",
103
+ project_id: 'hellofresh-android',
103
104
  credentials: creds
104
105
  )
105
- dataset = bigquery.dataset "github_data"
106
+ dataset = bigquery.dataset 'github_data'
106
107
 
107
108
  date = DateTime.now
108
109
 
@@ -113,18 +114,18 @@ class ReleaseMergeReport
113
114
 
114
115
  # Construct the SQL query
115
116
  query = <<~SQL
116
- MERGE INTO release_merges AS target
117
- USING (SELECT '#{branch}' AS release, '#{@repo}' AS platform) AS source
118
- ON target.release = source.release AND target.platform = source.platform
119
- WHEN MATCHED THEN
120
- UPDATE SET
121
- target.merge_count = #{count[:count]},
122
- target.timestamp = '#{date}',
123
- target.contributors = ARRAY[#{teams_json}],
124
- target.contributors_tribe = ARRAY[#{tribes_json}]
125
- WHEN NOT MATCHED THEN
126
- INSERT (release, merge_count, platform, timestamp, contributors, contributors_tribe)
127
- VALUES ('#{branch}', #{count[:count]}, '#{@repo}', '#{date}', ARRAY[#{teams_json}], ARRAY[#{tribes_json}]);
117
+ MERGE INTO release_merges AS target
118
+ USING (SELECT '#{branch}' AS release, '#{@repo}' AS platform) AS source
119
+ ON target.release = source.release AND target.platform = source.platform
120
+ WHEN MATCHED THEN
121
+ UPDATE SET
122
+ target.merge_count = #{count[:count]},
123
+ target.timestamp = '#{date}',
124
+ target.contributors = ARRAY[#{teams_json}],
125
+ target.contributors_tribe = ARRAY[#{tribes_json}]
126
+ WHEN NOT MATCHED THEN
127
+ INSERT (release, merge_count, platform, timestamp, contributors, contributors_tribe)
128
+ VALUES ('#{branch}', #{count[:count]}, '#{@repo}', '#{date}', ARRAY[#{teams_json}], ARRAY[#{tribes_json}]);
128
129
  SQL
129
130
 
130
131
  # Execute the query
@@ -141,17 +142,17 @@ class ReleaseMergeReport
141
142
 
142
143
  # Construct the SQL query
143
144
  query = <<~SQL
144
- MERGE INTO release_merges AS target
145
- USING (SELECT '#{branch}' AS release, '#{@repo}' AS platform) AS source
146
- ON target.release = source.release AND target.platform = source.platform
147
- WHEN MATCHED THEN
148
- UPDATE SET
149
- target.hotfix_count = #{count[:count]},
150
- target.contributors_hotfixes = ARRAY[#{teams_json}],
151
- target.contributors_hotfixes_tribe = ARRAY[#{tribes_json}]
152
- WHEN NOT MATCHED THEN
153
- INSERT (release, hotfix_count, platform, contributors_hotfixes, contributors_hotfixes_tribe )
154
- VALUES ('#{branch}', #{count[:count]}, '#{@repo}', ARRAY[#{teams_json}], ARRAY[#{tribes_json}]);
145
+ MERGE INTO release_merges AS target
146
+ USING (SELECT '#{branch}' AS release, '#{@repo}' AS platform) AS source
147
+ ON target.release = source.release AND target.platform = source.platform
148
+ WHEN MATCHED THEN
149
+ UPDATE SET
150
+ target.hotfix_count = #{count[:count]},
151
+ target.contributors_hotfixes = ARRAY[#{teams_json}],
152
+ target.contributors_hotfixes_tribe = ARRAY[#{tribes_json}]
153
+ WHEN NOT MATCHED THEN
154
+ INSERT (release, hotfix_count, platform, contributors_hotfixes, contributors_hotfixes_tribe )
155
+ VALUES ('#{branch}', #{count[:count]}, '#{@repo}', ARRAY[#{teams_json}], ARRAY[#{tribes_json}]);
155
156
  SQL
156
157
 
157
158
  # Execute the query
@@ -90,30 +90,30 @@ class ReviewReport
90
90
  # ENV['BQ_CREDENTIALS'] = `cat /Users/serghei.moret/.config/gcloud/application_default_credentials.json`
91
91
 
92
92
  if ENV['BQ_CREDENTIALS']
93
- require "google/cloud/bigquery"
94
- require "json"
93
+ require 'google/cloud/bigquery'
94
+ require 'json'
95
95
  creds = JSON.parse(ENV['BQ_CREDENTIALS'])
96
96
  bigquery = Google::Cloud::Bigquery.new(
97
- project_id: "hellofresh-android",
97
+ project_id: 'hellofresh-android',
98
98
  credentials: creds
99
99
  )
100
- dataset = bigquery.dataset "github_data"
100
+ dataset = bigquery.dataset 'github_data'
101
101
 
102
102
  query = <<~SQL
103
- MERGE INTO pr_reviews AS target
104
- USING (SELECT '#{week}' AS calendar_week, '#{@repo}' AS platform) AS source
105
- ON target.calendar_week = source.calendar_week AND target.platform = source.platform
106
- WHEN MATCHED THEN
107
- UPDATE SET
108
- target.change_requested_reviews = #{change_requested_reviews},
109
- target.reviews_with_comments = #{reviews_with_comments},
110
- target.total_reviews = #{total_reviews},
111
- target.average_review_time_hours = #{average_time_hours.round(2)},
112
- target.total_prs = #{total_count},
113
- target.platform = '#{@repo}'
114
- WHEN NOT MATCHED THEN
115
- INSERT (calendar_week, total_prs, average_review_time_hours, total_reviews, reviews_with_comments, change_requested_reviews, platform)
116
- VALUES ('#{week}', #{total_count}, #{average_time_hours.round(2)}, #{total_reviews}, #{reviews_with_comments}, #{change_requested_reviews}, '#{@repo}');
103
+ MERGE INTO pr_reviews AS target
104
+ USING (SELECT '#{week}' AS calendar_week, '#{@repo}' AS platform) AS source
105
+ ON target.calendar_week = source.calendar_week AND target.platform = source.platform
106
+ WHEN MATCHED THEN
107
+ UPDATE SET
108
+ target.change_requested_reviews = #{change_requested_reviews},
109
+ target.reviews_with_comments = #{reviews_with_comments},
110
+ target.total_reviews = #{total_reviews},
111
+ target.average_review_time_hours = #{average_time_hours.round(2)},
112
+ target.total_prs = #{total_count},
113
+ target.platform = '#{@repo}'
114
+ WHEN NOT MATCHED THEN
115
+ INSERT (calendar_week, total_prs, average_review_time_hours, total_reviews, reviews_with_comments, change_requested_reviews, platform)
116
+ VALUES ('#{week}', #{total_count}, #{average_time_hours.round(2)}, #{total_reviews}, #{reviews_with_comments}, #{change_requested_reviews}, '#{@repo}');
117
117
  SQL
118
118
 
119
119
  dataset.query(query)
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class GithubRepoStatistics
4
- VERSION = '2.3.7'
4
+ VERSION = '2.3.8'
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: github_repo_statistics
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.3.7
4
+ version: 2.3.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Serghei Moret
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-06-04 00:00:00.000000000 Z
11
+ date: 2024-06-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: date
@@ -25,7 +25,7 @@ dependencies:
25
25
  - !ruby/object:Gem::Version
26
26
  version: '0'
27
27
  - !ruby/object:Gem::Dependency
28
- name: octokit
28
+ name: faraday-retry
29
29
  requirement: !ruby/object:Gem::Requirement
30
30
  requirements:
31
31
  - - ">="
@@ -39,7 +39,7 @@ dependencies:
39
39
  - !ruby/object:Gem::Version
40
40
  version: '0'
41
41
  - !ruby/object:Gem::Dependency
42
- name: pry
42
+ name: google-cloud-bigquery
43
43
  requirement: !ruby/object:Gem::Requirement
44
44
  requirements:
45
45
  - - ">="
@@ -53,7 +53,7 @@ dependencies:
53
53
  - !ruby/object:Gem::Version
54
54
  version: '0'
55
55
  - !ruby/object:Gem::Dependency
56
- name: faraday-retry
56
+ name: octokit
57
57
  requirement: !ruby/object:Gem::Requirement
58
58
  requirements:
59
59
  - - ">="
@@ -67,7 +67,7 @@ dependencies:
67
67
  - !ruby/object:Gem::Version
68
68
  version: '0'
69
69
  - !ruby/object:Gem::Dependency
70
- name: google-cloud-bigquery
70
+ name: pry
71
71
  requirement: !ruby/object:Gem::Requirement
72
72
  requirements:
73
73
  - - ">="