github_repo_statistics 2.2.12 → 2.2.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8800c4a2d628982565f80eb824eee40394cc37ca4cd3a9bf48c1a90ebe121c08
4
- data.tar.gz: 161ca141a826f73be14b60124956d7324a464cf32a6958fa1724513299462d36
3
+ metadata.gz: dd2582ff1a032bad4e27ef0529ed83df931ffa781a9e9c2ca59b935ec8620deb
4
+ data.tar.gz: 931b612913c053acd5207092a0955c4532b301cfa62391f4314a046114e953c2
5
5
  SHA512:
6
- metadata.gz: 6a3ed11bc49f145fe479a900d6ad527f78ac8cec1234eab7dfb934784674fbc285dbce2b12876f6092827f1a58b1b533712238cabccd0dd3d25ef6bc3a718aba
7
- data.tar.gz: 47099629c0304465b6c0bf5ed587b7b2bab70eb23e7e0328b65179552eeaaf8acb80d4a105ad03535ea3fab5a8e92ea4a95c5e3f5dc8bd86811d187193c8d1f6
6
+ metadata.gz: f434060566ac717c0b8542a7825d240e85151fccd5a711cce0050a32764435247b147ec6d80fdb69ecc5fee43dab4a99eba1766557197faf8e4d5d933fe7aa10
7
+ data.tar.gz: 1ceeafecbbd2146816d8669e75c2d4fd645322b8c88259937520aeeb9ca11ccee3d9388e4eecf65d36777b95481b5c7ecf548d7499c33b169170245cdb274349
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- github_repo_statistics (2.2.11)
4
+ github_repo_statistics (2.2.13)
5
5
  date
6
6
  faraday-retry
7
7
  google-cloud-bigquery
@@ -30,7 +30,6 @@ class ForceMergeReport
30
30
  weekly_summaries = Hash.new { |hash, key| hash[key] = { total: 0, failed: 0, workflows: Hash.new(0) } }
31
31
 
32
32
  weeks = @duration_in_days / 7
33
-
34
33
  require 'pry'
35
34
  binding.pry
36
35
 
@@ -113,8 +112,8 @@ class ForceMergeReport
113
112
 
114
113
  query = <<~SQL
115
114
  MERGE INTO force_merges AS target
116
- USING (SELECT '#{week}' AS calendar_week) AS source
117
- ON target.calendar_week = source.calendar_week
115
+ USING (SELECT '#{week}' AS calendar_week, '#{@repo}' AS platform) AS source
116
+ ON target.calendar_week = source.calendar_week AND target.platform = source.platform
118
117
  WHEN MATCHED THEN
119
118
  UPDATE SET
120
119
  target.force_merges_count = #{failed_count},
@@ -124,7 +124,7 @@ class GithubRepoStatistics
124
124
  count += 1 if lines_count > size
125
125
  end
126
126
 
127
- puts " *Current total number of code files longer than #{size} lines:* #{count}"
127
+ count
128
128
  end
129
129
 
130
130
  def count_hotspot_lines(files)
@@ -142,7 +142,7 @@ class GithubRepoStatistics
142
142
  count += lines_count
143
143
  end
144
144
 
145
- puts " *Total lines of hotspot code:* #{count}"
145
+ count
146
146
  end
147
147
 
148
148
  def filter_existing_code_files(files)
@@ -234,28 +234,74 @@ class GithubRepoStatistics
234
234
  sorted_occurrences = occurrences.sort_by { |element, count| [-count, element] }
235
235
  contributors = Hash[sorted_occurrences]
236
236
  churn_count = file_team_map.values.map { |value| value[1] }.sum
237
- hotspot_changes_percentage = (churn_count.to_f / total_changes) * 100
237
+ hotspot_changes_percentage = ((churn_count.to_f / total_changes) * 100).round(2)
238
238
  # Filter files based on extension, existence and size
239
239
  filtered_files = filter_files(file_team_map:)
240
240
  filtered_top_touched_files = filtered_files.sort_by { |element, count| [-count.last, element] }
241
241
 
242
+ files_with_single_contributor_percentage = (100 - ((files_changed_by_many_teams.to_f / file_count) * 100)).round(2)
243
+ hotspot_lines = count_hotspot_lines(filtered_files.keys)
244
+ big_files_count = count_big_files(@directory_path)
245
+
246
+ if ENV['BQ_CREDENTIALS']
247
+ require "google/cloud/bigquery"
248
+ require "json"
249
+ creds = JSON.parse(ENV['BQ_CREDENTIALS'])
250
+ bigquery = Google::Cloud::Bigquery.new(
251
+ project_id: "hellofresh-android",
252
+ credentials: creds
253
+ )
254
+ dataset = bigquery.dataset "modularization"
255
+
256
+ files_with_multiple_contributor = file_team_map.count
257
+ big_files_with_multiple_contributors = filtered_top_touched_files.count
258
+ total_files_changed = uniq_code_files_with_changes.count
259
+
260
+ query = <<~SQL
261
+ MERGE INTO modularization AS target
262
+ USING (SELECT '#{@begin_time}' AS date, '#{@directory_path}' AS platform) AS source
263
+ ON target.date = source.date AND target.platform = source.platform
264
+ WHEN MATCHED THEN
265
+ UPDATE SET
266
+ target.platform = '#{@directory_path}',
267
+ target.single_contributor_percentage = #{files_with_single_contributor_percentage},
268
+ target.files_changed_by_many_teams = #{files_changed_by_many_teams},
269
+ target.file_count = #{file_count},
270
+ target.cross_teams_count = #{cross_teams_count},
271
+ target.single_ownership_teams_count = #{single_ownership_teams_count},
272
+ target.hotspot_changes_percentage = #{hotspot_changes_percentage},
273
+ target.churn_count = #{churn_count},
274
+ target.total_changes = #{total_changes},
275
+ target.files_with_multiple_contributor = #{files_with_multiple_contributor},
276
+ target.big_files_with_multiple_contributors = #{big_files_with_multiple_contributors},
277
+ target.total_files_changed = #{total_files_changed},
278
+ target.hotspot_lines = #{hotspot_lines},
279
+ target.big_files_count = #{big_files_count}
280
+ WHEN NOT MATCHED THEN
281
+ INSERT (date,platform,single_contributor_percentage,files_changed_by_many_teams,file_count,cross_teams_count,single_ownership_teams_count,hotspot_changes_percentage,churn_count,total_changes,files_with_multiple_contributor,big_files_with_multiple_contributors,total_files_changed,hotspot_lines,big_files_count)
282
+ VALUES (#{@begin_time}, #{@directory_path}, #{files_with_single_contributor_percentage}, #{files_changed_by_many_teams}, #{file_count}, ##{cross_teams_count}, #{single_ownership_teams_count}, #{hotspot_changes_percentage}, #{churn_count}, #{total_changes}, #{files_with_multiple_contributor}, #{big_files_with_multiple_contributors}, #{total_files_changed}, #{hotspot_lines}, #{big_files_count});
283
+ SQL
284
+
285
+ dataset.query(query)
286
+ end
287
+
242
288
  puts ''
243
289
  puts "*Timeframe:* #{(@begin_time - duration_in_days).strftime('%Y-%m-%d')} to #{@begin_time.strftime('%Y-%m-%d')}"
244
- puts " *Code files with a single contributor:* #{(100 - ((files_changed_by_many_teams.to_f / file_count) * 100)).round(2)}%"
290
+ puts " *Code files with a single contributor:* #{files_with_single_contributor_percentage}%"
245
291
  puts " *Existing files changed by many teams:* #{files_changed_by_many_teams}"
246
292
  puts " *Current existing #{CODE_EXTENSIONS} files:* #{file_count}"
247
293
  puts ' *Cross-Squad Dependency:*'
248
294
  puts " *Contributions by multiple squads to the same files:* #{cross_teams_count}"
249
295
  puts " *Contributions by single squads contributing to single files:* #{single_ownership_teams_count}"
250
- puts " *Hotspot Code Changes:* #{hotspot_changes_percentage.round(2)}%"
296
+ puts " *Hotspot Code Changes:* #{hotspot_changes_percentage}%"
251
297
  puts " *Churn count(commits to files by multiple teams):* #{churn_count}"
252
298
  puts " *Total amount of commits:* #{total_changes}"
253
- count_hotspot_lines(filtered_files.keys)
299
+ puts " *Total lines of hotspot code:* #{hotspot_lines}"
254
300
  puts " *#{CODE_EXTENSIONS} files with multiple contributors:* #{file_team_map.count}"
255
301
  puts " *#{CODE_EXTENSIONS} files exceeding #{BIG_FILE_SIZE} lines with multiple contributors:* #{filtered_top_touched_files.count}"
256
302
  puts " *Total amount of commits to #{CODE_EXTENSIONS} files:* #{total_changes}"
257
303
  puts " *Total #{CODE_EXTENSIONS} files changed:* #{uniq_code_files_with_changes.count}"
258
- count_big_files(@directory_path)
304
+ puts " *Current total number of code files longer than #{BIG_FILE_SIZE} lines:* #{big_files_count}"
259
305
  puts " *Current total of #{CODE_EXTENSIONS} files in the folder:* #{file_count}"
260
306
  puts " *Contributors:* #{contributors}"
261
307
 
@@ -99,8 +99,8 @@ class ReviewReport
99
99
 
100
100
  query = <<~SQL
101
101
  MERGE INTO pr_reviews AS target
102
- USING (SELECT '#{week}' AS calendar_week) AS source
103
- ON target.calendar_week = source.calendar_week
102
+ USING (SELECT '#{week}' AS calendar_week, '#{@repo}' AS platform) AS source
103
+ ON target.calendar_week = source.calendar_week AND target.platform = source.platform
104
104
  WHEN MATCHED THEN
105
105
  UPDATE SET
106
106
  target.change_requested_reviews = #{change_requested_reviews},
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class GithubRepoStatistics
4
- VERSION = '2.2.12'
4
+ VERSION = '2.2.14'
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: github_repo_statistics
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.2.12
4
+ version: 2.2.14
5
5
  platform: ruby
6
6
  authors:
7
7
  - Serghei Moret
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-03-11 00:00:00.000000000 Z
11
+ date: 2024-03-15 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: date