codepulse 0.1.2 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/codepulse/base_client.rb +1 -67
- data/lib/codepulse/cli.rb +7 -7
- data/lib/codepulse/formatter.rb +31 -11
- data/lib/codepulse/gh_cli_client.rb +108 -4
- data/lib/codepulse/metrics_calculator.rb +10 -9
- data/lib/codepulse.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: a8a474f7a151ae2edf2f67c74aa3f8347cef950039b6f87777455206afccdc94
|
|
4
|
+
data.tar.gz: 65af6a80141c769edbc30571a818a5af805d6a8e3366a93a8e04c9d1ab5d7d58
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: c6664055a79fbc73d73cbd2293cb16ef28f3a8417baad8b97f3c8f97582e82eb0d8a5724ac69d7280d112e90f72a47c24af2d4ed133b3d4e1f45fd9df26e44ff
|
|
7
|
+
data.tar.gz: 20b9addb4cbd4bd6d8874edc4509d355e1c2a688ee25d20947b6bb590dd5bd8df8d689725f1c6b25f399bf6e917c92b8cec81f484570f9bee4a110b7ceb836c3
|
|
@@ -1,72 +1,12 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
3
|
require "json"
|
|
4
|
-
require "uri"
|
|
5
4
|
|
|
6
5
|
module Codepulse
|
|
7
|
-
# Shared GitHub API client
|
|
6
|
+
# Shared GitHub API client utilities.
|
|
8
7
|
module BaseClient
|
|
9
|
-
REPO_FORMAT = %r{\A[^/]+/[^/]+\z}
|
|
10
|
-
|
|
11
|
-
# Fetches pull requests with pagination, then fetches full details for each.
|
|
12
|
-
def pull_requests(repository, state:, limit:)
|
|
13
|
-
ensure_repository_format(repository)
|
|
14
|
-
per_page = [limit, 100].min
|
|
15
|
-
page = 1
|
|
16
|
-
collected = []
|
|
17
|
-
|
|
18
|
-
while collected.length < limit
|
|
19
|
-
response = api_get(
|
|
20
|
-
"/repos/#{repository}/pulls",
|
|
21
|
-
state: state,
|
|
22
|
-
per_page: per_page,
|
|
23
|
-
page: page
|
|
24
|
-
)
|
|
25
|
-
break if response.empty?
|
|
26
|
-
|
|
27
|
-
collected.concat(response)
|
|
28
|
-
break if response.length < per_page
|
|
29
|
-
|
|
30
|
-
page += 1
|
|
31
|
-
end
|
|
32
|
-
|
|
33
|
-
limited = collected.first(limit)
|
|
34
|
-
fetch_pull_request_details(repository, limited)
|
|
35
|
-
end
|
|
36
|
-
|
|
37
|
-
def pull_request_reviews(repository, number)
|
|
38
|
-
ensure_repository_format(repository)
|
|
39
|
-
api_get("/repos/#{repository}/pulls/#{number}/reviews", per_page: 100)
|
|
40
|
-
end
|
|
41
|
-
|
|
42
|
-
def pull_request_comments(repository, number)
|
|
43
|
-
ensure_repository_format(repository)
|
|
44
|
-
api_get("/repos/#{repository}/pulls/#{number}/comments", per_page: 100)
|
|
45
|
-
end
|
|
46
|
-
|
|
47
|
-
def issue_comments(repository, number)
|
|
48
|
-
ensure_repository_format(repository)
|
|
49
|
-
api_get("/repos/#{repository}/issues/#{number}/comments", per_page: 100)
|
|
50
|
-
end
|
|
51
|
-
|
|
52
8
|
private
|
|
53
9
|
|
|
54
|
-
def api_get(_path, _query_params = {})
|
|
55
|
-
raise NotImplementedError, "Subclasses must implement api_get"
|
|
56
|
-
end
|
|
57
|
-
|
|
58
|
-
def ensure_repository_format(repository)
|
|
59
|
-
return if repository.to_s.match?(REPO_FORMAT)
|
|
60
|
-
|
|
61
|
-
raise ConfigurationError, "Repository must be in the format owner/name"
|
|
62
|
-
end
|
|
63
|
-
|
|
64
|
-
def fetch_pull_request_details(repository, pull_requests)
|
|
65
|
-
pull_requests.map do |pull_request|
|
|
66
|
-
api_get("/repos/#{repository}/pulls/#{pull_request["number"]}")
|
|
67
|
-
end
|
|
68
|
-
end
|
|
69
|
-
|
|
70
10
|
def parse_json(body)
|
|
71
11
|
return {} if body.to_s.strip.empty?
|
|
72
12
|
|
|
@@ -74,11 +14,5 @@ module Codepulse
|
|
|
74
14
|
rescue JSON::ParserError => error
|
|
75
15
|
raise ApiError, "Failed to parse response: #{error.message}"
|
|
76
16
|
end
|
|
77
|
-
|
|
78
|
-
def encode_query(query_params)
|
|
79
|
-
return "" if query_params.empty?
|
|
80
|
-
|
|
81
|
-
"?#{URI.encode_www_form(query_params)}"
|
|
82
|
-
end
|
|
83
17
|
end
|
|
84
18
|
end
|
data/lib/codepulse/cli.rb
CHANGED
|
@@ -122,8 +122,9 @@ module Codepulse
|
|
|
122
122
|
|
|
123
123
|
def fetch_pull_requests(client, repo)
|
|
124
124
|
limit = effective_limit
|
|
125
|
-
|
|
126
|
-
|
|
125
|
+
business_days = @options.fetch(:business_days_back)
|
|
126
|
+
status "Fetching pull requests from #{repo} for the last #{business_days} business days..."
|
|
127
|
+
client.pull_requests_with_activity(repo, state: @options.fetch(:state), limit: limit)
|
|
127
128
|
end
|
|
128
129
|
|
|
129
130
|
def effective_limit
|
|
@@ -143,13 +144,12 @@ module Codepulse
|
|
|
143
144
|
pull_requests
|
|
144
145
|
end
|
|
145
146
|
|
|
146
|
-
def calculate_metrics(
|
|
147
|
+
def calculate_metrics(_client, _repo, pull_requests)
|
|
147
148
|
status "Calculating metrics for #{pull_requests.length} pull requests..."
|
|
148
|
-
calculator = MetricsCalculator.new
|
|
149
|
+
calculator = MetricsCalculator.new
|
|
149
150
|
|
|
150
|
-
pull_requests.
|
|
151
|
-
|
|
152
|
-
calculator.metrics_for_pull_request(repo, pull_request)
|
|
151
|
+
pull_requests.map do |pull_request|
|
|
152
|
+
calculator.metrics_for_pull_request(pull_request)
|
|
153
153
|
end
|
|
154
154
|
end
|
|
155
155
|
|
data/lib/codepulse/formatter.rb
CHANGED
|
@@ -206,11 +206,11 @@ module Codepulse
|
|
|
206
206
|
sorted = values.sort
|
|
207
207
|
average_seconds = (values.sum / values.length.to_f).round
|
|
208
208
|
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
209
|
+
print_stat_line("Average", label, format_duration_compact(average_seconds))
|
|
210
|
+
print_stat_line("Median", label, format_duration_compact(median_value(sorted)))
|
|
211
|
+
print_stat_line("p95", label, format_duration_compact(percentile_value(sorted, 95))) if values.length >= MIN_FOR_P95
|
|
212
|
+
print_stat_line("Fastest", label, format_duration_compact(sorted.first))
|
|
213
|
+
print_stat_line("Slowest", label, format_duration_compact(sorted.last))
|
|
214
214
|
end
|
|
215
215
|
|
|
216
216
|
def print_number_stats(label, values)
|
|
@@ -219,11 +219,16 @@ module Codepulse
|
|
|
219
219
|
sorted = values.sort
|
|
220
220
|
average_value = (values.sum / values.length.to_f).round(1)
|
|
221
221
|
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
222
|
+
print_stat_line("Average", label, format_number_compact(average_value))
|
|
223
|
+
print_stat_line("Median", label, format_number_compact(median_value(sorted)))
|
|
224
|
+
print_stat_line("p95", label, format_number_compact(percentile_value(sorted, 95))) if values.length >= MIN_FOR_P95
|
|
225
|
+
print_stat_line("Min", label, format_number_compact(sorted.first))
|
|
226
|
+
print_stat_line("Max", label, format_number_compact(sorted.last))
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
def print_stat_line(prefix, label, value)
|
|
230
|
+
stat_label = "#{prefix} #{label.downcase}:"
|
|
231
|
+
puts " #{stat_label.ljust(28)} #{value}"
|
|
227
232
|
end
|
|
228
233
|
|
|
229
234
|
def truncate(value, length)
|
|
@@ -274,7 +279,7 @@ module Codepulse
|
|
|
274
279
|
end
|
|
275
280
|
|
|
276
281
|
# Returns the value at the given percentile from a sorted array.
|
|
277
|
-
# Uses nearest-rank method
|
|
282
|
+
# Uses nearest-rank method for percentiles like p95.
|
|
278
283
|
def percentile_value(sorted_values, percentile)
|
|
279
284
|
count = sorted_values.length
|
|
280
285
|
rank = (percentile / 100.0 * count).ceil
|
|
@@ -282,6 +287,21 @@ module Codepulse
|
|
|
282
287
|
sorted_values[index]
|
|
283
288
|
end
|
|
284
289
|
|
|
290
|
+
# Returns the median of a sorted array.
|
|
291
|
+
# For odd counts, returns the middle value.
|
|
292
|
+
# For even counts, returns the average of the two middle values.
|
|
293
|
+
def median_value(sorted_values)
|
|
294
|
+
count = sorted_values.length
|
|
295
|
+
return nil if count.zero?
|
|
296
|
+
|
|
297
|
+
if count.odd?
|
|
298
|
+
sorted_values[count / 2]
|
|
299
|
+
else
|
|
300
|
+
mid = count / 2
|
|
301
|
+
(sorted_values[mid - 1] + sorted_values[mid]) / 2.0
|
|
302
|
+
end
|
|
303
|
+
end
|
|
304
|
+
|
|
285
305
|
def format_number_compact(value)
|
|
286
306
|
return "0" if value.nil?
|
|
287
307
|
|
|
@@ -1,27 +1,131 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
3
|
require "open3"
|
|
4
|
+
require "json"
|
|
4
5
|
|
|
5
6
|
module Codepulse
|
|
6
7
|
class GhCliClient
|
|
7
8
|
include BaseClient
|
|
8
9
|
|
|
9
10
|
DEFAULT_COMMAND = "gh"
|
|
11
|
+
GRAPHQL_PAGE_SIZE = 50
|
|
12
|
+
ACTIVITY_PAGE_SIZE = 50
|
|
10
13
|
|
|
11
14
|
def initialize(command: DEFAULT_COMMAND)
|
|
12
15
|
@command = command
|
|
13
16
|
verify_cli_available
|
|
14
17
|
end
|
|
15
18
|
|
|
19
|
+
# Fetches PRs with reviews and comments in a single GraphQL query.
|
|
20
|
+
# Returns array of PR hashes with embedded :reviews, :review_comments, :issue_comments.
|
|
21
|
+
def pull_requests_with_activity(repository, state:, limit:)
|
|
22
|
+
owner, name = repository.split("/", 2)
|
|
23
|
+
raise ConfigurationError, "Repository must be in the format owner/name" unless owner && name
|
|
24
|
+
|
|
25
|
+
fetch_all_pull_requests(owner, name, graphql_states(state), limit)
|
|
26
|
+
end
|
|
27
|
+
|
|
16
28
|
private
|
|
17
29
|
|
|
18
|
-
def
|
|
19
|
-
|
|
20
|
-
|
|
30
|
+
def fetch_all_pull_requests(owner, name, states, limit)
|
|
31
|
+
pull_requests = []
|
|
32
|
+
cursor = nil
|
|
33
|
+
|
|
34
|
+
loop do
|
|
35
|
+
batch_size = [GRAPHQL_PAGE_SIZE, limit - pull_requests.length].min
|
|
36
|
+
response = graphql_query(build_query(owner, name, states, batch_size, cursor))
|
|
37
|
+
nodes, page_info = extract_pr_data(response)
|
|
38
|
+
break if nodes.empty?
|
|
39
|
+
|
|
40
|
+
pull_requests.concat(nodes.map { |node| transform_graphql_pr(node) })
|
|
41
|
+
break if pull_requests.length >= limit || !page_info["hasNextPage"]
|
|
42
|
+
|
|
43
|
+
cursor = page_info["endCursor"]
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
pull_requests.first(limit)
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def extract_pr_data(response)
|
|
50
|
+
pr_data = response.dig("data", "repository", "pullRequests") || {}
|
|
51
|
+
nodes = pr_data["nodes"] || []
|
|
52
|
+
page_info = pr_data["pageInfo"] || {}
|
|
53
|
+
[nodes, page_info]
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def graphql_states(state)
|
|
57
|
+
case state
|
|
58
|
+
when "open" then %w[OPEN]
|
|
59
|
+
when "closed" then %w[CLOSED MERGED]
|
|
60
|
+
else %w[OPEN CLOSED MERGED]
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def build_query(owner, name, states, batch_size, cursor)
|
|
65
|
+
after_clause = cursor ? ", after: \"#{cursor}\"" : ""
|
|
66
|
+
states_clause = states.join(", ")
|
|
67
|
+
|
|
68
|
+
<<~GRAPHQL
|
|
69
|
+
{
|
|
70
|
+
repository(owner: "#{owner}", name: "#{name}") {
|
|
71
|
+
pullRequests(first: #{batch_size}, states: [#{states_clause}], orderBy: {field: CREATED_AT, direction: DESC}#{after_clause}) {
|
|
72
|
+
pageInfo { hasNextPage endCursor }
|
|
73
|
+
nodes {
|
|
74
|
+
number title state createdAt mergedAt additions deletions changedFiles
|
|
75
|
+
author { login }
|
|
76
|
+
reviews(first: #{ACTIVITY_PAGE_SIZE}) { nodes { submittedAt author { login } } }
|
|
77
|
+
reviewThreads(first: #{ACTIVITY_PAGE_SIZE}) { nodes { comments(first: #{ACTIVITY_PAGE_SIZE}) { nodes { createdAt author { login } } } } }
|
|
78
|
+
comments(first: #{ACTIVITY_PAGE_SIZE}) { nodes { createdAt author { login } } }
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
GRAPHQL
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
def transform_graphql_pr(node)
|
|
87
|
+
{
|
|
88
|
+
"number" => node["number"],
|
|
89
|
+
"title" => node["title"],
|
|
90
|
+
"state" => node["state"]&.downcase,
|
|
91
|
+
"created_at" => node["createdAt"],
|
|
92
|
+
"merged_at" => node["mergedAt"],
|
|
93
|
+
"additions" => node["additions"],
|
|
94
|
+
"deletions" => node["deletions"],
|
|
95
|
+
"changed_files" => node["changedFiles"],
|
|
96
|
+
"user" => { "login" => node.dig("author", "login") },
|
|
97
|
+
"reviews" => transform_reviews(node),
|
|
98
|
+
"review_comments" => transform_review_comments(node),
|
|
99
|
+
"issue_comments" => transform_issue_comments(node)
|
|
100
|
+
}
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
def transform_reviews(node)
|
|
104
|
+
(node.dig("reviews", "nodes") || []).map do |review|
|
|
105
|
+
{ "submitted_at" => review["submittedAt"], "user" => { "login" => review.dig("author", "login") } }
|
|
106
|
+
end
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def transform_review_comments(node)
|
|
110
|
+
(node.dig("reviewThreads", "nodes") || []).flat_map do |thread|
|
|
111
|
+
(thread.dig("comments", "nodes") || []).map do |comment|
|
|
112
|
+
{ "created_at" => comment["createdAt"], "user" => { "login" => comment.dig("author", "login") } }
|
|
113
|
+
end
|
|
114
|
+
end
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def transform_issue_comments(node)
|
|
118
|
+
(node.dig("comments", "nodes") || []).map do |comment|
|
|
119
|
+
{ "created_at" => comment["createdAt"], "user" => { "login" => comment.dig("author", "login") } }
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
def graphql_query(query)
|
|
124
|
+
stdout, stderr, status = Open3.capture3(@command, "api", "graphql", "-f", "query=#{query}")
|
|
21
125
|
|
|
22
126
|
unless status.success?
|
|
23
127
|
message = stderr.to_s.strip.empty? ? stdout.to_s.strip : stderr.to_s.strip
|
|
24
|
-
raise ApiError, "
|
|
128
|
+
raise ApiError, "GraphQL query failed: #{message}"
|
|
25
129
|
end
|
|
26
130
|
|
|
27
131
|
parse_json(stdout)
|
|
@@ -69,15 +69,16 @@ module Codepulse
|
|
|
69
69
|
"allstar[bot]"
|
|
70
70
|
].freeze
|
|
71
71
|
|
|
72
|
-
def initialize
|
|
73
|
-
|
|
72
|
+
def initialize
|
|
73
|
+
# No client needed - data is pre-fetched via GraphQL
|
|
74
74
|
end
|
|
75
75
|
|
|
76
76
|
# Returns a hash of metrics for a single PR.
|
|
77
|
-
|
|
77
|
+
# Expects pull_request to include :reviews, :review_comments, :issue_comments from GraphQL.
|
|
78
|
+
def metrics_for_pull_request(pull_request)
|
|
78
79
|
created_at = parse_time(pull_request["created_at"])
|
|
79
80
|
merged_at = parse_time(pull_request["merged_at"])
|
|
80
|
-
pickup_event = find_pickup_event(
|
|
81
|
+
pickup_event = find_pickup_event(pull_request, created_at)
|
|
81
82
|
pickup_seconds = pickup_event ? business_seconds_between(created_at, pickup_event.fetch(:timestamp)) : nil
|
|
82
83
|
merge_seconds = merged_at && created_at ? business_seconds_between(created_at, merged_at) : nil
|
|
83
84
|
|
|
@@ -101,12 +102,12 @@ module Codepulse
|
|
|
101
102
|
private
|
|
102
103
|
|
|
103
104
|
# Finds the first non-author, non-bot response (review, comment, or issue comment).
|
|
104
|
-
|
|
105
|
-
|
|
105
|
+
# Uses pre-fetched data from GraphQL query.
|
|
106
|
+
def find_pickup_event(pull_request, created_at)
|
|
106
107
|
author_login = pull_request.dig("user", "login")
|
|
107
108
|
|
|
108
109
|
review_event = earliest_event(
|
|
109
|
-
|
|
110
|
+
pull_request["reviews"] || [],
|
|
110
111
|
author_login: author_login,
|
|
111
112
|
time_key: "submitted_at",
|
|
112
113
|
actor_path: %w[user login],
|
|
@@ -114,7 +115,7 @@ module Codepulse
|
|
|
114
115
|
)
|
|
115
116
|
|
|
116
117
|
review_comment_event = earliest_event(
|
|
117
|
-
|
|
118
|
+
pull_request["review_comments"] || [],
|
|
118
119
|
author_login: author_login,
|
|
119
120
|
time_key: "created_at",
|
|
120
121
|
actor_path: %w[user login],
|
|
@@ -122,7 +123,7 @@ module Codepulse
|
|
|
122
123
|
)
|
|
123
124
|
|
|
124
125
|
issue_comment_event = earliest_event(
|
|
125
|
-
|
|
126
|
+
pull_request["issue_comments"] || [],
|
|
126
127
|
author_login: author_login,
|
|
127
128
|
time_key: "created_at",
|
|
128
129
|
actor_path: %w[user login],
|
data/lib/codepulse.rb
CHANGED
metadata
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: codepulse
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.1.
|
|
4
|
+
version: 0.1.4
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Patrick Navarro
|
|
8
8
|
bindir: bin
|
|
9
9
|
cert_chain: []
|
|
10
|
-
date: 2025-12-
|
|
10
|
+
date: 2025-12-30 00:00:00.000000000 Z
|
|
11
11
|
dependencies: []
|
|
12
12
|
description: Terminal tool to analyze GitHub pull request pickup times, merge times,
|
|
13
13
|
and sizes using the gh CLI.
|