codepulse 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +111 -0
- data/bin/codepulse +8 -0
- data/lib/codepulse/base_client.rb +84 -0
- data/lib/codepulse/cli.rb +187 -0
- data/lib/codepulse/errors.rb +6 -0
- data/lib/codepulse/formatter.rb +286 -0
- data/lib/codepulse/gh_cli_client.rb +41 -0
- data/lib/codepulse/metrics_calculator.rb +123 -0
- data/lib/codepulse/time_helpers.rb +144 -0
- data/lib/codepulse.rb +13 -0
- metadata +54 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA256:
|
|
3
|
+
metadata.gz: 632b130d91677a3c461b9352908c9f0523fc402fe534ab41c7df9859eb3f4ec8
|
|
4
|
+
data.tar.gz: b053c80b65509c185e73d1f0f2ad9e70fbae40d9dc877bf030fddf37f7cd8fc6
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 1f925aca5a64dbc2b3c6aa3811a9c986c0c874482edcc80c008e0732beadfaed0ab0e72f3f51a54006f8810557f6fff63c8dc09931c28895a2a7f7c40d7a069a
|
|
7
|
+
data.tar.gz: 7839b516b6428ee42733d2474f310a90981c782bf8eed674a01ee4abdb8e56810c133dde059db595851f84712af7094ab5bc5cc13019c9f78a337f3e91c254eb
|
data/README.md
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
# Codepulse
|
|
2
|
+
|
|
3
|
+
Terminal tool to analyze GitHub pull request pickup times, merge times, and sizes using the `gh` CLI.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
### Prerequisites
|
|
8
|
+
|
|
9
|
+
1. Ruby 3.0+
|
|
10
|
+
2. Install GitHub CLI: https://cli.github.com
|
|
11
|
+
3. Authenticate:
|
|
12
|
+
```sh
|
|
13
|
+
gh auth login
|
|
14
|
+
```
|
|
15
|
+
|
|
16
|
+
### Install the gem
|
|
17
|
+
|
|
18
|
+
```sh
|
|
19
|
+
gem build codepulse.gemspec
|
|
20
|
+
gem install codepulse-0.1.0.gem
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
## Usage
|
|
24
|
+
|
|
25
|
+
```sh
|
|
26
|
+
# In a git repo (auto-detects owner/repo)
|
|
27
|
+
codepulse
|
|
28
|
+
|
|
29
|
+
# Or specify explicitly
|
|
30
|
+
codepulse owner/repo
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
### Options
|
|
34
|
+
|
|
35
|
+
| Option | Description | Default |
|
|
36
|
+
|--------|-------------|---------|
|
|
37
|
+
| `-s`, `--state STATE` | `open`, `closed`, or `all` | `all` |
|
|
38
|
+
| `-l`, `--limit COUNT` | Max PRs to fetch | auto (5 × business-days) |
|
|
39
|
+
| `--business-days DAYS` | PRs from last N business days | `7` |
|
|
40
|
+
| `--details` | Show individual PR table (sorted by slowest pickup) | off |
|
|
41
|
+
| `--gh-command PATH` | Custom `gh` executable path | `gh` |
|
|
42
|
+
|
|
43
|
+
### Examples
|
|
44
|
+
|
|
45
|
+
```sh
|
|
46
|
+
# Summary for current repo (last 14 business days)
|
|
47
|
+
codepulse
|
|
48
|
+
|
|
49
|
+
# Summary for specific repo
|
|
50
|
+
codepulse rails/rails
|
|
51
|
+
|
|
52
|
+
# With individual PR details
|
|
53
|
+
codepulse rails/rails --details
|
|
54
|
+
|
|
55
|
+
# Last 30 business days, limit 50 PRs
|
|
56
|
+
codepulse rails/rails --business-days 30 --limit 50
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
## Output
|
|
60
|
+
|
|
61
|
+
```
|
|
62
|
+
======================================================================================
|
|
63
|
+
PR PICKUP TIME REPORT | Last 14 business days (Dec 4 - Dec 24)
|
|
64
|
+
rails/rails
|
|
65
|
+
======================================================================================
|
|
66
|
+
|
|
67
|
+
--------------------------------------------------------------------------------------
|
|
68
|
+
SUMMARY (18 PRs with pickup, 5 pending)
|
|
69
|
+
--------------------------------------------------------------------------------------
|
|
70
|
+
|
|
71
|
+
Average pickup time: 4h 23m
|
|
72
|
+
Median pickup time: 2h 15m
|
|
73
|
+
p95 pickup time: 1d 8h
|
|
74
|
+
Fastest pickup time: 8m
|
|
75
|
+
Slowest pickup time: 2d 5h
|
|
76
|
+
|
|
77
|
+
Average time to merge: 1d 2h
|
|
78
|
+
Median time to merge: 18h 30m
|
|
79
|
+
p95 time to merge: 3d 4h
|
|
80
|
+
Fastest time to merge: 45m
|
|
81
|
+
Slowest time to merge: 5d 12h
|
|
82
|
+
|
|
83
|
+
...
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
## What is calculated
|
|
87
|
+
|
|
88
|
+
- **Pickup time**: Time from PR creation to first non-author response (business days only, Mon–Fri)
|
|
89
|
+
- **Time to merge**: Time from PR creation to merge
|
|
90
|
+
- **PR size**: Net lines (additions − deletions) and files changed
|
|
91
|
+
- **Stats**: Average, median, p95, fastest, slowest
|
|
92
|
+
|
|
93
|
+
## Filters
|
|
94
|
+
|
|
95
|
+
- **Default 14 business days**: Only analyzes recent PRs
|
|
96
|
+
- **Closed unmerged PRs excluded**: Abandoned PRs are filtered out
|
|
97
|
+
- **Bots ignored**: Copilot, GitHub Actions, and other bot reviewers don't count as pickup
|
|
98
|
+
|
|
99
|
+
## Development
|
|
100
|
+
|
|
101
|
+
```sh
|
|
102
|
+
# Run tests
|
|
103
|
+
rake test
|
|
104
|
+
|
|
105
|
+
# Lint (requires: gem install rubocop)
|
|
106
|
+
rubocop
|
|
107
|
+
|
|
108
|
+
# Rebuild and install
|
|
109
|
+
gem build codepulse.gemspec
|
|
110
|
+
gem install codepulse-0.1.0.gem
|
|
111
|
+
```
|
data/bin/codepulse
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "uri"
|
|
5
|
+
|
|
6
|
+
module Codepulse
|
|
7
|
+
# Shared GitHub API client logic for fetching PRs, reviews, and comments.
|
|
8
|
+
module BaseClient
|
|
9
|
+
REPO_FORMAT = %r{\A[^/]+/[^/]+\z}
|
|
10
|
+
|
|
11
|
+
# Fetches pull requests with pagination, then fetches full details for each.
|
|
12
|
+
def pull_requests(repository, state:, limit:)
|
|
13
|
+
ensure_repository_format(repository)
|
|
14
|
+
per_page = [limit, 100].min
|
|
15
|
+
page = 1
|
|
16
|
+
collected = []
|
|
17
|
+
|
|
18
|
+
while collected.length < limit
|
|
19
|
+
response = api_get(
|
|
20
|
+
"/repos/#{repository}/pulls",
|
|
21
|
+
state: state,
|
|
22
|
+
per_page: per_page,
|
|
23
|
+
page: page
|
|
24
|
+
)
|
|
25
|
+
break if response.empty?
|
|
26
|
+
|
|
27
|
+
collected.concat(response)
|
|
28
|
+
break if response.length < per_page
|
|
29
|
+
|
|
30
|
+
page += 1
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
limited = collected.first(limit)
|
|
34
|
+
fetch_pull_request_details(repository, limited)
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def pull_request_reviews(repository, number)
|
|
38
|
+
ensure_repository_format(repository)
|
|
39
|
+
api_get("/repos/#{repository}/pulls/#{number}/reviews", per_page: 100)
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def pull_request_comments(repository, number)
|
|
43
|
+
ensure_repository_format(repository)
|
|
44
|
+
api_get("/repos/#{repository}/pulls/#{number}/comments", per_page: 100)
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def issue_comments(repository, number)
|
|
48
|
+
ensure_repository_format(repository)
|
|
49
|
+
api_get("/repos/#{repository}/issues/#{number}/comments", per_page: 100)
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
private
|
|
53
|
+
|
|
54
|
+
def api_get(_path, _query_params = {})
|
|
55
|
+
raise NotImplementedError, "Subclasses must implement api_get"
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def ensure_repository_format(repository)
|
|
59
|
+
return if repository.to_s.match?(REPO_FORMAT)
|
|
60
|
+
|
|
61
|
+
raise ConfigurationError, "Repository must be in the format owner/name"
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def fetch_pull_request_details(repository, pull_requests)
|
|
65
|
+
pull_requests.map do |pull_request|
|
|
66
|
+
api_get("/repos/#{repository}/pulls/#{pull_request["number"]}")
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def parse_json(body)
|
|
71
|
+
return {} if body.to_s.strip.empty?
|
|
72
|
+
|
|
73
|
+
JSON.parse(body)
|
|
74
|
+
rescue JSON::ParserError => error
|
|
75
|
+
raise ApiError, "Failed to parse response: #{error.message}"
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
def encode_query(query_params)
|
|
79
|
+
return "" if query_params.empty?
|
|
80
|
+
|
|
81
|
+
"?#{URI.encode_www_form(query_params)}"
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
end
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "optparse"
|
|
4
|
+
require "open3"
|
|
5
|
+
require "json"
|
|
6
|
+
|
|
7
|
+
module Codepulse
|
|
8
|
+
class CLI
|
|
9
|
+
include TimeHelpers
|
|
10
|
+
|
|
11
|
+
DEFAULT_STATE = "all"
|
|
12
|
+
DEFAULT_BUSINESS_DAYS = 7
|
|
13
|
+
PRS_PER_BUSINESS_DAY = 5
|
|
14
|
+
MAX_AUTO_LIMIT = 200
|
|
15
|
+
|
|
16
|
+
def self.start(argument_list = ARGV)
|
|
17
|
+
new(argument_list).run
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def initialize(argument_list)
|
|
21
|
+
@argument_list = argument_list
|
|
22
|
+
@options = {
|
|
23
|
+
state: DEFAULT_STATE,
|
|
24
|
+
limit: nil,
|
|
25
|
+
gh_command: GhCliClient::DEFAULT_COMMAND,
|
|
26
|
+
business_days_back: DEFAULT_BUSINESS_DAYS,
|
|
27
|
+
details: false
|
|
28
|
+
}
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def run
|
|
32
|
+
parse_options
|
|
33
|
+
validate_required_options
|
|
34
|
+
|
|
35
|
+
repo = @options.fetch(:repo)
|
|
36
|
+
client = GhCliClient.new(command: @options.fetch(:gh_command))
|
|
37
|
+
|
|
38
|
+
pull_requests = fetch_pull_requests(client, repo)
|
|
39
|
+
pull_requests = apply_filters(pull_requests)
|
|
40
|
+
metrics = calculate_metrics(client, repo, pull_requests)
|
|
41
|
+
|
|
42
|
+
clear_status
|
|
43
|
+
Formatter.new.output(
|
|
44
|
+
metrics,
|
|
45
|
+
repo: repo,
|
|
46
|
+
detailed: @options.fetch(:details),
|
|
47
|
+
business_days: @options.fetch(:business_days_back)
|
|
48
|
+
)
|
|
49
|
+
rescue OptionParser::ParseError => error
|
|
50
|
+
$stderr.puts "Error: #{error.message}"
|
|
51
|
+
$stderr.puts
|
|
52
|
+
$stderr.puts option_parser
|
|
53
|
+
exit 1
|
|
54
|
+
rescue ConfigurationError => error
|
|
55
|
+
$stderr.puts "Configuration error: #{error.message}"
|
|
56
|
+
exit 1
|
|
57
|
+
rescue ApiError => error
|
|
58
|
+
$stderr.puts "GitHub API error: #{error.message}"
|
|
59
|
+
exit 1
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
private
|
|
63
|
+
|
|
64
|
+
def parse_options
|
|
65
|
+
option_parser.parse!(@argument_list)
|
|
66
|
+
@options[:repo] = @argument_list.shift if @argument_list.any?
|
|
67
|
+
@options[:repo] ||= detect_repo_from_git
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def option_parser
|
|
71
|
+
@option_parser ||= OptionParser.new do |parser|
|
|
72
|
+
parser.banner = "Usage: codepulse [options] [owner/repo]"
|
|
73
|
+
|
|
74
|
+
parser.on("-s", "--state STATE", "Pull request state: open, closed, all (default: #{DEFAULT_STATE})") do |state|
|
|
75
|
+
@options[:state] = state
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
parser.on("-l", "--limit COUNT", Integer, "Max PRs to fetch (default: auto based on business-days)") do |count|
|
|
79
|
+
@options[:limit] = count
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
parser.on("--gh-command PATH", "Path to gh executable (default: #{GhCliClient::DEFAULT_COMMAND})") do |path|
|
|
83
|
+
@options[:gh_command] = path
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
parser.on("--business-days DAYS", Integer, "PRs from last N business days (default: #{DEFAULT_BUSINESS_DAYS})") do |days|
|
|
87
|
+
@options[:business_days_back] = days
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
parser.on("--details", "Show per-PR detail table instead of summary") do
|
|
91
|
+
@options[:details] = true
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
parser.on("-h", "--help", "Show help") do
|
|
95
|
+
puts parser
|
|
96
|
+
exit
|
|
97
|
+
end
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def validate_required_options
|
|
102
|
+
raise OptionParser::MissingArgument, "owner/repo is required" unless @options[:repo]
|
|
103
|
+
|
|
104
|
+
validate_state
|
|
105
|
+
validate_positive_integer(:limit, "limit")
|
|
106
|
+
validate_positive_integer(:business_days_back, "business-days")
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def validate_state
|
|
110
|
+
return if %w[open closed all].include?(@options[:state])
|
|
111
|
+
|
|
112
|
+
raise OptionParser::InvalidArgument, "state must be open, closed, or all"
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
def validate_positive_integer(key, name)
|
|
116
|
+
value = @options[key]
|
|
117
|
+
return if value.nil?
|
|
118
|
+
return if value.is_a?(Integer) && value.positive?
|
|
119
|
+
|
|
120
|
+
raise OptionParser::InvalidArgument, "#{name} must be a positive integer"
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
def fetch_pull_requests(client, repo)
|
|
124
|
+
limit = effective_limit
|
|
125
|
+
status "Fetching pull requests from #{repo}..."
|
|
126
|
+
client.pull_requests(repo, state: @options.fetch(:state), limit: limit)
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
def effective_limit
|
|
130
|
+
return @options[:limit] if @options[:limit]
|
|
131
|
+
|
|
132
|
+
business_days = @options.fetch(:business_days_back)
|
|
133
|
+
calculated = business_days * PRS_PER_BUSINESS_DAY
|
|
134
|
+
[calculated, MAX_AUTO_LIMIT].min
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def apply_filters(pull_requests)
|
|
138
|
+
status "Filtering #{pull_requests.length} pull requests..."
|
|
139
|
+
pull_requests = exclude_closed_unmerged(pull_requests)
|
|
140
|
+
|
|
141
|
+
cutoff_time = business_days_cutoff(@options[:business_days_back])
|
|
142
|
+
pull_requests = filter_by_business_days(pull_requests, cutoff_time) if cutoff_time
|
|
143
|
+
pull_requests
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def calculate_metrics(client, repo, pull_requests)
|
|
147
|
+
status "Calculating metrics for #{pull_requests.length} pull requests..."
|
|
148
|
+
calculator = MetricsCalculator.new(client: client)
|
|
149
|
+
|
|
150
|
+
pull_requests.each_with_index.map do |pull_request, index|
|
|
151
|
+
status " Analyzing PR ##{pull_request["number"]} (#{index + 1}/#{pull_requests.length})..."
|
|
152
|
+
calculator.metrics_for_pull_request(repo, pull_request)
|
|
153
|
+
end
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
def exclude_closed_unmerged(pull_requests)
|
|
157
|
+
pull_requests.reject do |pull_request|
|
|
158
|
+
pull_request["state"] == "closed" && pull_request["merged_at"].nil?
|
|
159
|
+
end
|
|
160
|
+
end
|
|
161
|
+
|
|
162
|
+
def filter_by_business_days(pull_requests, cutoff_time)
|
|
163
|
+
pull_requests.select do |pull_request|
|
|
164
|
+
created_at = parse_time(pull_request["created_at"])
|
|
165
|
+
created_at && created_at >= cutoff_time
|
|
166
|
+
end
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def detect_repo_from_git
|
|
170
|
+
stdout, _stderr, status = Open3.capture3(@options[:gh_command], "repo", "view", "--json", "nameWithOwner")
|
|
171
|
+
return nil unless status.success?
|
|
172
|
+
|
|
173
|
+
data = JSON.parse(stdout)
|
|
174
|
+
data["nameWithOwner"]
|
|
175
|
+
rescue JSON::ParserError, Errno::ENOENT
|
|
176
|
+
nil
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
def status(message)
|
|
180
|
+
$stderr.print "\r\e[K#{message}"
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
def clear_status
|
|
184
|
+
$stderr.print "\r\e[K"
|
|
185
|
+
end
|
|
186
|
+
end
|
|
187
|
+
end
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Codepulse
|
|
4
|
+
# Formats and outputs PR metrics as a terminal report.
|
|
5
|
+
class Formatter
|
|
6
|
+
REPORT_WIDTH = 86
|
|
7
|
+
TITLE_LIMIT = 50
|
|
8
|
+
MIN_FOR_P95 = 50 # Minimum data points to show p95
|
|
9
|
+
|
|
10
|
+
# Main entry point: outputs metrics as a formatted report.
|
|
11
|
+
def output(metrics, repo:, detailed: true, business_days: nil)
|
|
12
|
+
if metrics.empty?
|
|
13
|
+
puts "No pull requests found for #{repo}."
|
|
14
|
+
return
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
with_pickup = metrics.select { |metric| metric[:pickup_time_seconds] }
|
|
18
|
+
without_pickup = metrics.reject { |metric| metric[:pickup_time_seconds] }
|
|
19
|
+
|
|
20
|
+
output_report(
|
|
21
|
+
with_pickup,
|
|
22
|
+
excluded: without_pickup,
|
|
23
|
+
repo: repo,
|
|
24
|
+
business_days: business_days,
|
|
25
|
+
detailed: detailed
|
|
26
|
+
)
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
private
|
|
30
|
+
|
|
31
|
+
def output_report(metrics, excluded:, repo:, business_days:, detailed:)
|
|
32
|
+
print_report_header(repo, business_days)
|
|
33
|
+
puts
|
|
34
|
+
print_definitions
|
|
35
|
+
puts
|
|
36
|
+
print_summary(metrics, excluded: excluded)
|
|
37
|
+
print_details(metrics, excluded: excluded) if detailed
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def print_definitions
|
|
41
|
+
puts " Pickup time: Time from PR creation to first reviewer response (business days, excl. US holidays)"
|
|
42
|
+
puts " Time to merge: Time from PR creation to merge (business days, excl. US holidays)"
|
|
43
|
+
puts " PR size: Net lines changed (additions - deletions)"
|
|
44
|
+
puts " Files changed: Number of files modified in the PR"
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def print_summary(metrics, excluded:)
|
|
48
|
+
open_count = excluded.count { |m| m[:merged_at].nil? }
|
|
49
|
+
merged_count = excluded.count - open_count
|
|
50
|
+
|
|
51
|
+
excluded_text = build_excluded_text(open_count, merged_count)
|
|
52
|
+
print_section_title("SUMMARY (#{metrics.count} PRs with pickup#{excluded_text})")
|
|
53
|
+
puts
|
|
54
|
+
|
|
55
|
+
print_duration_stats("Pickup time", metrics.map { |m| m[:pickup_time_seconds] })
|
|
56
|
+
puts
|
|
57
|
+
|
|
58
|
+
print_duration_stats("Time to merge", metrics.map { |m| m[:merge_time_seconds] }.compact)
|
|
59
|
+
puts
|
|
60
|
+
|
|
61
|
+
print_number_stats("PR size (net lines)", metrics.map { |m| m[:additions].to_i - m[:deletions].to_i })
|
|
62
|
+
puts
|
|
63
|
+
|
|
64
|
+
print_number_stats("Files changed", metrics.map { |m| m[:changed_files].to_i })
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def print_details(metrics, excluded:)
|
|
68
|
+
if metrics.any?
|
|
69
|
+
sorted = metrics.sort_by { |m| -(m[:pickup_time_seconds] || 0) }
|
|
70
|
+
puts
|
|
71
|
+
print_section_title("INDIVIDUAL PRs (slowest pickup first)")
|
|
72
|
+
puts
|
|
73
|
+
output_individual_prs(sorted)
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
return unless excluded.any?
|
|
77
|
+
|
|
78
|
+
puts
|
|
79
|
+
print_section_title("EXCLUDED PRs (no pickup yet)")
|
|
80
|
+
puts
|
|
81
|
+
output_excluded_prs(excluded)
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
def print_report_header(repo, business_days)
|
|
85
|
+
time_period = build_time_period(business_days)
|
|
86
|
+
puts "=" * REPORT_WIDTH
|
|
87
|
+
puts " PR PICKUP TIME REPORT | #{time_period}"
|
|
88
|
+
puts " #{repo}"
|
|
89
|
+
puts "=" * REPORT_WIDTH
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
def build_time_period(business_days)
|
|
93
|
+
return "all time" unless business_days
|
|
94
|
+
|
|
95
|
+
end_date = Time.now
|
|
96
|
+
start_date = calculate_start_date(business_days)
|
|
97
|
+
"Last #{business_days} business days (#{format_date(start_date)} - #{format_date(end_date)})"
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
def calculate_start_date(business_days)
|
|
101
|
+
current = Time.now
|
|
102
|
+
remaining = business_days
|
|
103
|
+
|
|
104
|
+
while remaining.positive?
|
|
105
|
+
current -= 86_400
|
|
106
|
+
remaining -= 1 if weekday?(current)
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
current
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
def weekday?(time_value)
|
|
113
|
+
time_value.wday.between?(1, 5)
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
def format_date(time_value)
|
|
117
|
+
time_value.strftime("%b %-d")
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
def build_excluded_text(open_count, merged_count)
|
|
121
|
+
parts = []
|
|
122
|
+
parts << "#{open_count} awaiting pickup" if open_count.positive?
|
|
123
|
+
parts << "#{merged_count} merged without pickup" if merged_count.positive?
|
|
124
|
+
|
|
125
|
+
return "" if parts.empty?
|
|
126
|
+
|
|
127
|
+
", #{parts.join(", ")}"
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
def print_section_title(title)
|
|
131
|
+
puts "-" * REPORT_WIDTH
|
|
132
|
+
puts " #{title}"
|
|
133
|
+
puts "-" * REPORT_WIDTH
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
def output_individual_prs(metrics)
|
|
137
|
+
pr_width = 8
|
|
138
|
+
pickup_width = 12
|
|
139
|
+
merge_width = 12
|
|
140
|
+
lines_width = 10
|
|
141
|
+
author_width = 16
|
|
142
|
+
|
|
143
|
+
header = [
|
|
144
|
+
"PR".ljust(pr_width),
|
|
145
|
+
"PICKUP".ljust(pickup_width),
|
|
146
|
+
"MERGE".ljust(merge_width),
|
|
147
|
+
"LINES".ljust(lines_width),
|
|
148
|
+
"AUTHOR".ljust(author_width),
|
|
149
|
+
"TITLE"
|
|
150
|
+
].join(" ")
|
|
151
|
+
puts " #{header}"
|
|
152
|
+
|
|
153
|
+
metrics.each do |metric|
|
|
154
|
+
net_lines = metric[:additions].to_i - metric[:deletions].to_i
|
|
155
|
+
merge_time = metric[:merge_time_seconds] ? format_duration_compact(metric[:merge_time_seconds]) : "—"
|
|
156
|
+
|
|
157
|
+
row = [
|
|
158
|
+
"##{metric.fetch(:number)}".ljust(pr_width),
|
|
159
|
+
format_duration_compact(metric[:pickup_time_seconds]).ljust(pickup_width),
|
|
160
|
+
merge_time.ljust(merge_width),
|
|
161
|
+
size_string(net_lines).ljust(lines_width),
|
|
162
|
+
metric.fetch(:author, "unknown").to_s.ljust(author_width),
|
|
163
|
+
truncate(metric.fetch(:title).to_s, 40)
|
|
164
|
+
].join(" ")
|
|
165
|
+
puts " #{row}"
|
|
166
|
+
end
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def output_excluded_prs(metrics)
|
|
170
|
+
pr_width = 10
|
|
171
|
+
age_width = 14
|
|
172
|
+
author_width = 20
|
|
173
|
+
|
|
174
|
+
header = [
|
|
175
|
+
"PR".ljust(pr_width),
|
|
176
|
+
"AGE".ljust(age_width),
|
|
177
|
+
"AUTHOR".ljust(author_width),
|
|
178
|
+
"TITLE"
|
|
179
|
+
].join(" ")
|
|
180
|
+
puts " #{header}"
|
|
181
|
+
|
|
182
|
+
metrics.each do |metric|
|
|
183
|
+
age = metric[:created_at] ? time_ago(metric[:created_at]) : "unknown"
|
|
184
|
+
row = [
|
|
185
|
+
"##{metric.fetch(:number)}".ljust(pr_width),
|
|
186
|
+
age.ljust(age_width),
|
|
187
|
+
metric.fetch(:author, "unknown").to_s.ljust(author_width),
|
|
188
|
+
truncate(metric.fetch(:title).to_s, 50)
|
|
189
|
+
].join(" ")
|
|
190
|
+
puts " #{row}"
|
|
191
|
+
end
|
|
192
|
+
end
|
|
193
|
+
|
|
194
|
+
def print_duration_stats(label, values)
|
|
195
|
+
return puts(" #{label}: none") if values.empty?
|
|
196
|
+
|
|
197
|
+
sorted = values.sort
|
|
198
|
+
average_seconds = (values.sum / values.length.to_f).round
|
|
199
|
+
|
|
200
|
+
puts " Average #{label.downcase}: #{format_duration_compact(average_seconds)}"
|
|
201
|
+
puts " Median #{label.downcase}: #{format_duration_compact(percentile_value(sorted, 50))}"
|
|
202
|
+
puts " p95 #{label.downcase}: #{format_duration_compact(percentile_value(sorted, 95))}" if values.length >= MIN_FOR_P95
|
|
203
|
+
puts " Fastest #{label.downcase}: #{format_duration_compact(sorted.first)}"
|
|
204
|
+
puts " Slowest #{label.downcase}: #{format_duration_compact(sorted.last)}"
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
def print_number_stats(label, values)
|
|
208
|
+
return puts(" #{label}: none") if values.empty?
|
|
209
|
+
|
|
210
|
+
sorted = values.sort
|
|
211
|
+
average_value = (values.sum / values.length.to_f).round(1)
|
|
212
|
+
|
|
213
|
+
puts " Average #{label.downcase}: #{format_number_compact(average_value)}"
|
|
214
|
+
puts " Median #{label.downcase}: #{format_number_compact(percentile_value(sorted, 50))}"
|
|
215
|
+
puts " p95 #{label.downcase}: #{format_number_compact(percentile_value(sorted, 95))}" if values.length >= MIN_FOR_P95
|
|
216
|
+
puts " Min #{label.downcase}: #{format_number_compact(sorted.first)}"
|
|
217
|
+
puts " Max #{label.downcase}: #{format_number_compact(sorted.last)}"
|
|
218
|
+
end
|
|
219
|
+
|
|
220
|
+
def truncate(value, length)
|
|
221
|
+
return value if value.length <= length
|
|
222
|
+
|
|
223
|
+
"#{value[0, length - 1]}…"
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
def size_string(net_lines)
|
|
227
|
+
return "0" if net_lines.zero?
|
|
228
|
+
|
|
229
|
+
net_lines.positive? ? "+#{net_lines}" : net_lines.to_s
|
|
230
|
+
end
|
|
231
|
+
|
|
232
|
+
def format_duration_compact(seconds)
|
|
233
|
+
seconds = seconds.to_i
|
|
234
|
+
return "0m" if seconds <= 0
|
|
235
|
+
|
|
236
|
+
total_minutes = (seconds / 60.0).round
|
|
237
|
+
minutes = total_minutes % 60
|
|
238
|
+
total_hours = total_minutes / 60
|
|
239
|
+
hours = total_hours % 24
|
|
240
|
+
days = total_hours / 24
|
|
241
|
+
|
|
242
|
+
if days.positive?
|
|
243
|
+
hours_part = hours.positive? ? " #{hours}h" : ""
|
|
244
|
+
"#{days}d#{hours_part}"
|
|
245
|
+
elsif total_hours.positive?
|
|
246
|
+
minutes_part = minutes.positive? ? " #{minutes}m" : ""
|
|
247
|
+
"#{total_hours}h#{minutes_part}"
|
|
248
|
+
else
|
|
249
|
+
"#{minutes}m"
|
|
250
|
+
end
|
|
251
|
+
end
|
|
252
|
+
|
|
253
|
+
def time_ago(time_value)
|
|
254
|
+
seconds = Time.now - time_value
|
|
255
|
+
return "#{seconds.to_i}s ago" if seconds < 60
|
|
256
|
+
|
|
257
|
+
minutes = seconds / 60
|
|
258
|
+
return "#{minutes.round}m ago" if minutes < 60
|
|
259
|
+
|
|
260
|
+
hours = minutes / 60
|
|
261
|
+
return "#{hours.round}h ago" if hours < 48
|
|
262
|
+
|
|
263
|
+
days = hours / 24
|
|
264
|
+
"#{days.round}d ago"
|
|
265
|
+
end
|
|
266
|
+
|
|
267
|
+
# Returns the value at the given percentile from a sorted array.
|
|
268
|
+
# Uses nearest-rank method: p50 = median, p95 = 95th percentile.
|
|
269
|
+
def percentile_value(sorted_values, percentile)
|
|
270
|
+
count = sorted_values.length
|
|
271
|
+
rank = (percentile / 100.0 * count).ceil
|
|
272
|
+
index = [rank - 1, count - 1].min
|
|
273
|
+
sorted_values[index]
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
def format_number_compact(value)
|
|
277
|
+
return "0" if value.nil?
|
|
278
|
+
|
|
279
|
+
if value.is_a?(Float) && value % 1 != 0
|
|
280
|
+
value.round(1).to_s
|
|
281
|
+
else
|
|
282
|
+
value.to_i.to_s
|
|
283
|
+
end
|
|
284
|
+
end
|
|
285
|
+
end
|
|
286
|
+
end
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "open3"
|
|
4
|
+
|
|
5
|
+
module Codepulse
|
|
6
|
+
class GhCliClient
|
|
7
|
+
include BaseClient
|
|
8
|
+
|
|
9
|
+
DEFAULT_COMMAND = "gh"
|
|
10
|
+
|
|
11
|
+
def initialize(command: DEFAULT_COMMAND)
|
|
12
|
+
@command = command
|
|
13
|
+
verify_cli_available
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
private
|
|
17
|
+
|
|
18
|
+
def api_get(path, query_params = {})
|
|
19
|
+
full_path = "#{path}#{encode_query(query_params)}"
|
|
20
|
+
stdout, stderr, status = Open3.capture3(@command, "api", full_path)
|
|
21
|
+
|
|
22
|
+
unless status.success?
|
|
23
|
+
message = stderr.to_s.strip.empty? ? stdout.to_s.strip : stderr.to_s.strip
|
|
24
|
+
raise ApiError, "gh api #{full_path} failed: #{message}"
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
parse_json(stdout)
|
|
28
|
+
rescue Errno::ENOENT
|
|
29
|
+
raise ConfigurationError, "gh CLI not found. Install it from https://cli.github.com and run `gh auth login`."
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def verify_cli_available
|
|
33
|
+
_stdout, _stderr, status = Open3.capture3(@command, "auth", "status")
|
|
34
|
+
return if status.success?
|
|
35
|
+
|
|
36
|
+
raise ConfigurationError, "gh CLI not authenticated. Run `gh auth login` first."
|
|
37
|
+
rescue Errno::ENOENT
|
|
38
|
+
raise ConfigurationError, "gh CLI not found. Install it from https://cli.github.com and run `gh auth login`."
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
end
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Codepulse
|
|
4
|
+
# Calculates pickup time, merge time, and size metrics for pull requests.
|
|
5
|
+
class MetricsCalculator
|
|
6
|
+
include TimeHelpers
|
|
7
|
+
|
|
8
|
+
# Bot accounts to ignore when calculating pickup time.
|
|
9
|
+
IGNORED_ACTORS = [
|
|
10
|
+
"copilot-pull-request-reviewer",
|
|
11
|
+
"copilot-pull-request-reviewer[bot]",
|
|
12
|
+
"copilot",
|
|
13
|
+
"copilot[bot]",
|
|
14
|
+
"copilot-bot",
|
|
15
|
+
"github-copilot",
|
|
16
|
+
"github-copilot[bot]",
|
|
17
|
+
"github-actions",
|
|
18
|
+
"github-actions[bot]"
|
|
19
|
+
].freeze
|
|
20
|
+
|
|
21
|
+
def initialize(client:)
|
|
22
|
+
@client = client
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# Returns a hash of metrics for a single PR.
|
|
26
|
+
def metrics_for_pull_request(repository, pull_request)
|
|
27
|
+
created_at = parse_time(pull_request["created_at"])
|
|
28
|
+
merged_at = parse_time(pull_request["merged_at"])
|
|
29
|
+
pickup_event = find_pickup_event(repository, pull_request, created_at)
|
|
30
|
+
pickup_seconds = pickup_event ? business_seconds_between(created_at, pickup_event.fetch(:timestamp)) : nil
|
|
31
|
+
merge_seconds = merged_at && created_at ? business_seconds_between(created_at, merged_at) : nil
|
|
32
|
+
|
|
33
|
+
{
|
|
34
|
+
number: pull_request["number"],
|
|
35
|
+
title: pull_request["title"],
|
|
36
|
+
author: pull_request.dig("user", "login"),
|
|
37
|
+
created_at: created_at,
|
|
38
|
+
merged_at: merged_at,
|
|
39
|
+
additions: pull_request["additions"].to_i,
|
|
40
|
+
deletions: pull_request["deletions"].to_i,
|
|
41
|
+
changed_files: pull_request["changed_files"].to_i,
|
|
42
|
+
pickup_time_seconds: pickup_seconds,
|
|
43
|
+
merge_time_seconds: merge_seconds,
|
|
44
|
+
pickup_actor: pickup_event&.fetch(:actor, nil),
|
|
45
|
+
pickup_at: pickup_event&.fetch(:timestamp, nil),
|
|
46
|
+
pickup_source: pickup_event&.fetch(:source, nil)
|
|
47
|
+
}
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
private
|
|
51
|
+
|
|
52
|
+
# Finds the first non-author, non-bot response (review, comment, or issue comment).
|
|
53
|
+
def find_pickup_event(repository, pull_request, created_at)
|
|
54
|
+
pull_number = pull_request["number"]
|
|
55
|
+
author_login = pull_request.dig("user", "login")
|
|
56
|
+
|
|
57
|
+
review_event = earliest_event(
|
|
58
|
+
@client.pull_request_reviews(repository, pull_number),
|
|
59
|
+
author_login: author_login,
|
|
60
|
+
time_key: "submitted_at",
|
|
61
|
+
actor_path: %w[user login],
|
|
62
|
+
source: "review"
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
review_comment_event = earliest_event(
|
|
66
|
+
@client.pull_request_comments(repository, pull_number),
|
|
67
|
+
author_login: author_login,
|
|
68
|
+
time_key: "created_at",
|
|
69
|
+
actor_path: %w[user login],
|
|
70
|
+
source: "review_comment"
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
issue_comment_event = earliest_event(
|
|
74
|
+
@client.issue_comments(repository, pull_number),
|
|
75
|
+
author_login: author_login,
|
|
76
|
+
time_key: "created_at",
|
|
77
|
+
actor_path: %w[user login],
|
|
78
|
+
source: "issue_comment"
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
[review_event, review_comment_event, issue_comment_event]
|
|
82
|
+
.compact
|
|
83
|
+
.select { |event| event.fetch(:timestamp) && created_at }
|
|
84
|
+
.min_by { |event| event.fetch(:timestamp) }
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
def earliest_event(events, author_login:, time_key:, actor_path:, source:)
|
|
88
|
+
events
|
|
89
|
+
.map { |event| build_event(event, author_login: author_login, time_key: time_key, actor_path: actor_path, source: source) }
|
|
90
|
+
.compact
|
|
91
|
+
.min_by { |event| event.fetch(:timestamp) }
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
def build_event(event, author_login:, time_key:, actor_path:, source:)
|
|
95
|
+
actor_login = dig_path(event, actor_path)
|
|
96
|
+
return nil if actor_login.nil?
|
|
97
|
+
|
|
98
|
+
normalized_actor = normalize_actor(actor_login)
|
|
99
|
+
normalized_author = normalize_actor(author_login)
|
|
100
|
+
|
|
101
|
+
return nil if normalized_actor.nil?
|
|
102
|
+
return nil if normalized_actor == normalized_author
|
|
103
|
+
return nil if IGNORED_ACTORS.include?(normalized_actor)
|
|
104
|
+
|
|
105
|
+
timestamp = parse_time(event[time_key])
|
|
106
|
+
return nil unless timestamp
|
|
107
|
+
|
|
108
|
+
{
|
|
109
|
+
actor: actor_login,
|
|
110
|
+
timestamp: timestamp,
|
|
111
|
+
source: source
|
|
112
|
+
}
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
def dig_path(hash, path)
|
|
116
|
+
path.reduce(hash) { |value, key| value.is_a?(Hash) ? value[key] : nil }
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
def normalize_actor(value)
|
|
120
|
+
value.to_s.downcase.strip
|
|
121
|
+
end
|
|
122
|
+
end
|
|
123
|
+
end
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "time"
|
|
4
|
+
|
|
5
|
+
module Codepulse
|
|
6
|
+
# Time calculation helpers for business days and holidays.
|
|
7
|
+
module TimeHelpers
|
|
8
|
+
SECONDS_PER_DAY = 86_400
|
|
9
|
+
|
|
10
|
+
# Parses a time string, returns nil if invalid.
|
|
11
|
+
def parse_time(value)
|
|
12
|
+
Time.parse(value.to_s)
|
|
13
|
+
rescue ArgumentError
|
|
14
|
+
nil
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Returns true if the time falls on a weekday and is not a US holiday.
|
|
18
|
+
def business_day?(time_value)
|
|
19
|
+
return false unless weekday?(time_value)
|
|
20
|
+
return false if us_holiday?(time_value)
|
|
21
|
+
|
|
22
|
+
true
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# Returns true for Monday through Friday.
|
|
26
|
+
def weekday?(time_value)
|
|
27
|
+
time_value.wday.between?(1, 5)
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Returns true if the date is a US federal holiday.
|
|
31
|
+
def us_holiday?(time_value)
|
|
32
|
+
us_holidays(time_value.year).include?(date_key(time_value))
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def date_key(time_value)
|
|
36
|
+
[time_value.year, time_value.month, time_value.day]
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
def us_holidays(year)
|
|
40
|
+
@us_holidays_cache ||= {}
|
|
41
|
+
@us_holidays_cache[year] ||= build_us_holidays(year)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def build_us_holidays(year)
|
|
45
|
+
holidays = []
|
|
46
|
+
|
|
47
|
+
# New Year's Day (Jan 1)
|
|
48
|
+
holidays << [year, 1, 1]
|
|
49
|
+
|
|
50
|
+
# MLK Day (3rd Monday in January)
|
|
51
|
+
holidays << nth_weekday(year, 1, 1, 3)
|
|
52
|
+
|
|
53
|
+
# Presidents Day (3rd Monday in February)
|
|
54
|
+
holidays << nth_weekday(year, 2, 1, 3)
|
|
55
|
+
|
|
56
|
+
# Memorial Day (last Monday in May)
|
|
57
|
+
holidays << last_weekday(year, 5, 1)
|
|
58
|
+
|
|
59
|
+
# Juneteenth (June 19)
|
|
60
|
+
holidays << [year, 6, 19]
|
|
61
|
+
|
|
62
|
+
# Independence Day (July 4)
|
|
63
|
+
holidays << [year, 7, 4]
|
|
64
|
+
|
|
65
|
+
# Labor Day (1st Monday in September)
|
|
66
|
+
holidays << nth_weekday(year, 9, 1, 1)
|
|
67
|
+
|
|
68
|
+
# Columbus Day (2nd Monday in October)
|
|
69
|
+
holidays << nth_weekday(year, 10, 1, 2)
|
|
70
|
+
|
|
71
|
+
# Veterans Day (Nov 11)
|
|
72
|
+
holidays << [year, 11, 11]
|
|
73
|
+
|
|
74
|
+
# Thanksgiving (4th Thursday in November)
|
|
75
|
+
holidays << nth_weekday(year, 11, 4, 4)
|
|
76
|
+
|
|
77
|
+
# Christmas Day (Dec 25)
|
|
78
|
+
holidays << [year, 12, 25]
|
|
79
|
+
|
|
80
|
+
holidays
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
# Finds the nth occurrence of a weekday in a month (e.g., 3rd Monday).
|
|
84
|
+
# target_wday: 0=Sun, 1=Mon, ..., 6=Sat
|
|
85
|
+
def nth_weekday(year, month, target_wday, occurrence)
|
|
86
|
+
first_day = Time.new(year, month, 1)
|
|
87
|
+
days_until = (target_wday - first_day.wday + 7) % 7
|
|
88
|
+
day = 1 + days_until + (7 * (occurrence - 1))
|
|
89
|
+
[year, month, day]
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
# Finds the last occurrence of a weekday in a month (e.g., last Monday of May).
|
|
93
|
+
def last_weekday(year, month, target_wday)
|
|
94
|
+
next_month = month == 12 ? Time.new(year + 1, 1, 1) : Time.new(year, month + 1, 1)
|
|
95
|
+
last_day = next_month - SECONDS_PER_DAY
|
|
96
|
+
days_back = (last_day.wday - target_wday + 7) % 7
|
|
97
|
+
[year, month, last_day.day - days_back]
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
def start_of_day(time_value)
|
|
101
|
+
Time.new(time_value.year, time_value.month, time_value.day, 0, 0, 0, time_value.utc_offset)
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
def end_of_day(time_value)
|
|
105
|
+
Time.new(time_value.year, time_value.month, time_value.day, 23, 59, 59, time_value.utc_offset)
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
# Calculates seconds between two times, counting only business day hours.
|
|
109
|
+
# Skips weekends and US holidays entirely.
|
|
110
|
+
def business_seconds_between(start_time, end_time)
|
|
111
|
+
return nil unless start_time && end_time
|
|
112
|
+
return 0 if end_time <= start_time
|
|
113
|
+
|
|
114
|
+
total = 0
|
|
115
|
+
current_start = start_time
|
|
116
|
+
|
|
117
|
+
while current_start < end_time
|
|
118
|
+
day_end = end_of_day(current_start)
|
|
119
|
+
segment_end = [day_end, end_time].min
|
|
120
|
+
|
|
121
|
+
total += (segment_end - current_start) if business_day?(current_start)
|
|
122
|
+
|
|
123
|
+
current_start = start_of_day(current_start + SECONDS_PER_DAY)
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
total.to_i
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
# Returns the cutoff time for N business days ago.
|
|
130
|
+
def business_days_cutoff(business_days)
|
|
131
|
+
return nil unless business_days
|
|
132
|
+
|
|
133
|
+
current_time = Time.now
|
|
134
|
+
remaining_days = business_days
|
|
135
|
+
|
|
136
|
+
while remaining_days.positive?
|
|
137
|
+
current_time -= SECONDS_PER_DAY
|
|
138
|
+
remaining_days -= 1 if business_day?(current_time)
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
start_of_day(current_time)
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
end
|
data/lib/codepulse.rb
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "codepulse/errors"
|
|
4
|
+
require_relative "codepulse/time_helpers"
|
|
5
|
+
require_relative "codepulse/base_client"
|
|
6
|
+
require_relative "codepulse/gh_cli_client"
|
|
7
|
+
require_relative "codepulse/metrics_calculator"
|
|
8
|
+
require_relative "codepulse/formatter"
|
|
9
|
+
require_relative "codepulse/cli"
|
|
10
|
+
|
|
11
|
+
module Codepulse
|
|
12
|
+
VERSION = "0.1.0"
|
|
13
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: codepulse
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.1.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- Patrick Navarro
|
|
8
|
+
bindir: bin
|
|
9
|
+
cert_chain: []
|
|
10
|
+
date: 2025-12-24 00:00:00.000000000 Z
|
|
11
|
+
dependencies: []
|
|
12
|
+
description: Terminal tool to analyze GitHub pull request pickup times, merge times,
|
|
13
|
+
and sizes using the gh CLI.
|
|
14
|
+
email:
|
|
15
|
+
- patrick@workbright.com
|
|
16
|
+
executables:
|
|
17
|
+
- codepulse
|
|
18
|
+
extensions: []
|
|
19
|
+
extra_rdoc_files: []
|
|
20
|
+
files:
|
|
21
|
+
- README.md
|
|
22
|
+
- bin/codepulse
|
|
23
|
+
- lib/codepulse.rb
|
|
24
|
+
- lib/codepulse/base_client.rb
|
|
25
|
+
- lib/codepulse/cli.rb
|
|
26
|
+
- lib/codepulse/errors.rb
|
|
27
|
+
- lib/codepulse/formatter.rb
|
|
28
|
+
- lib/codepulse/gh_cli_client.rb
|
|
29
|
+
- lib/codepulse/metrics_calculator.rb
|
|
30
|
+
- lib/codepulse/time_helpers.rb
|
|
31
|
+
homepage: https://github.com/WorkBright/codepulse
|
|
32
|
+
licenses: []
|
|
33
|
+
metadata:
|
|
34
|
+
rubygems_mfa_required: 'true'
|
|
35
|
+
homepage_uri: https://github.com/WorkBright/codepulse
|
|
36
|
+
source_code_uri: https://github.com/WorkBright/codepulse
|
|
37
|
+
rdoc_options: []
|
|
38
|
+
require_paths:
|
|
39
|
+
- lib
|
|
40
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
41
|
+
requirements:
|
|
42
|
+
- - ">="
|
|
43
|
+
- !ruby/object:Gem::Version
|
|
44
|
+
version: '3.0'
|
|
45
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
46
|
+
requirements:
|
|
47
|
+
- - ">="
|
|
48
|
+
- !ruby/object:Gem::Version
|
|
49
|
+
version: '0'
|
|
50
|
+
requirements: []
|
|
51
|
+
rubygems_version: 3.6.2
|
|
52
|
+
specification_version: 4
|
|
53
|
+
summary: GitHub PR metrics TUI app
|
|
54
|
+
test_files: []
|