how_is 8.0.0 → 9.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +15 -15
- data/.rspec +4 -0
- data/.rspec-ignore-tags +2 -0
- data/.travis.yml +8 -4
- data/Gemfile +1 -0
- data/README.md +76 -76
- data/Rakefile +24 -0
- data/data/issues.plg +22 -22
- data/exe/how_is +30 -75
- data/how_is.gemspec +38 -37
- data/lib/how_is.rb +58 -56
- data/lib/how_is/analyzer.rb +170 -170
- data/lib/how_is/chart.rb +83 -83
- data/lib/how_is/cli.rb +90 -92
- data/lib/how_is/cli/parser.rb +76 -0
- data/lib/how_is/fetcher.rb +45 -45
- data/lib/how_is/pulse.rb +29 -29
- data/lib/how_is/report.rb +92 -92
- data/lib/how_is/report/html.rb +100 -100
- data/lib/how_is/report/json.rb +17 -17
- data/lib/how_is/report/pdf.rb +78 -78
- data/lib/how_is/version.rb +3 -3
- data/roadmap.markdown +49 -49
- metadata +21 -6
data/lib/how_is.rb
CHANGED
@@ -1,56 +1,58 @@
|
|
1
|
-
require 'how_is/version'
|
2
|
-
require 'contracts'
|
3
|
-
require 'cacert'
|
4
|
-
|
5
|
-
Cacert.set_in_env
|
6
|
-
|
7
|
-
C = Contracts
|
8
|
-
|
9
|
-
module HowIs
|
10
|
-
include Contracts::Core
|
11
|
-
|
12
|
-
require 'how_is/fetcher'
|
13
|
-
require 'how_is/analyzer'
|
14
|
-
require 'how_is/report'
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
1
|
+
require 'how_is/version'
|
2
|
+
require 'contracts'
|
3
|
+
require 'cacert'
|
4
|
+
|
5
|
+
Cacert.set_in_env
|
6
|
+
|
7
|
+
C = Contracts
|
8
|
+
|
9
|
+
module HowIs
|
10
|
+
include Contracts::Core
|
11
|
+
|
12
|
+
require 'how_is/fetcher'
|
13
|
+
require 'how_is/analyzer'
|
14
|
+
require 'how_is/report'
|
15
|
+
|
16
|
+
DEFAULT_FORMAT = :html
|
17
|
+
|
18
|
+
def self.generate_report_file(report:, **kw_args)
|
19
|
+
analysis = self.generate_analysis(**kw_args)
|
20
|
+
|
21
|
+
Report.export!(analysis, report)
|
22
|
+
end
|
23
|
+
|
24
|
+
def self.generate_report(format:, **kw_args)
|
25
|
+
analysis = self.generate_analysis(**kw_args)
|
26
|
+
|
27
|
+
Report.export(analysis, format)
|
28
|
+
end
|
29
|
+
|
30
|
+
def self.supported_formats
|
31
|
+
report_constants = HowIs.constants.grep(/.Report/) - [:BaseReport]
|
32
|
+
report_constants.map {|x| x.to_s.split('Report').first.downcase }
|
33
|
+
end
|
34
|
+
|
35
|
+
def self.can_export_to?(file)
|
36
|
+
supported_formats.include?(file.split('.').last)
|
37
|
+
end
|
38
|
+
|
39
|
+
Contract C::KeywordArgs[repository: String,
|
40
|
+
from: C::Optional[C::Or[String, nil]],
|
41
|
+
fetcher: C::Optional[Class],
|
42
|
+
analyzer: C::Optional[Class],
|
43
|
+
github: C::Optional[C::Any]] => C::Any
|
44
|
+
def self.generate_analysis(repository:,
|
45
|
+
from: nil,
|
46
|
+
fetcher: Fetcher.new,
|
47
|
+
analyzer: Analyzer.new,
|
48
|
+
github: nil)
|
49
|
+
if from
|
50
|
+
analysis = analyzer.from_file(from)
|
51
|
+
else
|
52
|
+
raw_data = fetcher.call(repository, github)
|
53
|
+
analysis = analyzer.call(raw_data)
|
54
|
+
end
|
55
|
+
|
56
|
+
analysis
|
57
|
+
end
|
58
|
+
end
|
data/lib/how_is/analyzer.rb
CHANGED
@@ -1,170 +1,170 @@
|
|
1
|
-
require 'contracts'
|
2
|
-
require 'ostruct'
|
3
|
-
require 'date'
|
4
|
-
require 'json'
|
5
|
-
|
6
|
-
module HowIs
|
7
|
-
##
|
8
|
-
# Represents a completed analysis of the repository being analyzed.
|
9
|
-
class Analysis < OpenStruct
|
10
|
-
end
|
11
|
-
|
12
|
-
class Analyzer
|
13
|
-
include Contracts::Core
|
14
|
-
|
15
|
-
class UnsupportedImportFormat < StandardError
|
16
|
-
def initialize(format)
|
17
|
-
super("Unsupported import format: #{format}")
|
18
|
-
end
|
19
|
-
end
|
20
|
-
|
21
|
-
Contract Fetcher::Results, C::KeywordArgs[analysis_class: C::Optional[Class]] => Analysis
|
22
|
-
def call(data, analysis_class: Analysis)
|
23
|
-
issues = data.issues
|
24
|
-
pulls = data.pulls
|
25
|
-
|
26
|
-
analysis_class.new(
|
27
|
-
issues_url: "https://github.com/#{data.repository}/issues",
|
28
|
-
pulls_url: "https://github.com/#{data.repository}/pulls",
|
29
|
-
|
30
|
-
repository: data.repository,
|
31
|
-
|
32
|
-
number_of_issues: issues.length,
|
33
|
-
number_of_pulls: pulls.length,
|
34
|
-
|
35
|
-
issues_with_label: with_label_links(num_with_label(issues), data.repository),
|
36
|
-
issues_with_no_label: {link
|
37
|
-
|
38
|
-
average_issue_age: average_age_for(issues),
|
39
|
-
average_pull_age: average_age_for(pulls),
|
40
|
-
|
41
|
-
oldest_issue: issue_or_pull_to_hash(oldest_for(issues)),
|
42
|
-
oldest_pull: issue_or_pull_to_hash(oldest_for(pulls)),
|
43
|
-
)
|
44
|
-
end
|
45
|
-
|
46
|
-
def from_file(file)
|
47
|
-
extension = file.split('.').last
|
48
|
-
raise UnsupportedImportFormat, extension unless extension == 'json'
|
49
|
-
|
50
|
-
hash = JSON.parse(open(file).read)
|
51
|
-
hash = hash.map do |k, v|
|
52
|
-
v = DateTime.parse(v) if k.end_with?('_date')
|
53
|
-
|
54
|
-
[k, v]
|
55
|
-
end.to_h
|
56
|
-
|
57
|
-
%w[oldest_issue oldest_pull].each do |key|
|
58
|
-
hash[key]['date'] = DateTime.parse(hash[key]['date'])
|
59
|
-
end
|
60
|
-
|
61
|
-
Analysis.new(hash)
|
62
|
-
end
|
63
|
-
|
64
|
-
# Given an Array of issues or pulls, return a Hash specifying how many
|
65
|
-
# issues or pulls use each label.
|
66
|
-
def num_with_label(issues_or_pulls)
|
67
|
-
# Returned hash maps labels to frequency.
|
68
|
-
# E.g., given 10 issues/pulls with label "label1" and 5 with label "label2",
|
69
|
-
# {
|
70
|
-
# "label1" => 10,
|
71
|
-
# "label2" => 5
|
72
|
-
# }
|
73
|
-
|
74
|
-
hash = Hash.new(0)
|
75
|
-
issues_or_pulls.each do |iop|
|
76
|
-
next unless iop['labels']
|
77
|
-
|
78
|
-
iop['labels'].each do |label|
|
79
|
-
hash[label['name']] += 1
|
80
|
-
end
|
81
|
-
end
|
82
|
-
hash
|
83
|
-
end
|
84
|
-
|
85
|
-
def num_with_no_label(issues)
|
86
|
-
issues.select { |x| x['labels'].empty? }.length
|
87
|
-
end
|
88
|
-
|
89
|
-
def average_date_for(issues_or_pulls)
|
90
|
-
timestamps = issues_or_pulls.map { |iop| Date.parse(iop['created_at']).strftime('%s').to_i }
|
91
|
-
average_timestamp = timestamps.reduce(:+) / issues_or_pulls.length
|
92
|
-
|
93
|
-
DateTime.strptime(average_timestamp.to_s, '%s')
|
94
|
-
end
|
95
|
-
|
96
|
-
# Given an Array of issues or pulls, return the average age of them.
|
97
|
-
def average_age_for(issues_or_pulls)
|
98
|
-
ages = issues_or_pulls.map {|iop| time_ago_in_seconds(iop['created_at'])}
|
99
|
-
raw_average = ages.reduce(:+) / ages.length
|
100
|
-
|
101
|
-
seconds_in_a_year = 31_556_926
|
102
|
-
seconds_in_a_month = 2_629_743
|
103
|
-
seconds_in_a_week = 604_800
|
104
|
-
seconds_in_a_day = 86_400
|
105
|
-
|
106
|
-
years = raw_average / seconds_in_a_year
|
107
|
-
years_remainder = raw_average % seconds_in_a_year
|
108
|
-
|
109
|
-
months = years_remainder / seconds_in_a_month
|
110
|
-
months_remainder = years_remainder % seconds_in_a_month
|
111
|
-
|
112
|
-
weeks = months_remainder / seconds_in_a_week
|
113
|
-
weeks_remainder = months_remainder % seconds_in_a_week
|
114
|
-
|
115
|
-
days = weeks_remainder / seconds_in_a_day
|
116
|
-
|
117
|
-
values = [
|
118
|
-
[years, "year"],
|
119
|
-
[months, "month"],
|
120
|
-
[weeks, "week"],
|
121
|
-
[days, "day"],
|
122
|
-
].reject {|(v, k)| v == 0}.map{ |(v,k)|
|
123
|
-
k = k + 's' if v != 1
|
124
|
-
[v, k]
|
125
|
-
}
|
126
|
-
|
127
|
-
most_significant = values[0, 2].map {|x| x.join(" ")}
|
128
|
-
|
129
|
-
if most_significant.length < 2
|
130
|
-
value = most_significant.first
|
131
|
-
else
|
132
|
-
value = most_significant.join(" and ")
|
133
|
-
end
|
134
|
-
|
135
|
-
"approximately #{value}"
|
136
|
-
end
|
137
|
-
|
138
|
-
# Given an Array of issues or pulls, return the creation date of the oldest.
|
139
|
-
def oldest_for(issues_or_pulls)
|
140
|
-
issues_or_pulls.sort_by {|x| DateTime.parse(x['created_at']) }.first
|
141
|
-
end
|
142
|
-
|
143
|
-
def date_for(issue_or_pull)
|
144
|
-
DateTime.parse(issue_or_pull['created_at'])
|
145
|
-
end
|
146
|
-
|
147
|
-
private
|
148
|
-
def with_label_links(labels, repository)
|
149
|
-
labels.map do |label, num_issues|
|
150
|
-
label_link = "https://github.com/#{repository}/issues?q=" + CGI.escape("is:open is:issue label:\"#{label}\"")
|
151
|
-
|
152
|
-
[label, {link
|
153
|
-
end.to_h
|
154
|
-
end
|
155
|
-
|
156
|
-
def time_ago_in_seconds(x)
|
157
|
-
DateTime.now.strftime("%s").to_i - DateTime.parse(x).strftime("%s").to_i
|
158
|
-
end
|
159
|
-
|
160
|
-
def issue_or_pull_to_hash(iop)
|
161
|
-
ret = {}
|
162
|
-
|
163
|
-
ret[
|
164
|
-
ret[
|
165
|
-
ret[
|
166
|
-
|
167
|
-
ret
|
168
|
-
end
|
169
|
-
end
|
170
|
-
end
|
1
|
+
require 'contracts'
|
2
|
+
require 'ostruct'
|
3
|
+
require 'date'
|
4
|
+
require 'json'
|
5
|
+
|
6
|
+
module HowIs
|
7
|
+
##
|
8
|
+
# Represents a completed analysis of the repository being analyzed.
|
9
|
+
class Analysis < OpenStruct
|
10
|
+
end
|
11
|
+
|
12
|
+
class Analyzer
|
13
|
+
include Contracts::Core
|
14
|
+
|
15
|
+
class UnsupportedImportFormat < StandardError
|
16
|
+
def initialize(format)
|
17
|
+
super("Unsupported import format: #{format}")
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
Contract Fetcher::Results, C::KeywordArgs[analysis_class: C::Optional[Class]] => Analysis
|
22
|
+
def call(data, analysis_class: Analysis)
|
23
|
+
issues = data.issues
|
24
|
+
pulls = data.pulls
|
25
|
+
|
26
|
+
analysis_class.new(
|
27
|
+
issues_url: "https://github.com/#{data.repository}/issues",
|
28
|
+
pulls_url: "https://github.com/#{data.repository}/pulls",
|
29
|
+
|
30
|
+
repository: data.repository,
|
31
|
+
|
32
|
+
number_of_issues: issues.length,
|
33
|
+
number_of_pulls: pulls.length,
|
34
|
+
|
35
|
+
issues_with_label: with_label_links(num_with_label(issues), data.repository),
|
36
|
+
issues_with_no_label: {'link' => nil, 'total' => num_with_no_label(issues)},
|
37
|
+
|
38
|
+
average_issue_age: average_age_for(issues),
|
39
|
+
average_pull_age: average_age_for(pulls),
|
40
|
+
|
41
|
+
oldest_issue: issue_or_pull_to_hash(oldest_for(issues)),
|
42
|
+
oldest_pull: issue_or_pull_to_hash(oldest_for(pulls)),
|
43
|
+
)
|
44
|
+
end
|
45
|
+
|
46
|
+
def from_file(file)
|
47
|
+
extension = file.split('.').last
|
48
|
+
raise UnsupportedImportFormat, extension unless extension == 'json'
|
49
|
+
|
50
|
+
hash = JSON.parse(open(file).read)
|
51
|
+
hash = hash.map do |k, v|
|
52
|
+
v = DateTime.parse(v) if k.end_with?('_date')
|
53
|
+
|
54
|
+
[k, v]
|
55
|
+
end.to_h
|
56
|
+
|
57
|
+
%w[oldest_issue oldest_pull].each do |key|
|
58
|
+
hash[key]['date'] = DateTime.parse(hash[key]['date'])
|
59
|
+
end
|
60
|
+
|
61
|
+
Analysis.new(hash)
|
62
|
+
end
|
63
|
+
|
64
|
+
# Given an Array of issues or pulls, return a Hash specifying how many
|
65
|
+
# issues or pulls use each label.
|
66
|
+
def num_with_label(issues_or_pulls)
|
67
|
+
# Returned hash maps labels to frequency.
|
68
|
+
# E.g., given 10 issues/pulls with label "label1" and 5 with label "label2",
|
69
|
+
# {
|
70
|
+
# "label1" => 10,
|
71
|
+
# "label2" => 5
|
72
|
+
# }
|
73
|
+
|
74
|
+
hash = Hash.new(0)
|
75
|
+
issues_or_pulls.each do |iop|
|
76
|
+
next unless iop['labels']
|
77
|
+
|
78
|
+
iop['labels'].each do |label|
|
79
|
+
hash[label['name']] += 1
|
80
|
+
end
|
81
|
+
end
|
82
|
+
hash
|
83
|
+
end
|
84
|
+
|
85
|
+
def num_with_no_label(issues)
|
86
|
+
issues.select { |x| x['labels'].empty? }.length
|
87
|
+
end
|
88
|
+
|
89
|
+
def average_date_for(issues_or_pulls)
|
90
|
+
timestamps = issues_or_pulls.map { |iop| Date.parse(iop['created_at']).strftime('%s').to_i }
|
91
|
+
average_timestamp = timestamps.reduce(:+) / issues_or_pulls.length
|
92
|
+
|
93
|
+
DateTime.strptime(average_timestamp.to_s, '%s')
|
94
|
+
end
|
95
|
+
|
96
|
+
# Given an Array of issues or pulls, return the average age of them.
|
97
|
+
def average_age_for(issues_or_pulls)
|
98
|
+
ages = issues_or_pulls.map {|iop| time_ago_in_seconds(iop['created_at'])}
|
99
|
+
raw_average = ages.reduce(:+) / ages.length
|
100
|
+
|
101
|
+
seconds_in_a_year = 31_556_926
|
102
|
+
seconds_in_a_month = 2_629_743
|
103
|
+
seconds_in_a_week = 604_800
|
104
|
+
seconds_in_a_day = 86_400
|
105
|
+
|
106
|
+
years = raw_average / seconds_in_a_year
|
107
|
+
years_remainder = raw_average % seconds_in_a_year
|
108
|
+
|
109
|
+
months = years_remainder / seconds_in_a_month
|
110
|
+
months_remainder = years_remainder % seconds_in_a_month
|
111
|
+
|
112
|
+
weeks = months_remainder / seconds_in_a_week
|
113
|
+
weeks_remainder = months_remainder % seconds_in_a_week
|
114
|
+
|
115
|
+
days = weeks_remainder / seconds_in_a_day
|
116
|
+
|
117
|
+
values = [
|
118
|
+
[years, "year"],
|
119
|
+
[months, "month"],
|
120
|
+
[weeks, "week"],
|
121
|
+
[days, "day"],
|
122
|
+
].reject {|(v, k)| v == 0}.map{ |(v,k)|
|
123
|
+
k = k + 's' if v != 1
|
124
|
+
[v, k]
|
125
|
+
}
|
126
|
+
|
127
|
+
most_significant = values[0, 2].map {|x| x.join(" ")}
|
128
|
+
|
129
|
+
if most_significant.length < 2
|
130
|
+
value = most_significant.first
|
131
|
+
else
|
132
|
+
value = most_significant.join(" and ")
|
133
|
+
end
|
134
|
+
|
135
|
+
"approximately #{value}"
|
136
|
+
end
|
137
|
+
|
138
|
+
# Given an Array of issues or pulls, return the creation date of the oldest.
|
139
|
+
def oldest_for(issues_or_pulls)
|
140
|
+
issues_or_pulls.sort_by {|x| DateTime.parse(x['created_at']) }.first
|
141
|
+
end
|
142
|
+
|
143
|
+
def date_for(issue_or_pull)
|
144
|
+
DateTime.parse(issue_or_pull['created_at'])
|
145
|
+
end
|
146
|
+
|
147
|
+
private
|
148
|
+
def with_label_links(labels, repository)
|
149
|
+
labels.map do |label, num_issues|
|
150
|
+
label_link = "https://github.com/#{repository}/issues?q=" + CGI.escape("is:open is:issue label:\"#{label}\"")
|
151
|
+
|
152
|
+
[label, {'link' => label_link, 'total' => num_issues}]
|
153
|
+
end.to_h
|
154
|
+
end
|
155
|
+
|
156
|
+
def time_ago_in_seconds(x)
|
157
|
+
DateTime.now.strftime("%s").to_i - DateTime.parse(x).strftime("%s").to_i
|
158
|
+
end
|
159
|
+
|
160
|
+
def issue_or_pull_to_hash(iop)
|
161
|
+
ret = {}
|
162
|
+
|
163
|
+
ret['html_url'] = iop['html_url']
|
164
|
+
ret['number'] = iop['number']
|
165
|
+
ret['date'] = date_for(iop)
|
166
|
+
|
167
|
+
ret
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|