metric_fu 2.1.3.4 → 2.1.3.5
Sign up to get free protection for your applications and to get access to all the features.
- data/{HISTORY → HISTORY.md} +18 -2
- data/README.md +11 -7
- data/TODO +1 -0
- data/lib/data_structures/careful_array.rb +8 -6
- data/lib/data_structures/code_issue.rb +82 -78
- data/lib/data_structures/grouping.rb +2 -2
- data/lib/data_structures/table.rb +77 -75
- data/lib/errors/analysis_error.rb +4 -1
- data/lib/metrics/churn/{churn_analyzer.rb → churn_hotspot.rb} +4 -4
- data/lib/metrics/flay/{flay_analyzer.rb → flay_hotspot.rb} +5 -5
- data/lib/metrics/flog/{flog_analyzer.rb → flog_hotspot.rb} +4 -4
- data/lib/metrics/hotspot_analyzer.rb +328 -0
- data/lib/metrics/hotspots/hotspots.rb +1 -1
- data/lib/metrics/rcov/{rcov_analyzer.rb → rcov_hotspot.rb} +4 -4
- data/lib/metrics/reek/{reek_analyzer.rb → reek_hotspot.rb} +7 -7
- data/lib/metrics/roodi/{roodi_analyzer.rb → roodi_hotspot.rb} +5 -5
- data/lib/metrics/saikuro/{saikuro_analyzer.rb → saikuro_hotspot.rb} +4 -4
- data/lib/metrics/stats/{stats_analyzer.rb → stats_hotspot.rb} +1 -1
- data/lib/scoring_strategies.rb +24 -22
- data/lib/version.rb +1 -1
- data/metric_fu.gemspec +1 -1
- data/spec/base/{metric_analyzer_spec.rb → hotspot_analyzer_spec.rb} +92 -92
- data/spec/generators/hotspots_spec.rb +2 -2
- metadata +356 -330
- data/lib/metrics/metric_analyzer.rb +0 -328
@@ -1,328 +0,0 @@
|
|
1
|
-
%w(record grouping).each do |path|
|
2
|
-
MetricFu.data_structures_require { path }
|
3
|
-
end
|
4
|
-
|
5
|
-
class MetricAnalyzer
|
6
|
-
|
7
|
-
COMMON_COLUMNS = %w{metric}
|
8
|
-
GRANULARITIES = %w{file_path class_name method_name}
|
9
|
-
|
10
|
-
attr_accessor :table
|
11
|
-
|
12
|
-
def initialize(yaml)
|
13
|
-
if(yaml.is_a?(String))
|
14
|
-
@yaml = YAML.load(yaml)
|
15
|
-
else
|
16
|
-
@yaml = yaml
|
17
|
-
end
|
18
|
-
@file_ranking = MetricFu::Ranking.new
|
19
|
-
@class_ranking = MetricFu::Ranking.new
|
20
|
-
@method_ranking = MetricFu::Ranking.new
|
21
|
-
rankings = [@file_ranking, @class_ranking, @method_ranking]
|
22
|
-
|
23
|
-
tool_analyzers = [ReekAnalyzer.new, RoodiAnalyzer.new,
|
24
|
-
FlogAnalyzer.new, ChurnAnalyzer.new, SaikuroAnalyzer.new,
|
25
|
-
FlayAnalyzer.new, StatsAnalyzer.new, RcovAnalyzer.new]
|
26
|
-
# TODO There is likely a clash that will happen between
|
27
|
-
# column names eventually. We should probably auto-prefix
|
28
|
-
# them (e.g. "roodi_problem")
|
29
|
-
columns = COMMON_COLUMNS + GRANULARITIES + tool_analyzers.map{|analyzer| analyzer.columns}.flatten
|
30
|
-
|
31
|
-
@table = make_table(columns)
|
32
|
-
|
33
|
-
# These tables are an optimization. They contain subsets of the master table.
|
34
|
-
# TODO - these should be pushed into the Table class now
|
35
|
-
@tool_tables = make_table_hash(columns)
|
36
|
-
@file_tables = make_table_hash(columns)
|
37
|
-
@class_tables = make_table_hash(columns)
|
38
|
-
@method_tables = make_table_hash(columns)
|
39
|
-
|
40
|
-
tool_analyzers.each do |analyzer|
|
41
|
-
analyzer.generate_records(@yaml[analyzer.name], @table)
|
42
|
-
end
|
43
|
-
|
44
|
-
build_lookups!(table)
|
45
|
-
process_rows!(table)
|
46
|
-
|
47
|
-
tool_analyzers.each do |analyzer|
|
48
|
-
GRANULARITIES.each do |granularity|
|
49
|
-
metric_ranking = calculate_metric_scores(granularity, analyzer)
|
50
|
-
add_to_master_ranking(ranking(granularity), metric_ranking, analyzer)
|
51
|
-
end
|
52
|
-
end
|
53
|
-
|
54
|
-
rankings.each do |ranking|
|
55
|
-
ranking.delete(nil)
|
56
|
-
end
|
57
|
-
end
|
58
|
-
|
59
|
-
def location(item, value)
|
60
|
-
sub_table = get_sub_table(item, value)
|
61
|
-
if(sub_table.length==0)
|
62
|
-
raise AnalysisError, "The #{item.to_s} '#{value.to_s}' does not have any rows in the analysis table"
|
63
|
-
else
|
64
|
-
first_row = sub_table[0]
|
65
|
-
case item
|
66
|
-
when :class
|
67
|
-
MetricFu::Location.get(first_row.file_path, first_row.class_name, nil)
|
68
|
-
when :method
|
69
|
-
MetricFu::Location.get(first_row.file_path, first_row.class_name, first_row.method_name)
|
70
|
-
when :file
|
71
|
-
MetricFu::Location.get(first_row.file_path, nil, nil)
|
72
|
-
else
|
73
|
-
raise ArgumentError, "Item must be :class, :method, or :file"
|
74
|
-
end
|
75
|
-
end
|
76
|
-
end
|
77
|
-
|
78
|
-
#todo redo as item,value, options = {}
|
79
|
-
# Note that the other option for 'details' is :detailed (this isn't
|
80
|
-
# at all clear from this method itself
|
81
|
-
def problems_with(item, value, details = :summary, exclude_details = [])
|
82
|
-
sub_table = get_sub_table(item, value)
|
83
|
-
#grouping = Ruport::Data::Grouping.new(sub_table, :by => 'metric')
|
84
|
-
grouping = get_grouping(sub_table, :by => 'metric')
|
85
|
-
problems = {}
|
86
|
-
grouping.each do |metric, table|
|
87
|
-
if details == :summary || exclude_details.include?(metric)
|
88
|
-
problems[metric] = present_group(metric,table)
|
89
|
-
else
|
90
|
-
problems[metric] = present_group_details(metric,table)
|
91
|
-
end
|
92
|
-
end
|
93
|
-
problems
|
94
|
-
end
|
95
|
-
|
96
|
-
def worst_methods(size = nil)
|
97
|
-
@method_ranking.top(size)
|
98
|
-
end
|
99
|
-
|
100
|
-
def worst_classes(size = nil)
|
101
|
-
@class_ranking.top(size)
|
102
|
-
end
|
103
|
-
|
104
|
-
def worst_files(size = nil)
|
105
|
-
@file_ranking.top(size)
|
106
|
-
end
|
107
|
-
|
108
|
-
private
|
109
|
-
|
110
|
-
def get_grouping(table, opts)
|
111
|
-
#Ruport::Data::Grouping.new(table, opts)
|
112
|
-
MetricFu::Grouping.new(table, opts)
|
113
|
-
#@grouping_cache ||= {}
|
114
|
-
#@grouping_cache.fetch(grouping_key(table,opts)) do
|
115
|
-
# @grouping_cache[grouping_key(table,opts)] = Ruport::Data::Grouping.new(table, opts)
|
116
|
-
#end
|
117
|
-
end
|
118
|
-
|
119
|
-
def grouping_key(table, opts)
|
120
|
-
"table #{table.object_id} opts #{opts.inspect}"
|
121
|
-
end
|
122
|
-
|
123
|
-
def build_lookups!(table)
|
124
|
-
@class_and_method_to_file ||= {}
|
125
|
-
# Build a mapping from [class,method] => filename
|
126
|
-
# (and make sure the mapping is unique)
|
127
|
-
table.each do |row|
|
128
|
-
# We know that Saikuro provides the wrong data
|
129
|
-
next if row['metric'] == :saikuro
|
130
|
-
key = [row['class_name'], row['method_name']]
|
131
|
-
file_path = row['file_path']
|
132
|
-
@class_and_method_to_file[key] ||= file_path
|
133
|
-
end
|
134
|
-
end
|
135
|
-
|
136
|
-
def process_rows!(table)
|
137
|
-
# Correct incorrect rows in the table
|
138
|
-
table.each do |row|
|
139
|
-
row_metric = row['metric'] #perf optimization
|
140
|
-
if row_metric == :saikuro
|
141
|
-
fix_row_file_path!(row)
|
142
|
-
end
|
143
|
-
@tool_tables[row_metric] << row
|
144
|
-
@file_tables[row["file_path"]] << row
|
145
|
-
@class_tables[row["class_name"]] << row
|
146
|
-
@method_tables[row["method_name"]] << row
|
147
|
-
end
|
148
|
-
end
|
149
|
-
|
150
|
-
def fix_row_file_path!(row)
|
151
|
-
# We know that Saikuro rows are broken
|
152
|
-
# next unless row['metric'] == :saikuro
|
153
|
-
key = [row['class_name'], row['method_name']]
|
154
|
-
current_file_path = row['file_path'].to_s
|
155
|
-
correct_file_path = @class_and_method_to_file[key]
|
156
|
-
if(correct_file_path!=nil && correct_file_path.include?(current_file_path))
|
157
|
-
row['file_path'] = correct_file_path
|
158
|
-
else
|
159
|
-
# There wasn't an exact match, so we can do a substring match
|
160
|
-
matching_file_path = file_paths.detect {|file_path|
|
161
|
-
file_path!=nil && file_path.include?(current_file_path)
|
162
|
-
}
|
163
|
-
if(matching_file_path)
|
164
|
-
row['file_path'] = matching_file_path
|
165
|
-
end
|
166
|
-
end
|
167
|
-
end
|
168
|
-
|
169
|
-
def file_paths
|
170
|
-
@file_paths ||= @table.column('file_path').uniq
|
171
|
-
end
|
172
|
-
|
173
|
-
def ranking(column_name)
|
174
|
-
case column_name
|
175
|
-
when "file_path"
|
176
|
-
@file_ranking
|
177
|
-
when "class_name"
|
178
|
-
@class_ranking
|
179
|
-
when "method_name"
|
180
|
-
@method_ranking
|
181
|
-
else
|
182
|
-
raise ArgumentError, "Invalid column name #{column_name}"
|
183
|
-
end
|
184
|
-
end
|
185
|
-
|
186
|
-
def calculate_metric_scores(granularity, analyzer)
|
187
|
-
metric_ranking = MetricFu::Ranking.new
|
188
|
-
metric_violations = @tool_tables[analyzer.name]
|
189
|
-
metric_violations.each do |row|
|
190
|
-
location = row[granularity]
|
191
|
-
metric_ranking[location] ||= []
|
192
|
-
metric_ranking[location] << analyzer.map(row)
|
193
|
-
end
|
194
|
-
|
195
|
-
metric_ranking.each do |item, scores|
|
196
|
-
metric_ranking[item] = analyzer.reduce(scores)
|
197
|
-
end
|
198
|
-
|
199
|
-
metric_ranking
|
200
|
-
end
|
201
|
-
|
202
|
-
def add_to_master_ranking(master_ranking, metric_ranking, analyzer)
|
203
|
-
metric_ranking.each do |item, _|
|
204
|
-
master_ranking[item] ||= 0
|
205
|
-
master_ranking[item] += analyzer.score(metric_ranking, item) # scaling? Do we just add in the raw score?
|
206
|
-
end
|
207
|
-
end
|
208
|
-
|
209
|
-
def most_common_column(column_name, size)
|
210
|
-
#grouping = Ruport::Data::Grouping.new(@table,
|
211
|
-
# :by => column_name,
|
212
|
-
# :order => lambda { |g| -g.size})
|
213
|
-
get_grouping(@table, :by => column_name, :order => lambda {|g| -g.size})
|
214
|
-
values = []
|
215
|
-
grouping.each do |value, _|
|
216
|
-
values << value if value!=nil
|
217
|
-
if(values.size==size)
|
218
|
-
break
|
219
|
-
end
|
220
|
-
end
|
221
|
-
return nil if values.empty?
|
222
|
-
if(values.size == 1)
|
223
|
-
return values.first
|
224
|
-
else
|
225
|
-
return values
|
226
|
-
end
|
227
|
-
end
|
228
|
-
|
229
|
-
# TODO: As we get fancier, the presenter should
|
230
|
-
# be its own class, not just a method with a long
|
231
|
-
# case statement
|
232
|
-
def present_group(metric, group)
|
233
|
-
occurences = group.size
|
234
|
-
case(metric)
|
235
|
-
when :reek
|
236
|
-
"found #{occurences} code smells"
|
237
|
-
when :roodi
|
238
|
-
"found #{occurences} design problems"
|
239
|
-
when :churn
|
240
|
-
"detected high level of churn (changed #{group[0].times_changed} times)"
|
241
|
-
when :flog
|
242
|
-
complexity = get_mean(group.column("score"))
|
243
|
-
"#{"average " if occurences > 1}complexity is %.1f" % complexity
|
244
|
-
when :saikuro
|
245
|
-
complexity = get_mean(group.column("complexity"))
|
246
|
-
"#{"average " if occurences > 1}complexity is %.1f" % complexity
|
247
|
-
when :flay
|
248
|
-
"found #{occurences} code duplications"
|
249
|
-
when :rcov
|
250
|
-
average_code_uncoverage = get_mean(group.column("percentage_uncovered"))
|
251
|
-
"#{"average " if occurences > 1}uncovered code is %.1f%" % average_code_uncoverage
|
252
|
-
else
|
253
|
-
raise AnalysisError, "Unknown metric #{metric}"
|
254
|
-
end
|
255
|
-
end
|
256
|
-
|
257
|
-
def present_group_details(metric, group)
|
258
|
-
occurences = group.size
|
259
|
-
case(metric)
|
260
|
-
when :reek
|
261
|
-
message = "found #{occurences} code smells<br/>"
|
262
|
-
group.each do |item|
|
263
|
-
type = item.data["reek__type_name"]
|
264
|
-
reek_message = item.data["reek__message"]
|
265
|
-
message << "* #{type}: #{reek_message}<br/>"
|
266
|
-
end
|
267
|
-
message
|
268
|
-
when :roodi
|
269
|
-
message = "found #{occurences} design problems<br/>"
|
270
|
-
group.each do |item|
|
271
|
-
problem = item.data["problems"]
|
272
|
-
message << "* #{problem}<br/>"
|
273
|
-
end
|
274
|
-
message
|
275
|
-
when :churn
|
276
|
-
"detected high level of churn (changed #{group[0].times_changed} times)"
|
277
|
-
when :flog
|
278
|
-
complexity = get_mean(group.column("score"))
|
279
|
-
"#{"average " if occurences > 1}complexity is %.1f" % complexity
|
280
|
-
when :saikuro
|
281
|
-
complexity = get_mean(group.column("complexity"))
|
282
|
-
"#{"average " if occurences > 1}complexity is %.1f" % complexity
|
283
|
-
when :flay
|
284
|
-
message = "found #{occurences} code duplications<br/>"
|
285
|
-
group.each do |item|
|
286
|
-
problem = item.data["flay_reason"]
|
287
|
-
problem = problem.gsub(/^[0-9]*\)/,'')
|
288
|
-
problem = problem.gsub(/files\:/,' <br> files:')
|
289
|
-
message << "* #{problem}<br/>"
|
290
|
-
end
|
291
|
-
message
|
292
|
-
else
|
293
|
-
raise AnalysisError, "Unknown metric #{metric}"
|
294
|
-
end
|
295
|
-
end
|
296
|
-
|
297
|
-
def make_table_hash(columns)
|
298
|
-
Hash.new { |hash, key|
|
299
|
-
hash[key] = make_table(columns)
|
300
|
-
}
|
301
|
-
end
|
302
|
-
|
303
|
-
def make_table(columns)
|
304
|
-
Table.new(:column_names => columns)
|
305
|
-
end
|
306
|
-
|
307
|
-
def get_sub_table(item, value)
|
308
|
-
tables = {
|
309
|
-
:class => @class_tables,
|
310
|
-
:method => @method_tables,
|
311
|
-
:file => @file_tables,
|
312
|
-
:tool => @tool_tables
|
313
|
-
}.fetch(item) do
|
314
|
-
raise ArgumentError, "Item must be :class, :method, or :file"
|
315
|
-
end
|
316
|
-
tables[value]
|
317
|
-
end
|
318
|
-
|
319
|
-
def get_mean(collection)
|
320
|
-
collection_length = collection.length
|
321
|
-
sum = 0
|
322
|
-
sum = collection.inject( nil ) { |sum,x| sum ? sum+x : x }
|
323
|
-
(sum.to_f / collection_length.to_f)
|
324
|
-
end
|
325
|
-
|
326
|
-
end
|
327
|
-
|
328
|
-
|