edouard-metric_fu 1.0.3.2 → 1.0.3.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (40) hide show
  1. data/lib/base/base_template.rb +134 -0
  2. data/lib/base/configuration.rb +187 -0
  3. data/lib/base/generator.rb +147 -0
  4. data/lib/base/md5_tracker.rb +52 -0
  5. data/lib/base/report.rb +100 -0
  6. data/lib/generators/churn.rb +86 -0
  7. data/lib/generators/flay.rb +29 -0
  8. data/lib/generators/flog.rb +127 -0
  9. data/lib/generators/rcov.rb +77 -0
  10. data/lib/generators/reek.rb +32 -0
  11. data/lib/generators/roodi.rb +24 -0
  12. data/lib/generators/saikuro.rb +211 -0
  13. data/lib/generators/stats.rb +43 -0
  14. data/lib/metric_fu.rb +21 -0
  15. data/lib/templates/awesome/awesome_template.rb +30 -0
  16. data/lib/templates/awesome/churn.html.erb +19 -0
  17. data/lib/templates/awesome/default.css +62 -0
  18. data/lib/templates/awesome/flay.html.erb +22 -0
  19. data/lib/templates/awesome/flog.html.erb +42 -0
  20. data/lib/templates/awesome/index.html.erb +28 -0
  21. data/lib/templates/awesome/rcov.html.erb +32 -0
  22. data/lib/templates/awesome/reek.html.erb +30 -0
  23. data/lib/templates/awesome/roodi.html.erb +17 -0
  24. data/lib/templates/awesome/saikuro.html.erb +71 -0
  25. data/lib/templates/awesome/stats.html.erb +41 -0
  26. data/lib/templates/standard/churn.html.erb +30 -0
  27. data/lib/templates/standard/default.css +64 -0
  28. data/lib/templates/standard/flay.html.erb +33 -0
  29. data/lib/templates/standard/flog.html.erb +52 -0
  30. data/lib/templates/standard/index.html.erb +38 -0
  31. data/lib/templates/standard/rcov.html.erb +42 -0
  32. data/lib/templates/standard/reek.html.erb +41 -0
  33. data/lib/templates/standard/roodi.html.erb +28 -0
  34. data/lib/templates/standard/saikuro.html.erb +83 -0
  35. data/lib/templates/standard/standard_template.rb +26 -0
  36. data/lib/templates/standard/stats.html.erb +54 -0
  37. data/tasks/metric_fu.rake +15 -0
  38. data/tasks/railroad.rake +39 -0
  39. data/vendor/saikuro/saikuro.rb +1214 -0
  40. metadata +40 -1
@@ -0,0 +1,1214 @@
1
+ # $Id: saikuro 33 2006-12-07 16:09:55Z zev $
2
+ # Version 0.2
3
+ # == Usage
4
+ #
5
+ # ruby saikuro.rb [ -h ] [-o output_directory] [-f type] [ -c, -t ]
6
+ # [ -y, -w, -e, -k, -s, -d - number ] ( -p file | -i directory )
7
+ #
8
+ # == Help
9
+ #
10
+ # -o, --output_directory (directory) : A directory to ouput the results in.
11
+ # The current directory is used if this option is not passed.
12
+ #
13
+ # -h, --help : This help message.
14
+ #
15
+ # -f, --formater (html | text) : The format to output the results in.
16
+ # The default is html
17
+ #
18
+ # -c, --cyclo : Compute the cyclomatic complexity of the input.
19
+ #
20
+ # -t, --token : Count the number of tokens per line of the input.
21
+ #
22
+ # -y, --filter_cyclo (number) : Filter the output to only include methods
23
+ # whose cyclomatic complexity are greater than the passed number.
24
+ #
25
+ # -w, --warn_cyclo (number) : Highlight with a warning methods whose
26
+ # cyclomatic complexity are greather than or equal to the passed number.
27
+ #
28
+ #
29
+ # -e, --error_cyclo (number) : Highligh with an error methods whose
30
+ # cyclomatic complexity are greather than or equal to the passed number.
31
+ #
32
+ #
33
+ # -k, --filter_token (number) : Filter the output to only include lines
34
+ # whose token count are greater than the passed number.
35
+ #
36
+ #
37
+ # -s, --warn_token (number) : Highlight with a warning lines whose
38
+ # token count are greater than or equal to the passed number.
39
+ #
40
+ #
41
+ # -d, --error_token (number) : Highlight with an error lines whose
42
+ # token count are greater than or equal to the passed number.
43
+ #
44
+ #
45
+ # -p, --parse_file (file) : A file to use as input.
46
+ #
47
+ # -i, --input_directory (directory) : All ruby files found recursively
48
+ # inside the directory are passed as input.
49
+
50
+ # == License
51
+ # Saikruo uses the BSD license.
52
+ #
53
+ # Copyright (c) 2005, Ubiquitous Business Technology (http://ubit.com)
54
+ # All rights reserved.
55
+ #
56
+ # Redistribution and use in source and binary forms, with or without
57
+ # modification, are permitted provided that the following conditions are
58
+ # met:
59
+ #
60
+ #
61
+ # * Redistributions of source code must retain the above copyright
62
+ # notice, this list of conditions and the following disclaimer.
63
+ #
64
+ # * Redistributions in binary form must reproduce the above
65
+ # copyright notice, this list of conditions and the following
66
+ # disclaimer in the documentation and/or other materials provided
67
+ # with the distribution.
68
+ #
69
+ # * Neither the name of Ubiquitous Business Technology nor the names
70
+ # of its contributors may be used to endorse or promote products
71
+ # derived from this software without specific prior written
72
+ # permission.
73
+ #
74
+ #
75
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
76
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
77
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
78
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
79
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
80
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
81
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
82
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
83
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
84
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
85
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
86
+ #
87
+ # == Author
88
+ # Zev Blut (zb@ubit.com)
89
+
90
+ require 'irb/ruby-lex'
91
+ require 'yaml'
92
+
93
+ # States to watch for
94
+ # once in def get the token after space, because it may also
95
+ # be something like + or << for operator overloading.
96
+
97
+ # Counts the number of tokens in each line.
98
+ class TokenCounter
99
+ include RubyToken
100
+
101
+ attr_reader :current_file
102
+
103
+ def initialize
104
+ @files = Hash.new
105
+ @tokens_per_line = Hash.new(0)
106
+ @current_file = ""
107
+ end
108
+
109
+ # Mark file to associate with the token count.
110
+ def set_current_file(file)
111
+ @current_file = file
112
+ @tokens_per_line = Hash.new(0)
113
+ @files[@current_file] = @tokens_per_line
114
+ end
115
+
116
+ # Iterate through all tracked files, passing the
117
+ # the provided formater the token counts.
118
+ def list_tokens_per_line(formater)
119
+ formater.start_count(@files.size)
120
+ @files.each do |fname, tok_per_line|
121
+ formater.start_file(fname)
122
+ tok_per_line.sort.each do |line,num|
123
+ formater.line_token_count(line,num)
124
+ end
125
+ formater.end_file
126
+ end
127
+ end
128
+
129
+ # Count the token for the passed line.
130
+ def count_token(line_no,token)
131
+ case token
132
+ when TkSPACE, TkNL, TkRD_COMMENT
133
+ # Do not count these as tokens
134
+ when TkCOMMENT
135
+ # Ignore this only for comments in a statement?
136
+ # Ignore TkCOLON,TkCOLON2 and operators? like "." etc..
137
+ when TkRBRACK, TkRPAREN, TkRBRACE
138
+ # Ignore the closing of an array/index/hash/paren
139
+ # The opening is counted, but no more.
140
+ # Thus [], () {} is counted as 1 token not 2.
141
+ else
142
+ # may want to filter out comments...
143
+ @tokens_per_line[line_no] += 1
144
+ end
145
+ end
146
+
147
+ end
148
+
149
+ # Main class and structure used to compute the
150
+ # cyclomatic complexity of Ruby programs.
151
+ class ParseState
152
+ include RubyToken
153
+ attr_accessor :name, :children, :complexity, :parent, :lines
154
+
155
+ @@top_state = nil
156
+ def ParseState.make_top_state()
157
+ @@top_state = ParseState.new(nil)
158
+ @@top_state.name = "__top__"
159
+ @@top_state
160
+ end
161
+
162
+ @@token_counter = TokenCounter.new
163
+ def ParseState.set_token_counter(counter)
164
+ @@token_counter = counter
165
+ end
166
+ def ParseState.get_token_counter
167
+ @@token_counter
168
+ end
169
+
170
+ def initialize(lexer,parent=nil)
171
+ @name = ""
172
+ @children = Array.new
173
+ @complexity = 0
174
+ @parent = parent
175
+ @lexer = lexer
176
+ @run = true
177
+ # To catch one line def statements, We always have one line.
178
+ @lines = 0
179
+ @last_token_line_and_char = Array.new
180
+ end
181
+
182
+ def top_state?
183
+ self == @@top_state
184
+ end
185
+
186
+ def lexer=(lexer)
187
+ @run = true
188
+ @lexer = lexer
189
+ end
190
+
191
+ def make_state(type,parent = nil)
192
+ cstate = type.new(@lexer,self)
193
+ parent.children<< cstate
194
+ cstate
195
+ end
196
+
197
+ def calc_complexity
198
+ complexity = @complexity
199
+ children.each do |child|
200
+ complexity += child.calc_complexity
201
+ end
202
+ complexity
203
+ end
204
+
205
+ def calc_lines
206
+ lines = @lines
207
+ children.each do |child|
208
+ lines += child.calc_lines
209
+ end
210
+ lines
211
+ end
212
+
213
+ def compute_state(formater)
214
+ if top_state?
215
+ compute_state_for_global(formater)
216
+ end
217
+
218
+ @children.each do |s|
219
+ s.compute_state(formater)
220
+ end
221
+ end
222
+
223
+ def compute_state_for_global(formater)
224
+ global_def, @children = @children.partition do |s|
225
+ !s.kind_of?(ParseClass)
226
+ end
227
+ return if global_def.empty?
228
+ gx = global_def.inject(0) { |c,s| s.calc_complexity }
229
+ gl = global_def.inject(0) { |c,s| s.calc_lines }
230
+ formater.start_class_compute_state("Global", "", gx, gl)
231
+ global_def.each do |s|
232
+ s.compute_state(formater)
233
+ end
234
+ formater.end_class_compute_state("")
235
+ end
236
+
237
+ # Count the tokens parsed if true else ignore them.
238
+ def count_tokens?
239
+ true
240
+ end
241
+
242
+ def parse
243
+ while @run do
244
+ tok = @lexer.token
245
+ @run = false if tok.nil?
246
+ if lexer_loop?(tok)
247
+ STDERR.puts "Lexer loop at line : #{@lexer.line_no} char #{@lexer.char_no}."
248
+ @run = false
249
+ end
250
+ @last_token_line_and_char<< [@lexer.line_no.to_i, @lexer.char_no.to_i, tok]
251
+ if $VERBOSE
252
+ puts "DEBUG: #{@lexer.line_no} #{tok.class}:#{tok.name if tok.respond_to?(:name)}"
253
+ end
254
+ @@token_counter.count_token(@lexer.line_no, tok) if count_tokens?
255
+ parse_token(tok)
256
+ end
257
+ end
258
+
259
+ # Ruby-Lexer can go into a loop if the file does not end with a newline.
260
+ def lexer_loop?(token)
261
+ return false if @last_token_line_and_char.empty?
262
+ loop_flag = false
263
+ last = @last_token_line_and_char.last
264
+ line = last[0]
265
+ char = last[1]
266
+ ltok = last[2]
267
+
268
+ if ( (line == @lexer.line_no.to_i) &&
269
+ (char == @lexer.char_no.to_i) &&
270
+ (ltok.class == token.class) )
271
+ # We are potentially in a loop
272
+ if @last_token_line_and_char.size >= 3
273
+ loop_flag = true
274
+ end
275
+ else
276
+ # Not in a loop so clear stack
277
+ @last_token_line_and_char = Array.new
278
+ end
279
+
280
+ loop_flag
281
+ end
282
+
283
+ def do_begin_token(token)
284
+ make_state(EndableParseState, self)
285
+ end
286
+
287
+ def do_class_token(token)
288
+ make_state(ParseClass,self)
289
+ end
290
+
291
+ def do_module_token(token)
292
+ make_state(ParseModule,self)
293
+ end
294
+
295
+ def do_def_token(token)
296
+ make_state(ParseDef,self)
297
+ end
298
+
299
+ def do_constant_token(token)
300
+ nil
301
+ end
302
+
303
+ def do_identifier_token(token)
304
+ if (token.name == "__END__" && token.char_no.to_i == 0)
305
+ # The Ruby code has stopped and the rest is data so cease parsing.
306
+ @run = false
307
+ end
308
+ nil
309
+ end
310
+
311
+ def do_right_brace_token(token)
312
+ nil
313
+ end
314
+
315
+ def do_end_token(token)
316
+ end_debug
317
+ nil
318
+ end
319
+
320
+ def do_block_token(token)
321
+ make_state(ParseBlock,self)
322
+ end
323
+
324
+ def do_conditional_token(token)
325
+ make_state(ParseCond,self)
326
+ end
327
+
328
+ def do_conditional_do_control_token(token)
329
+ make_state(ParseDoCond,self)
330
+ end
331
+
332
+ def do_case_token(token)
333
+ make_state(EndableParseState, self)
334
+ end
335
+
336
+ def do_one_line_conditional_token(token)
337
+ # This is an if with no end
338
+ @complexity += 1
339
+ #STDOUT.puts "got IF_MOD: #{self.to_yaml}" if $VERBOSE
340
+ #if state.type != "class" && state.type != "def" && state.type != "cond"
341
+ #STDOUT.puts "Changing IF_MOD Parent" if $VERBOSE
342
+ #state = state.parent
343
+ #@run = false
344
+ nil
345
+ end
346
+
347
+ def do_else_token(token)
348
+ STDOUT.puts "Ignored/Unknown Token:#{token.class}" if $VERBOSE
349
+ nil
350
+ end
351
+
352
+ def do_comment_token(token)
353
+ make_state(ParseComment, self)
354
+ end
355
+
356
+ def do_symbol_token(token)
357
+ make_state(ParseSymbol, self)
358
+ end
359
+
360
+ def parse_token(token)
361
+ state = nil
362
+ case token
363
+ when TkCLASS
364
+ state = do_class_token(token)
365
+ when TkMODULE
366
+ state = do_module_token(token)
367
+ when TkDEF
368
+ state = do_def_token(token)
369
+ when TkCONSTANT
370
+ # Nothing to do with a constant at top level?
371
+ state = do_constant_token(token)
372
+ when TkIDENTIFIER,TkFID
373
+ # Nothing to do at top level?
374
+ state = do_identifier_token(token)
375
+ when TkRBRACE
376
+ # Nothing to do at top level
377
+ state = do_right_brace_token(token)
378
+ when TkEND
379
+ state = do_end_token(token)
380
+ # At top level this might be an error...
381
+ when TkDO,TkfLBRACE
382
+ state = do_block_token(token)
383
+ when TkIF,TkUNLESS
384
+ state = do_conditional_token(token)
385
+ when TkWHILE,TkUNTIL,TkFOR
386
+ state = do_conditional_do_control_token(token)
387
+ when TkELSIF #,TkELSE
388
+ @complexity += 1
389
+ when TkELSE
390
+ # Else does not increase complexity
391
+ when TkCASE
392
+ state = do_case_token(token)
393
+ when TkWHEN
394
+ @complexity += 1
395
+ when TkBEGIN
396
+ state = do_begin_token(token)
397
+ when TkRESCUE
398
+ # Maybe this should add complexity and not begin
399
+ @complexity += 1
400
+ when TkIF_MOD, TkUNLESS_MOD, TkUNTIL_MOD, TkWHILE_MOD, TkQUESTION
401
+ state = do_one_line_conditional_token(token)
402
+ when TkNL
403
+ #
404
+ @lines += 1
405
+ when TkRETURN
406
+ # Early returns do not increase complexity as the condition that
407
+ # calls the return is the one that increases it.
408
+ when TkCOMMENT
409
+ state = do_comment_token(token)
410
+ when TkSYMBEG
411
+ state = do_symbol_token(token)
412
+ when TkError
413
+ STDOUT.puts "Lexer received an error for line #{@lexer.line_no} char #{@lexer.char_no}"
414
+ else
415
+ state = do_else_token(token)
416
+ end
417
+ state.parse if state
418
+ end
419
+
420
+ def end_debug
421
+ STDOUT.puts "got an end: #{@name} in #{self.class.name}" if $VERBOSE
422
+ if @parent.nil?
423
+ STDOUT.puts "DEBUG: Line #{@lexer.line_no}"
424
+ STDOUT.puts "DEBUG: #{@name}; #{self.class}"
425
+ # to_yaml can cause an infinite loop?
426
+ #STDOUT.puts "TOP: #{@@top_state.to_yaml}"
427
+ #STDOUT.puts "TOP: #{@@top_state.inspect}"
428
+
429
+ # This may not be an error?
430
+ #exit 1
431
+ end
432
+ end
433
+
434
+ end
435
+
436
+ # Read and consume tokens in comments until a new line.
437
+ class ParseComment < ParseState
438
+
439
+ # While in a comment state do not count the tokens.
440
+ def count_tokens?
441
+ false
442
+ end
443
+
444
+ def parse_token(token)
445
+ if token.is_a?(TkNL)
446
+ @lines += 1
447
+ @run = false
448
+ end
449
+ end
450
+ end
451
+
452
+ class ParseSymbol < ParseState
453
+ def initialize(lexer, parent = nil)
454
+ super
455
+ STDOUT.puts "STARTING SYMBOL" if $VERBOSE
456
+ end
457
+
458
+ def parse_token(token)
459
+ STDOUT.puts "Symbol's token is #{token.class}" if $VERBOSE
460
+ # Consume the next token and stop
461
+ @run = false
462
+ nil
463
+ end
464
+ end
465
+
466
+ class EndableParseState < ParseState
467
+ def initialize(lexer,parent=nil)
468
+ super(lexer,parent)
469
+ STDOUT.puts "Starting #{self.class}" if $VERBOSE
470
+ end
471
+
472
+ def do_end_token(token)
473
+ end_debug
474
+ @run = false
475
+ nil
476
+ end
477
+ end
478
+
479
+ class ParseClass < EndableParseState
480
+ def initialize(lexer,parent=nil)
481
+ super(lexer,parent)
482
+ @type_name = "Class"
483
+ end
484
+
485
+ def do_constant_token(token)
486
+ @name = token.name if @name.empty?
487
+ nil
488
+ end
489
+
490
+ def compute_state(formater)
491
+ # Seperate the Module and Class Children out
492
+ cnm_children, @children = @children.partition do |child|
493
+ child.kind_of?(ParseClass)
494
+ end
495
+
496
+ formater.start_class_compute_state(@type_name,@name,self.calc_complexity,self.calc_lines)
497
+ super(formater)
498
+ formater.end_class_compute_state(@name)
499
+
500
+ cnm_children.each do |child|
501
+ child.name = @name + "::" + child.name
502
+ child.compute_state(formater)
503
+ end
504
+ end
505
+ end
506
+
507
+ class ParseModule < ParseClass
508
+ def initialize(lexer,parent=nil)
509
+ super(lexer,parent)
510
+ @type_name = "Module"
511
+ end
512
+ end
513
+
514
+ class ParseDef < EndableParseState
515
+
516
+ def initialize(lexer,parent=nil)
517
+ super(lexer,parent)
518
+ @complexity = 1
519
+ @looking_for_name = true
520
+ @first_space = true
521
+ end
522
+
523
+ # This way I don't need to list all possible overload
524
+ # tokens.
525
+ def create_def_name(token)
526
+ case token
527
+ when TkSPACE
528
+ # mark first space so we can stop at next space
529
+ if @first_space
530
+ @first_space = false
531
+ else
532
+ @looking_for_name = false
533
+ end
534
+ when TkNL,TkLPAREN,TkfLPAREN,TkSEMICOLON
535
+ # we can also stop at a new line or left parenthesis
536
+ @looking_for_name = false
537
+ when TkDOT
538
+ @name<< "."
539
+ when TkCOLON2
540
+ @name<< "::"
541
+ when TkASSIGN
542
+ @name<< "="
543
+ when TkfLBRACK
544
+ @name<< "["
545
+ when TkRBRACK
546
+ @name<< "]"
547
+ else
548
+ begin
549
+ @name<< token.name.to_s
550
+ rescue Exception => err
551
+ #what is this?
552
+ STDOUT.puts @@token_counter.current_file
553
+ STDOUT.puts @name
554
+ STDOUT.puts token.inspect
555
+ STDOUT.puts err.message
556
+ exit 1
557
+ end
558
+ end
559
+ end
560
+
561
+ def parse_token(token)
562
+ if @looking_for_name
563
+ create_def_name(token)
564
+ end
565
+ super(token)
566
+ end
567
+
568
+ def compute_state(formater)
569
+ formater.def_compute_state(@name, self.calc_complexity, self.calc_lines)
570
+ super(formater)
571
+ end
572
+ end
573
+
574
+ class ParseCond < EndableParseState
575
+ def initialize(lexer,parent=nil)
576
+ super(lexer,parent)
577
+ @complexity = 1
578
+ end
579
+ end
580
+
581
+ class ParseDoCond < ParseCond
582
+ def initialize(lexer,parent=nil)
583
+ super(lexer,parent)
584
+ @looking_for_new_line = true
585
+ end
586
+
587
+ # Need to consume the do that can appear at the
588
+ # end of these control structures.
589
+ def parse_token(token)
590
+ if @looking_for_new_line
591
+ if token.is_a?(TkDO)
592
+ nil
593
+ else
594
+ if token.is_a?(TkNL)
595
+ @looking_for_new_line = false
596
+ end
597
+ super(token)
598
+ end
599
+ else
600
+ super(token)
601
+ end
602
+ end
603
+
604
+ end
605
+
606
+ class ParseBlock < EndableParseState
607
+
608
+ def initialize(lexer,parent=nil)
609
+ super(lexer,parent)
610
+ @complexity = 1
611
+ @lbraces = Array.new
612
+ end
613
+
614
+ # Because the token for a block and hash right brace is the same,
615
+ # we need to track the hash left braces to determine when an end is
616
+ # encountered.
617
+ def parse_token(token)
618
+ if token.is_a?(TkLBRACE)
619
+ @lbraces.push(true)
620
+ elsif token.is_a?(TkRBRACE)
621
+ if @lbraces.empty?
622
+ do_right_brace_token(token)
623
+ #do_end_token(token)
624
+ else
625
+ @lbraces.pop
626
+ end
627
+ else
628
+ super(token)
629
+ end
630
+ end
631
+
632
+ def do_right_brace_token(token)
633
+ # we are done ? what about a hash in a block :-/
634
+ @run = false
635
+ nil
636
+ end
637
+
638
+ end
639
+
640
+ # ------------ END Analyzer logic ------------------------------------
641
+
642
+ class Filter
643
+ attr_accessor :limit, :error, :warn
644
+
645
+ def initialize(limit = -1, error = 11, warn = 8)
646
+ @limit = limit
647
+ @error = error
648
+ @warn = warn
649
+ end
650
+
651
+ def ignore?(count)
652
+ count < @limit
653
+ end
654
+
655
+ def warn?(count)
656
+ count >= @warn
657
+ end
658
+
659
+ def error?(count)
660
+ count >= @error
661
+ end
662
+
663
+ end
664
+
665
+
666
+ class BaseFormater
667
+ attr_accessor :warnings, :errors, :current
668
+
669
+ def initialize(out, filter = nil)
670
+ @out = out
671
+ @filter = filter
672
+ reset_data
673
+ end
674
+
675
+ def warn_error?(num, marker)
676
+ klass = ""
677
+
678
+ if @filter.error?(num)
679
+ klass = ' class="error"'
680
+ @errors<< [@current, marker, num]
681
+ elsif @filter.warn?(num)
682
+ klass = ' class="warning"'
683
+ @warnings<< [@current, marker, num]
684
+ end
685
+
686
+ klass
687
+ end
688
+
689
+ def reset_data
690
+ @warnings = Array.new
691
+ @errors = Array.new
692
+ @current = ""
693
+ end
694
+
695
+ end
696
+
697
+ class TokenCounterFormater < BaseFormater
698
+
699
+ def start(new_out=nil)
700
+ reset_data
701
+ @out = new_out if new_out
702
+ @out.puts "Token Count"
703
+ end
704
+
705
+ def start_count(number_of_files)
706
+ @out.puts "Counting tokens for #{number_of_files} files."
707
+ end
708
+
709
+ def start_file(file_name)
710
+ @current = file_name
711
+ @out.puts "File:#{file_name}"
712
+ end
713
+
714
+ def line_token_count(line_number,number_of_tokens)
715
+ return if @filter.ignore?(number_of_tokens)
716
+ warn_error?(number_of_tokens, line_number)
717
+ @out.puts "Line:#{line_number} ; Tokens : #{number_of_tokens}"
718
+ end
719
+
720
+ def end_file
721
+ @out.puts ""
722
+ end
723
+
724
+ def end_count
725
+ end
726
+
727
+ def end
728
+ end
729
+
730
+ end
731
+
732
+ module HTMLStyleSheet
733
+ def HTMLStyleSheet.style_sheet
734
+ out = StringIO.new
735
+
736
+ out.puts "<style>"
737
+ out.puts 'body {'
738
+ out.puts ' margin: 20px;'
739
+ out.puts ' padding: 0;'
740
+ out.puts ' font-size: 12px;'
741
+ out.puts ' font-family: bitstream vera sans, verdana, arial, sans serif;'
742
+ out.puts ' background-color: #efefef;'
743
+ out.puts '}'
744
+ out.puts ''
745
+ out.puts 'table { '
746
+ out.puts ' border-collapse: collapse;'
747
+ out.puts ' /*border-spacing: 0;*/'
748
+ out.puts ' border: 1px solid #666;'
749
+ out.puts ' background-color: #fff;'
750
+ out.puts ' margin-bottom: 20px;'
751
+ out.puts '}'
752
+ out.puts ''
753
+ out.puts 'table, th, th+th, td, td+td {'
754
+ out.puts ' border: 1px solid #ccc;'
755
+ out.puts '}'
756
+ out.puts ''
757
+ out.puts 'table th {'
758
+ out.puts ' font-size: 12px;'
759
+ out.puts ' color: #fc0;'
760
+ out.puts ' padding: 4px 0;'
761
+ out.puts ' background-color: #336;'
762
+ out.puts '}'
763
+ out.puts ''
764
+ out.puts 'th, td {'
765
+ out.puts ' padding: 4px 10px;'
766
+ out.puts '}'
767
+ out.puts ''
768
+ out.puts 'td { '
769
+ out.puts ' font-size: 13px;'
770
+ out.puts '}'
771
+ out.puts ''
772
+ out.puts '.class_name {'
773
+ out.puts ' font-size: 17px;'
774
+ out.puts ' margin: 20px 0 0;'
775
+ out.puts '}'
776
+ out.puts ''
777
+ out.puts '.class_complexity {'
778
+ out.puts 'margin: 0 auto;'
779
+ out.puts '}'
780
+ out.puts ''
781
+ out.puts '.class_complexity>.class_complexity {'
782
+ out.puts ' margin: 0;'
783
+ out.puts '}'
784
+ out.puts ''
785
+ out.puts '.class_total_complexity, .class_total_lines, .start_token_count, .file_count {'
786
+ out.puts ' font-size: 13px;'
787
+ out.puts ' font-weight: bold;'
788
+ out.puts '}'
789
+ out.puts ''
790
+ out.puts '.class_total_complexity, .class_total_lines {'
791
+ out.puts ' color: #c00;'
792
+ out.puts '}'
793
+ out.puts ''
794
+ out.puts '.start_token_count, .file_count {'
795
+ out.puts ' color: #333;'
796
+ out.puts '}'
797
+ out.puts ''
798
+ out.puts '.warning {'
799
+ out.puts ' background-color: yellow;'
800
+ out.puts '}'
801
+ out.puts ''
802
+ out.puts '.error {'
803
+ out.puts ' background-color: #f00;'
804
+ out.puts '}'
805
+ out.puts "</style>"
806
+
807
+ out.string
808
+ end
809
+
810
+ def style_sheet
811
+ HTMLStyleSheet.style_sheet
812
+ end
813
+ end
814
+
815
+
816
+ class HTMLTokenCounterFormater < TokenCounterFormater
817
+ include HTMLStyleSheet
818
+
819
+ def start(new_out=nil)
820
+ reset_data
821
+ @out = new_out if new_out
822
+ @out.puts "<html>"
823
+ @out.puts style_sheet
824
+ @out.puts "<body>"
825
+ end
826
+
827
+ def start_count(number_of_files)
828
+ @out.puts "<div class=\"start_token_count\">"
829
+ @out.puts "Number of files: #{number_of_files}"
830
+ @out.puts "</div>"
831
+ end
832
+
833
+ def start_file(file_name)
834
+ @current = file_name
835
+ @out.puts "<div class=\"file_count\">"
836
+ @out.puts "<p class=\"file_name\">"
837
+ @out.puts "File: #{file_name}"
838
+ @out.puts "</p>"
839
+ @out.puts "<table width=\"100%\" border=\"1\">"
840
+ @out.puts "<tr><th>Line</th><th>Tokens</th></tr>"
841
+ end
842
+
843
+ def line_token_count(line_number,number_of_tokens)
844
+ return if @filter.ignore?(number_of_tokens)
845
+ klass = warn_error?(number_of_tokens, line_number)
846
+ @out.puts "<tr><td>#{line_number}</td><td#{klass}>#{number_of_tokens}</td></tr>"
847
+ end
848
+
849
+ def end_file
850
+ @out.puts "</table>"
851
+ end
852
+
853
+ def end_count
854
+ end
855
+
856
+ def end
857
+ @out.puts "</body>"
858
+ @out.puts "</html>"
859
+ end
860
+ end
861
+
862
+ class ParseStateFormater < BaseFormater
863
+
864
+ def start(new_out=nil)
865
+ reset_data
866
+ @out = new_out if new_out
867
+ end
868
+
869
+ def end
870
+ end
871
+
872
+ def start_class_compute_state(type_name,name,complexity,lines)
873
+ @current = name
874
+ @out.puts "-- START #{name} --"
875
+ @out.puts "Type:#{type_name} Name:#{name} Complexity:#{complexity} Lines:#{lines}"
876
+ end
877
+
878
+ def end_class_compute_state(name)
879
+ @out.puts "-- END #{name} --"
880
+ end
881
+
882
+ def def_compute_state(name,complexity,lines)
883
+ return if @filter.ignore?(complexity)
884
+ warn_error?(complexity, name)
885
+ @out.puts "Type:Def Name:#{name} Complexity:#{complexity} Lines:#{lines}"
886
+ end
887
+
888
+ end
889
+
890
+
891
+
892
+ class StateHTMLComplexityFormater < ParseStateFormater
893
+ include HTMLStyleSheet
894
+
895
+ def start(new_out=nil)
896
+ reset_data
897
+ @out = new_out if new_out
898
+ @out.puts "<html><head><title>Cyclometric Complexity</title></head>"
899
+ @out.puts style_sheet
900
+ @out.puts "<body>"
901
+ end
902
+
903
+ def end
904
+ @out.puts "</body>"
905
+ @out.puts "</html>"
906
+ end
907
+
908
+ def start_class_compute_state(type_name,name,complexity,lines)
909
+ @current = name
910
+ @out.puts "<div class=\"class_complexity\">"
911
+ @out.puts "<h2 class=\"class_name\">#{type_name} : #{name}</h2>"
912
+ @out.puts "<div class=\"class_total_complexity\">Total Complexity: #{complexity}</div>"
913
+ @out.puts "<div class=\"class_total_lines\">Total Lines: #{lines}</div>"
914
+ @out.puts "<table width=\"100%\" border=\"1\">"
915
+ @out.puts "<tr><th>Method</th><th>Complexity</th><th># Lines</th></tr>"
916
+ end
917
+
918
+ def end_class_compute_state(name)
919
+ @out.puts "</table>"
920
+ @out.puts "</div>"
921
+ end
922
+
923
+ def def_compute_state(name, complexity, lines)
924
+ return if @filter.ignore?(complexity)
925
+ klass = warn_error?(complexity, name)
926
+ @out.puts "<tr><td>#{name}</td><td#{klass}>#{complexity}</td><td>#{lines}</td></tr>"
927
+ end
928
+
929
+ end
930
+
931
+
932
+ module ResultIndexGenerator
933
+ def summarize_errors_and_warnings(enw, header)
934
+ return "" if enw.empty?
935
+ f = StringIO.new
936
+ erval = Hash.new { |h,k| h[k] = Array.new }
937
+ wval = Hash.new { |h,k| h[k] = Array.new }
938
+
939
+ enw.each do |fname, warnings, errors|
940
+ errors.each do |c,m,v|
941
+ erval[v] << [fname, c, m]
942
+ end
943
+ warnings.each do |c,m,v|
944
+ wval[v] << [fname, c, m]
945
+ end
946
+ end
947
+
948
+ f.puts "<h2 class=\"class_name\">Errors and Warnings</h2>"
949
+ f.puts "<table width=\"100%\" border=\"1\">"
950
+ f.puts header
951
+
952
+ f.puts print_summary_table_rows(erval, "error")
953
+ f.puts print_summary_table_rows(wval, "warning")
954
+ f.puts "</table>"
955
+
956
+ f.string
957
+ end
958
+
959
+ def print_summary_table_rows(ewvals, klass_type)
960
+ f = StringIO.new
961
+ ewvals.sort { |a,b| b <=> a}.each do |v, vals|
962
+ vals.sort.each do |fname, c, m|
963
+ f.puts "<tr><td><a href=\"./#{fname}\">#{c}</a></td><td>#{m}</td>"
964
+ f.puts "<td class=\"#{klass_type}\">#{v}</td></tr>"
965
+ end
966
+ end
967
+ f.string
968
+ end
969
+
970
+ def list_analyzed_files(files)
971
+ f = StringIO.new
972
+ f.puts "<h2 class=\"class_name\">Analyzed Files</h2>"
973
+ f.puts "<ul>"
974
+ files.each do |fname, warnings, errors|
975
+ readname = fname.split("_")[0...-1].join("_")
976
+ f.puts "<li>"
977
+ f.puts "<p class=\"file_name\"><a href=\"./#{fname}\">#{readname}</a>"
978
+ f.puts "</li>"
979
+ end
980
+ f.puts "</ul>"
981
+ f.string
982
+ end
983
+
984
+ def write_index(files, filename, title, header)
985
+ return if files.empty?
986
+
987
+ File.open(filename,"w") do |f|
988
+ f.puts "<html><head><title>#{title}</title></head>"
989
+ f.puts "#{HTMLStyleSheet.style_sheet}\n<body>"
990
+ f.puts "<h1>#{title}</h1>"
991
+
992
+ enw = files.find_all { |fn,w,e| (!w.empty? || !e.empty?) }
993
+
994
+ f.puts summarize_errors_and_warnings(enw, header)
995
+
996
+ f.puts "<hr/>"
997
+ f.puts list_analyzed_files(files)
998
+ f.puts "</body></html>"
999
+ end
1000
+ end
1001
+
1002
+ def write_cyclo_index(files, output_dir)
1003
+ header = "<tr><th>Class</th><th>Method</th><th>Complexity</th></tr>"
1004
+ write_index(files,
1005
+ "#{output_dir}/index_cyclo.html",
1006
+ "Index for cyclomatic complexity",
1007
+ header)
1008
+ end
1009
+
1010
+ def write_token_index(files, output_dir)
1011
+ header = "<tr><th>File</th><th>Line #</th><th>Tokens</th></tr>"
1012
+ write_index(files,
1013
+ "#{output_dir}/index_token.html",
1014
+ "Index for tokens per line",
1015
+ header)
1016
+ end
1017
+
1018
+ end
1019
+
1020
+ module Saikuro
1021
+ def Saikuro.analyze(files, state_formater, token_count_formater, output_dir)
1022
+
1023
+ idx_states = Array.new
1024
+ idx_tokens = Array.new
1025
+
1026
+ # parse each file
1027
+ files.each do |file|
1028
+ begin
1029
+ STDOUT.puts "Parsing #{file}"
1030
+ # create top state
1031
+ top = ParseState.make_top_state
1032
+ STDOUT.puts "TOP State made" if $VERBOSE
1033
+ token_counter = TokenCounter.new
1034
+ ParseState.set_token_counter(token_counter)
1035
+ token_counter.set_current_file(file)
1036
+
1037
+ STDOUT.puts "Setting up Lexer" if $VERBOSE
1038
+ lexer = RubyLex.new
1039
+ # Turn of this, because it aborts when a syntax error is found...
1040
+ lexer.exception_on_syntax_error = false
1041
+ lexer.set_input(File.new(file,"r"))
1042
+ top.lexer = lexer
1043
+ STDOUT.puts "Parsing" if $VERBOSE
1044
+ top.parse
1045
+
1046
+
1047
+ fdir_path = seperate_file_from_path(file)
1048
+ FileUtils.makedirs("#{output_dir}/#{fdir_path}")
1049
+
1050
+ if state_formater
1051
+ # output results
1052
+ state_io = StringIO.new
1053
+ state_formater.start(state_io)
1054
+ top.compute_state(state_formater)
1055
+ state_formater.end
1056
+
1057
+ fname = "#{file}_cyclo.html"
1058
+ puts "writing cyclomatic #{file}" if $VERBOSE
1059
+ File.open("#{output_dir}/#{fname}","w") do |f|
1060
+ f.write state_io.string
1061
+ end
1062
+ idx_states<< [
1063
+ fname,
1064
+ state_formater.warnings.dup,
1065
+ state_formater.errors.dup,
1066
+ ]
1067
+ end
1068
+
1069
+ if token_count_formater
1070
+ token_io = StringIO.new
1071
+ token_count_formater.start(token_io)
1072
+ token_counter.list_tokens_per_line(token_count_formater)
1073
+ token_count_formater.end
1074
+
1075
+ fname = "#{file}_token.html"
1076
+ puts "writing token #{file}" if $VERBOSE
1077
+ File.open("#{output_dir}/#{fname}","w") do |f|
1078
+ f.write token_io.string
1079
+ end
1080
+ idx_tokens<< [
1081
+ fname,
1082
+ token_count_formater.warnings.dup,
1083
+ token_count_formater.errors.dup,
1084
+ ]
1085
+ end
1086
+
1087
+ rescue RubyLex::SyntaxError => synerr
1088
+ STDOUT.puts "Lexer error for file #{file} on line #{lexer.line_no}"
1089
+ STDOUT.puts "#{synerr.class.name} : #{synerr.message}"
1090
+ rescue StandardError => err
1091
+ STDOUT.puts "Error while parsing file : #{file}"
1092
+ STDOUT.puts err.class,err.message,err.backtrace.join("\n")
1093
+ rescue Exception => ex
1094
+ STDOUT.puts "Error while parsing file : #{file}"
1095
+ STDOUT.puts ex.class,ex.message,ex.backtrace.join("\n")
1096
+ end
1097
+ end
1098
+
1099
+ [idx_states, idx_tokens]
1100
+ end
1101
+ end
1102
+
1103
+ if __FILE__ == $0
1104
+ require 'stringio'
1105
+ require 'getoptlong'
1106
+ require 'fileutils'
1107
+ require 'find'
1108
+ require 'rdoc/ri/ri_paths'
1109
+ require 'rdoc/usage'
1110
+
1111
+ include ResultIndexGenerator
1112
+
1113
+ #Returns the path without the file
1114
+ def seperate_file_from_path(path)
1115
+ res = path.split("/")
1116
+ if res.size == 1
1117
+ ""
1118
+ else
1119
+ res[0..res.size - 2].join("/")
1120
+ end
1121
+ end
1122
+
1123
+ def get_ruby_files(input_path)
1124
+ files = Array.new
1125
+ input_path.split("|").each do |path|
1126
+ Find.find(path.strip) do |f|
1127
+ files << f if !FileTest.directory?(f) && f =~ /\.rb$/
1128
+ end
1129
+ end
1130
+ files
1131
+ end
1132
+
1133
+ files = Array.new
1134
+ output_dir = "./"
1135
+ formater = "html"
1136
+ state_filter = Filter.new(5)
1137
+ token_filter = Filter.new(10, 25, 50)
1138
+ comp_state = comp_token = false
1139
+ begin
1140
+ opt = GetoptLong.new(
1141
+ ["-o","--output_directory", GetoptLong::REQUIRED_ARGUMENT],
1142
+ ["-h","--help", GetoptLong::NO_ARGUMENT],
1143
+ ["-f","--formater", GetoptLong::REQUIRED_ARGUMENT],
1144
+ ["-c","--cyclo", GetoptLong::NO_ARGUMENT],
1145
+ ["-t","--token", GetoptLong::NO_ARGUMENT],
1146
+ ["-y","--filter_cyclo", GetoptLong::REQUIRED_ARGUMENT],
1147
+ ["-k","--filter_token", GetoptLong::REQUIRED_ARGUMENT],
1148
+ ["-w","--warn_cyclo", GetoptLong::REQUIRED_ARGUMENT],
1149
+ ["-s","--warn_token", GetoptLong::REQUIRED_ARGUMENT],
1150
+ ["-e","--error_cyclo", GetoptLong::REQUIRED_ARGUMENT],
1151
+ ["-d","--error_token", GetoptLong::REQUIRED_ARGUMENT],
1152
+ ["-p","--parse_file", GetoptLong::REQUIRED_ARGUMENT],
1153
+ ["-i","--input_directory", GetoptLong::REQUIRED_ARGUMENT],
1154
+ ["-v","--verbose", GetoptLong::NO_ARGUMENT]
1155
+ )
1156
+
1157
+ opt.each do |arg,val|
1158
+ case arg
1159
+ when "-o"
1160
+ output_dir = val
1161
+ when "-h"
1162
+ RDoc.usage('help')
1163
+ when "-f"
1164
+ formater = val
1165
+ when "-c"
1166
+ comp_state = true
1167
+ when "-t"
1168
+ comp_token = true
1169
+ when "-k"
1170
+ token_filter.limit = val.to_i
1171
+ when "-s"
1172
+ token_filter.warn = val.to_i
1173
+ when "-d"
1174
+ token_filter.error = val.to_i
1175
+ when "-y"
1176
+ state_filter.limit = val.to_i
1177
+ when "-w"
1178
+ state_filter.warn = val.to_i
1179
+ when "-e"
1180
+ state_filter.error = val.to_i
1181
+ when "-p"
1182
+ files<< val
1183
+ when "-i"
1184
+ files.concat(get_ruby_files(val))
1185
+ when "-v"
1186
+ STDOUT.puts "Verbose mode on"
1187
+ $VERBOSE = true
1188
+ end
1189
+
1190
+ end
1191
+ RDoc.usage if !comp_state && !comp_token
1192
+ rescue => err
1193
+ RDoc.usage
1194
+ end
1195
+
1196
+ if formater =~ /html/i
1197
+ state_formater = StateHTMLComplexityFormater.new(STDOUT,state_filter)
1198
+ token_count_formater = HTMLTokenCounterFormater.new(STDOUT,token_filter)
1199
+ else
1200
+ state_formater = ParseStateFormater.new(STDOUT,state_filter)
1201
+ token_count_formater = TokenCounterFormater.new(STDOUT,token_filter)
1202
+ end
1203
+
1204
+ state_formater = nil if !comp_state
1205
+ token_count_formater = nil if !comp_token
1206
+
1207
+ idx_states, idx_tokens = Saikuro.analyze(files,
1208
+ state_formater,
1209
+ token_count_formater,
1210
+ output_dir)
1211
+
1212
+ write_cyclo_index(idx_states, output_dir)
1213
+ write_token_index(idx_tokens, output_dir)
1214
+ end