saikuro 1.1.1.1

Sign up to get free protection for your applications and to get access to all the features.
data/.gitignore ADDED
@@ -0,0 +1,4 @@
1
+ *.gem
2
+ .bundle
3
+ Gemfile.lock
4
+ pkg/*
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source "http://rubygems.org"
2
+
3
+ # Specify your gem's dependencies in saikuro.gemspec
4
+ gemspec
data/Rakefile ADDED
@@ -0,0 +1 @@
1
+ require "bundler/gem_tasks"
data/bin/saikuro ADDED
@@ -0,0 +1,1216 @@
1
+ #!/usr/bin/env ruby
2
+ # $Id: saikuro 33 2006-12-07 16:09:55Z zev $
3
+ # Version 0.2
4
+ # == Usage
5
+ #
6
+ # saikuro [ -h ] [-o output_directory] [-f type] [ -c, -t ]
7
+ # [ -y, -w, -e, -k, -s, -d - number ] ( -p file | -i directory )
8
+ #
9
+ # == Help
10
+ #
11
+ # -o, --output_directory (directory) : A directory to ouput the results in.
12
+ # The current directory is used if this option is not passed.
13
+ #
14
+ # -h, --help : This help message.
15
+ #
16
+ # -f, --formater (html | text) : The format to output the results in.
17
+ # The default is html
18
+ #
19
+ # -c, --cyclo : Compute the cyclomatic complexity of the input.
20
+ #
21
+ # -t, --token : Count the number of tokens per line of the input.
22
+ #
23
+ # -y, --filter_cyclo (number) : Filter the output to only include methods
24
+ # whose cyclomatic complexity are greater than the passed number.
25
+ #
26
+ # -w, --warn_cyclo (number) : Highlight with a warning methods whose
27
+ # cyclomatic complexity are greather than or equal to the passed number.
28
+ #
29
+ #
30
+ # -e, --error_cyclo (number) : Highligh with an error methods whose
31
+ # cyclomatic complexity are greather than or equal to the passed number.
32
+ #
33
+ #
34
+ # -k, --filter_token (number) : Filter the output to only include lines
35
+ # whose token count are greater than the passed number.
36
+ #
37
+ #
38
+ # -s, --warn_token (number) : Highlight with a warning lines whose
39
+ # token count are greater than or equal to the passed number.
40
+ #
41
+ #
42
+ # -d, --error_token (number) : Highlight with an error lines whose
43
+ # token count are greater than or equal to the passed number.
44
+ #
45
+ #
46
+ # -p, --parse_file (file) : A file to use as input.
47
+ #
48
+ # -i, --input_directory (directory) : All ruby files found recursively
49
+ # inside the directory are passed as input.
50
+
51
+ # == License
52
+ # Saikruo uses the BSD license.
53
+ #
54
+ # Copyright (c) 2005, Ubiquitous Business Technology (http://ubit.com)
55
+ # All rights reserved.
56
+ #
57
+ # Redistribution and use in source and binary forms, with or without
58
+ # modification, are permitted provided that the following conditions are
59
+ # met:
60
+ #
61
+ #
62
+ # * Redistributions of source code must retain the above copyright
63
+ # notice, this list of conditions and the following disclaimer.
64
+ #
65
+ # * Redistributions in binary form must reproduce the above
66
+ # copyright notice, this list of conditions and the following
67
+ # disclaimer in the documentation and/or other materials provided
68
+ # with the distribution.
69
+ #
70
+ # * Neither the name of Ubiquitous Business Technology nor the names
71
+ # of its contributors may be used to endorse or promote products
72
+ # derived from this software without specific prior written
73
+ # permission.
74
+ #
75
+ #
76
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
77
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
78
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
79
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
80
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
81
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
82
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
83
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
84
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
85
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
86
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
87
+ #
88
+ # == Author
89
+ # Zev Blut (zb@ubit.com)
90
+
91
+ require 'irb/ruby-lex'
92
+ require 'yaml'
93
+
94
+ # States to watch for
95
+ # once in def get the token after space, because it may also
96
+ # be something like + or << for operator overloading.
97
+
98
+ # Counts the number of tokens in each line.
99
+ class TokenCounter
100
+ include RubyToken
101
+
102
+ attr_reader :current_file
103
+
104
+ def initialize
105
+ @files = Hash.new
106
+ @tokens_per_line = Hash.new(0)
107
+ @current_file = ""
108
+ end
109
+
110
+ # Mark file to associate with the token count.
111
+ def set_current_file(file)
112
+ @current_file = file
113
+ @tokens_per_line = Hash.new(0)
114
+ @files[@current_file] = @tokens_per_line
115
+ end
116
+
117
+ # Iterate through all tracked files, passing the
118
+ # the provided formater the token counts.
119
+ def list_tokens_per_line(formater)
120
+ formater.start_count(@files.size)
121
+ @files.each do |fname, tok_per_line|
122
+ formater.start_file(fname)
123
+ tok_per_line.sort.each do |line,num|
124
+ formater.line_token_count(line,num)
125
+ end
126
+ formater.end_file
127
+ end
128
+ end
129
+
130
+ # Count the token for the passed line.
131
+ def count_token(line_no,token)
132
+ case token
133
+ when TkSPACE, TkNL, TkRD_COMMENT
134
+ # Do not count these as tokens
135
+ when TkCOMMENT
136
+ # Ignore this only for comments in a statement?
137
+ # Ignore TkCOLON,TkCOLON2 and operators? like "." etc..
138
+ when TkRBRACK, TkRPAREN, TkRBRACE
139
+ # Ignore the closing of an array/index/hash/paren
140
+ # The opening is counted, but no more.
141
+ # Thus [], () {} is counted as 1 token not 2.
142
+ else
143
+ # may want to filter out comments...
144
+ @tokens_per_line[line_no] += 1
145
+ end
146
+ end
147
+
148
+ end
149
+
150
+ # Main class and structure used to compute the
151
+ # cyclomatic complexity of Ruby programs.
152
+ class ParseState
153
+ include RubyToken
154
+ attr_accessor :name, :children, :complexity, :parent, :lines
155
+
156
+ @@top_state = nil
157
+ def ParseState.make_top_state()
158
+ @@top_state = ParseState.new(nil)
159
+ @@top_state.name = "__top__"
160
+ @@top_state
161
+ end
162
+
163
+ @@token_counter = TokenCounter.new
164
+ def ParseState.set_token_counter(counter)
165
+ @@token_counter = counter
166
+ end
167
+ def ParseState.get_token_counter
168
+ @@token_counter
169
+ end
170
+
171
+ def initialize(lexer,parent=nil)
172
+ @name = ""
173
+ @children = Array.new
174
+ @complexity = 0
175
+ @parent = parent
176
+ @lexer = lexer
177
+ @run = true
178
+ # To catch one line def statements, We always have one line.
179
+ @lines = 0
180
+ @last_token_line_and_char = Array.new
181
+ end
182
+
183
+ def top_state?
184
+ self == @@top_state
185
+ end
186
+
187
+ def lexer=(lexer)
188
+ @run = true
189
+ @lexer = lexer
190
+ end
191
+
192
+ def make_state(type,parent = nil)
193
+ cstate = type.new(@lexer,self)
194
+ parent.children<< cstate
195
+ cstate
196
+ end
197
+
198
+ def calc_complexity
199
+ complexity = @complexity
200
+ children.each do |child|
201
+ complexity += child.calc_complexity
202
+ end
203
+ complexity
204
+ end
205
+
206
+ def calc_lines
207
+ lines = @lines
208
+ children.each do |child|
209
+ lines += child.calc_lines
210
+ end
211
+ lines
212
+ end
213
+
214
+ def compute_state(formater)
215
+ if top_state?
216
+ compute_state_for_global(formater)
217
+ end
218
+
219
+ @children.each do |s|
220
+ s.compute_state(formater)
221
+ end
222
+ end
223
+
224
+ def compute_state_for_global(formater)
225
+ global_def, @children = @children.partition do |s|
226
+ !s.kind_of?(ParseClass)
227
+ end
228
+ return if global_def.empty?
229
+ gx = global_def.inject(0) { |c,s| s.calc_complexity }
230
+ gl = global_def.inject(0) { |c,s| s.calc_lines }
231
+ formater.start_class_compute_state("Global", "", gx, gl)
232
+ global_def.each do |s|
233
+ s.compute_state(formater)
234
+ end
235
+ formater.end_class_compute_state("")
236
+ end
237
+
238
+ # Count the tokens parsed if true else ignore them.
239
+ def count_tokens?
240
+ true
241
+ end
242
+
243
+ def parse
244
+ while @run do
245
+ tok = @lexer.token
246
+ @run = false if tok.nil?
247
+ if lexer_loop?(tok)
248
+ STDERR.puts "Lexer loop at line : #{@lexer.line_no} char #{@lexer.char_no}."
249
+ @run = false
250
+ end
251
+ @last_token_line_and_char<< [@lexer.line_no.to_i, @lexer.char_no.to_i, tok]
252
+ if $VERBOSE
253
+ puts "DEBUG: #{@lexer.line_no} #{tok.class}:#{tok.name if tok.respond_to?(:name)}"
254
+ end
255
+ @@token_counter.count_token(@lexer.line_no, tok) if count_tokens?
256
+ parse_token(tok)
257
+ end
258
+ end
259
+
260
+ # Ruby-Lexer can go into a loop if the file does not end with a newline.
261
+ def lexer_loop?(token)
262
+ return false if @last_token_line_and_char.empty?
263
+ loop_flag = false
264
+ last = @last_token_line_and_char.last
265
+ line = last[0]
266
+ char = last[1]
267
+ ltok = last[2]
268
+
269
+ if ( (line == @lexer.line_no.to_i) &&
270
+ (char == @lexer.char_no.to_i) &&
271
+ (ltok.class == token.class) )
272
+ # We are potentially in a loop
273
+ if @last_token_line_and_char.size >= 3
274
+ loop_flag = true
275
+ end
276
+ else
277
+ # Not in a loop so clear stack
278
+ @last_token_line_and_char = Array.new
279
+ end
280
+
281
+ loop_flag
282
+ end
283
+
284
+ def do_begin_token(token)
285
+ make_state(EndableParseState, self)
286
+ end
287
+
288
+ def do_class_token(token)
289
+ make_state(ParseClass,self)
290
+ end
291
+
292
+ def do_module_token(token)
293
+ make_state(ParseModule,self)
294
+ end
295
+
296
+ def do_def_token(token)
297
+ make_state(ParseDef,self)
298
+ end
299
+
300
+ def do_constant_token(token)
301
+ nil
302
+ end
303
+
304
+ def do_identifier_token(token)
305
+ if (token.name == "__END__" && token.char_no.to_i == 0)
306
+ # The Ruby code has stopped and the rest is data so cease parsing.
307
+ @run = false
308
+ end
309
+ nil
310
+ end
311
+
312
+ def do_right_brace_token(token)
313
+ nil
314
+ end
315
+
316
+ def do_end_token(token)
317
+ end_debug
318
+ nil
319
+ end
320
+
321
+ def do_block_token(token)
322
+ make_state(ParseBlock,self)
323
+ end
324
+
325
+ def do_conditional_token(token)
326
+ make_state(ParseCond,self)
327
+ end
328
+
329
+ def do_conditional_do_control_token(token)
330
+ make_state(ParseDoCond,self)
331
+ end
332
+
333
+ def do_case_token(token)
334
+ make_state(EndableParseState, self)
335
+ end
336
+
337
+ def do_one_line_conditional_token(token)
338
+ # This is an if with no end
339
+ @complexity += 1
340
+ #STDOUT.puts "got IF_MOD: #{self.to_yaml}" if $VERBOSE
341
+ #if state.type != "class" && state.type != "def" && state.type != "cond"
342
+ #STDOUT.puts "Changing IF_MOD Parent" if $VERBOSE
343
+ #state = state.parent
344
+ #@run = false
345
+ nil
346
+ end
347
+
348
+ def do_else_token(token)
349
+ STDOUT.puts "Ignored/Unknown Token:#{token.class}" if $VERBOSE
350
+ nil
351
+ end
352
+
353
+ def do_comment_token(token)
354
+ make_state(ParseComment, self)
355
+ end
356
+
357
+ def do_symbol_token(token)
358
+ make_state(ParseSymbol, self)
359
+ end
360
+
361
+ def parse_token(token)
362
+ state = nil
363
+ case token
364
+ when TkCLASS
365
+ state = do_class_token(token)
366
+ when TkMODULE
367
+ state = do_module_token(token)
368
+ when TkDEF
369
+ state = do_def_token(token)
370
+ when TkCONSTANT
371
+ # Nothing to do with a constant at top level?
372
+ state = do_constant_token(token)
373
+ when TkIDENTIFIER,TkFID
374
+ # Nothing to do at top level?
375
+ state = do_identifier_token(token)
376
+ when TkRBRACE
377
+ # Nothing to do at top level
378
+ state = do_right_brace_token(token)
379
+ when TkEND
380
+ state = do_end_token(token)
381
+ # At top level this might be an error...
382
+ when TkDO,TkfLBRACE
383
+ state = do_block_token(token)
384
+ when TkIF,TkUNLESS
385
+ state = do_conditional_token(token)
386
+ when TkWHILE,TkUNTIL,TkFOR
387
+ state = do_conditional_do_control_token(token)
388
+ when TkELSIF #,TkELSE
389
+ @complexity += 1
390
+ when TkELSE
391
+ # Else does not increase complexity
392
+ when TkCASE
393
+ state = do_case_token(token)
394
+ when TkWHEN
395
+ @complexity += 1
396
+ when TkBEGIN
397
+ state = do_begin_token(token)
398
+ when TkRESCUE
399
+ # Maybe this should add complexity and not begin
400
+ @complexity += 1
401
+ when TkIF_MOD, TkUNLESS_MOD, TkUNTIL_MOD, TkWHILE_MOD, TkQUESTION
402
+ state = do_one_line_conditional_token(token)
403
+ when TkNL
404
+ #
405
+ @lines += 1
406
+ when TkRETURN
407
+ # Early returns do not increase complexity as the condition that
408
+ # calls the return is the one that increases it.
409
+ when TkCOMMENT
410
+ state = do_comment_token(token)
411
+ when TkSYMBEG
412
+ state = do_symbol_token(token)
413
+ when TkError
414
+ STDOUT.puts "Lexer received an error for line #{@lexer.line_no} char #{@lexer.char_no}"
415
+ else
416
+ state = do_else_token(token)
417
+ end
418
+ state.parse if state
419
+ end
420
+
421
+ def end_debug
422
+ STDOUT.puts "got an end: #{@name} in #{self.class.name}" if $VERBOSE
423
+ if @parent.nil?
424
+ STDOUT.puts "DEBUG: Line #{@lexer.line_no}"
425
+ STDOUT.puts "DEBUG: #{@name}; #{self.class}"
426
+ # to_yaml can cause an infinite loop?
427
+ #STDOUT.puts "TOP: #{@@top_state.to_yaml}"
428
+ #STDOUT.puts "TOP: #{@@top_state.inspect}"
429
+
430
+ # This may not be an error?
431
+ #exit 1
432
+ end
433
+ end
434
+
435
+ end
436
+
437
+ # Read and consume tokens in comments until a new line.
438
+ class ParseComment < ParseState
439
+
440
+ # While in a comment state do not count the tokens.
441
+ def count_tokens?
442
+ false
443
+ end
444
+
445
+ def parse_token(token)
446
+ if token.is_a?(TkNL)
447
+ @lines += 1
448
+ @run = false
449
+ end
450
+ end
451
+ end
452
+
453
+ class ParseSymbol < ParseState
454
+ def initialize(lexer, parent = nil)
455
+ super
456
+ STDOUT.puts "STARTING SYMBOL" if $VERBOSE
457
+ end
458
+
459
+ def parse_token(token)
460
+ STDOUT.puts "Symbol's token is #{token.class}" if $VERBOSE
461
+ # Consume the next token and stop
462
+ @run = false
463
+ nil
464
+ end
465
+ end
466
+
467
+ class EndableParseState < ParseState
468
+ def initialize(lexer,parent=nil)
469
+ super(lexer,parent)
470
+ STDOUT.puts "Starting #{self.class}" if $VERBOSE
471
+ end
472
+
473
+ def do_end_token(token)
474
+ end_debug
475
+ @run = false
476
+ nil
477
+ end
478
+ end
479
+
480
+ class ParseClass < EndableParseState
481
+ def initialize(lexer,parent=nil)
482
+ super(lexer,parent)
483
+ @type_name = "Class"
484
+ end
485
+
486
+ def do_constant_token(token)
487
+ @name = token.name if @name.empty?
488
+ nil
489
+ end
490
+
491
+ def compute_state(formater)
492
+ # Seperate the Module and Class Children out
493
+ cnm_children, @children = @children.partition do |child|
494
+ child.kind_of?(ParseClass)
495
+ end
496
+
497
+ formater.start_class_compute_state(@type_name,@name,self.calc_complexity,self.calc_lines)
498
+ super(formater)
499
+ formater.end_class_compute_state(@name)
500
+
501
+ cnm_children.each do |child|
502
+ child.name = @name + "::" + child.name
503
+ child.compute_state(formater)
504
+ end
505
+ end
506
+ end
507
+
508
+ class ParseModule < ParseClass
509
+ def initialize(lexer,parent=nil)
510
+ super(lexer,parent)
511
+ @type_name = "Module"
512
+ end
513
+ end
514
+
515
+ class ParseDef < EndableParseState
516
+
517
+ def initialize(lexer,parent=nil)
518
+ super(lexer,parent)
519
+ @complexity = 1
520
+ @looking_for_name = true
521
+ @first_space = true
522
+ end
523
+
524
+ # This way I don't need to list all possible overload
525
+ # tokens.
526
+ def create_def_name(token)
527
+ case token
528
+ when TkSPACE
529
+ # mark first space so we can stop at next space
530
+ if @first_space
531
+ @first_space = false
532
+ else
533
+ @looking_for_name = false
534
+ end
535
+ when TkNL,TkLPAREN,TkfLPAREN,TkSEMICOLON
536
+ # we can also stop at a new line or left parenthesis
537
+ @looking_for_name = false
538
+ when TkDOT
539
+ @name<< "."
540
+ when TkCOLON2
541
+ @name<< "::"
542
+ when TkASSIGN
543
+ @name<< "="
544
+ when TkfLBRACK
545
+ @name<< "["
546
+ when TkRBRACK
547
+ @name<< "]"
548
+ else
549
+ begin
550
+ @name<< token.name.to_s
551
+ rescue Exception => err
552
+ #what is this?
553
+ STDOUT.puts @@token_counter.current_file
554
+ STDOUT.puts @name
555
+ STDOUT.puts token.inspect
556
+ STDOUT.puts err.message
557
+ exit 1
558
+ end
559
+ end
560
+ end
561
+
562
+ def parse_token(token)
563
+ if @looking_for_name
564
+ create_def_name(token)
565
+ end
566
+ super(token)
567
+ end
568
+
569
+ def compute_state(formater)
570
+ formater.def_compute_state(@name, self.calc_complexity, self.calc_lines)
571
+ super(formater)
572
+ end
573
+ end
574
+
575
+ class ParseCond < EndableParseState
576
+ def initialize(lexer,parent=nil)
577
+ super(lexer,parent)
578
+ @complexity = 1
579
+ end
580
+ end
581
+
582
+ class ParseDoCond < ParseCond
583
+ def initialize(lexer,parent=nil)
584
+ super(lexer,parent)
585
+ @looking_for_new_line = true
586
+ end
587
+
588
+ # Need to consume the do that can appear at the
589
+ # end of these control structures.
590
+ def parse_token(token)
591
+ if @looking_for_new_line
592
+ if token.is_a?(TkDO)
593
+ nil
594
+ else
595
+ if token.is_a?(TkNL)
596
+ @looking_for_new_line = false
597
+ end
598
+ super(token)
599
+ end
600
+ else
601
+ super(token)
602
+ end
603
+ end
604
+
605
+ end
606
+
607
+ class ParseBlock < EndableParseState
608
+
609
+ def initialize(lexer,parent=nil)
610
+ super(lexer,parent)
611
+ @complexity = 1
612
+ @lbraces = Array.new
613
+ end
614
+
615
+ # Because the token for a block and hash right brace is the same,
616
+ # we need to track the hash left braces to determine when an end is
617
+ # encountered.
618
+ def parse_token(token)
619
+ if token.is_a?(TkLBRACE)
620
+ @lbraces.push(true)
621
+ elsif token.is_a?(TkRBRACE)
622
+ if @lbraces.empty?
623
+ do_right_brace_token(token)
624
+ #do_end_token(token)
625
+ else
626
+ @lbraces.pop
627
+ end
628
+ else
629
+ super(token)
630
+ end
631
+ end
632
+
633
+ def do_right_brace_token(token)
634
+ # we are done ? what about a hash in a block :-/
635
+ @run = false
636
+ nil
637
+ end
638
+
639
+ end
640
+
641
+ # ------------ END Analyzer logic ------------------------------------
642
+
643
+ class Filter
644
+ attr_accessor :limit, :error, :warn
645
+
646
+ def initialize(limit = -1, error = 11, warn = 8)
647
+ @limit = limit
648
+ @error = error
649
+ @warn = warn
650
+ end
651
+
652
+ def ignore?(count)
653
+ count < @limit
654
+ end
655
+
656
+ def warn?(count)
657
+ count >= @warn
658
+ end
659
+
660
+ def error?(count)
661
+ count >= @error
662
+ end
663
+
664
+ end
665
+
666
+
667
+ class BaseFormater
668
+ attr_accessor :warnings, :errors, :current
669
+
670
+ def initialize(out, filter = nil)
671
+ @out = out
672
+ @filter = filter
673
+ reset_data
674
+ end
675
+
676
+ def warn_error?(num, marker)
677
+ klass = ""
678
+
679
+ if @filter.error?(num)
680
+ klass = ' class="error"'
681
+ @errors<< [@current, marker, num]
682
+ elsif @filter.warn?(num)
683
+ klass = ' class="warning"'
684
+ @warnings<< [@current, marker, num]
685
+ end
686
+
687
+ klass
688
+ end
689
+
690
+ def reset_data
691
+ @warnings = Array.new
692
+ @errors = Array.new
693
+ @current = ""
694
+ end
695
+
696
+ end
697
+
698
+ class TokenCounterFormater < BaseFormater
699
+
700
+ def start(new_out=nil)
701
+ reset_data
702
+ @out = new_out if new_out
703
+ @out.puts "Token Count"
704
+ end
705
+
706
+ def start_count(number_of_files)
707
+ @out.puts "Counting tokens for #{number_of_files} files."
708
+ end
709
+
710
+ def start_file(file_name)
711
+ @current = file_name
712
+ @out.puts "File:#{file_name}"
713
+ end
714
+
715
+ def line_token_count(line_number,number_of_tokens)
716
+ return if @filter.ignore?(number_of_tokens)
717
+ warn_error?(number_of_tokens, line_number)
718
+ @out.puts "Line:#{line_number} ; Tokens : #{number_of_tokens}"
719
+ end
720
+
721
+ def end_file
722
+ @out.puts ""
723
+ end
724
+
725
+ def end_count
726
+ end
727
+
728
+ def end
729
+ end
730
+
731
+ end
732
+
733
+ module HTMLStyleSheet
734
+ def HTMLStyleSheet.style_sheet
735
+ out = StringIO.new
736
+
737
+ out.puts "<style>"
738
+ out.puts 'body {'
739
+ out.puts ' margin: 20px;'
740
+ out.puts ' padding: 0;'
741
+ out.puts ' font-size: 12px;'
742
+ out.puts ' font-family: bitstream vera sans, verdana, arial, sans serif;'
743
+ out.puts ' background-color: #efefef;'
744
+ out.puts '}'
745
+ out.puts ''
746
+ out.puts 'table { '
747
+ out.puts ' border-collapse: collapse;'
748
+ out.puts ' /*border-spacing: 0;*/'
749
+ out.puts ' border: 1px solid #666;'
750
+ out.puts ' background-color: #fff;'
751
+ out.puts ' margin-bottom: 20px;'
752
+ out.puts '}'
753
+ out.puts ''
754
+ out.puts 'table, th, th+th, td, td+td {'
755
+ out.puts ' border: 1px solid #ccc;'
756
+ out.puts '}'
757
+ out.puts ''
758
+ out.puts 'table th {'
759
+ out.puts ' font-size: 12px;'
760
+ out.puts ' color: #fc0;'
761
+ out.puts ' padding: 4px 0;'
762
+ out.puts ' background-color: #336;'
763
+ out.puts '}'
764
+ out.puts ''
765
+ out.puts 'th, td {'
766
+ out.puts ' padding: 4px 10px;'
767
+ out.puts '}'
768
+ out.puts ''
769
+ out.puts 'td { '
770
+ out.puts ' font-size: 13px;'
771
+ out.puts '}'
772
+ out.puts ''
773
+ out.puts '.class_name {'
774
+ out.puts ' font-size: 17px;'
775
+ out.puts ' margin: 20px 0 0;'
776
+ out.puts '}'
777
+ out.puts ''
778
+ out.puts '.class_complexity {'
779
+ out.puts 'margin: 0 auto;'
780
+ out.puts '}'
781
+ out.puts ''
782
+ out.puts '.class_complexity>.class_complexity {'
783
+ out.puts ' margin: 0;'
784
+ out.puts '}'
785
+ out.puts ''
786
+ out.puts '.class_total_complexity, .class_total_lines, .start_token_count, .file_count {'
787
+ out.puts ' font-size: 13px;'
788
+ out.puts ' font-weight: bold;'
789
+ out.puts '}'
790
+ out.puts ''
791
+ out.puts '.class_total_complexity, .class_total_lines {'
792
+ out.puts ' color: #c00;'
793
+ out.puts '}'
794
+ out.puts ''
795
+ out.puts '.start_token_count, .file_count {'
796
+ out.puts ' color: #333;'
797
+ out.puts '}'
798
+ out.puts ''
799
+ out.puts '.warning {'
800
+ out.puts ' background-color: yellow;'
801
+ out.puts '}'
802
+ out.puts ''
803
+ out.puts '.error {'
804
+ out.puts ' background-color: #f00;'
805
+ out.puts '}'
806
+ out.puts "</style>"
807
+
808
+ out.string
809
+ end
810
+
811
+ def style_sheet
812
+ HTMLStyleSheet.style_sheet
813
+ end
814
+ end
815
+
816
+
817
+ class HTMLTokenCounterFormater < TokenCounterFormater
818
+ include HTMLStyleSheet
819
+
820
+ def start(new_out=nil)
821
+ reset_data
822
+ @out = new_out if new_out
823
+ @out.puts "<html>"
824
+ @out.puts style_sheet
825
+ @out.puts "<body>"
826
+ end
827
+
828
+ def start_count(number_of_files)
829
+ @out.puts "<div class=\"start_token_count\">"
830
+ @out.puts "Number of files: #{number_of_files}"
831
+ @out.puts "</div>"
832
+ end
833
+
834
+ def start_file(file_name)
835
+ @current = file_name
836
+ @out.puts "<div class=\"file_count\">"
837
+ @out.puts "<p class=\"file_name\">"
838
+ @out.puts "File: #{file_name}"
839
+ @out.puts "</p>"
840
+ @out.puts "<table width=\"100%\" border=\"1\">"
841
+ @out.puts "<tr><th>Line</th><th>Tokens</th></tr>"
842
+ end
843
+
844
+ def line_token_count(line_number,number_of_tokens)
845
+ return if @filter.ignore?(number_of_tokens)
846
+ klass = warn_error?(number_of_tokens, line_number)
847
+ @out.puts "<tr><td>#{line_number}</td><td#{klass}>#{number_of_tokens}</td></tr>"
848
+ end
849
+
850
+ def end_file
851
+ @out.puts "</table>"
852
+ end
853
+
854
+ def end_count
855
+ end
856
+
857
+ def end
858
+ @out.puts "</body>"
859
+ @out.puts "</html>"
860
+ end
861
+ end
862
+
863
+ class ParseStateFormater < BaseFormater
864
+
865
+ def start(new_out=nil)
866
+ reset_data
867
+ @out = new_out if new_out
868
+ end
869
+
870
+ def end
871
+ end
872
+
873
+ def start_class_compute_state(type_name,name,complexity,lines)
874
+ @current = name
875
+ @out.puts "-- START #{name} --"
876
+ @out.puts "Type:#{type_name} Name:#{name} Complexity:#{complexity} Lines:#{lines}"
877
+ end
878
+
879
+ def end_class_compute_state(name)
880
+ @out.puts "-- END #{name} --"
881
+ end
882
+
883
+ def def_compute_state(name,complexity,lines)
884
+ return if @filter.ignore?(complexity)
885
+ warn_error?(complexity, name)
886
+ @out.puts "Type:Def Name:#{name} Complexity:#{complexity} Lines:#{lines}"
887
+ end
888
+
889
+ end
890
+
891
+
892
+
893
+ class StateHTMLComplexityFormater < ParseStateFormater
894
+ include HTMLStyleSheet
895
+
896
+ def start(new_out=nil)
897
+ reset_data
898
+ @out = new_out if new_out
899
+ @out.puts "<html><head><title>Cyclometric Complexity</title></head>"
900
+ @out.puts style_sheet
901
+ @out.puts "<body>"
902
+ end
903
+
904
+ def end
905
+ @out.puts "</body>"
906
+ @out.puts "</html>"
907
+ end
908
+
909
+ def start_class_compute_state(type_name,name,complexity,lines)
910
+ @current = name
911
+ @out.puts "<div class=\"class_complexity\">"
912
+ @out.puts "<h2 class=\"class_name\">#{type_name} : #{name}</h2>"
913
+ @out.puts "<div class=\"class_total_complexity\">Total Complexity: #{complexity}</div>"
914
+ @out.puts "<div class=\"class_total_lines\">Total Lines: #{lines}</div>"
915
+ @out.puts "<table width=\"100%\" border=\"1\">"
916
+ @out.puts "<tr><th>Method</th><th>Complexity</th><th># Lines</th></tr>"
917
+ end
918
+
919
+ def end_class_compute_state(name)
920
+ @out.puts "</table>"
921
+ @out.puts "</div>"
922
+ end
923
+
924
+ def def_compute_state(name, complexity, lines)
925
+ return if @filter.ignore?(complexity)
926
+ klass = warn_error?(complexity, name)
927
+ @out.puts "<tr><td>#{name}</td><td#{klass}>#{complexity}</td><td>#{lines}</td></tr>"
928
+ end
929
+
930
+ end
931
+
932
+
933
+ module ResultIndexGenerator
934
+ def summarize_errors_and_warnings(enw, header)
935
+ return "" if enw.empty?
936
+ f = StringIO.new
937
+ erval = Hash.new { |h,k| h[k] = Array.new }
938
+ wval = Hash.new { |h,k| h[k] = Array.new }
939
+
940
+ enw.each do |fname, warnings, errors|
941
+ errors.each do |c,m,v|
942
+ erval[v] << [fname, c, m]
943
+ end
944
+ warnings.each do |c,m,v|
945
+ wval[v] << [fname, c, m]
946
+ end
947
+ end
948
+
949
+ f.puts "<h2 class=\"class_name\">Errors and Warnings</h2>"
950
+ f.puts "<table width=\"100%\" border=\"1\">"
951
+ f.puts header
952
+
953
+ f.puts print_summary_table_rows(erval, "error")
954
+ f.puts print_summary_table_rows(wval, "warning")
955
+ f.puts "</table>"
956
+
957
+ f.string
958
+ end
959
+
960
+ def print_summary_table_rows(ewvals, klass_type)
961
+ f = StringIO.new
962
+ ewvals.sort { |a,b| b <=> a}.each do |v, vals|
963
+ vals.sort.each do |fname, c, m|
964
+ f.puts "<tr><td><a href=\"./#{fname}\">#{c}</a></td><td>#{m}</td>"
965
+ f.puts "<td class=\"#{klass_type}\">#{v}</td></tr>"
966
+ end
967
+ end
968
+ f.string
969
+ end
970
+
971
+ def list_analyzed_files(files)
972
+ f = StringIO.new
973
+ f.puts "<h2 class=\"class_name\">Analyzed Files</h2>"
974
+ f.puts "<ul>"
975
+ files.each do |fname, warnings, errors|
976
+ readname = fname.split("_")[0...-1].join("_")
977
+ f.puts "<li>"
978
+ f.puts "<p class=\"file_name\"><a href=\"./#{fname}\">#{readname}</a>"
979
+ f.puts "</li>"
980
+ end
981
+ f.puts "</ul>"
982
+ f.string
983
+ end
984
+
985
+ def write_index(files, filename, title, header)
986
+ return if files.empty?
987
+
988
+ File.open(filename,"w") do |f|
989
+ f.puts "<html><head><title>#{title}</title></head>"
990
+ f.puts "#{HTMLStyleSheet.style_sheet}\n<body>"
991
+ f.puts "<h1>#{title}</h1>"
992
+
993
+ enw = files.find_all { |fn,w,e| (!w.empty? || !e.empty?) }
994
+
995
+ f.puts summarize_errors_and_warnings(enw, header)
996
+
997
+ f.puts "<hr/>"
998
+ f.puts list_analyzed_files(files)
999
+ f.puts "</body></html>"
1000
+ end
1001
+ end
1002
+
1003
+ def write_cyclo_index(files, output_dir)
1004
+ header = "<tr><th>Class</th><th>Method</th><th>Complexity</th></tr>"
1005
+ write_index(files,
1006
+ "#{output_dir}/index_cyclo.html",
1007
+ "Index for cyclomatic complexity",
1008
+ header)
1009
+ end
1010
+
1011
+ def write_token_index(files, output_dir)
1012
+ header = "<tr><th>File</th><th>Line #</th><th>Tokens</th></tr>"
1013
+ write_index(files,
1014
+ "#{output_dir}/index_token.html",
1015
+ "Index for tokens per line",
1016
+ header)
1017
+ end
1018
+
1019
+ end
1020
+
1021
+ module Saikuro
1022
+ def Saikuro.analyze(files, state_formater, token_count_formater, output_dir)
1023
+
1024
+ idx_states = Array.new
1025
+ idx_tokens = Array.new
1026
+
1027
+ # parse each file
1028
+ files.each do |file|
1029
+ begin
1030
+ STDOUT.puts "Parsing #{file}"
1031
+ # create top state
1032
+ top = ParseState.make_top_state
1033
+ STDOUT.puts "TOP State made" if $VERBOSE
1034
+ token_counter = TokenCounter.new
1035
+ ParseState.set_token_counter(token_counter)
1036
+ token_counter.set_current_file(file)
1037
+
1038
+ STDOUT.puts "Setting up Lexer" if $VERBOSE
1039
+ lexer = RubyLex.new
1040
+ # Turn of this, because it aborts when a syntax error is found...
1041
+ lexer.exception_on_syntax_error = false
1042
+ lexer.set_input(File.new(file,"r"))
1043
+ top.lexer = lexer
1044
+ STDOUT.puts "Parsing" if $VERBOSE
1045
+ top.parse
1046
+
1047
+
1048
+ fdir_path = seperate_file_from_path(file)
1049
+ FileUtils.makedirs("#{output_dir}/#{fdir_path}")
1050
+
1051
+ if state_formater
1052
+ # output results
1053
+ state_io = StringIO.new
1054
+ state_formater.start(state_io)
1055
+ top.compute_state(state_formater)
1056
+ state_formater.end
1057
+
1058
+ fname = "#{file}_cyclo.html"
1059
+ puts "writing cyclomatic #{file}" if $VERBOSE
1060
+ File.open("#{output_dir}/#{fname}","w") do |f|
1061
+ f.write state_io.string
1062
+ end
1063
+ idx_states<< [
1064
+ fname,
1065
+ state_formater.warnings.dup,
1066
+ state_formater.errors.dup,
1067
+ ]
1068
+ end
1069
+
1070
+ if token_count_formater
1071
+ token_io = StringIO.new
1072
+ token_count_formater.start(token_io)
1073
+ token_counter.list_tokens_per_line(token_count_formater)
1074
+ token_count_formater.end
1075
+
1076
+ fname = "#{file}_token.html"
1077
+ puts "writing token #{file}" if $VERBOSE
1078
+ File.open("#{output_dir}/#{fname}","w") do |f|
1079
+ f.write token_io.string
1080
+ end
1081
+ idx_tokens<< [
1082
+ fname,
1083
+ token_count_formater.warnings.dup,
1084
+ token_count_formater.errors.dup,
1085
+ ]
1086
+ end
1087
+
1088
+ rescue RubyLex::SyntaxError => synerr
1089
+ STDOUT.puts "Lexer error for file #{file} on line #{lexer.line_no}"
1090
+ STDOUT.puts "#{synerr.class.name} : #{synerr.message}"
1091
+ rescue StandardError => err
1092
+ STDOUT.puts "Error while parsing file : #{file}"
1093
+ STDOUT.puts err.class,err.message,err.backtrace.join("\n")
1094
+ rescue Exception => ex
1095
+ STDOUT.puts "Error while parsing file : #{file}"
1096
+ STDOUT.puts ex.class,ex.message,ex.backtrace.join("\n")
1097
+ end
1098
+ end
1099
+
1100
+ [idx_states, idx_tokens]
1101
+ end
1102
+ end
1103
+
1104
+ if __FILE__ == $0
1105
+ require 'stringio'
1106
+ require 'getoptlong'
1107
+ require 'fileutils'
1108
+ require 'find'
1109
+ require 'rdoc/usage'
1110
+
1111
+ include ResultIndexGenerator
1112
+
1113
+ #Returns the path without the file
1114
+ def seperate_file_from_path(path)
1115
+ res = path.split("/")
1116
+ if res.size == 1
1117
+ ""
1118
+ else
1119
+ res[0..res.size - 2].join("/")
1120
+ end
1121
+ end
1122
+
1123
+ def get_ruby_files(path)
1124
+ files = Array.new
1125
+ Find.find(path) do |f|
1126
+ if !FileTest.directory?(f)
1127
+ if f =~ /rb$/
1128
+ files<< f
1129
+ end
1130
+ end
1131
+ end
1132
+ files
1133
+ end
1134
+
1135
+ files = Array.new
1136
+ output_dir = "./"
1137
+ formater = "html"
1138
+ state_filter = Filter.new(5)
1139
+ token_filter = Filter.new(10, 25, 50)
1140
+ comp_state = comp_token = false
1141
+ begin
1142
+ opt = GetoptLong.new(
1143
+ ["-o","--output_directory", GetoptLong::REQUIRED_ARGUMENT],
1144
+ ["-h","--help", GetoptLong::NO_ARGUMENT],
1145
+ ["-f","--formater", GetoptLong::REQUIRED_ARGUMENT],
1146
+ ["-c","--cyclo", GetoptLong::NO_ARGUMENT],
1147
+ ["-t","--token", GetoptLong::NO_ARGUMENT],
1148
+ ["-y","--filter_cyclo", GetoptLong::REQUIRED_ARGUMENT],
1149
+ ["-k","--filter_token", GetoptLong::REQUIRED_ARGUMENT],
1150
+ ["-w","--warn_cyclo", GetoptLong::REQUIRED_ARGUMENT],
1151
+ ["-s","--warn_token", GetoptLong::REQUIRED_ARGUMENT],
1152
+ ["-e","--error_cyclo", GetoptLong::REQUIRED_ARGUMENT],
1153
+ ["-d","--error_token", GetoptLong::REQUIRED_ARGUMENT],
1154
+ ["-p","--parse_file", GetoptLong::REQUIRED_ARGUMENT],
1155
+ ["-i","--input_directory", GetoptLong::REQUIRED_ARGUMENT],
1156
+ ["-v","--verbose", GetoptLong::NO_ARGUMENT]
1157
+ )
1158
+
1159
+ opt.each do |arg,val|
1160
+ case arg
1161
+ when "-o"
1162
+ output_dir = val
1163
+ when "-h"
1164
+ RDoc.usage('help')
1165
+ when "-f"
1166
+ formater = val
1167
+ when "-c"
1168
+ comp_state = true
1169
+ when "-t"
1170
+ comp_token = true
1171
+ when "-k"
1172
+ token_filter.limit = val.to_i
1173
+ when "-s"
1174
+ token_filter.warn = val.to_i
1175
+ when "-d"
1176
+ token_filter.error = val.to_i
1177
+ when "-y"
1178
+ state_filter.limit = val.to_i
1179
+ when "-w"
1180
+ state_filter.warn = val.to_i
1181
+ when "-e"
1182
+ state_filter.error = val.to_i
1183
+ when "-p"
1184
+ files<< val
1185
+ when "-i"
1186
+ files.concat(get_ruby_files(val))
1187
+ when "-v"
1188
+ STDOUT.puts "Verbose mode on"
1189
+ $VERBOSE = true
1190
+ end
1191
+
1192
+ end
1193
+ RDoc.usage if !comp_state && !comp_token
1194
+ rescue => err
1195
+ RDoc.usage
1196
+ end
1197
+
1198
+ if formater =~ /html/i
1199
+ state_formater = StateHTMLComplexityFormater.new(STDOUT,state_filter)
1200
+ token_count_formater = HTMLTokenCounterFormater.new(STDOUT,token_filter)
1201
+ else
1202
+ state_formater = ParseStateFormater.new(STDOUT,state_filter)
1203
+ token_count_formater = TokenCounterFormater.new(STDOUT,token_filter)
1204
+ end
1205
+
1206
+ state_formater = nil if !comp_state
1207
+ token_count_formater = nil if !comp_token
1208
+
1209
+ idx_states, idx_tokens = Saikuro.analyze(files,
1210
+ state_formater,
1211
+ token_count_formater,
1212
+ output_dir)
1213
+
1214
+ write_cyclo_index(idx_states, output_dir)
1215
+ write_token_index(idx_tokens, output_dir)
1216
+ end