cyclomagic 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
data/.gitignore ADDED
@@ -0,0 +1,4 @@
1
+ *.gem
2
+ .bundle
3
+ Gemfile.lock
4
+ pkg/*
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source "http://rubygems.org"
2
+
3
+ # Specify your gem's dependencies in cyclomagic.gemspec
4
+ gemspec
data/README.md ADDED
@@ -0,0 +1 @@
1
+ # Cyclomagic - an attempt to build a working version of saikuro
data/Rakefile ADDED
@@ -0,0 +1 @@
1
+ require "bundler/gem_tasks"
data/bin/cyclomagic ADDED
@@ -0,0 +1,6 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'cyclomagic'
4
+
5
+ SaikuroCMDLineRunner.new.run
6
+
@@ -0,0 +1,20 @@
1
+ # -*- encoding: utf-8 -*-
2
+ $:.push File.expand_path("../lib", __FILE__)
3
+ require "cyclomagic/version"
4
+
5
+ Gem::Specification.new do |s|
6
+ s.name = "cyclomagic"
7
+ s.version = Cyclomagic::VERSION
8
+ s.authors = ["dennyabraham"]
9
+ s.email = ["email@dennyabraham.com"]
10
+ s.homepage = ""
11
+ s.summary = "an attempt to build a working version of saikuro, the cyclomatic complexity analyzer"
12
+ s.description = "an attempt to build a working version of saikuro, the cyclomatic complexity analyzer"
13
+ s.default_executable = %q{cyclomagic}
14
+ s.executables = ["cyclomagic"]
15
+
16
+ s.files = `git ls-files`.split("\n")
17
+ s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
18
+ s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
19
+ s.require_paths = ["lib"]
20
+ end
@@ -0,0 +1,3 @@
1
+ module Cyclomagic
2
+ VERSION = "0.0.1"
3
+ end
data/lib/cyclomagic.rb ADDED
@@ -0,0 +1,1137 @@
1
+ require "cyclomagic/version"
2
+
3
+ module Cyclomagic
4
+
5
+
6
+ require 'irb/ruby-lex'
7
+ require 'yaml'
8
+
9
+ # States to watch for
10
+ # once in def get the token after space, because it may also
11
+ # be something like + or << for operator overloading.
12
+
13
+ # Counts the number of tokens in each line.
14
+ class TokenCounter
15
+ include RubyToken
16
+
17
+ attr_reader :current_file
18
+
19
+ def initialize
20
+ @files = Hash.new
21
+ @tokens_per_line = Hash.new(0)
22
+ @current_file = ""
23
+ end
24
+
25
+ # Mark file to associate with the token count.
26
+ def set_current_file(file)
27
+ @current_file = file
28
+ @tokens_per_line = Hash.new(0)
29
+ @files[@current_file] = @tokens_per_line
30
+ end
31
+
32
+ # Iterate through all tracked files, passing the
33
+ # the provided formater the token counts.
34
+ def list_tokens_per_line(formater)
35
+ formater.start_count(@files.size)
36
+ @files.each do |fname, tok_per_line|
37
+ formater.start_file(fname)
38
+ tok_per_line.sort.each do |line,num|
39
+ formater.line_token_count(line,num)
40
+ end
41
+ formater.end_file
42
+ end
43
+ end
44
+
45
+ # Count the token for the passed line.
46
+ def count_token(line_no,token)
47
+ case token
48
+ when TkSPACE, TkNL, TkRD_COMMENT
49
+ # Do not count these as tokens
50
+ when TkCOMMENT
51
+ # Ignore this only for comments in a statement?
52
+ # Ignore TkCOLON,TkCOLON2 and operators? like "." etc..
53
+ when TkRBRACK, TkRPAREN, TkRBRACE
54
+ # Ignore the closing of an array/index/hash/paren
55
+ # The opening is counted, but no more.
56
+ # Thus [], () {} is counted as 1 token not 2.
57
+ else
58
+ # may want to filter out comments...
59
+ @tokens_per_line[line_no] += 1
60
+ end
61
+ end
62
+
63
+ end
64
+
65
+ # Main class and structure used to compute the
66
+ # cyclomatic complexity of Ruby programs.
67
+ class ParseState
68
+ include RubyToken
69
+ attr_accessor :name, :children, :complexity, :parent, :lines
70
+
71
+ @@top_state = nil
72
+ def ParseState.make_top_state()
73
+ @@top_state = ParseState.new(nil)
74
+ @@top_state.name = "__top__"
75
+ @@top_state
76
+ end
77
+
78
+ @@token_counter = TokenCounter.new
79
+ def ParseState.set_token_counter(counter)
80
+ @@token_counter = counter
81
+ end
82
+ def ParseState.get_token_counter
83
+ @@token_counter
84
+ end
85
+
86
+ def initialize(lexer,parent=nil)
87
+ @name = ""
88
+ @children = Array.new
89
+ @complexity = 0
90
+ @parent = parent
91
+ @lexer = lexer
92
+ @run = true
93
+ # To catch one line def statements, We always have one line.
94
+ @lines = 0
95
+ @last_token_line_and_char = Array.new
96
+ end
97
+
98
+ def top_state?
99
+ self == @@top_state
100
+ end
101
+
102
+ def lexer=(lexer)
103
+ @run = true
104
+ @lexer = lexer
105
+ end
106
+
107
+ def make_state(type,parent = nil)
108
+ cstate = type.new(@lexer,self)
109
+ parent.children<< cstate
110
+ cstate
111
+ end
112
+
113
+ def calc_complexity
114
+ complexity = @complexity
115
+ children.each do |child|
116
+ complexity += child.calc_complexity
117
+ end
118
+ complexity
119
+ end
120
+
121
+ def calc_lines
122
+ lines = @lines
123
+ children.each do |child|
124
+ lines += child.calc_lines
125
+ end
126
+ lines
127
+ end
128
+
129
+ def compute_state(formater)
130
+ if top_state?
131
+ compute_state_for_global(formater)
132
+ end
133
+
134
+ @children.each do |s|
135
+ s.compute_state(formater)
136
+ end
137
+ end
138
+
139
+ def compute_state_for_global(formater)
140
+ global_def, @children = @children.partition do |s|
141
+ !s.kind_of?(ParseClass)
142
+ end
143
+ return if global_def.empty?
144
+ gx = global_def.inject(0) { |c,s| s.calc_complexity }
145
+ gl = global_def.inject(0) { |c,s| s.calc_lines }
146
+ formater.start_class_compute_state("Global", "", gx, gl)
147
+ global_def.each do |s|
148
+ s.compute_state(formater)
149
+ end
150
+ formater.end_class_compute_state("")
151
+ end
152
+
153
+ # Count the tokens parsed if true else ignore them.
154
+ def count_tokens?
155
+ true
156
+ end
157
+
158
+ def parse
159
+ while @run do
160
+ tok = @lexer.token
161
+ @run = false if tok.nil?
162
+ if lexer_loop?(tok)
163
+ STDERR.puts "Lexer loop at line : #{@lexer.line_no} char #{@lexer.char_no}."
164
+ @run = false
165
+ end
166
+ @last_token_line_and_char<< [@lexer.line_no.to_i, @lexer.char_no.to_i, tok]
167
+ if $VERBOSE
168
+ puts "DEBUG: #{@lexer.line_no} #{tok.class}:#{tok.name if tok.respond_to?(:name)}"
169
+ end
170
+ @@token_counter.count_token(@lexer.line_no, tok) if count_tokens?
171
+ parse_token(tok)
172
+ end
173
+ end
174
+
175
+ # Ruby-Lexer can go into a loop if the file does not end with a newline.
176
+ def lexer_loop?(token)
177
+ return false if @last_token_line_and_char.empty?
178
+ loop_flag = false
179
+ last = @last_token_line_and_char.last
180
+ line = last[0]
181
+ char = last[1]
182
+ ltok = last[2]
183
+
184
+ if ( (line == @lexer.line_no.to_i) &&
185
+ (char == @lexer.char_no.to_i) &&
186
+ (ltok.class == token.class) )
187
+ # We are potentially in a loop
188
+ if @last_token_line_and_char.size >= 3
189
+ loop_flag = true
190
+ end
191
+ else
192
+ # Not in a loop so clear stack
193
+ @last_token_line_and_char = Array.new
194
+ end
195
+
196
+ loop_flag
197
+ end
198
+
199
+ def do_begin_token(token)
200
+ make_state(EndableParseState, self)
201
+ end
202
+
203
+ def do_class_token(token)
204
+ make_state(ParseClass,self)
205
+ end
206
+
207
+ def do_module_token(token)
208
+ make_state(ParseModule,self)
209
+ end
210
+
211
+ def do_def_token(token)
212
+ make_state(ParseDef,self)
213
+ end
214
+
215
+ def do_constant_token(token)
216
+ nil
217
+ end
218
+
219
+ def do_identifier_token(token)
220
+ if (token.name == "__END__" && token.char_no.to_i == 0)
221
+ # The Ruby code has stopped and the rest is data so cease parsing.
222
+ @run = false
223
+ end
224
+ nil
225
+ end
226
+
227
+ def do_right_brace_token(token)
228
+ nil
229
+ end
230
+
231
+ def do_end_token(token)
232
+ end_debug
233
+ nil
234
+ end
235
+
236
+ def do_block_token(token)
237
+ make_state(ParseBlock,self)
238
+ end
239
+
240
+ def do_conditional_token(token)
241
+ make_state(ParseCond,self)
242
+ end
243
+
244
+ def do_conditional_do_control_token(token)
245
+ make_state(ParseDoCond,self)
246
+ end
247
+
248
+ def do_case_token(token)
249
+ make_state(EndableParseState, self)
250
+ end
251
+
252
+ def do_one_line_conditional_token(token)
253
+ # This is an if with no end
254
+ @complexity += 1
255
+ #STDOUT.puts "got IF_MOD: #{self.to_yaml}" if $VERBOSE
256
+ #if state.type != "class" && state.type != "def" && state.type != "cond"
257
+ #STDOUT.puts "Changing IF_MOD Parent" if $VERBOSE
258
+ #state = state.parent
259
+ #@run = false
260
+ nil
261
+ end
262
+
263
+ def do_else_token(token)
264
+ STDOUT.puts "Ignored/Unknown Token:#{token.class}" if $VERBOSE
265
+ nil
266
+ end
267
+
268
+ def do_comment_token(token)
269
+ make_state(ParseComment, self)
270
+ end
271
+
272
+ def do_symbol_token(token)
273
+ make_state(ParseSymbol, self)
274
+ end
275
+
276
+ def parse_token(token)
277
+ state = nil
278
+ case token
279
+ when TkCLASS
280
+ state = do_class_token(token)
281
+ when TkMODULE
282
+ state = do_module_token(token)
283
+ when TkDEF
284
+ state = do_def_token(token)
285
+ when TkCONSTANT
286
+ # Nothing to do with a constant at top level?
287
+ state = do_constant_token(token)
288
+ when TkIDENTIFIER,TkFID
289
+ # Nothing to do at top level?
290
+ state = do_identifier_token(token)
291
+ when TkRBRACE
292
+ # Nothing to do at top level
293
+ state = do_right_brace_token(token)
294
+ when TkEND
295
+ state = do_end_token(token)
296
+ # At top level this might be an error...
297
+ when TkDO,TkfLBRACE
298
+ state = do_block_token(token)
299
+ when TkIF,TkUNLESS
300
+ state = do_conditional_token(token)
301
+ when TkWHILE,TkUNTIL,TkFOR
302
+ state = do_conditional_do_control_token(token)
303
+ when TkELSIF #,TkELSE
304
+ @complexity += 1
305
+ when TkELSE
306
+ # Else does not increase complexity
307
+ when TkCASE
308
+ state = do_case_token(token)
309
+ when TkWHEN
310
+ @complexity += 1
311
+ when TkBEGIN
312
+ state = do_begin_token(token)
313
+ when TkRESCUE
314
+ # Maybe this should add complexity and not begin
315
+ @complexity += 1
316
+ when TkIF_MOD, TkUNLESS_MOD, TkUNTIL_MOD, TkWHILE_MOD, TkQUESTION
317
+ state = do_one_line_conditional_token(token)
318
+ when TkNL
319
+ #
320
+ @lines += 1
321
+ when TkRETURN
322
+ # Early returns do not increase complexity as the condition that
323
+ # calls the return is the one that increases it.
324
+ when TkCOMMENT
325
+ state = do_comment_token(token)
326
+ when TkSYMBEG
327
+ state = do_symbol_token(token)
328
+ when TkError
329
+ STDOUT.puts "Lexer received an error for line #{@lexer.line_no} char #{@lexer.char_no}"
330
+ else
331
+ state = do_else_token(token)
332
+ end
333
+ state.parse if state
334
+ end
335
+
336
+ def end_debug
337
+ STDOUT.puts "got an end: #{@name} in #{self.class.name}" if $VERBOSE
338
+ if @parent.nil?
339
+ STDOUT.puts "DEBUG: Line #{@lexer.line_no}"
340
+ STDOUT.puts "DEBUG: #{@name}; #{self.class}"
341
+ # to_yaml can cause an infinite loop?
342
+ #STDOUT.puts "TOP: #{@@top_state.to_yaml}"
343
+ #STDOUT.puts "TOP: #{@@top_state.inspect}"
344
+
345
+ # This may not be an error?
346
+ #exit 1
347
+ end
348
+ end
349
+
350
+ end
351
+
352
+ # Read and consume tokens in comments until a new line.
353
+ class ParseComment < ParseState
354
+
355
+ # While in a comment state do not count the tokens.
356
+ def count_tokens?
357
+ false
358
+ end
359
+
360
+ def parse_token(token)
361
+ if token.is_a?(TkNL)
362
+ @lines += 1
363
+ @run = false
364
+ end
365
+ end
366
+ end
367
+
368
+ class ParseSymbol < ParseState
369
+ def initialize(lexer, parent = nil)
370
+ super
371
+ STDOUT.puts "STARTING SYMBOL" if $VERBOSE
372
+ end
373
+
374
+ def parse_token(token)
375
+ STDOUT.puts "Symbol's token is #{token.class}" if $VERBOSE
376
+ # Consume the next token and stop
377
+ @run = false
378
+ nil
379
+ end
380
+ end
381
+
382
+ class EndableParseState < ParseState
383
+ def initialize(lexer,parent=nil)
384
+ super(lexer,parent)
385
+ STDOUT.puts "Starting #{self.class}" if $VERBOSE
386
+ end
387
+
388
+ def do_end_token(token)
389
+ end_debug
390
+ @run = false
391
+ nil
392
+ end
393
+ end
394
+
395
+ class ParseClass < EndableParseState
396
+ def initialize(lexer,parent=nil)
397
+ super(lexer,parent)
398
+ @type_name = "Class"
399
+ end
400
+
401
+ def do_constant_token(token)
402
+ @name = token.name if @name.empty?
403
+ nil
404
+ end
405
+
406
+ def compute_state(formater)
407
+ # Seperate the Module and Class Children out
408
+ cnm_children, @children = @children.partition do |child|
409
+ child.kind_of?(ParseClass)
410
+ end
411
+
412
+ formater.start_class_compute_state(@type_name,@name,self.calc_complexity,self.calc_lines)
413
+ super(formater)
414
+ formater.end_class_compute_state(@name)
415
+
416
+ cnm_children.each do |child|
417
+ child.name = @name + "::" + child.name
418
+ child.compute_state(formater)
419
+ end
420
+ end
421
+ end
422
+
423
+ class ParseModule < ParseClass
424
+ def initialize(lexer,parent=nil)
425
+ super(lexer,parent)
426
+ @type_name = "Module"
427
+ end
428
+ end
429
+
430
+ class ParseDef < EndableParseState
431
+
432
+ def initialize(lexer,parent=nil)
433
+ super(lexer,parent)
434
+ @complexity = 1
435
+ @looking_for_name = true
436
+ @first_space = true
437
+ end
438
+
439
+ # This way I don't need to list all possible overload
440
+ # tokens.
441
+ def create_def_name(token)
442
+ case token
443
+ when TkSPACE
444
+ # mark first space so we can stop at next space
445
+ if @first_space
446
+ @first_space = false
447
+ else
448
+ @looking_for_name = false
449
+ end
450
+ when TkNL,TkLPAREN,TkfLPAREN,TkSEMICOLON
451
+ # we can also stop at a new line or left parenthesis
452
+ @looking_for_name = false
453
+ when TkDOT
454
+ @name<< "."
455
+ when TkCOLON2
456
+ @name<< "::"
457
+ when TkASSIGN
458
+ @name<< "="
459
+ when TkfLBRACK
460
+ @name<< "["
461
+ when TkRBRACK
462
+ @name<< "]"
463
+ else
464
+ begin
465
+ @name<< token.name.to_s
466
+ rescue Exception => err
467
+ #what is this?
468
+ STDOUT.puts @@token_counter.current_file
469
+ STDOUT.puts @name
470
+ STDOUT.puts token.inspect
471
+ STDOUT.puts err.message
472
+ exit 1
473
+ end
474
+ end
475
+ end
476
+
477
+ def parse_token(token)
478
+ if @looking_for_name
479
+ create_def_name(token)
480
+ end
481
+ super(token)
482
+ end
483
+
484
+ def compute_state(formater)
485
+ formater.def_compute_state(@name, self.calc_complexity, self.calc_lines)
486
+ super(formater)
487
+ end
488
+ end
489
+
490
+ class ParseCond < EndableParseState
491
+ def initialize(lexer,parent=nil)
492
+ super(lexer,parent)
493
+ @complexity = 1
494
+ end
495
+ end
496
+
497
+ class ParseDoCond < ParseCond
498
+ def initialize(lexer,parent=nil)
499
+ super(lexer,parent)
500
+ @looking_for_new_line = true
501
+ end
502
+
503
+ # Need to consume the do that can appear at the
504
+ # end of these control structures.
505
+ def parse_token(token)
506
+ if @looking_for_new_line
507
+ if token.is_a?(TkDO)
508
+ nil
509
+ else
510
+ if token.is_a?(TkNL)
511
+ @looking_for_new_line = false
512
+ end
513
+ super(token)
514
+ end
515
+ else
516
+ super(token)
517
+ end
518
+ end
519
+
520
+ end
521
+
522
+ class ParseBlock < EndableParseState
523
+
524
+ def initialize(lexer,parent=nil)
525
+ super(lexer,parent)
526
+ @complexity = 1
527
+ @lbraces = Array.new
528
+ end
529
+
530
+ # Because the token for a block and hash right brace is the same,
531
+ # we need to track the hash left braces to determine when an end is
532
+ # encountered.
533
+ def parse_token(token)
534
+ if token.is_a?(TkLBRACE)
535
+ @lbraces.push(true)
536
+ elsif token.is_a?(TkRBRACE)
537
+ if @lbraces.empty?
538
+ do_right_brace_token(token)
539
+ #do_end_token(token)
540
+ else
541
+ @lbraces.pop
542
+ end
543
+ else
544
+ super(token)
545
+ end
546
+ end
547
+
548
+ def do_right_brace_token(token)
549
+ # we are done ? what about a hash in a block :-/
550
+ @run = false
551
+ nil
552
+ end
553
+
554
+ end
555
+
556
+ # ------------ END Analyzer logic ------------------------------------
557
+
558
+ class Filter
559
+ attr_accessor :limit, :error, :warn
560
+
561
+ def initialize(limit = -1, error = 11, warn = 8)
562
+ @limit = limit
563
+ @error = error
564
+ @warn = warn
565
+ end
566
+
567
+ def ignore?(count)
568
+ count < @limit
569
+ end
570
+
571
+ def warn?(count)
572
+ count >= @warn
573
+ end
574
+
575
+ def error?(count)
576
+ count >= @error
577
+ end
578
+
579
+ end
580
+
581
+
582
+ class BaseFormater
583
+ attr_accessor :warnings, :errors, :current
584
+
585
+ def initialize(out, filter = nil)
586
+ @out = out
587
+ @filter = filter
588
+ reset_data
589
+ end
590
+
591
+ def warn_error?(num, marker)
592
+ klass = ""
593
+
594
+ if @filter.error?(num)
595
+ klass = ' class="error"'
596
+ @errors<< [@current, marker, num]
597
+ elsif @filter.warn?(num)
598
+ klass = ' class="warning"'
599
+ @warnings<< [@current, marker, num]
600
+ end
601
+
602
+ klass
603
+ end
604
+
605
+ def reset_data
606
+ @warnings = Array.new
607
+ @errors = Array.new
608
+ @current = ""
609
+ end
610
+
611
+ end
612
+
613
+ class TokenCounterFormater < BaseFormater
614
+
615
+ def start(new_out=nil)
616
+ reset_data
617
+ @out = new_out if new_out
618
+ @out.puts "Token Count"
619
+ end
620
+
621
+ def start_count(number_of_files)
622
+ @out.puts "Counting tokens for #{number_of_files} files."
623
+ end
624
+
625
+ def start_file(file_name)
626
+ @current = file_name
627
+ @out.puts "File:#{file_name}"
628
+ end
629
+
630
+ def line_token_count(line_number,number_of_tokens)
631
+ return if @filter.ignore?(number_of_tokens)
632
+ warn_error?(number_of_tokens, line_number)
633
+ @out.puts "Line:#{line_number} ; Tokens : #{number_of_tokens}"
634
+ end
635
+
636
+ def end_file
637
+ @out.puts ""
638
+ end
639
+
640
+ def end_count
641
+ end
642
+
643
+ def end
644
+ end
645
+
646
+ end
647
+
648
+ module HTMLStyleSheet
649
+ def HTMLStyleSheet.style_sheet
650
+ out = StringIO.new
651
+
652
+ out.puts "<style>"
653
+ out.puts 'body {'
654
+ out.puts ' margin: 20px;'
655
+ out.puts ' padding: 0;'
656
+ out.puts ' font-size: 12px;'
657
+ out.puts ' font-family: bitstream vera sans, verdana, arial, sans serif;'
658
+ out.puts ' background-color: #efefef;'
659
+ out.puts '}'
660
+ out.puts ''
661
+ out.puts 'table { '
662
+ out.puts ' border-collapse: collapse;'
663
+ out.puts ' /*border-spacing: 0;*/'
664
+ out.puts ' border: 1px solid #666;'
665
+ out.puts ' background-color: #fff;'
666
+ out.puts ' margin-bottom: 20px;'
667
+ out.puts '}'
668
+ out.puts ''
669
+ out.puts 'table, th, th+th, td, td+td {'
670
+ out.puts ' border: 1px solid #ccc;'
671
+ out.puts '}'
672
+ out.puts ''
673
+ out.puts 'table th {'
674
+ out.puts ' font-size: 12px;'
675
+ out.puts ' color: #fc0;'
676
+ out.puts ' padding: 4px 0;'
677
+ out.puts ' background-color: #336;'
678
+ out.puts '}'
679
+ out.puts ''
680
+ out.puts 'th, td {'
681
+ out.puts ' padding: 4px 10px;'
682
+ out.puts '}'
683
+ out.puts ''
684
+ out.puts 'td { '
685
+ out.puts ' font-size: 13px;'
686
+ out.puts '}'
687
+ out.puts ''
688
+ out.puts '.class_name {'
689
+ out.puts ' font-size: 17px;'
690
+ out.puts ' margin: 20px 0 0;'
691
+ out.puts '}'
692
+ out.puts ''
693
+ out.puts '.class_complexity {'
694
+ out.puts 'margin: 0 auto;'
695
+ out.puts '}'
696
+ out.puts ''
697
+ out.puts '.class_complexity>.class_complexity {'
698
+ out.puts ' margin: 0;'
699
+ out.puts '}'
700
+ out.puts ''
701
+ out.puts '.class_total_complexity, .class_total_lines, .start_token_count, .file_count {'
702
+ out.puts ' font-size: 13px;'
703
+ out.puts ' font-weight: bold;'
704
+ out.puts '}'
705
+ out.puts ''
706
+ out.puts '.class_total_complexity, .class_total_lines {'
707
+ out.puts ' color: #c00;'
708
+ out.puts '}'
709
+ out.puts ''
710
+ out.puts '.start_token_count, .file_count {'
711
+ out.puts ' color: #333;'
712
+ out.puts '}'
713
+ out.puts ''
714
+ out.puts '.warning {'
715
+ out.puts ' background-color: yellow;'
716
+ out.puts '}'
717
+ out.puts ''
718
+ out.puts '.error {'
719
+ out.puts ' background-color: #f00;'
720
+ out.puts '}'
721
+ out.puts "</style>"
722
+
723
+ out.string
724
+ end
725
+
726
+ def style_sheet
727
+ HTMLStyleSheet.style_sheet
728
+ end
729
+ end
730
+
731
+
732
+ class HTMLTokenCounterFormater < TokenCounterFormater
733
+ include HTMLStyleSheet
734
+
735
+ def start(new_out=nil)
736
+ reset_data
737
+ @out = new_out if new_out
738
+ @out.puts "<html>"
739
+ @out.puts style_sheet
740
+ @out.puts "<body>"
741
+ end
742
+
743
+ def start_count(number_of_files)
744
+ @out.puts "<div class=\"start_token_count\">"
745
+ @out.puts "Number of files: #{number_of_files}"
746
+ @out.puts "</div>"
747
+ end
748
+
749
+ def start_file(file_name)
750
+ @current = file_name
751
+ @out.puts "<div class=\"file_count\">"
752
+ @out.puts "<p class=\"file_name\">"
753
+ @out.puts "File: #{file_name}"
754
+ @out.puts "</p>"
755
+ @out.puts "<table width=\"100%\" border=\"1\">"
756
+ @out.puts "<tr><th>Line</th><th>Tokens</th></tr>"
757
+ end
758
+
759
+ def line_token_count(line_number,number_of_tokens)
760
+ return if @filter.ignore?(number_of_tokens)
761
+ klass = warn_error?(number_of_tokens, line_number)
762
+ @out.puts "<tr><td>#{line_number}</td><td#{klass}>#{number_of_tokens}</td></tr>"
763
+ end
764
+
765
+ def end_file
766
+ @out.puts "</table>"
767
+ end
768
+
769
+ def end_count
770
+ end
771
+
772
+ def end
773
+ @out.puts "</body>"
774
+ @out.puts "</html>"
775
+ end
776
+ end
777
+
778
+ class ParseStateFormater < BaseFormater
779
+
780
+ def start(new_out=nil)
781
+ reset_data
782
+ @out = new_out if new_out
783
+ end
784
+
785
+ def end
786
+ end
787
+
788
+ def start_class_compute_state(type_name,name,complexity,lines)
789
+ @current = name
790
+ @out.puts "-- START #{name} --"
791
+ @out.puts "Type:#{type_name} Name:#{name} Complexity:#{complexity} Lines:#{lines}"
792
+ end
793
+
794
+ def end_class_compute_state(name)
795
+ @out.puts "-- END #{name} --"
796
+ end
797
+
798
+ def def_compute_state(name,complexity,lines)
799
+ return if @filter.ignore?(complexity)
800
+ warn_error?(complexity, name)
801
+ @out.puts "Type:Def Name:#{name} Complexity:#{complexity} Lines:#{lines}"
802
+ end
803
+
804
+ end
805
+
806
+
807
+
808
+ class StateHTMLComplexityFormater < ParseStateFormater
809
+ include HTMLStyleSheet
810
+
811
+ def start(new_out=nil)
812
+ reset_data
813
+ @out = new_out if new_out
814
+ @out.puts "<html><head><title>Cyclometric Complexity</title></head>"
815
+ @out.puts style_sheet
816
+ @out.puts "<body>"
817
+ end
818
+
819
+ def end
820
+ @out.puts "</body>"
821
+ @out.puts "</html>"
822
+ end
823
+
824
+ def start_class_compute_state(type_name,name,complexity,lines)
825
+ @current = name
826
+ @out.puts "<div class=\"class_complexity\">"
827
+ @out.puts "<h2 class=\"class_name\">#{type_name} : #{name}</h2>"
828
+ @out.puts "<div class=\"class_total_complexity\">Total Complexity: #{complexity}</div>"
829
+ @out.puts "<div class=\"class_total_lines\">Total Lines: #{lines}</div>"
830
+ @out.puts "<table width=\"100%\" border=\"1\">"
831
+ @out.puts "<tr><th>Method</th><th>Complexity</th><th># Lines</th></tr>"
832
+ end
833
+
834
+ def end_class_compute_state(name)
835
+ @out.puts "</table>"
836
+ @out.puts "</div>"
837
+ end
838
+
839
+ def def_compute_state(name, complexity, lines)
840
+ return if @filter.ignore?(complexity)
841
+ klass = warn_error?(complexity, name)
842
+ @out.puts "<tr><td>#{name}</td><td#{klass}>#{complexity}</td><td>#{lines}</td></tr>"
843
+ end
844
+
845
+ end
846
+
847
+
848
+ module ResultIndexGenerator
849
+ def summarize_errors_and_warnings(enw, header)
850
+ return "" if enw.empty?
851
+ f = StringIO.new
852
+ erval = Hash.new { |h,k| h[k] = Array.new }
853
+ wval = Hash.new { |h,k| h[k] = Array.new }
854
+
855
+ enw.each do |fname, warnings, errors|
856
+ errors.each do |c,m,v|
857
+ erval[v] << [fname, c, m]
858
+ end
859
+ warnings.each do |c,m,v|
860
+ wval[v] << [fname, c, m]
861
+ end
862
+ end
863
+
864
+ f.puts "<h2 class=\"class_name\">Errors and Warnings</h2>"
865
+ f.puts "<table width=\"100%\" border=\"1\">"
866
+ f.puts header
867
+
868
+ f.puts print_summary_table_rows(erval, "error")
869
+ f.puts print_summary_table_rows(wval, "warning")
870
+ f.puts "</table>"
871
+
872
+ f.string
873
+ end
874
+
875
+ def print_summary_table_rows(ewvals, klass_type)
876
+ f = StringIO.new
877
+ ewvals.sort { |a,b| b <=> a}.each do |v, vals|
878
+ vals.sort.each do |fname, c, m|
879
+ f.puts "<tr><td><a href=\"./#{fname}\">#{c}</a></td><td>#{m}</td>"
880
+ f.puts "<td class=\"#{klass_type}\">#{v}</td></tr>"
881
+ end
882
+ end
883
+ f.string
884
+ end
885
+
886
+ def list_analyzed_files(files)
887
+ f = StringIO.new
888
+ f.puts "<h2 class=\"class_name\">Analyzed Files</h2>"
889
+ f.puts "<ul>"
890
+ files.each do |fname, warnings, errors|
891
+ readname = fname.split("_")[0...-1].join("_")
892
+ f.puts "<li>"
893
+ f.puts "<p class=\"file_name\"><a href=\"./#{fname}\">#{readname}</a>"
894
+ f.puts "</li>"
895
+ end
896
+ f.puts "</ul>"
897
+ f.string
898
+ end
899
+
900
+ def write_index(files, filename, title, header)
901
+ return if files.empty?
902
+
903
+ File.open(filename,"w") do |f|
904
+ f.puts "<html><head><title>#{title}</title></head>"
905
+ f.puts "#{HTMLStyleSheet.style_sheet}\n<body>"
906
+ f.puts "<h1>#{title}</h1>"
907
+
908
+ enw = files.find_all { |fn,w,e| (!w.empty? || !e.empty?) }
909
+
910
+ f.puts summarize_errors_and_warnings(enw, header)
911
+
912
+ f.puts "<hr/>"
913
+ f.puts list_analyzed_files(files)
914
+ f.puts "</body></html>"
915
+ end
916
+ end
917
+
918
+ def write_cyclo_index(files, output_dir)
919
+ header = "<tr><th>Class</th><th>Method</th><th>Complexity</th></tr>"
920
+ write_index(files,
921
+ "#{output_dir}/index_cyclo.html",
922
+ "Index for cyclomatic complexity",
923
+ header)
924
+ end
925
+
926
+ def write_token_index(files, output_dir)
927
+ header = "<tr><th>File</th><th>Line #</th><th>Tokens</th></tr>"
928
+ write_index(files,
929
+ "#{output_dir}/index_token.html",
930
+ "Index for tokens per line",
931
+ header)
932
+ end
933
+
934
+ end
935
+
936
+ module Cyclomagic
937
+
938
+ #Returns the path without the file
939
+ def Cyclomagic.seperate_file_from_path(path)
940
+ res = path.split("/")
941
+ if res.size == 1
942
+ ""
943
+ else
944
+ res[0..res.size - 2].join("/")
945
+ end
946
+ end
947
+
948
+ def Cyclomagic.analyze(files, state_formater, token_count_formater, output_dir)
949
+
950
+ idx_states = Array.new
951
+ idx_tokens = Array.new
952
+
953
+ # parse each file
954
+ files.each do |file|
955
+ begin
956
+ STDOUT.puts "Parsing #{file}"
957
+ # create top state
958
+ top = ParseState.make_top_state
959
+ STDOUT.puts "TOP State made" if $VERBOSE
960
+ token_counter = TokenCounter.new
961
+ ParseState.set_token_counter(token_counter)
962
+ token_counter.set_current_file(file)
963
+
964
+ STDOUT.puts "Setting up Lexer" if $VERBOSE
965
+ lexer = RubyLex.new
966
+ # Turn of this, because it aborts when a syntax error is found...
967
+ lexer.exception_on_syntax_error = false
968
+ lexer.set_input(File.new(file,"r"))
969
+ top.lexer = lexer
970
+ STDOUT.puts "Parsing" if $VERBOSE
971
+ top.parse
972
+
973
+
974
+ fdir_path = seperate_file_from_path(file)
975
+ FileUtils.makedirs("#{output_dir}/#{fdir_path}")
976
+
977
+ if state_formater
978
+ # output results
979
+ state_io = StringIO.new
980
+ state_formater.start(state_io)
981
+ top.compute_state(state_formater)
982
+ state_formater.end
983
+
984
+ fname = "#{file}_cyclo.html"
985
+ puts "writing cyclomatic #{file}" if $VERBOSE
986
+ File.open("#{output_dir}/#{fname}","w") do |f|
987
+ f.write state_io.string
988
+ end
989
+ idx_states<< [
990
+ fname,
991
+ state_formater.warnings.dup,
992
+ state_formater.errors.dup,
993
+ ]
994
+ end
995
+
996
+ if token_count_formater
997
+ token_io = StringIO.new
998
+ token_count_formater.start(token_io)
999
+ token_counter.list_tokens_per_line(token_count_formater)
1000
+ token_count_formater.end
1001
+
1002
+ fname = "#{file}_token.html"
1003
+ puts "writing token #{file}" if $VERBOSE
1004
+ File.open("#{output_dir}/#{fname}","w") do |f|
1005
+ f.write token_io.string
1006
+ end
1007
+ idx_tokens<< [
1008
+ fname,
1009
+ token_count_formater.warnings.dup,
1010
+ token_count_formater.errors.dup,
1011
+ ]
1012
+ end
1013
+
1014
+ rescue RubyLex::SyntaxError => synerr
1015
+ STDOUT.puts "Lexer error for file #{file} on line #{lexer.line_no}"
1016
+ STDOUT.puts "#{synerr.class.name} : #{synerr.message}"
1017
+ rescue StandardError => err
1018
+ STDOUT.puts "Error while parsing file : #{file}"
1019
+ STDOUT.puts err.class,err.message,err.backtrace.join("\n")
1020
+ rescue Exception => ex
1021
+ STDOUT.puts "Error while parsing file : #{file}"
1022
+ STDOUT.puts ex.class,ex.message,ex.backtrace.join("\n")
1023
+ end
1024
+ end
1025
+
1026
+ [idx_states, idx_tokens]
1027
+ end
1028
+ end
1029
+
1030
+
1031
+ # Really ugly command line runner stuff here for now
1032
+
1033
+ class CyclomagicCMDLineRunner
1034
+ require 'stringio'
1035
+ require 'getoptlong'
1036
+ require 'fileutils'
1037
+ require 'find'
1038
+
1039
+
1040
+ include ResultIndexGenerator
1041
+
1042
+ def get_ruby_files(input_path)
1043
+ files = Array.new
1044
+ input_path.split("|").each do |path|
1045
+ Find.find(path.strip) do |f|
1046
+ files << f if !FileTest.directory?(f) && f =~ /\.rb$/
1047
+ end
1048
+ end
1049
+ files
1050
+ end
1051
+
1052
+ def run
1053
+ files = Array.new
1054
+ output_dir = "./"
1055
+ formater = "html"
1056
+ state_filter = Filter.new(5)
1057
+ token_filter = Filter.new(10, 25, 50)
1058
+ comp_state = comp_token = false
1059
+ begin
1060
+ opt = GetoptLong.new(
1061
+ ["-o","--output_directory", GetoptLong::REQUIRED_ARGUMENT],
1062
+ ["-h","--help", GetoptLong::NO_ARGUMENT],
1063
+ ["-f","--formater", GetoptLong::REQUIRED_ARGUMENT],
1064
+ ["-c","--cyclo", GetoptLong::NO_ARGUMENT],
1065
+ ["-t","--token", GetoptLong::NO_ARGUMENT],
1066
+ ["-y","--filter_cyclo", GetoptLong::REQUIRED_ARGUMENT],
1067
+ ["-k","--filter_token", GetoptLong::REQUIRED_ARGUMENT],
1068
+ ["-w","--warn_cyclo", GetoptLong::REQUIRED_ARGUMENT],
1069
+ ["-s","--warn_token", GetoptLong::REQUIRED_ARGUMENT],
1070
+ ["-e","--error_cyclo", GetoptLong::REQUIRED_ARGUMENT],
1071
+ ["-d","--error_token", GetoptLong::REQUIRED_ARGUMENT],
1072
+ ["-p","--parse_file", GetoptLong::REQUIRED_ARGUMENT],
1073
+ ["-i","--input_directory", GetoptLong::REQUIRED_ARGUMENT],
1074
+ ["-v","--verbose", GetoptLong::NO_ARGUMENT]
1075
+ )
1076
+
1077
+ opt.each do |arg,val|
1078
+ case arg
1079
+ when "-o"
1080
+ output_dir = val
1081
+ when "-h"
1082
+ #RDoc.usage('help')
1083
+ when "-f"
1084
+ formater = val
1085
+ when "-c"
1086
+ comp_state = true
1087
+ when "-t"
1088
+ comp_token = true
1089
+ when "-k"
1090
+ token_filter.limit = val.to_i
1091
+ when "-s"
1092
+ token_filter.warn = val.to_i
1093
+ when "-d"
1094
+ token_filter.error = val.to_i
1095
+ when "-y"
1096
+ state_filter.limit = val.to_i
1097
+ when "-w"
1098
+ state_filter.warn = val.to_i
1099
+ when "-e"
1100
+ state_filter.error = val.to_i
1101
+ when "-p"
1102
+ files<< val
1103
+ when "-i"
1104
+ files.concat(get_ruby_files(val))
1105
+ when "-v"
1106
+ STDOUT.puts "Verbose mode on"
1107
+ $VERBOSE = true
1108
+ end
1109
+
1110
+ end
1111
+ #RDoc.usage if !comp_state && !comp_token
1112
+ rescue => err
1113
+ #RDoc.usage
1114
+ end
1115
+
1116
+ if formater =~ /html/i
1117
+ state_formater = StateHTMLComplexityFormater.new(STDOUT,state_filter)
1118
+ token_count_formater = HTMLTokenCounterFormater.new(STDOUT,token_filter)
1119
+ else
1120
+ state_formater = ParseStateFormater.new(STDOUT,state_filter)
1121
+ token_count_formater = TokenCounterFormater.new(STDOUT,token_filter)
1122
+ end
1123
+
1124
+ state_formater = nil if !comp_state
1125
+ token_count_formater = nil if !comp_token
1126
+
1127
+ idx_states, idx_tokens = Cyclomagic.analyze(files,
1128
+ state_formater,
1129
+ token_count_formater,
1130
+ output_dir)
1131
+
1132
+ write_cyclo_index(idx_states, output_dir)
1133
+ write_token_index(idx_tokens, output_dir)
1134
+ end
1135
+
1136
+ end
1137
+ end
metadata ADDED
@@ -0,0 +1,56 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: cyclomagic
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ prerelease:
6
+ platform: ruby
7
+ authors:
8
+ - dennyabraham
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2012-03-08 00:00:00.000000000Z
13
+ dependencies: []
14
+ description: an attempt to build a working version of saikuro, the cyclomatic complexity
15
+ analyzer
16
+ email:
17
+ - email@dennyabraham.com
18
+ executables:
19
+ - cyclomagic
20
+ extensions: []
21
+ extra_rdoc_files: []
22
+ files:
23
+ - .gitignore
24
+ - Gemfile
25
+ - README.md
26
+ - Rakefile
27
+ - bin/cyclomagic
28
+ - cyclomagic.gemspec
29
+ - lib/cyclomagic.rb
30
+ - lib/cyclomagic/version.rb
31
+ homepage: ''
32
+ licenses: []
33
+ post_install_message:
34
+ rdoc_options: []
35
+ require_paths:
36
+ - lib
37
+ required_ruby_version: !ruby/object:Gem::Requirement
38
+ none: false
39
+ requirements:
40
+ - - ! '>='
41
+ - !ruby/object:Gem::Version
42
+ version: '0'
43
+ required_rubygems_version: !ruby/object:Gem::Requirement
44
+ none: false
45
+ requirements:
46
+ - - ! '>='
47
+ - !ruby/object:Gem::Version
48
+ version: '0'
49
+ requirements: []
50
+ rubyforge_project:
51
+ rubygems_version: 1.8.10
52
+ signing_key:
53
+ specification_version: 3
54
+ summary: an attempt to build a working version of saikuro, the cyclomatic complexity
55
+ analyzer
56
+ test_files: []