eden 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. data/CHANGELOG +4 -0
  2. data/LICENSE +20 -0
  3. data/README.md +48 -0
  4. data/Rakefile +10 -0
  5. data/bin/eden +132 -0
  6. data/lib/eden.rb +10 -0
  7. data/lib/eden/defaults.rb +26 -0
  8. data/lib/eden/formatter.rb +25 -0
  9. data/lib/eden/formatters/block_formatter.rb +45 -0
  10. data/lib/eden/formatters/indenter.rb +91 -0
  11. data/lib/eden/formatters/white_space_cleaner.rb +14 -0
  12. data/lib/eden/line.rb +65 -0
  13. data/lib/eden/source_file.rb +32 -0
  14. data/lib/eden/token.rb +62 -0
  15. data/lib/eden/tokenizer.rb +259 -0
  16. data/lib/eden/tokenizers/basic_tokenizer.rb +167 -0
  17. data/lib/eden/tokenizers/delimited_literal_tokenizer.rb +38 -0
  18. data/lib/eden/tokenizers/number_tokenizer.rb +68 -0
  19. data/lib/eden/tokenizers/operator_tokenizer.rb +211 -0
  20. data/lib/eden/tokenizers/regex_tokenizer.rb +37 -0
  21. data/lib/eden/tokenizers/string_tokenizer.rb +149 -0
  22. data/test/array_literal_tokenization_test.rb +43 -0
  23. data/test/basic_tokenization_test.rb +29 -0
  24. data/test/block_formatter_test.rb +47 -0
  25. data/test/class_var_token_test.rb +21 -0
  26. data/test/identifier_token_test.rb +140 -0
  27. data/test/indenter_test.rb +314 -0
  28. data/test/instance_var_token_test.rb +48 -0
  29. data/test/number_tokenization_test.rb +83 -0
  30. data/test/operator_tokenization_test.rb +180 -0
  31. data/test/regex_tokenization_test.rb +68 -0
  32. data/test/single_character_tokenization_test.rb +87 -0
  33. data/test/string_tokenization_test.rb +291 -0
  34. data/test/symbol_tokenization_test.rb +64 -0
  35. data/test/test_helper.rb +13 -0
  36. data/test/white_space_cleaner_test.rb +35 -0
  37. data/test/whitespace_token_test.rb +63 -0
  38. metadata +108 -0
@@ -0,0 +1,48 @@
1
+ require File.dirname(__FILE__) + "/test_helper.rb"
2
+
3
+ class IdentifierTokenTest < Test::Unit::TestCase
4
+ def setup
5
+ @sf = Eden::SourceFile.new( "dummy.rb" )
6
+ end
7
+
8
+ def test_instance_var_tokenization
9
+ @sf.stubs(:source).returns("@token @_token @token2")
10
+ @sf.tokenize!
11
+ line = @sf.lines[0]
12
+ assert_equal 5, line.tokens.size
13
+ assert_equal "@token", line.tokens[0].content
14
+ assert_equal :instancevar, line.tokens[0].type
15
+ assert_equal "@_token", line.tokens[2].content
16
+ assert_equal :instancevar, line.tokens[2].type
17
+ assert_equal "@token2", line.tokens[4].content
18
+ assert_equal :instancevar, line.tokens[4].type
19
+ end
20
+
21
+ # $' used as a global by Hprictot 0.6.164:lib/elements.rb[274]
22
+ def test_global_var_tokenization
23
+ @sf.stubs(:source).returns("$: $? $foo $1 $'")
24
+ @sf.tokenize!
25
+ tokens = @sf.lines[0].tokens
26
+ assert_equal 9, tokens.size
27
+ assert_equal "$:", tokens[0].content
28
+ assert_equal :globalvar, tokens[0].type
29
+ assert_equal "$?", tokens[2].content
30
+ assert_equal :globalvar, tokens[2].type
31
+ assert_equal "$foo", tokens[4].content
32
+ assert_equal :globalvar, tokens[4].type
33
+ assert_equal "$1", tokens[6].content
34
+ assert_equal :globalvar, tokens[6].type
35
+ assert_equal "$'", tokens[8].content
36
+ assert_equal :globalvar, tokens[8].type
37
+ end
38
+
39
+ # $` used as a global by Hprictot 0.6.164:lib/builder.rb[199]
40
+ def test_global_var_tokenization_2
41
+ @sf.stubs(:source).returns("$`")
42
+ @sf.tokenize!
43
+ tokens = @sf.lines[0].tokens
44
+ assert_equal 1, tokens.size
45
+ assert_equal "$`", tokens[0].content
46
+ assert_equal :globalvar, tokens[0].type
47
+ end
48
+ end
@@ -0,0 +1,83 @@
1
+ require File.dirname(__FILE__) + "/test_helper.rb"
2
+
3
+ class NumberTokenizationTest < Test::Unit::TestCase
4
+ def setup
5
+ @sf = Eden::SourceFile.new( "dummy.rb" )
6
+ end
7
+
8
+ def test_binary_tokenization
9
+ @sf.stubs(:source).returns("b12345 0b01010 0b11056")
10
+ @sf.tokenize!
11
+ line = @sf.lines[0]
12
+ assert_equal 6, line.tokens.size
13
+ assert_equal "b12345", line.tokens[0].content
14
+ assert_equal :identifier, line.tokens[0].type
15
+ assert_equal "0b01010", line.tokens[2].content
16
+ assert_equal :bin_literal, line.tokens[2].type
17
+ assert_equal "0b110", line.tokens[4].content
18
+ assert_equal :bin_literal, line.tokens[4].type
19
+ end
20
+
21
+ def test_hexadecimal_tokenisation
22
+ @sf.stubs(:source).returns("0x0123 0x0F 0xDEADBEEF 0x0FRR")
23
+ @sf.tokenize!
24
+ line = @sf.lines[0]
25
+ assert_equal 8, line.tokens.size
26
+ assert_equal "0x0123", line.tokens[0].content
27
+ assert_equal :hex_literal, line.tokens[0].type
28
+ assert_equal "0x0F", line.tokens[2].content
29
+ assert_equal :hex_literal, line.tokens[2].type
30
+ assert_equal "0xDEADBEEF", line.tokens[4].content
31
+ assert_equal :hex_literal, line.tokens[4].type
32
+ assert_equal "0x0F", line.tokens[6].content
33
+ assert_equal :hex_literal, line.tokens[6].type
34
+ end
35
+
36
+ def test_signed_tokenization
37
+ @sf.stubs(:source).returns("-10 +4")
38
+ @sf.tokenize!
39
+ tokens = @sf.lines[0].tokens
40
+ assert_equal 3, tokens.size
41
+ assert_equal "-10", tokens[0].content
42
+ assert_equal :dec_literal, tokens[0].type
43
+ assert_equal "+4", tokens[2].content
44
+ assert_equal :dec_literal, tokens[2].type
45
+ end
46
+
47
+ def test_decimal_tokenisation
48
+ @sf.stubs(:source).returns("123 9123 0d1987")
49
+ @sf.tokenize!
50
+ line = @sf.lines[0]
51
+ assert_equal 5, line.tokens.size
52
+ assert_equal "123", line.tokens[0].content
53
+ assert_equal :dec_literal, line.tokens[0].type
54
+ assert_equal "9123", line.tokens[2].content
55
+ assert_equal :dec_literal, line.tokens[2].type
56
+ assert_equal "0d1987", line.tokens[4].content
57
+ assert_equal :dec_literal, line.tokens[4].type
58
+ end
59
+
60
+ def test_float_tokenisation
61
+ # TODO: Add test that 123.method doens't tokenize as float
62
+ @sf.stubs(:source).returns("123.0")
63
+ @sf.tokenize!
64
+ line = @sf.lines[0]
65
+ assert_equal 1, line.tokens.size
66
+ assert_equal "123.0", line.tokens[0].content
67
+ assert_equal :float_literal, line.tokens[0].type
68
+ end
69
+
70
+
71
+ def test_exponent_tokenisation
72
+ @sf.stubs(:source).returns("123e24 1.032e-12 1.32e+12")
73
+ @sf.tokenize!
74
+ line = @sf.lines[0]
75
+ assert_equal 5, line.tokens.size
76
+ assert_equal "123e24", line.tokens[0].content
77
+ assert_equal :exp_literal, line.tokens[0].type
78
+ assert_equal "1.032e-12", line.tokens[2].content
79
+ assert_equal :exp_literal, line.tokens[2].type
80
+ assert_equal "1.32e+12", line.tokens[4].content
81
+ assert_equal :exp_literal, line.tokens[4].type
82
+ end
83
+ end
@@ -0,0 +1,180 @@
1
+ require File.dirname(__FILE__) + "/test_helper.rb"
2
+
3
+ class OperatorTokenizationTest < Test::Unit::TestCase
4
+ def setup
5
+ @sf = Eden::SourceFile.new( "dummy.rb" )
6
+ end
7
+
8
+ def test_equals_tokenization
9
+ @sf.stubs(:source).returns("= == => === =~")
10
+ @sf.tokenize!
11
+ tokens = @sf.lines[0].tokens
12
+ assert_equal 9, tokens.size
13
+ assert_equal :equals, tokens[0].type
14
+ assert_equal "=", tokens[0].content
15
+ assert_equal :equality, tokens[2].type
16
+ assert_equal "==", tokens[2].content
17
+ assert_equal :hash_rocket, tokens[4].type
18
+ assert_equal "=>", tokens[4].content
19
+ assert_equal :identity_equality, tokens[6].type
20
+ assert_equal "===", tokens[6].content
21
+ assert_equal :matches, tokens[8].type
22
+ assert_equal "=~", tokens[8].content
23
+ end
24
+
25
+ def test_plus_tokenization
26
+ @sf.stubs(:source).returns("+ += +@")
27
+ @sf.tokenize!
28
+ tokens = @sf.lines[0].tokens
29
+ assert_equal 5, tokens.size
30
+ assert_equal :plus, tokens[0].type
31
+ assert_equal "+", tokens[0].content
32
+ assert_equal :plus_equals, tokens[2].type
33
+ assert_equal "+=", tokens[2].content
34
+ assert_equal :plus_at, tokens[4].type
35
+ assert_equal "+@", tokens[4].content
36
+ end
37
+
38
+ def test_minus_tokenization
39
+ @sf.stubs(:source).returns("- -= -@")
40
+ @sf.tokenize!
41
+ tokens = @sf.lines[0].tokens
42
+ assert_equal 5, tokens.size
43
+ assert_equal :minus, tokens[0].type
44
+ assert_equal "-", tokens[0].content
45
+ assert_equal :minus_equals, tokens[2].type
46
+ assert_equal "-=", tokens[2].content
47
+ assert_equal :minus_at, tokens[4].type
48
+ assert_equal "-@", tokens[4].content
49
+ end
50
+
51
+ def test_multiply_tokenization
52
+ @sf.stubs(:source).returns("* *= ** **=")
53
+ @sf.tokenize!
54
+ tokens = @sf.lines[0].tokens
55
+ assert_equal 7, tokens.size
56
+ assert_equal :multiply, tokens[0].type
57
+ assert_equal "*", tokens[0].content
58
+ assert_equal :multiply_equals, tokens[2].type
59
+ assert_equal "*=", tokens[2].content
60
+ assert_equal :exponent, tokens[4].type
61
+ assert_equal "**", tokens[4].content
62
+ assert_equal :exponent_equals, tokens[6].type
63
+ assert_equal "**=", tokens[6].content
64
+ end
65
+
66
+ def test_divide_tokenization
67
+ @sf.stubs(:source).returns("rah / gah")
68
+ @sf.tokenize!
69
+ tokens = @sf.lines[0].tokens
70
+ assert_equal 5, tokens.size
71
+ assert_equal :divide, tokens[2].type
72
+ assert_equal "/", tokens[2].content
73
+ end
74
+
75
+ def test_divide_equals_tokenization
76
+ @sf.stubs(:source).returns("rah /= gah")
77
+ @sf.tokenize!
78
+ tokens = @sf.lines[0].tokens
79
+ assert_equal tokens.size, 5
80
+ assert_equal :divide_equals, tokens[2].type
81
+ assert_equal "/=", tokens[2].content
82
+ end
83
+
84
+ def test_lt_tokenization
85
+ @sf.stubs(:source).returns("< <= << <<= <=>")
86
+ @sf.tokenize!
87
+ tokens = @sf.lines[0].tokens
88
+ assert_equal 9, tokens.size
89
+ assert_equal :lt, tokens[0].type
90
+ assert_equal "<", tokens[0].content
91
+ assert_equal :lte, tokens[2].type
92
+ assert_equal "<=", tokens[2].content
93
+ assert_equal :left_shift, tokens[4].type
94
+ assert_equal "<<", tokens[4].content
95
+ assert_equal :left_shift_equals, tokens[6].type
96
+ assert_equal "<<=", tokens[6].content
97
+ assert_equal :sort_operator, tokens[8].type
98
+ assert_equal "<=>", tokens[8].content
99
+ end
100
+
101
+ def test_gt_tokenization
102
+ @sf.stubs(:source).returns("> >= >> >>=")
103
+ @sf.tokenize!
104
+ tokens = @sf.lines[0].tokens
105
+ assert_equal 7, tokens.size
106
+ assert_equal :gt, tokens[0].type
107
+ assert_equal ">", tokens[0].content
108
+ assert_equal :gte, tokens[2].type
109
+ assert_equal ">=", tokens[2].content
110
+ assert_equal :right_shift, tokens[4].type
111
+ assert_equal ">>", tokens[4].content
112
+ assert_equal :right_shift_equals, tokens[6].type
113
+ assert_equal ">>=", tokens[6].content
114
+ end
115
+
116
+ def test_pipe_tokenization
117
+ @sf.stubs(:source).returns("| |= || ||=")
118
+ @sf.tokenize!
119
+ tokens = @sf.lines[0].tokens
120
+ assert_equal 7, tokens.size
121
+ assert_equal :bitwise_or, tokens[0].type
122
+ assert_equal "|", tokens[0].content
123
+ assert_equal :bitwise_or_equals, tokens[2].type
124
+ assert_equal "|=", tokens[2].content
125
+ assert_equal :logical_or, tokens[4].type
126
+ assert_equal "||", tokens[4].content
127
+ assert_equal :logical_or_equals, tokens[6].type
128
+ assert_equal "||=", tokens[6].content
129
+ end
130
+
131
+ def test_pipe_tokenization
132
+ @sf.stubs(:source).returns("& &= && &&=")
133
+ @sf.tokenize!
134
+ tokens = @sf.lines[0].tokens
135
+ assert_equal 7, tokens.size
136
+ assert_equal :bitwise_and, tokens[0].type
137
+ assert_equal "&", tokens[0].content
138
+ assert_equal :bitwise_and_equals, tokens[2].type
139
+ assert_equal "&=", tokens[2].content
140
+ assert_equal :logical_and, tokens[4].type
141
+ assert_equal "&&", tokens[4].content
142
+ assert_equal :logical_and_equals, tokens[6].type
143
+ assert_equal "&&=", tokens[6].content
144
+ end
145
+
146
+ def test_caret_tokenization
147
+ @sf.stubs(:source).returns("^ ^=")
148
+ @sf.tokenize!
149
+ tokens = @sf.lines[0].tokens
150
+ assert_equal 3, tokens.size
151
+ assert_equal :caret, tokens[0].type
152
+ assert_equal "^", tokens[0].content
153
+ assert_equal :caret_equals, tokens[2].type
154
+ assert_equal "^=", tokens[2].content
155
+ end
156
+
157
+ def test_modulo_tokenization
158
+ @sf.stubs(:source).returns("% a %=")
159
+ @sf.tokenize!
160
+ tokens = @sf.lines[0].tokens
161
+ assert_equal 5, tokens.size
162
+ assert_equal :modulo, tokens[0].type
163
+ assert_equal "%", tokens[0].content
164
+ assert_equal :modulo_equals, tokens[4].type
165
+ assert_equal "%=", tokens[4].content
166
+ end
167
+
168
+ def test_bang_tokenization
169
+ @sf.stubs(:source).returns("!= !~ !")
170
+ @sf.tokenize!
171
+ tokens = @sf.lines[0].tokens
172
+ assert_equal 5, tokens.size
173
+ assert_equal :not_equals, tokens[0].type
174
+ assert_equal "!=", tokens[0].content
175
+ assert_equal :not_matches, tokens[2].type
176
+ assert_equal "!~", tokens[2].content
177
+ assert_equal :logical_not, tokens[4].type
178
+ assert_equal "!", tokens[4].content
179
+ end
180
+ end
@@ -0,0 +1,68 @@
1
+ require File.dirname(__FILE__) + "/test_helper.rb"
2
+
3
+ class RegexTokenizationTest < Test::Unit::TestCase
4
+ def setup
5
+ @sf = Eden::SourceFile.new( "dummy.rb" )
6
+ end
7
+
8
+ def test_delimited_regex_tokenization
9
+ @sf.stubs(:source).returns("%r{[a-z]}")
10
+ @sf.tokenize!
11
+ tokens = @sf.lines[0].tokens
12
+ assert_equal 1, tokens.size
13
+ assert_equal :regex, tokens[0].type
14
+ assert_equal "%r{[a-z]}", tokens[0].content
15
+ end
16
+
17
+ def test_delimited_regex_tokenization2
18
+ @sf.stubs(:source).returns("%r{[a-z]}i")
19
+ @sf.tokenize!
20
+ tokens = @sf.lines[0].tokens
21
+ assert_equal 1, tokens.size
22
+ assert_equal :regex, tokens[0].type
23
+ assert_equal "%r{[a-z]}i", tokens[0].content
24
+ end
25
+
26
+ def test_regex_tokenization_at_line_start
27
+ @sf.stubs(:source).returns("/[asdf]/")
28
+ @sf.tokenize!
29
+ tokens = @sf.lines[0].tokens
30
+ assert_equal 1, tokens.size
31
+ assert_equal "/[asdf]/", tokens[0].content
32
+ assert_equal :regex, tokens[0].type
33
+ end
34
+
35
+ def test_ambiguous_regex_tokenization
36
+ @sf.stubs(:source).returns("a = b / c\nd = f / g")
37
+ @sf.tokenize!
38
+ tokens = @sf.lines[0].tokens + @sf.lines[1].tokens
39
+ tokens.each {|t| assert t.type != :regex }
40
+ end
41
+
42
+ def test_regex_tokenization_with_escape_characters
43
+ @sf.stubs(:source).returns("/test\\/test/")
44
+ @sf.tokenize!
45
+ tokens = @sf.lines[0].tokens
46
+ assert_equal 1, tokens.size
47
+ assert_equal "/test\\/test/", tokens[0].content
48
+ assert_equal :regex, tokens[0].type
49
+ end
50
+
51
+ def test_regex_tokenization_with_modifiers
52
+ @sf.stubs(:source).returns("/test/i")
53
+ @sf.tokenize!
54
+ tokens = @sf.lines[0].tokens
55
+ assert_equal 1, tokens.size
56
+ assert_equal "/test/i", tokens[0].content
57
+ assert_equal :regex, tokens[0].type
58
+ end
59
+
60
+ def test_regex_tokenization_with_funky_regexes
61
+ @sf.stubs(:source).returns(%q{%r!\[ *(?:(@)([\w\(\)-]+)|([\w\(\)-]+\(\))) *([~\!\|\*$\^=]*) *'?"?([^'"]*)'?"? *\]!i})
62
+ @sf.tokenize!
63
+ tokens = @sf.lines[0].tokens
64
+ assert_equal 1, tokens.size
65
+ assert_equal :regex, tokens[0].type
66
+ assert_equal %q{%r!\[ *(?:(@)([\w\(\)-]+)|([\w\(\)-]+\(\))) *([~\!\|\*$\^=]*) *'?"?([^'"]*)'?"? *\]!i}, tokens[0].content
67
+ end
68
+ end
@@ -0,0 +1,87 @@
1
+ require File.dirname(__FILE__) + "/test_helper.rb"
2
+
3
+ class SingleCharacterTokenizationTest < Test::Unit::TestCase
4
+ def setup
5
+ @sf = Eden::SourceFile.new( "dummy.rb" )
6
+ end
7
+
8
+ def test_single_character_tokenisation
9
+ @sf.stubs(:source).returns("<>~!@% ^&*()[]{}|.: ;=? +-")
10
+ @sf.tokenize!
11
+ line = @sf.lines[0]
12
+ assert_equal 26, line.tokens.size
13
+ assert_equal "<", line.tokens[0].content
14
+ assert_equal :lt, line.tokens[0].type
15
+ assert_equal ">", line.tokens[1].content
16
+ assert_equal :gt, line.tokens[1].type
17
+ assert_equal "~", line.tokens[2].content
18
+ assert_equal :tilde, line.tokens[2].type
19
+ assert_equal "!", line.tokens[3].content
20
+ assert_equal :logical_not, line.tokens[3].type
21
+ assert_equal "@", line.tokens[4].content
22
+ assert_equal :at, line.tokens[4].type
23
+ assert_equal "%", line.tokens[5].content
24
+ assert_equal :modulo, line.tokens[5].type
25
+ assert_equal "^", line.tokens[7].content
26
+ assert_equal :caret, line.tokens[7].type
27
+ assert_equal "&", line.tokens[8].content
28
+ assert_equal :bitwise_and, line.tokens[8].type
29
+ assert_equal "*", line.tokens[9].content
30
+ assert_equal :multiply, line.tokens[9].type
31
+ assert_equal "(", line.tokens[10].content
32
+ assert_equal :lparen, line.tokens[10].type
33
+ assert_equal ")", line.tokens[11].content
34
+ assert_equal :rparen, line.tokens[11].type
35
+ assert_equal "[", line.tokens[12].content
36
+ assert_equal :lsquare, line.tokens[12].type
37
+ assert_equal "]", line.tokens[13].content
38
+ assert_equal :rsquare, line.tokens[13].type
39
+ assert_equal "{", line.tokens[14].content
40
+ assert_equal :lcurly, line.tokens[14].type
41
+ assert_equal "}", line.tokens[15].content
42
+ assert_equal :rcurly, line.tokens[15].type
43
+ assert_equal "|", line.tokens[16].content
44
+ assert_equal :bitwise_or, line.tokens[16].type
45
+ assert_equal ".", line.tokens[17].content
46
+ assert_equal :period, line.tokens[17].type
47
+ assert_equal ":", line.tokens[18].content
48
+ assert_equal :colon, line.tokens[18].type
49
+ assert_equal ";", line.tokens[20].content
50
+ assert_equal :semicolon, line.tokens[20].type
51
+ assert_equal "=", line.tokens[21].content
52
+ assert_equal :equals, line.tokens[21].type
53
+ assert_equal "?", line.tokens[22].content
54
+ assert_equal :question_mark, line.tokens[22].type
55
+ assert_equal "+", line.tokens[24].content
56
+ assert_equal :plus, line.tokens[24].type
57
+ assert_equal "-", line.tokens[25].content
58
+ assert_equal :minus, line.tokens[25].type
59
+ end
60
+
61
+ def test_period_tokenization
62
+ @sf.stubs(:source).returns("... .. .")
63
+ @sf.tokenize!
64
+ tokens = @sf.lines[0].tokens
65
+ assert_equal 5, tokens.size
66
+ assert_equal "...", tokens[0].content
67
+ assert_equal :range_inc, tokens[0].type
68
+ assert_equal "..", tokens[2].content
69
+ assert_equal :range_exc, tokens[2].type
70
+ assert_equal ".", tokens[4].content
71
+ assert_equal :period, tokens[4].type
72
+ end
73
+
74
+ def test_colon_tokenization
75
+ @sf.stubs(:source).returns(":test :: : ")
76
+ @sf.tokenize!
77
+ tokens = @sf.lines[0].tokens
78
+ assert_equal 6, tokens.size
79
+ assert_equal ":test", tokens[0].content
80
+ assert_equal :symbol, tokens[0].type
81
+ assert_equal "::", tokens[2].content
82
+ assert_equal :scope_res, tokens[2].type
83
+ assert_equal ":", tokens[4].content
84
+ assert_equal :colon, tokens[4].type
85
+ end
86
+
87
+ end