eden 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. data/CHANGELOG +4 -0
  2. data/LICENSE +20 -0
  3. data/README.md +48 -0
  4. data/Rakefile +10 -0
  5. data/bin/eden +132 -0
  6. data/lib/eden.rb +10 -0
  7. data/lib/eden/defaults.rb +26 -0
  8. data/lib/eden/formatter.rb +25 -0
  9. data/lib/eden/formatters/block_formatter.rb +45 -0
  10. data/lib/eden/formatters/indenter.rb +91 -0
  11. data/lib/eden/formatters/white_space_cleaner.rb +14 -0
  12. data/lib/eden/line.rb +65 -0
  13. data/lib/eden/source_file.rb +32 -0
  14. data/lib/eden/token.rb +62 -0
  15. data/lib/eden/tokenizer.rb +259 -0
  16. data/lib/eden/tokenizers/basic_tokenizer.rb +167 -0
  17. data/lib/eden/tokenizers/delimited_literal_tokenizer.rb +38 -0
  18. data/lib/eden/tokenizers/number_tokenizer.rb +68 -0
  19. data/lib/eden/tokenizers/operator_tokenizer.rb +211 -0
  20. data/lib/eden/tokenizers/regex_tokenizer.rb +37 -0
  21. data/lib/eden/tokenizers/string_tokenizer.rb +149 -0
  22. data/test/array_literal_tokenization_test.rb +43 -0
  23. data/test/basic_tokenization_test.rb +29 -0
  24. data/test/block_formatter_test.rb +47 -0
  25. data/test/class_var_token_test.rb +21 -0
  26. data/test/identifier_token_test.rb +140 -0
  27. data/test/indenter_test.rb +314 -0
  28. data/test/instance_var_token_test.rb +48 -0
  29. data/test/number_tokenization_test.rb +83 -0
  30. data/test/operator_tokenization_test.rb +180 -0
  31. data/test/regex_tokenization_test.rb +68 -0
  32. data/test/single_character_tokenization_test.rb +87 -0
  33. data/test/string_tokenization_test.rb +291 -0
  34. data/test/symbol_tokenization_test.rb +64 -0
  35. data/test/test_helper.rb +13 -0
  36. data/test/white_space_cleaner_test.rb +35 -0
  37. data/test/whitespace_token_test.rb +63 -0
  38. metadata +108 -0
@@ -0,0 +1,291 @@
1
+ require File.dirname(__FILE__) + "/test_helper.rb"
2
+
3
+ class StringTokenizationTest < Test::Unit::TestCase
4
+ def setup
5
+ @sf = Eden::SourceFile.new( "dummy.rb" )
6
+ end
7
+
8
+ def test_single_quote_string_tokenisation
9
+ @sf.stubs(:source).returns("'test' 'te\\'st' 'te\\\\st' 'te\"st'")
10
+ @sf.tokenize!
11
+ tokens = @sf.lines[0].tokens
12
+ assert_equal 7, tokens.size
13
+ assert_equal :single_q_string, tokens[0].type
14
+ assert_equal "'test'", tokens[0].content
15
+ assert_equal :single_q_string, tokens[2].type
16
+ assert_equal "'te\\'st'", tokens[2].content
17
+ assert_equal :single_q_string, tokens[4].type
18
+ assert_equal "'te\\\\st'", tokens[4].content
19
+ assert_equal :single_q_string, tokens[6].type
20
+ assert_equal "'te\"st'", tokens[6].content
21
+ end
22
+
23
+ def test_backquote_string_tokenisation
24
+ @sf.stubs(:source).returns("`exec`")
25
+ @sf.tokenize!
26
+ tokens = @sf.lines[0].tokens
27
+ assert_equal 1, tokens.size
28
+ assert_equal :backquote_string, tokens[0].type
29
+ assert_equal "`exec`", tokens[0].content
30
+ end
31
+
32
+ def test_backquote_string_interpolation
33
+ @sf.stubs(:source).returns("`exec \#\{\"cmd\"\}`")
34
+ @sf.tokenize!
35
+ tokens = @sf.lines[0].tokens
36
+ assert_equal 5, tokens.size
37
+ assert_equal "`exec \#", tokens[0].content
38
+ assert_equal :lcurly, tokens[1].type
39
+ assert_equal "\"cmd\"", tokens[2].content
40
+ assert_equal :rcurly, tokens[3].type
41
+ assert_equal "`", tokens[4].content
42
+ assert_equal :backquote_string, tokens[4].type
43
+ end
44
+
45
+ def test_should_tokenize_unterminated_backquote_string
46
+ @sf.stubs(:source).returns("`exec")
47
+ @sf.tokenize!
48
+ tokens = @sf.lines[0].tokens
49
+ assert_equal 1, tokens.size
50
+ assert_equal :backquote_string, tokens[0].type
51
+ assert_equal "`exec", tokens[0].content
52
+ end
53
+
54
+ def test_double_quote_string_tokenisation
55
+ @sf.stubs(:source).returns('"test" "end')
56
+ @sf.tokenize!
57
+ tokens = @sf.lines[0].tokens
58
+ assert_equal 3, tokens.size
59
+ assert_equal :double_q_string, tokens[0].type
60
+ assert_equal '"test"', tokens[0].content
61
+ assert_equal :double_q_string, tokens[2].type
62
+ assert_equal '"end', tokens[2].content
63
+ end
64
+
65
+ def test_double_quote_string_escaping
66
+ @sf.stubs(:source).returns('"te\\"st" "test\\\\test"')
67
+ @sf.tokenize!
68
+ tokens = @sf.lines[0].tokens
69
+ assert_equal 3, tokens.size
70
+ assert_equal :double_q_string, tokens[0].type
71
+ assert_equal '"te\\"st"', tokens[0].content
72
+ assert_equal :double_q_string, tokens[2].type
73
+ assert_equal '"test\\\\test"', tokens[2].content
74
+ end
75
+
76
+ def test_quoted_expanded_literal_string_tokenization
77
+ @sf.stubs(:source).returns("%(test)\n%Q(test)")
78
+ @sf.tokenize!
79
+ tokens = @sf.lines[0].tokens
80
+ assert_equal 2, tokens.size
81
+ assert_equal "%(test)", tokens[0].content
82
+ assert_equal :double_q_string, tokens[0].type
83
+ tokens = @sf.lines[1].tokens
84
+ assert_equal "%Q(test)", tokens[0].content
85
+ assert_equal :double_q_string, tokens[0].type
86
+ end
87
+
88
+ def test_should_expand_expanded_literal_strings
89
+ @sf.stubs(:source).returns("%Q(rah\#{@ivar}rah)")
90
+ @sf.tokenize!
91
+ tokens = @sf.lines[0].tokens
92
+ assert_equal 5, tokens.size
93
+ assert_equal "%Q(rah\#", tokens[0].content
94
+ assert_equal :double_q_string, tokens[0].type
95
+ assert_equal :lcurly, tokens[1].type
96
+ assert_equal "@ivar", tokens[2].content
97
+ assert_equal :instancevar, tokens[2].type
98
+ assert_equal :rcurly, tokens[3].type
99
+ assert_equal "rah)", tokens[4].content
100
+ assert_equal :double_q_string, tokens[4].type
101
+ end
102
+
103
+ def test_should_not_expand_non_expanded_literal_strings
104
+ @sf.stubs(:source).returns("%q(rah\#{@ivar}rah)")
105
+ @sf.tokenize!
106
+ tokens = @sf.lines[0].tokens
107
+ assert_equal 1, tokens.size
108
+ assert_equal "%q(rah\#{@ivar}rah)", tokens[0].content
109
+ assert_equal :single_q_string, tokens[0].type
110
+ end
111
+
112
+ def test_double_quote_string_interpolation
113
+ @sf.stubs(:source).returns("\"str\#{ @inst }str\"")
114
+ @sf.tokenize!
115
+ tokens = @sf.lines[0].tokens
116
+ assert_equal 7, tokens.size
117
+ assert_equal :double_q_string, tokens[0].type
118
+ assert_equal '"str#', tokens[0].content
119
+ assert_equal :lcurly, tokens[1].type
120
+ assert_equal '{', tokens[1].content
121
+ assert_equal :instancevar, tokens[3].type
122
+ assert_equal '@inst', tokens[3].content
123
+ assert_equal :rcurly, tokens[5].type
124
+ assert_equal '}', tokens[5].content
125
+ assert_equal :double_q_string, tokens[6].type
126
+ assert_equal 'str"', tokens[6].content
127
+ end
128
+
129
+ def test_string_interpolation_at_end
130
+ @sf.stubs(:source).returns("\"str\#{ @inst }\"\n")
131
+ @sf.tokenize!
132
+ tokens = @sf.lines[0].tokens
133
+ assert_equal 8, tokens.size
134
+ assert_equal :double_q_string, tokens[0].type
135
+ assert_equal '"str#', tokens[0].content
136
+ assert_equal :double_q_string, tokens[6].type
137
+ assert_equal '"', tokens[6].content
138
+ end
139
+
140
+ def test_string_interpolation_with_class_instance_vars
141
+ @sf.stubs(:source).returns("\"str\#@inst moar \#@@var\"\n")
142
+ @sf.tokenize!
143
+ tokens = @sf.lines[0].tokens
144
+ assert_equal 6, tokens.size
145
+ assert_equal '"str#', tokens[0].content
146
+ assert_equal :double_q_string, tokens[0].type
147
+ assert_equal '@inst', tokens[1].content
148
+ assert_equal :instancevar, tokens[1].type
149
+ assert_equal ' moar #', tokens[2].content
150
+ assert_equal :double_q_string, tokens[2].type
151
+ assert_equal '@@var', tokens[3].content
152
+ assert_equal :classvar, tokens[3].type
153
+ assert_equal '"', tokens[4].content
154
+ assert_equal :double_q_string, tokens[4].type
155
+ end
156
+
157
+ def test_string_interpolation_with_global_vars
158
+ @sf.stubs(:source).returns("\"str\#$1\"\n")
159
+ @sf.tokenize!
160
+ tokens = @sf.lines[0].tokens
161
+ assert_equal 4, tokens.size
162
+ assert_equal '"str#', tokens[0].content
163
+ assert_equal :double_q_string, tokens[0].type
164
+ assert_equal '$1', tokens[1].content
165
+ assert_equal :globalvar, tokens[1].type
166
+ assert_equal '"', tokens[2].content
167
+ assert_equal :double_q_string, tokens[2].type
168
+ end
169
+
170
+ def test_delimited_backquote_string_tokenization
171
+ @sf.stubs(:source).returns("%x{rah --e}")
172
+ @sf.tokenize!
173
+ tokens = @sf.lines[0].tokens
174
+ assert_equal 1, tokens.size
175
+ assert_equal :backquote_string, tokens[0].type
176
+ assert_equal "%x{rah --e}", tokens[0].content
177
+ end
178
+
179
+ def test_should_expand_backquote_string_delimited_literals
180
+ @sf.stubs(:source).returns("%x(rah\#{@rah})")
181
+ @sf.tokenize!
182
+ tokens = @sf.lines[0].tokens
183
+ assert_equal 5, tokens.size
184
+ assert_equal "%x(rah\#", tokens[0].content
185
+ assert_equal :backquote_string, tokens[0].type
186
+ assert_equal :lcurly, tokens[1].type
187
+ assert_equal "@rah", tokens[2].content
188
+ assert_equal :instancevar, tokens[2].type
189
+ assert_equal :rcurly, tokens[3].type
190
+ assert_equal ")", tokens[4].content
191
+ assert_equal :backquote_string, tokens[4].type
192
+ end
193
+
194
+ def test_heredoc_tokenization
195
+ @sf.stubs(:source).returns("str = <<HEREDOC\nLorem Ipsum\nHEREDOC\n")
196
+ @sf.tokenize!
197
+ tokens = @sf.lines[0].tokens
198
+ assert_equal 6, tokens.size
199
+ assert_equal :heredoc_delimiter, tokens[4].type
200
+ assert_equal "<<HEREDOC", tokens[4].content
201
+ tokens = @sf.lines[1].tokens
202
+ assert_equal :heredoc_body, tokens[0].type
203
+ assert_equal "Lorem Ipsum\nHEREDOC", tokens[0].content
204
+ assert_equal :newline, tokens[1].type
205
+ end
206
+
207
+ def test_heredoc_tokenization_2
208
+ @sf.stubs(:source).returns("str = <<-HEREDOC\nLorem Ipsum\nHEREDOC\n")
209
+ @sf.tokenize!
210
+ tokens = @sf.lines[0].tokens
211
+ assert_equal 6, tokens.size
212
+ assert_equal :heredoc_delimiter, tokens[4].type
213
+ assert_equal "<<-HEREDOC", tokens[4].content
214
+ tokens = @sf.lines[1].tokens
215
+ assert_equal :heredoc_body, tokens[0].type
216
+ assert_equal "Lorem Ipsum\nHEREDOC", tokens[0].content
217
+ assert_equal :newline, tokens[1].type
218
+ end
219
+
220
+ # Because the heredoc delimiter must be on a line by itself, the heredoc
221
+ # delimiter can appear in the heredoc itself without terminating it.
222
+ def test_heredoc_tokenization_with_amgibuous_delimiter
223
+ @sf.stubs(:source).returns(<<-SOURCE)
224
+ var = <<WARNING
225
+ WARNING Blah Blah Blah
226
+ And something more
227
+ WARNING
228
+ SOURCE
229
+ @sf.tokenize!
230
+ tokens = @sf.lines[0].tokens
231
+ assert_equal 6, tokens.size
232
+ assert_equal "<<WARNING", tokens[4].content
233
+ assert_equal :heredoc_delimiter, tokens[4].type
234
+ tokens = @sf.lines[1].tokens
235
+ assert_equal 2, tokens.size
236
+ assert_equal "WARNING Blah Blah Blah\nAnd something more\nWARNING", tokens[0].content
237
+ assert_equal :heredoc_body, tokens[0].type
238
+ end
239
+
240
+ def test_heredoc_tokenization_empty_heredoc
241
+ @sf.stubs(:source).returns("str = <<-HEREDOC\nHEREDOC\n")
242
+ @sf.tokenize!
243
+ tokens = @sf.lines[0].tokens
244
+ assert_equal 6, tokens.size
245
+ assert_equal :heredoc_delimiter, tokens[4].type
246
+ assert_equal "<<-HEREDOC", tokens[4].content
247
+ tokens = @sf.lines[1].tokens
248
+ assert_equal :heredoc_body, tokens[0].type
249
+ assert_equal "HEREDOC", tokens[0].content
250
+ assert_equal :newline, tokens[1].type
251
+ end
252
+
253
+ def test_heredoc_tokeniztion_with_single_quote_delimiter
254
+ @sf.stubs(:source).returns("str = <<'HERE DOC'\nLorem Ipsum\n'HERE DOC'\n")
255
+ @sf.tokenize!
256
+ tokens = @sf.lines[0].tokens
257
+ assert_equal 6, tokens.size
258
+ assert_equal :heredoc_delimiter, tokens[4].type
259
+ assert_equal "<<'HERE DOC'", tokens[4].content
260
+ tokens = @sf.lines[1].tokens
261
+ assert_equal :heredoc_body, tokens[0].type
262
+ assert_equal "Lorem Ipsum\n'HERE DOC'", tokens[0].content
263
+ assert_equal :newline, tokens[1].type
264
+ end
265
+
266
+ def test_heredoc_tokeniztion_with_double_quote_delimiter
267
+ @sf.stubs(:source).returns("str = <<\"HERE DOC\"\nLorem Ipsum\n\"HERE DOC\"\n")
268
+ @sf.tokenize!
269
+ tokens = @sf.lines[0].tokens
270
+ assert_equal 6, tokens.size
271
+ assert_equal :heredoc_delimiter, tokens[4].type
272
+ assert_equal "<<\"HERE DOC\"", tokens[4].content
273
+ tokens = @sf.lines[1].tokens
274
+ assert_equal :heredoc_body, tokens[0].type
275
+ assert_equal "Lorem Ipsum\n\"HERE DOC\"", tokens[0].content
276
+ assert_equal :newline, tokens[1].type
277
+ end
278
+
279
+ def test_heredoc_tokeniztion_with_backquote_delimiter
280
+ @sf.stubs(:source).returns("str = <<`HERE DOC`\nLorem Ipsum\n`HERE DOC`\n")
281
+ @sf.tokenize!
282
+ tokens = @sf.lines[0].tokens
283
+ assert_equal 6, tokens.size
284
+ assert_equal :heredoc_delimiter, tokens[4].type
285
+ assert_equal "<<`HERE DOC`", tokens[4].content
286
+ tokens = @sf.lines[1].tokens
287
+ assert_equal :heredoc_body, tokens[0].type
288
+ assert_equal "Lorem Ipsum\n`HERE DOC`", tokens[0].content
289
+ assert_equal :newline, tokens[1].type
290
+ end
291
+ end
@@ -0,0 +1,64 @@
1
+ require File.dirname(__FILE__) + "/test_helper.rb"
2
+
3
+ class SymbolTokenizationTest < Test::Unit::TestCase
4
+ def setup
5
+ @sf = Eden::SourceFile.new( "dummy.rb" )
6
+ end
7
+
8
+ def test_simple_symbol_tokenisation
9
+ @sf.stubs(:source).returns(":test :test12 :_rah")
10
+ @sf.tokenize!
11
+ line = @sf.lines[0]
12
+ assert_equal 5, line.tokens.size
13
+ assert_equal ":test", line.tokens[0].content
14
+ assert_equal :symbol, line.tokens[0].type
15
+ assert_equal ":test12", line.tokens[2].content
16
+ assert_equal :symbol, line.tokens[2].type
17
+ assert_equal ":_rah", line.tokens[4].content
18
+ assert_equal :symbol, line.tokens[4].type
19
+ end
20
+
21
+ def test_operator_symbol_tokenization
22
+ @sf.stubs(:source).returns(":% :< :/ :<<")
23
+ @sf.tokenize!
24
+ tokens = @sf.lines[0].tokens
25
+ assert_equal 7, tokens.size
26
+ assert_equal ":%", tokens[0].content
27
+ assert_equal :symbol, tokens[0].type
28
+ assert_equal ":<", tokens[2].content
29
+ assert_equal :symbol, tokens[2].type
30
+ assert_equal ":/", tokens[4].content
31
+ assert_equal :symbol, tokens[4].type
32
+ assert_equal ":<<", tokens[6].content
33
+ assert_equal :symbol, tokens[6].type
34
+ end
35
+
36
+ def test_dynamic_symbol_tokenisation
37
+ @sf.stubs(:source).returns(":'dynamic symbol' :\"dynamic symbol\"")
38
+ @sf.tokenize!
39
+ tokens = @sf.lines[0].tokens
40
+ assert_equal 3, tokens.size
41
+ assert_equal :symbol, tokens[0].type
42
+ assert_equal ":'dynamic symbol'", tokens[0].content
43
+ assert_equal :symbol, tokens[2].type
44
+ assert_equal ":\"dynamic symbol\"", tokens[2].content
45
+ end
46
+
47
+ def test_dynamic_symbol_tokenization2
48
+ @sf.stubs(:source).returns("%s{rah}\n%s(rah)\n%s:rah:\n%s<rah<rah>rah>")
49
+ @sf.tokenize!
50
+ tokens = @sf.lines[0].tokens
51
+ assert_equal 2, tokens.size
52
+ assert_equal :symbol, tokens[0].type
53
+ assert_equal "%s{rah}", tokens[0].content
54
+ tokens = @sf.lines[1].tokens
55
+ assert_equal :symbol, tokens[0].type
56
+ assert_equal "%s(rah)", tokens[0].content
57
+ tokens = @sf.lines[2].tokens
58
+ assert_equal :symbol, tokens[0].type
59
+ assert_equal "%s:rah:", tokens[0].content
60
+ tokens = @sf.lines[3].tokens
61
+ assert_equal :symbol, tokens[0].type
62
+ assert_equal "%s<rah<rah>rah>", tokens[0].content
63
+ end
64
+ end
@@ -0,0 +1,13 @@
1
+ lib_dir = File.dirname(__FILE__) + '/../lib'
2
+ require 'rubygems'
3
+ require 'test/unit'
4
+ require 'mocha'
5
+
6
+ $:.unshift lib_dir unless $:.include?(lib_dir)
7
+ require 'eden'
8
+
9
+ Dir.glob([File.dirname(__FILE__) + "/..//lib/eden/formatters/*.rb"] ) do |file|
10
+ require "#{file}"
11
+ end
12
+
13
+ require 'eden/defaults'
@@ -0,0 +1,35 @@
1
+ require File.dirname(__FILE__) + "/test_helper.rb"
2
+ require File.dirname(__FILE__) + "/../lib/eden/formatters/white_space_cleaner.rb"
3
+
4
+ class WhiteSpaceCleanerTest < Test::Unit::TestCase
5
+
6
+ def setup
7
+ @sf = Eden::SourceFile.new( "dummy.rb" )
8
+ end
9
+
10
+ def test_should_not_strip_whitespace_when_not_configured
11
+ WhiteSpaceCleaner.configure do |c|
12
+ c.remove_trailing_whitespace false
13
+ end
14
+
15
+ @sf.stubs(:source).returns("def function \n return nil \nend\n")
16
+ @sf.tokenize!
17
+ WhiteSpaceCleaner.format( @sf )
18
+ assert_equal "def function \n", @sf.lines[0].joined_tokens
19
+ assert_equal " return nil \n", @sf.lines[1].joined_tokens
20
+ assert_equal "end\n", @sf.lines[2].joined_tokens
21
+ end
22
+
23
+ def test_should_strip_whitespace
24
+ WhiteSpaceCleaner.configure do |c|
25
+ c.remove_trailing_whitespace true
26
+ end
27
+
28
+ @sf.stubs(:source).returns("def function \n return nil \nend\n")
29
+ @sf.tokenize!
30
+ WhiteSpaceCleaner.format( @sf )
31
+ assert_equal "def function\n", @sf.lines[0].joined_tokens
32
+ assert_equal " return nil\n", @sf.lines[1].joined_tokens
33
+ assert_equal "end\n", @sf.lines[2].joined_tokens
34
+ end
35
+ end
@@ -0,0 +1,63 @@
1
+ require File.dirname(__FILE__) + "/test_helper.rb"
2
+
3
+ class WhitespaceTokenTest < Test::Unit::TestCase
4
+ def setup
5
+ @sf = Eden::SourceFile.new( "dummy.rb" )
6
+ end
7
+
8
+ def test_leading_whitespace_tokenization
9
+ @sf.stubs(:source).returns(" token")
10
+ @sf.tokenize!
11
+ line = @sf.lines[0]
12
+ assert_equal 2, line.tokens.size
13
+ assert_equal " ", line.tokens[0].content
14
+ assert_equal :whitespace, line.tokens[0].type
15
+ end
16
+
17
+ def test_leading_whitespace_tab_tokenization
18
+ @sf.stubs(:source).returns("\t\ttoken")
19
+ @sf.tokenize!
20
+ line = @sf.lines[0]
21
+ assert_equal 2, line.tokens.size
22
+ assert_equal "\t\t", line.tokens[0].content
23
+ assert_equal :whitespace, line.tokens[0].type
24
+ end
25
+
26
+ def test_leading_whitespace_multiple_space_tokenization
27
+ @sf.stubs(:source).returns("\t token")
28
+ @sf.tokenize!
29
+ line = @sf.lines[0]
30
+ assert_equal 2, line.tokens.size
31
+ assert_equal "\t ", line.tokens[0].content
32
+ assert_equal :whitespace, line.tokens[0].type
33
+ end
34
+
35
+ def test_trailing_whitespace_tokenization
36
+ @sf.stubs(:source).returns("token ")
37
+ @sf.tokenize!
38
+ line = @sf.lines[0]
39
+ assert_equal 2, line.tokens.size
40
+ assert_equal " ", line.tokens[1].content
41
+ assert_equal :whitespace, line.tokens[1].type
42
+ end
43
+
44
+ def test_interstitial_whitespace_tokenization
45
+ @sf.stubs(:source).returns("token token")
46
+ @sf.tokenize!
47
+ line = @sf.lines[0]
48
+ assert_equal 3, line.tokens.size
49
+ assert_equal " ", line.tokens[1].content
50
+ assert_equal :whitespace, line.tokens[1].type
51
+ end
52
+
53
+ def test_comment_tokenization
54
+ @sf.stubs(:source).returns("token # Comment Rah Rah Rah\n")
55
+ @sf.tokenize!
56
+ tokens = @sf.lines[0].tokens
57
+ assert_equal 4, tokens.size
58
+ assert_equal "# Comment Rah Rah Rah", tokens[2].content
59
+ assert_equal :comment, tokens[2].type
60
+ assert_equal "\n", tokens[3].content
61
+ assert_equal :newline, tokens[3].type
62
+ end
63
+ end