syntax 0.5.0 → 0.7.0
Sign up to get free protection for your applications and to get access to all the features.
- data/data/ruby.css +18 -0
- data/data/xml.css +8 -0
- data/data/yaml.css +12 -0
- data/lib/syntax.rb +8 -1
- data/lib/syntax/common.rb +30 -6
- data/lib/syntax/convertors/abstract.rb +24 -0
- data/lib/syntax/convertors/html.rb +18 -17
- data/lib/syntax/{ruby.rb → lang/ruby.rb} +84 -19
- data/lib/syntax/{xml.rb → lang/xml.rb} +0 -0
- data/lib/syntax/{yaml.rb → lang/yaml.rb} +0 -0
- data/lib/syntax/version.rb +1 -1
- data/test/syntax/tc_ruby.rb +500 -352
- data/test/syntax/tc_xml.rb +2 -2
- data/test/syntax/tc_yaml.rb +2 -2
- data/test/tc_syntax.rb +22 -0
- metadata +14 -8
data/test/syntax/tc_ruby.rb
CHANGED
@@ -1,518 +1,666 @@
|
|
1
|
-
$:.unshift "
|
1
|
+
$:.unshift File.dirname(__FILE__) +"/../../lib"
|
2
2
|
|
3
3
|
require 'test/unit'
|
4
|
-
require 'syntax/ruby'
|
4
|
+
require 'syntax/lang/ruby'
|
5
5
|
|
6
6
|
class TC_Syntax_Ruby < Test::Unit::TestCase
|
7
7
|
|
8
|
+
def tokenize( string )
|
9
|
+
@tokens = []
|
10
|
+
@ruby.tokenize( string ) { |tok| @tokens << tok }
|
11
|
+
end
|
12
|
+
|
13
|
+
def assert_next_token(group, lexeme, instruction=:none)
|
14
|
+
assert false, "no tokens in stack" if @tokens.nil? or @tokens.empty?
|
15
|
+
assert_equal [group, lexeme, instruction],
|
16
|
+
[@tokens.first.group, @tokens.first, @tokens.shift.instruction]
|
17
|
+
end
|
18
|
+
|
19
|
+
def assert_no_next_token
|
20
|
+
assert @tokens.empty?
|
21
|
+
end
|
22
|
+
|
23
|
+
def skip_token( n=1 )
|
24
|
+
n.times { @tokens.shift } unless @tokens.nil? || @tokens.empty?
|
25
|
+
end
|
26
|
+
|
8
27
|
def setup
|
9
28
|
@ruby = Syntax::Ruby.new
|
10
29
|
end
|
11
30
|
|
12
31
|
def test_empty
|
13
|
-
|
14
|
-
|
15
|
-
assert !called
|
32
|
+
tokenize( "" )
|
33
|
+
assert_no_next_token
|
16
34
|
end
|
17
35
|
|
18
36
|
def test_constant
|
19
|
-
|
20
|
-
|
21
|
-
called = true
|
22
|
-
assert_equal :constant, tok.group
|
23
|
-
assert_equal "Foo", tok
|
24
|
-
end
|
25
|
-
assert called
|
37
|
+
tokenize( "Foo" )
|
38
|
+
assert_next_token :constant, "Foo"
|
26
39
|
end
|
27
40
|
|
28
41
|
def test_ident
|
29
|
-
|
30
|
-
|
31
|
-
called = true
|
32
|
-
assert_equal :ident, tok.group
|
33
|
-
assert_equal "foo", tok
|
34
|
-
end
|
35
|
-
assert called
|
42
|
+
tokenize( "foo" )
|
43
|
+
assert_next_token :ident, "foo"
|
36
44
|
end
|
37
45
|
|
38
46
|
def test_comment_eol
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
assert_equal "# a comment", tok
|
44
|
-
end
|
45
|
-
assert called
|
47
|
+
tokenize( "# a comment\nfoo" )
|
48
|
+
assert_next_token :comment, "# a comment"
|
49
|
+
assert_next_token :normal, "\n"
|
50
|
+
assert_next_token :ident, "foo"
|
46
51
|
end
|
47
52
|
|
48
53
|
def test_comment_block
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
assert_equal "=begin\nthis is a comment\n=end", tok
|
54
|
-
end
|
55
|
-
assert called
|
54
|
+
tokenize( "=begin\nthis is a comment\n=end\nnoncomment" )
|
55
|
+
assert_next_token :comment, "=begin\nthis is a comment\n=end"
|
56
|
+
assert_next_token :normal, "\n"
|
57
|
+
assert_next_token :ident, "noncomment"
|
56
58
|
end
|
57
59
|
|
58
60
|
def test_keyword
|
59
61
|
Syntax::Ruby::KEYWORDS.each do |word|
|
60
|
-
|
61
|
-
|
62
|
-
assert_equal [ :keyword, word ], [ tok.first.group, tok.first ]
|
62
|
+
tokenize( word )
|
63
|
+
assert_next_token :keyword, word
|
63
64
|
end
|
64
65
|
Syntax::Ruby::KEYWORDS.each do |word|
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
tok.shift
|
69
|
-
assert_equal [ :ident, word ], [ tok.first.group, tok.first ]
|
66
|
+
tokenize( "foo.#{word}" )
|
67
|
+
skip_token 2
|
68
|
+
assert_next_token :ident, word
|
70
69
|
end
|
71
70
|
end
|
72
71
|
|
73
72
|
def test__END__
|
74
|
-
|
75
|
-
|
76
|
-
called = true
|
77
|
-
assert_equal :comment, tok.group
|
78
|
-
assert_equal "__END__\n\nblah blah blah", tok
|
79
|
-
end
|
80
|
-
assert called
|
73
|
+
tokenize( "__END__\n\nblah blah blah" )
|
74
|
+
assert_next_token :comment, "__END__\n\nblah blah blah"
|
81
75
|
end
|
82
76
|
|
83
77
|
def test_def_paren
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
assert_equal "foo", tok
|
91
|
-
assert_equal :method, tok.group
|
78
|
+
tokenize( "def foo(bar)" )
|
79
|
+
assert_next_token :keyword, "def "
|
80
|
+
assert_next_token :method, "foo"
|
81
|
+
assert_next_token :punct, "("
|
82
|
+
assert_next_token :ident, "bar"
|
83
|
+
assert_next_token :punct, ")"
|
92
84
|
end
|
93
85
|
|
94
86
|
def test_def_space
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
@ruby.step
|
101
|
-
assert_equal "foo", tok
|
102
|
-
assert_equal :method, tok.group
|
87
|
+
tokenize( "def foo bar" )
|
88
|
+
assert_next_token :keyword, "def "
|
89
|
+
assert_next_token :method, "foo"
|
90
|
+
assert_next_token :normal, " "
|
91
|
+
assert_next_token :ident, "bar"
|
103
92
|
end
|
104
93
|
|
105
94
|
def test_def_semicolon
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
assert_equal :keyword, tok.group
|
111
|
-
@ruby.step
|
112
|
-
assert_equal "foo", tok
|
113
|
-
assert_equal :method, tok.group
|
95
|
+
tokenize( "def foo;" )
|
96
|
+
assert_next_token :keyword, "def "
|
97
|
+
assert_next_token :method, "foo"
|
98
|
+
assert_next_token :punct, ";"
|
114
99
|
end
|
115
100
|
|
116
101
|
def test_class_space
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
assert_equal :keyword, tok.group
|
122
|
-
@ruby.step
|
123
|
-
assert_equal "Foo", tok
|
124
|
-
assert_equal :class, tok.group
|
102
|
+
tokenize( "class Foo\n" )
|
103
|
+
assert_next_token :keyword, "class "
|
104
|
+
assert_next_token :class, "Foo"
|
105
|
+
assert_next_token :normal, "\n"
|
125
106
|
end
|
126
107
|
|
127
108
|
def test_class_semicolon
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
assert_equal :keyword, tok.group
|
133
|
-
@ruby.step
|
134
|
-
assert_equal "Foo", tok
|
135
|
-
assert_equal :class, tok.group
|
109
|
+
tokenize( "class Foo;" )
|
110
|
+
assert_next_token :keyword, "class "
|
111
|
+
assert_next_token :class, "Foo"
|
112
|
+
assert_next_token :punct, ";"
|
136
113
|
end
|
137
114
|
|
138
115
|
def test_class_extend
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
assert_equal "Foo", tok
|
146
|
-
assert_equal :class, tok.group
|
116
|
+
tokenize( "class Foo< Bang" )
|
117
|
+
assert_next_token :keyword, "class "
|
118
|
+
assert_next_token :class, "Foo"
|
119
|
+
assert_next_token :punct, "<"
|
120
|
+
assert_next_token :normal, " "
|
121
|
+
assert_next_token :constant, "Bang"
|
147
122
|
end
|
148
123
|
|
149
124
|
def test_module_space
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
assert_equal :keyword, tok.group
|
155
|
-
@ruby.step
|
156
|
-
assert_equal "Foo", tok
|
157
|
-
assert_equal :module, tok.group
|
125
|
+
tokenize( "module Foo\n" )
|
126
|
+
assert_next_token :keyword, "module "
|
127
|
+
assert_next_token :module, "Foo"
|
128
|
+
assert_next_token :normal, "\n"
|
158
129
|
end
|
159
130
|
|
160
131
|
def test_module_semicolon
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
assert_equal :keyword, tok.group
|
166
|
-
@ruby.step
|
167
|
-
assert_equal "Foo", tok
|
168
|
-
assert_equal :module, tok.group
|
132
|
+
tokenize( "module Foo;" )
|
133
|
+
assert_next_token :keyword, "module "
|
134
|
+
assert_next_token :module, "Foo"
|
135
|
+
assert_next_token :punct, ";"
|
169
136
|
end
|
170
137
|
|
171
138
|
def test_module_other
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
assert_equal "module ", tok
|
176
|
-
assert_equal :keyword, tok.group
|
177
|
-
@ruby.step
|
178
|
-
assert_equal "Foo!", tok
|
179
|
-
assert_equal :module, tok.group
|
139
|
+
tokenize( "module Foo!\n" )
|
140
|
+
assert_next_token :keyword, "module "
|
141
|
+
assert_next_token :module, "Foo!"
|
180
142
|
end
|
181
143
|
|
182
144
|
def test_scope_operator
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
assert_equal :punct, tok.shift.group
|
188
|
-
assert_equal "Bar", tok.first
|
145
|
+
tokenize( "Foo::Bar" )
|
146
|
+
assert_next_token :constant, "Foo"
|
147
|
+
assert_next_token :punct, "::"
|
148
|
+
assert_next_token :constant, "Bar"
|
189
149
|
end
|
190
150
|
|
191
151
|
def test_symbol_dquote
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
152
|
+
tokenize( ':"foo"' )
|
153
|
+
assert_next_token :symbol, ':"'
|
154
|
+
assert_next_token :symbol, '', :region_open
|
155
|
+
assert_next_token :symbol, 'foo'
|
156
|
+
assert_next_token :symbol, '', :region_close
|
157
|
+
assert_next_token :symbol, '"'
|
158
|
+
assert_no_next_token
|
196
159
|
end
|
197
160
|
|
198
161
|
def test_symbol_squote
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
162
|
+
tokenize( ":'foo'" )
|
163
|
+
assert_next_token :symbol, ":'"
|
164
|
+
assert_next_token :symbol, "", :region_open
|
165
|
+
assert_next_token :symbol, "foo"
|
166
|
+
assert_next_token :symbol, "", :region_close
|
167
|
+
assert_next_token :symbol, "'"
|
168
|
+
assert_no_next_token
|
203
169
|
end
|
204
170
|
|
205
171
|
def test_symbol
|
206
|
-
|
207
|
-
|
208
|
-
assert_equal ":foo_bar?", tok.first
|
209
|
-
assert_equal :symbol, tok.first.group
|
172
|
+
tokenize( ":foo_bar?" )
|
173
|
+
assert_next_token :symbol, ":foo_bar?"
|
210
174
|
end
|
211
175
|
|
212
176
|
def test_char
|
213
|
-
|
214
|
-
|
215
|
-
assert_equal "?.", tok.first
|
216
|
-
assert_equal :char, tok.first.group
|
177
|
+
tokenize( "?." )
|
178
|
+
assert_next_token :char, "?."
|
217
179
|
|
218
|
-
|
219
|
-
|
220
|
-
assert_equal '?\n', tok.first
|
221
|
-
assert_equal :char, tok.first.group
|
180
|
+
tokenize( '?\n' )
|
181
|
+
assert_next_token :char, '?\n'
|
222
182
|
end
|
223
183
|
|
224
184
|
def test_specials
|
225
185
|
%w{__FILE__ __LINE__ true false nil self}.each do |word|
|
226
|
-
|
227
|
-
|
228
|
-
assert_equal word, tok.first
|
229
|
-
assert_equal :constant, tok.first.group
|
186
|
+
tokenize( word )
|
187
|
+
assert_next_token :constant, word
|
230
188
|
end
|
231
189
|
|
232
190
|
%w{__FILE__ __LINE__ true false nil self}.each do |word|
|
233
|
-
|
234
|
-
|
235
|
-
assert_equal "#{word}?", tok.first
|
236
|
-
assert_equal :ident, tok.first.group
|
191
|
+
tokenize( "#{word}?" )
|
192
|
+
assert_next_token :ident, "#{word}?"
|
237
193
|
end
|
238
194
|
|
239
195
|
%w{__FILE__ __LINE__ true false nil self}.each do |word|
|
240
|
-
|
241
|
-
|
242
|
-
assert_equal "#{word}!", tok.first
|
243
|
-
assert_equal :ident, tok.first.group
|
196
|
+
tokenize( "#{word}!" )
|
197
|
+
assert_next_token :ident, "#{word}!"
|
244
198
|
end
|
245
199
|
|
246
200
|
%w{__FILE__ __LINE__ true false nil self}.each do |word|
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
tok.shift
|
251
|
-
assert_equal word, tok.first
|
252
|
-
assert_equal :ident, tok.first.group
|
201
|
+
tokenize( "x.#{word}" )
|
202
|
+
skip_token 2
|
203
|
+
assert_next_token :ident, word
|
253
204
|
end
|
254
205
|
end
|
255
206
|
|
256
207
|
def test_pct_r
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
208
|
+
tokenize( '%r{foo#{x}bar}' )
|
209
|
+
assert_next_token :punct, "%r{"
|
210
|
+
assert_next_token :regex, "", :region_open
|
211
|
+
assert_next_token :regex, "foo"
|
212
|
+
assert_next_token :expr, '#{x}'
|
213
|
+
assert_next_token :regex, "bar"
|
214
|
+
assert_next_token :regex, "", :region_close
|
215
|
+
assert_next_token :punct, "}"
|
216
|
+
|
217
|
+
tokenize( '%r-foo#{x}bar-' )
|
218
|
+
assert_next_token :punct, "%r-"
|
219
|
+
assert_next_token :regex, "", :region_open
|
220
|
+
assert_next_token :regex, "foo"
|
221
|
+
assert_next_token :expr, '#{x}'
|
222
|
+
assert_next_token :regex, "bar"
|
223
|
+
assert_next_token :regex, "", :region_close
|
224
|
+
assert_next_token :punct, "-"
|
225
|
+
end
|
226
|
+
|
227
|
+
def test_pct_r_with_wakas
|
228
|
+
tokenize '%r<foo#{x}bar> foo'
|
229
|
+
assert_next_token :punct, "%r<"
|
230
|
+
assert_next_token :regex, "", :region_open
|
231
|
+
assert_next_token :regex, "foo"
|
232
|
+
assert_next_token :expr, '#{x}'
|
233
|
+
assert_next_token :regex, "bar"
|
234
|
+
assert_next_token :regex, "", :region_close
|
235
|
+
assert_next_token :punct, ">"
|
236
|
+
assert_next_token :normal, " "
|
237
|
+
assert_next_token :ident, "foo"
|
238
|
+
end
|
239
|
+
|
240
|
+
def test_pct_w_brace
|
241
|
+
tokenize( '%w{foo bar baz}' )
|
242
|
+
assert_next_token :punct, "%w{"
|
243
|
+
assert_next_token :string, '', :region_open
|
244
|
+
assert_next_token :string, 'foo bar baz'
|
245
|
+
assert_next_token :string, '', :region_close
|
246
|
+
assert_next_token :punct, "}"
|
272
247
|
end
|
273
248
|
|
274
249
|
def test_pct_w
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
250
|
+
tokenize( '%w-foo#{x} bar baz-' )
|
251
|
+
assert_next_token :punct, "%w-"
|
252
|
+
assert_next_token :string, '', :region_open
|
253
|
+
assert_next_token :string, 'foo#{x} bar baz'
|
254
|
+
assert_next_token :string, '', :region_close
|
255
|
+
assert_next_token :punct, "-"
|
280
256
|
end
|
281
257
|
|
282
258
|
def test_pct_q
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
259
|
+
tokenize( '%q-hello #{world}-' )
|
260
|
+
assert_next_token :punct, "%q-"
|
261
|
+
assert_next_token :string, '', :region_open
|
262
|
+
assert_next_token :string, 'hello #{world}'
|
263
|
+
assert_next_token :string, '', :region_close
|
264
|
+
assert_next_token :punct, "-"
|
288
265
|
end
|
289
266
|
|
290
267
|
def test_pct_s
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
268
|
+
tokenize( '%s-hello #{world}-' )
|
269
|
+
assert_next_token :punct, "%s-"
|
270
|
+
assert_next_token :symbol, '', :region_open
|
271
|
+
assert_next_token :symbol, 'hello #{world}'
|
272
|
+
assert_next_token :symbol, '', :region_close
|
273
|
+
assert_next_token :punct, "-"
|
296
274
|
end
|
297
275
|
|
298
276
|
def test_pct_W
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
277
|
+
tokenize( '%W-foo#{x} bar baz-' )
|
278
|
+
assert_next_token :punct, "%W-"
|
279
|
+
assert_next_token :string, '', :region_open
|
280
|
+
assert_next_token :string, 'foo'
|
281
|
+
assert_next_token :expr, '#{x}'
|
282
|
+
assert_next_token :string, ' bar baz'
|
283
|
+
assert_next_token :string, '', :region_close
|
284
|
+
assert_next_token :punct, "-"
|
306
285
|
end
|
307
286
|
|
308
287
|
def test_pct_Q
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
288
|
+
tokenize( '%Q-hello #{world}-' )
|
289
|
+
assert_next_token :punct, "%Q-"
|
290
|
+
assert_next_token :string, '', :region_open
|
291
|
+
assert_next_token :string, 'hello '
|
292
|
+
assert_next_token :expr, '#{world}'
|
293
|
+
assert_next_token :string, '', :region_close
|
294
|
+
assert_next_token :punct, "-"
|
315
295
|
end
|
316
296
|
|
317
297
|
def test_pct_x
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
298
|
+
tokenize( '%x-ls /blah/#{foo}-' )
|
299
|
+
assert_next_token :punct, "%x-"
|
300
|
+
assert_next_token :string, '', :region_open
|
301
|
+
assert_next_token :string, 'ls /blah/'
|
302
|
+
assert_next_token :expr, '#{foo}'
|
303
|
+
assert_next_token :string, '', :region_close
|
304
|
+
assert_next_token :punct, "-"
|
324
305
|
end
|
325
306
|
|
326
307
|
def test_pct_string
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
308
|
+
tokenize( '%-hello #{world}-' )
|
309
|
+
assert_next_token :punct, "%-"
|
310
|
+
assert_next_token :string, '', :region_open
|
311
|
+
assert_next_token :string, 'hello '
|
312
|
+
assert_next_token :expr, '#{world}'
|
313
|
+
assert_next_token :string, '', :region_close
|
314
|
+
assert_next_token :punct, "-"
|
333
315
|
end
|
334
316
|
|
335
317
|
def test_bad_pct_string
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
assert_equal [ :comment, '#{world}0' ], [ tok.first.group, tok.shift ]
|
318
|
+
tokenize( '%0hello #{world}0' )
|
319
|
+
assert_next_token :punct, "%"
|
320
|
+
assert_next_token :number, '0'
|
321
|
+
assert_next_token :ident, 'hello'
|
322
|
+
assert_next_token :normal, ' '
|
323
|
+
assert_next_token :comment, '#{world}0'
|
343
324
|
end
|
344
325
|
|
345
326
|
def test_shift_left
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
327
|
+
tokenize( 'foo << 5' )
|
328
|
+
assert_next_token :ident, "foo"
|
329
|
+
assert_next_token :normal, " "
|
330
|
+
assert_next_token :punct, "<<"
|
331
|
+
assert_next_token :normal, " "
|
332
|
+
assert_next_token :number, "5"
|
333
|
+
end
|
334
|
+
|
335
|
+
def test_shift_left_no_white
|
336
|
+
tokenize( 'foo<<5' )
|
337
|
+
assert_next_token :ident, "foo"
|
338
|
+
assert_next_token :punct, "<<"
|
339
|
+
assert_next_token :number, "5"
|
353
340
|
end
|
354
341
|
|
355
342
|
def test_here_doc_no_opts
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
343
|
+
tokenize( "foo <<EOF\n foo\n bar\n baz\nEOF" )
|
344
|
+
assert_next_token :ident, "foo"
|
345
|
+
assert_next_token :normal, " "
|
346
|
+
assert_next_token :punct, "<<"
|
347
|
+
assert_next_token :constant, "EOF"
|
348
|
+
assert_next_token :string, "", :region_open
|
349
|
+
assert_next_token :string, "\n foo\n bar\n baz\n"
|
350
|
+
assert_next_token :string, "", :region_close
|
351
|
+
assert_next_token :constant, "EOF"
|
364
352
|
end
|
365
353
|
|
366
354
|
def test_here_doc_no_opts_missing_end
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
355
|
+
tokenize( "foo <<EOF\n foo\n bar\n baz\n EOF" )
|
356
|
+
assert_next_token :ident, "foo"
|
357
|
+
assert_next_token :normal, " "
|
358
|
+
assert_next_token :punct, "<<"
|
359
|
+
assert_next_token :constant, "EOF"
|
360
|
+
assert_next_token :string, "", :region_open
|
361
|
+
assert_next_token :string, "\n foo\n bar\n baz\n EOF"
|
362
|
+
assert_no_next_token
|
374
363
|
end
|
375
364
|
|
376
365
|
def test_here_doc_float_right
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
366
|
+
tokenize( "foo <<-EOF\n foo\n bar\n baz\n EOF" )
|
367
|
+
assert_next_token :ident, "foo"
|
368
|
+
assert_next_token :normal, " "
|
369
|
+
assert_next_token :punct, "<<-"
|
370
|
+
assert_next_token :constant, "EOF"
|
371
|
+
assert_next_token :string, "", :region_open
|
372
|
+
assert_next_token :string, "\n foo\n bar\n baz\n"
|
373
|
+
assert_next_token :string, "", :region_close
|
374
|
+
assert_next_token :constant, " EOF"
|
385
375
|
end
|
386
376
|
|
387
377
|
def test_here_doc_single_quotes
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
378
|
+
tokenize( "foo <<'EOF'\n foo\#{x}\n bar\n baz\nEOF" )
|
379
|
+
assert_next_token :ident, "foo"
|
380
|
+
assert_next_token :normal, " "
|
381
|
+
assert_next_token :punct, "<<'"
|
382
|
+
assert_next_token :constant, "EOF"
|
383
|
+
assert_next_token :punct, "'"
|
384
|
+
assert_next_token :string, "", :region_open
|
385
|
+
assert_next_token :string, "\n foo\#{x}\n bar\n baz\n"
|
386
|
+
assert_next_token :string, "", :region_close
|
387
|
+
assert_next_token :constant, "EOF"
|
397
388
|
end
|
398
389
|
|
399
390
|
def test_here_doc_double_quotes
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
391
|
+
tokenize( "foo <<\"EOF\"\n foo\#{x}\n bar\n baz\nEOF" )
|
392
|
+
assert_next_token :ident, "foo"
|
393
|
+
assert_next_token :normal, " "
|
394
|
+
assert_next_token :punct, "<<\""
|
395
|
+
assert_next_token :constant, "EOF"
|
396
|
+
assert_next_token :punct, "\""
|
397
|
+
assert_next_token :string, "", :region_open
|
398
|
+
assert_next_token :string, "\n foo"
|
399
|
+
assert_next_token :expr, '#{x}'
|
400
|
+
assert_next_token :string, "\n bar\n baz\n"
|
401
|
+
assert_next_token :string, "", :region_close
|
402
|
+
assert_next_token :constant, "EOF"
|
411
403
|
end
|
412
404
|
|
413
405
|
def test_space
|
414
|
-
|
415
|
-
|
416
|
-
assert_equal [ :normal, "\n \t\t\n\n\r\n" ], [ tok.first.group, tok.shift ]
|
406
|
+
tokenize( "\n \t\t\n\n\r\n" )
|
407
|
+
assert_next_token :normal, "\n \t\t\n\n\r\n"
|
417
408
|
end
|
418
409
|
|
419
410
|
def test_number
|
420
|
-
|
421
|
-
|
422
|
-
|
423
|
-
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
|
433
|
-
|
434
|
-
assert_equal [ :number, "1_2.5_2e3_2" ], [ tok.first.group, tok.shift ]
|
411
|
+
tokenize( "1 1.0 1e5 1.0e5 1_2.5 1_2.5_2 1_2.5_2e3_2" )
|
412
|
+
assert_next_token :number, "1"
|
413
|
+
skip_token
|
414
|
+
assert_next_token :number, "1.0"
|
415
|
+
skip_token
|
416
|
+
assert_next_token :number, "1e5"
|
417
|
+
skip_token
|
418
|
+
assert_next_token :number, "1.0e5"
|
419
|
+
skip_token
|
420
|
+
assert_next_token :number, "1_2.5"
|
421
|
+
skip_token
|
422
|
+
assert_next_token :number, "1_2.5_2"
|
423
|
+
skip_token
|
424
|
+
assert_next_token :number, "1_2.5_2e3_2"
|
435
425
|
end
|
436
426
|
|
437
427
|
def test_dquoted_string
|
438
|
-
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
|
445
|
-
|
446
|
-
|
447
|
-
|
448
|
-
|
449
|
-
|
450
|
-
|
428
|
+
tokenize( '"foo #{x} bar\"\n\tbaz\xA5b\5\1234"' )
|
429
|
+
assert_next_token :punct, '"'
|
430
|
+
assert_next_token :string, '', :region_open
|
431
|
+
assert_next_token :string, 'foo '
|
432
|
+
assert_next_token :expr, '#{x}'
|
433
|
+
assert_next_token :string, ' bar'
|
434
|
+
assert_next_token :escape, '\"\n\t'
|
435
|
+
assert_next_token :string, 'baz'
|
436
|
+
assert_next_token :escape, '\xA5'
|
437
|
+
assert_next_token :string, 'b'
|
438
|
+
assert_next_token :escape, '\5\123'
|
439
|
+
assert_next_token :string, '4'
|
440
|
+
assert_next_token :string, '', :region_close
|
441
|
+
assert_next_token :punct, '"'
|
451
442
|
end
|
452
443
|
|
453
444
|
def test_squoted_string
|
454
|
-
|
455
|
-
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
460
|
-
|
461
|
-
|
462
|
-
|
445
|
+
tokenize( '\'foo #{x} bar\\\'\n\tbaz\\\\\xA5b\5\1234\'' )
|
446
|
+
assert_next_token :punct, "'"
|
447
|
+
assert_next_token :string, "", :region_open
|
448
|
+
assert_next_token :string, 'foo #{x} bar'
|
449
|
+
assert_next_token :escape, '\\\''
|
450
|
+
assert_next_token :string, '\n\tbaz'
|
451
|
+
assert_next_token :escape, '\\\\'
|
452
|
+
assert_next_token :string, '\xA5b\5\1234'
|
453
|
+
assert_next_token :string, "", :region_close
|
454
|
+
assert_next_token :punct, "'"
|
463
455
|
end
|
464
456
|
|
465
457
|
def test_dot_selector
|
466
|
-
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
assert_equal [ :ident, "nil" ], [ tok.first.group, tok.shift ]
|
458
|
+
tokenize( 'foo.nil' )
|
459
|
+
skip_token
|
460
|
+
assert_next_token :punct, "."
|
461
|
+
assert_next_token :ident, "nil"
|
471
462
|
end
|
472
463
|
|
473
464
|
def test_dot_range_inclusive
|
474
|
-
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
assert_equal [ :constant, "nil" ], [ tok.first.group, tok.shift ]
|
465
|
+
tokenize( 'foo..nil' )
|
466
|
+
skip_token
|
467
|
+
assert_next_token :punct, ".."
|
468
|
+
assert_next_token :constant, "nil"
|
479
469
|
end
|
480
470
|
|
481
471
|
def test_dot_range_exclusive
|
482
|
-
|
483
|
-
|
484
|
-
|
485
|
-
|
486
|
-
assert_equal [ :constant, "nil" ], [ tok.first.group, tok.shift ]
|
472
|
+
tokenize( 'foo...nil' )
|
473
|
+
skip_token
|
474
|
+
assert_next_token :punct, "..."
|
475
|
+
assert_next_token :constant, "nil"
|
487
476
|
end
|
488
477
|
|
489
478
|
def test_dot_range_many
|
490
|
-
|
491
|
-
|
492
|
-
|
493
|
-
|
494
|
-
assert_equal [ :constant, "nil" ], [ tok.first.group, tok.shift ]
|
479
|
+
tokenize( 'foo.....nil' )
|
480
|
+
skip_token
|
481
|
+
assert_next_token :punct, "....."
|
482
|
+
assert_next_token :constant, "nil"
|
495
483
|
end
|
496
484
|
|
497
485
|
def test_attribute
|
498
|
-
|
499
|
-
|
500
|
-
assert_equal [ :attribute, "@var_foo" ], [ tok.first.group, tok.shift ]
|
486
|
+
tokenize( '@var_foo' )
|
487
|
+
assert_next_token :attribute, "@var_foo"
|
501
488
|
end
|
502
489
|
|
503
490
|
def test_global
|
504
|
-
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
|
510
|
-
|
511
|
-
|
512
|
-
|
513
|
-
|
514
|
-
|
515
|
-
|
491
|
+
tokenize( '$var_foo' )
|
492
|
+
assert_next_token :global, "$var_foo"
|
493
|
+
tokenize( '$12' )
|
494
|
+
assert_next_token :global, "$12"
|
495
|
+
tokenize( '$/f' )
|
496
|
+
assert_next_token :global, "$/"
|
497
|
+
tokenize( "$\n" )
|
498
|
+
assert_next_token :global, "$"
|
499
|
+
end
|
500
|
+
|
501
|
+
def test_paren_delimiter
|
502
|
+
tokenize( '%w(a)' )
|
503
|
+
assert_next_token :punct, "%w("
|
504
|
+
assert_next_token :string, "", :region_open
|
505
|
+
assert_next_token :string, "a"
|
506
|
+
assert_next_token :string, "", :region_close
|
507
|
+
assert_next_token :punct, ")"
|
508
|
+
end
|
509
|
+
|
510
|
+
def test_division
|
511
|
+
tokenize( 'm / 3' )
|
512
|
+
assert_next_token :ident, "m"
|
513
|
+
assert_next_token :normal, " "
|
514
|
+
assert_next_token :punct, "/"
|
515
|
+
assert_next_token :normal, " "
|
516
|
+
assert_next_token :number, "3"
|
517
|
+
end
|
518
|
+
|
519
|
+
def test_regex
|
520
|
+
tokenize( 'm =~ /3/' )
|
521
|
+
assert_next_token :ident, "m"
|
522
|
+
assert_next_token :normal, " "
|
523
|
+
assert_next_token :punct, "=~"
|
524
|
+
assert_next_token :normal, " "
|
525
|
+
assert_next_token :punct, "/"
|
526
|
+
assert_next_token :regex, "", :region_open
|
527
|
+
assert_next_token :regex, "3"
|
528
|
+
assert_next_token :regex, "", :region_close
|
529
|
+
assert_next_token :punct, "/"
|
530
|
+
end
|
531
|
+
|
532
|
+
def test_heredoc_with_trailing_text
|
533
|
+
tokenize( "foo('here', <<EOF)\n A heredoc.\nEOF\nfoo" )
|
534
|
+
assert_next_token :ident, "foo"
|
535
|
+
assert_next_token :punct, "('"
|
536
|
+
assert_next_token :string, '', :region_open
|
537
|
+
assert_next_token :string, 'here'
|
538
|
+
assert_next_token :string, '', :region_close
|
539
|
+
assert_next_token :punct, "',"
|
540
|
+
assert_next_token :normal, ' '
|
541
|
+
assert_next_token :punct, '<<'
|
542
|
+
assert_next_token :constant, "EOF"
|
543
|
+
assert_next_token :punct, ')'
|
544
|
+
assert_next_token :string, "", :region_open
|
545
|
+
assert_next_token :string, "\n A heredoc.\n"
|
546
|
+
assert_next_token :string, "", :region_close
|
547
|
+
assert_next_token :constant, "EOF"
|
548
|
+
assert_next_token :normal, "\n"
|
549
|
+
assert_next_token :ident, "foo"
|
550
|
+
end
|
551
|
+
|
552
|
+
def test_multiple_heredocs
|
553
|
+
tokenize( <<'TEST' )
|
554
|
+
foo('here', <<EOF, 'there', <<-'FOO', 'blah')
|
555
|
+
First heredoc, right here.
|
556
|
+
Expressions are #{allowed}
|
557
|
+
EOF
|
558
|
+
Another heredoc, immediately after the first.
|
559
|
+
Expressions are not #{allowed}
|
560
|
+
FOO
|
561
|
+
TEST
|
562
|
+
assert_next_token :ident, "foo"
|
563
|
+
assert_next_token :punct, "('"
|
564
|
+
assert_next_token :string, '', :region_open
|
565
|
+
assert_next_token :string, 'here'
|
566
|
+
assert_next_token :string, '', :region_close
|
567
|
+
assert_next_token :punct, "',"
|
568
|
+
assert_next_token :normal, ' '
|
569
|
+
assert_next_token :punct, '<<'
|
570
|
+
assert_next_token :constant, "EOF"
|
571
|
+
assert_next_token :punct, ','
|
572
|
+
assert_next_token :normal, ' '
|
573
|
+
assert_next_token :punct, "'"
|
574
|
+
assert_next_token :string, '', :region_open
|
575
|
+
assert_next_token :string, 'there'
|
576
|
+
assert_next_token :string, '', :region_close
|
577
|
+
assert_next_token :punct, "',"
|
578
|
+
assert_next_token :normal, ' '
|
579
|
+
assert_next_token :punct, "<<-'"
|
580
|
+
assert_next_token :constant, "FOO"
|
581
|
+
assert_next_token :punct, "',"
|
582
|
+
assert_next_token :normal, ' '
|
583
|
+
assert_next_token :punct, "'"
|
584
|
+
assert_next_token :string, '', :region_open
|
585
|
+
assert_next_token :string, 'blah'
|
586
|
+
assert_next_token :string, '', :region_close
|
587
|
+
assert_next_token :punct, "')"
|
588
|
+
assert_next_token :string, "", :region_open
|
589
|
+
assert_next_token :string, "\nFirst heredoc, right here.\nExpressions are "
|
590
|
+
assert_next_token :expr, '#{allowed}'
|
591
|
+
assert_next_token :string, "\n"
|
592
|
+
assert_next_token :string, "", :region_close
|
593
|
+
assert_next_token :constant, "EOF"
|
594
|
+
assert_next_token :string, "", :region_open
|
595
|
+
assert_next_token :string, "\n Another heredoc, immediately after the first.\n Expressions are not \#{allowed}\n"
|
596
|
+
assert_next_token :string, "", :region_close
|
597
|
+
assert_next_token :constant, " FOO"
|
598
|
+
end
|
599
|
+
|
600
|
+
def test_carldr_bad_heredoc_001
|
601
|
+
tokenize( <<'TEST' )
|
602
|
+
str = <<END
|
603
|
+
here document #{1 + 1}
|
604
|
+
END
|
605
|
+
|
606
|
+
if str
|
607
|
+
TEST
|
608
|
+
|
609
|
+
assert_next_token :ident, "str"
|
610
|
+
assert_next_token :normal, " "
|
611
|
+
assert_next_token :punct, "="
|
612
|
+
assert_next_token :normal, " "
|
613
|
+
assert_next_token :punct, "<<"
|
614
|
+
assert_next_token :constant, "END"
|
615
|
+
assert_next_token :string, "", :region_open
|
616
|
+
assert_next_token :string, "\nhere document "
|
617
|
+
assert_next_token :expr, '#{1 + 1}'
|
618
|
+
assert_next_token :string, "\n"
|
619
|
+
assert_next_token :string, "", :region_close
|
620
|
+
assert_next_token :constant, "END"
|
621
|
+
assert_next_token :normal, "\n\n"
|
622
|
+
assert_next_token :keyword, "if"
|
623
|
+
assert_next_token :normal, " "
|
624
|
+
assert_next_token :ident, "str"
|
625
|
+
end
|
626
|
+
|
627
|
+
def test_regex_after_keyword
|
628
|
+
tokenize "when /[0-7]/\nfoo"
|
629
|
+
assert_next_token :keyword, "when"
|
630
|
+
assert_next_token :normal, " "
|
631
|
+
assert_next_token :punct, "/"
|
632
|
+
assert_next_token :regex, "", :region_open
|
633
|
+
assert_next_token :regex, "[0-7]"
|
634
|
+
assert_next_token :regex, "", :region_close
|
635
|
+
assert_next_token :punct, "/"
|
636
|
+
assert_next_token :normal, "\n"
|
637
|
+
assert_next_token :ident, "foo"
|
638
|
+
end
|
639
|
+
|
640
|
+
def test_heredoc_after_lparen
|
641
|
+
tokenize "foo(<<SRC, obj)\nblah blah\nSRC\nfoo"
|
642
|
+
assert_next_token :ident, "foo"
|
643
|
+
assert_next_token :punct, "(<<"
|
644
|
+
assert_next_token :constant, "SRC"
|
645
|
+
assert_next_token :punct, ","
|
646
|
+
assert_next_token :normal, " "
|
647
|
+
assert_next_token :ident, "obj"
|
648
|
+
assert_next_token :punct, ")"
|
649
|
+
assert_next_token :string, "", :region_open
|
650
|
+
assert_next_token :string, "\nblah blah\n"
|
651
|
+
assert_next_token :string, "", :region_close
|
652
|
+
assert_next_token :constant, "SRC"
|
653
|
+
assert_next_token :normal, "\n"
|
654
|
+
assert_next_token :ident, "foo"
|
655
|
+
end
|
656
|
+
|
657
|
+
def test_division_after_parens
|
658
|
+
tokenize "(a+b)/2"
|
659
|
+
assert_next_token :punct, "("
|
660
|
+
assert_next_token :ident, "a"
|
661
|
+
assert_next_token :punct, "+"
|
662
|
+
assert_next_token :ident, "b"
|
663
|
+
assert_next_token :punct, ")/"
|
664
|
+
assert_next_token :number, "2"
|
516
665
|
end
|
517
|
-
|
518
666
|
end
|