syntax 1.1.0 → 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
data/CHANGELOG CHANGED
@@ -1,6 +1,9 @@
1
1
  Syntax: a syntax highlighting library for Ruby.
2
2
 
3
- 1.1.0 In Progress
3
+ 1.2.0 02 Jan 2014
4
+ Cleaned up Gemspec, added license and homepage - @grosser.
5
+
6
+ 1.1.0 11 Dec 2013
4
7
  Published from https://github.com/dblock/syntax, a now maintained fork.
5
8
  The project builds again and runs, fixes by @dblock, @distler.
6
9
 
@@ -39,11 +39,3 @@ Tokenizing is straightforward process. Each time a new token is discovered by th
39
39
  * <tt>token.instruction</tt> is an instruction used to determine how this token should be treated. It will be <tt>:none</tt> for normal tokens, <tt>:region_open</tt> if the token starts a nested region, and <tt>:region_close</tt> if it closes the last opened region.
40
40
  * <tt>token</tt> is itself a subclass of String, so you can use it just as you would a string. It represents the lexeme that was actually parsed.
41
41
 
42
- == Releasing Syntax
43
-
44
- * Update version in <tt>lib/syntax/version.rb</tt>.
45
-
46
- * Build the gem, use Ruby 1.9.3.
47
-
48
- rake clean
49
- rake package
@@ -1,7 +1,7 @@
1
1
  module Syntax
2
2
  module Version
3
3
  MAJOR=1
4
- MINOR=1
4
+ MINOR=2
5
5
  TINY=0
6
6
 
7
7
  STRING=[MAJOR,MINOR,TINY].join('.')
metadata CHANGED
@@ -1,22 +1,25 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: syntax
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.0
4
+ version: 1.2.0
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
8
8
  - Jamis Buck
9
- autorequire: syntax
9
+ autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-12-11 00:00:00.000000000 Z
12
+ date: 2014-01-02 00:00:00.000000000 Z
13
13
  dependencies: []
14
- description:
14
+ description: Syntax is Ruby library for performing simple syntax highlighting.
15
15
  email: jamis@jamisbuck.org
16
16
  executables: []
17
17
  extensions: []
18
18
  extra_rdoc_files: []
19
19
  files:
20
+ - LICENSE
21
+ - README.rdoc
22
+ - CHANGELOG
20
23
  - lib/syntax/common.rb
21
24
  - lib/syntax/convertors/abstract.rb
22
25
  - lib/syntax/convertors/html.rb
@@ -30,17 +33,9 @@ files:
30
33
  - lib/syntax/lang/yaml.rb
31
34
  - lib/syntax/version.rb
32
35
  - lib/syntax.rb
33
- - test/ALL-TESTS.rb
34
- - test/syntax/tc_ruby.rb
35
- - test/syntax/tc_xml.rb
36
- - test/syntax/tc_yaml.rb
37
- - test/syntax/tokenizer_testcase.rb
38
- - test/tc_syntax.rb
39
- - README.rdoc
40
- - LICENSE
41
- - CHANGELOG
42
- homepage:
43
- licenses: []
36
+ homepage: https://github.com/dblock/syntax
37
+ licenses:
38
+ - BSD
44
39
  post_install_message:
45
40
  rdoc_options: []
46
41
  require_paths:
@@ -62,6 +57,5 @@ rubyforge_project:
62
57
  rubygems_version: 1.8.25
63
58
  signing_key:
64
59
  specification_version: 3
65
- summary: Syntax is Ruby library for performing simple syntax highlighting.
66
- test_files:
67
- - test/ALL-TESTS.rb
60
+ summary: Perform simple syntax highlighting.
61
+ test_files: []
@@ -1,5 +0,0 @@
1
- #!/usr/bin/env ruby
2
- $:.unshift "../lib"
3
-
4
- Dir.chdir File.dirname(__FILE__)
5
- Dir["**/tc_*.rb"].each { |file| load File.expand_path(file) }
@@ -1,871 +0,0 @@
1
- require File.dirname(__FILE__) + "/tokenizer_testcase"
2
-
3
- class TC_Syntax_Ruby < TokenizerTestCase
4
-
5
- syntax "ruby"
6
-
7
- def test_empty
8
- tokenize ""
9
- assert_no_next_token
10
- end
11
-
12
- def test_constant
13
- tokenize "Foo"
14
- assert_next_token :constant, "Foo"
15
- end
16
-
17
- def test_ident
18
- tokenize "foo"
19
- assert_next_token :ident, "foo"
20
- end
21
-
22
- def test_comment_eol
23
- tokenize "# a comment\nfoo"
24
- assert_next_token :comment, "# a comment"
25
- assert_next_token :normal, "\n"
26
- assert_next_token :ident, "foo"
27
- end
28
-
29
- def test_comment_block
30
- tokenize "=begin\nthis is a comment\n=end\nnoncomment"
31
- assert_next_token :comment, "=begin\nthis is a comment\n=end"
32
- assert_next_token :normal, "\n"
33
- assert_next_token :ident, "noncomment"
34
- end
35
-
36
- def test_comment_block_with_CRNL
37
- tokenize "=begin\r\nthis is a comment\r\n=end\r\nnoncomment"
38
- assert_next_token :comment, "=begin\r\nthis is a comment\r\n=end"
39
- assert_next_token :normal, "\r\n"
40
- assert_next_token :ident, "noncomment"
41
- end
42
-
43
- def test_keyword
44
- Syntax::Ruby::KEYWORDS.each do |word|
45
- tokenize word
46
- assert_next_token :keyword, word
47
- end
48
- Syntax::Ruby::KEYWORDS.each do |word|
49
- tokenize "foo.#{word}"
50
- skip_token 2
51
- assert_next_token :ident, word
52
- end
53
- end
54
-
55
- def test__END__
56
- tokenize "__END__\n\nblah blah blah"
57
- assert_next_token :comment, "__END__\n\nblah blah blah"
58
- end
59
-
60
- def test__END__with_CRNL
61
- tokenize "__END__\r\nblah blah blah"
62
- assert_next_token :comment, "__END__\r\nblah blah blah"
63
- end
64
-
65
- def test_def_paren
66
- tokenize "def foo(bar)"
67
- assert_next_token :keyword, "def "
68
- assert_next_token :method, "foo"
69
- assert_next_token :punct, "("
70
- assert_next_token :ident, "bar"
71
- assert_next_token :punct, ")"
72
- end
73
-
74
- def test_def_space
75
- tokenize "def foo bar"
76
- assert_next_token :keyword, "def "
77
- assert_next_token :method, "foo"
78
- assert_next_token :normal, " "
79
- assert_next_token :ident, "bar"
80
- end
81
-
82
- def test_def_semicolon
83
- tokenize "def foo;"
84
- assert_next_token :keyword, "def "
85
- assert_next_token :method, "foo"
86
- assert_next_token :punct, ";"
87
- end
88
-
89
- def test_def_eol
90
- tokenize "def foo"
91
- assert_next_token :keyword, "def "
92
- assert_next_token :method, "foo"
93
- end
94
-
95
- def test_class_space
96
- tokenize "class Foo\n"
97
- assert_next_token :keyword, "class "
98
- assert_next_token :class, "Foo"
99
- assert_next_token :normal, "\n"
100
- end
101
-
102
- def test_class_semicolon
103
- tokenize "class Foo;"
104
- assert_next_token :keyword, "class "
105
- assert_next_token :class, "Foo"
106
- assert_next_token :punct, ";"
107
- end
108
-
109
- def test_class_extend
110
- tokenize "class Foo< Bang"
111
- assert_next_token :keyword, "class "
112
- assert_next_token :class, "Foo"
113
- assert_next_token :punct, "<"
114
- assert_next_token :normal, " "
115
- assert_next_token :constant, "Bang"
116
- end
117
-
118
- def test_module_space
119
- tokenize "module Foo\n"
120
- assert_next_token :keyword, "module "
121
- assert_next_token :module, "Foo"
122
- assert_next_token :normal, "\n"
123
- end
124
-
125
- def test_module_semicolon
126
- tokenize "module Foo;"
127
- assert_next_token :keyword, "module "
128
- assert_next_token :module, "Foo"
129
- assert_next_token :punct, ";"
130
- end
131
-
132
- def test_module_other
133
- tokenize "module Foo!\n"
134
- assert_next_token :keyword, "module "
135
- assert_next_token :module, "Foo!"
136
- end
137
-
138
- def test_scope_operator
139
- tokenize "Foo::Bar"
140
- assert_next_token :constant, "Foo"
141
- assert_next_token :punct, "::"
142
- assert_next_token :constant, "Bar"
143
- end
144
-
145
- def test_symbol_dquote
146
- tokenize ':"foo"'
147
- assert_next_token :symbol, ':"'
148
- assert_next_token :symbol, '', :region_open
149
- assert_next_token :symbol, 'foo'
150
- assert_next_token :symbol, '', :region_close
151
- assert_next_token :symbol, '"'
152
- assert_no_next_token
153
- end
154
-
155
- def test_symbol_squote
156
- tokenize ":'foo'"
157
- assert_next_token :symbol, ":'"
158
- assert_next_token :symbol, "", :region_open
159
- assert_next_token :symbol, "foo"
160
- assert_next_token :symbol, "", :region_close
161
- assert_next_token :symbol, "'"
162
- assert_no_next_token
163
- end
164
-
165
- def test_symbol
166
- tokenize ":foo_123"
167
- assert_next_token :symbol, ":foo_123"
168
-
169
- tokenize ":123"
170
- assert_next_token :punct, ":"
171
- assert_next_token :number, "123"
172
-
173
- tokenize ":foo="
174
- assert_next_token :symbol, ":foo="
175
-
176
- tokenize ":foo!"
177
- assert_next_token :symbol, ":foo!"
178
-
179
- tokenize ":foo?"
180
- assert_next_token :symbol, ":foo?"
181
- end
182
-
183
- def test_char
184
- tokenize "?."
185
- assert_next_token :char, "?."
186
-
187
- tokenize '?\n'
188
- assert_next_token :char, '?\n'
189
- end
190
-
191
- def test_specials
192
- %w{__FILE__ __LINE__ true false nil self}.each do |word|
193
- tokenize word
194
- assert_next_token :constant, word
195
- end
196
-
197
- %w{__FILE__ __LINE__ true false nil self}.each do |word|
198
- tokenize "#{word}?"
199
- assert_next_token :ident, "#{word}?"
200
- end
201
-
202
- %w{__FILE__ __LINE__ true false nil self}.each do |word|
203
- tokenize "#{word}!"
204
- assert_next_token :ident, "#{word}!"
205
- end
206
-
207
- %w{__FILE__ __LINE__ true false nil self}.each do |word|
208
- tokenize "x.#{word}"
209
- skip_token 2
210
- assert_next_token :ident, word
211
- end
212
- end
213
-
214
- def test_pct_r
215
- tokenize '%r{foo#{x}bar}'
216
- assert_next_token :punct, "%r{"
217
- assert_next_token :regex, "", :region_open
218
- assert_next_token :regex, "foo"
219
- assert_next_token :expr, '#{x}'
220
- assert_next_token :regex, "bar"
221
- assert_next_token :regex, "", :region_close
222
- assert_next_token :punct, "}"
223
-
224
- tokenize '%r-foo#{x}bar-'
225
- assert_next_token :punct, "%r-"
226
- assert_next_token :regex, "", :region_open
227
- assert_next_token :regex, "foo"
228
- assert_next_token :expr, '#{x}'
229
- assert_next_token :regex, "bar"
230
- assert_next_token :regex, "", :region_close
231
- assert_next_token :punct, "-"
232
- end
233
-
234
- def test_pct_r_with_wakas
235
- tokenize '%r<foo#{x}bar> foo'
236
- assert_next_token :punct, "%r<"
237
- assert_next_token :regex, "", :region_open
238
- assert_next_token :regex, "foo"
239
- assert_next_token :expr, '#{x}'
240
- assert_next_token :regex, "bar"
241
- assert_next_token :regex, "", :region_close
242
- assert_next_token :punct, ">"
243
- assert_next_token :normal, " "
244
- assert_next_token :ident, "foo"
245
- end
246
-
247
- def test_pct_w_brace
248
- tokenize '%w{foo bar baz}'
249
- assert_next_token :punct, "%w{"
250
- assert_next_token :string, '', :region_open
251
- assert_next_token :string, 'foo bar baz'
252
- assert_next_token :string, '', :region_close
253
- assert_next_token :punct, "}"
254
- end
255
-
256
- def test_pct_w
257
- tokenize '%w-foo#{x} bar baz-'
258
- assert_next_token :punct, "%w-"
259
- assert_next_token :string, '', :region_open
260
- assert_next_token :string, 'foo#{x} bar baz'
261
- assert_next_token :string, '', :region_close
262
- assert_next_token :punct, "-"
263
- end
264
-
265
- def test_pct_q
266
- tokenize '%q-hello #{world}-'
267
- assert_next_token :punct, "%q-"
268
- assert_next_token :string, '', :region_open
269
- assert_next_token :string, 'hello #{world}'
270
- assert_next_token :string, '', :region_close
271
- assert_next_token :punct, "-"
272
- end
273
-
274
- def test_pct_s
275
- tokenize '%s-hello #{world}-'
276
- assert_next_token :punct, "%s-"
277
- assert_next_token :symbol, '', :region_open
278
- assert_next_token :symbol, 'hello #{world}'
279
- assert_next_token :symbol, '', :region_close
280
- assert_next_token :punct, "-"
281
- end
282
-
283
- def test_pct_W
284
- tokenize '%W-foo#{x} bar baz-'
285
- assert_next_token :punct, "%W-"
286
- assert_next_token :string, '', :region_open
287
- assert_next_token :string, 'foo'
288
- assert_next_token :expr, '#{x}'
289
- assert_next_token :string, ' bar baz'
290
- assert_next_token :string, '', :region_close
291
- assert_next_token :punct, "-"
292
- end
293
-
294
- def test_pct_Q
295
- tokenize '%Q-hello #{world}-'
296
- assert_next_token :punct, "%Q-"
297
- assert_next_token :string, '', :region_open
298
- assert_next_token :string, 'hello '
299
- assert_next_token :expr, '#{world}'
300
- assert_next_token :string, '', :region_close
301
- assert_next_token :punct, "-"
302
- end
303
-
304
- def test_pct_x
305
- tokenize '%x-ls /blah/#{foo}-'
306
- assert_next_token :punct, "%x-"
307
- assert_next_token :string, '', :region_open
308
- assert_next_token :string, 'ls /blah/'
309
- assert_next_token :expr, '#{foo}'
310
- assert_next_token :string, '', :region_close
311
- assert_next_token :punct, "-"
312
- end
313
-
314
- def test_pct_string
315
- tokenize '%-hello #{world}-'
316
- assert_next_token :punct, "%-"
317
- assert_next_token :string, '', :region_open
318
- assert_next_token :string, 'hello '
319
- assert_next_token :expr, '#{world}'
320
- assert_next_token :string, '', :region_close
321
- assert_next_token :punct, "-"
322
- end
323
-
324
- def test_bad_pct_string
325
- tokenize '%0hello #{world}0'
326
- assert_next_token :punct, "%"
327
- assert_next_token :number, '0'
328
- assert_next_token :ident, 'hello'
329
- assert_next_token :normal, ' '
330
- assert_next_token :comment, '#{world}0'
331
- end
332
-
333
- def test_shift_left
334
- tokenize 'foo << 5'
335
- assert_next_token :ident, "foo"
336
- assert_next_token :normal, " "
337
- assert_next_token :punct, "<<"
338
- assert_next_token :normal, " "
339
- assert_next_token :number, "5"
340
- end
341
-
342
- def test_shift_left_no_white
343
- tokenize 'foo<<5'
344
- assert_next_token :ident, "foo"
345
- assert_next_token :punct, "<<"
346
- assert_next_token :number, "5"
347
- end
348
-
349
- def test_here_doc_no_opts
350
- tokenize "foo <<EOF\n foo\n bar\n baz\nEOF"
351
- assert_next_token :ident, "foo"
352
- assert_next_token :normal, " "
353
- assert_next_token :punct, "<<"
354
- assert_next_token :constant, "EOF"
355
- assert_next_token :string, "", :region_open
356
- assert_next_token :string, "\n foo\n bar\n baz\n"
357
- assert_next_token :string, "", :region_close
358
- assert_next_token :constant, "EOF"
359
- end
360
-
361
- def test_here_doc_no_opts_missing_end
362
- tokenize "foo <<EOF\n foo\n bar\n baz\n EOF"
363
- assert_next_token :ident, "foo"
364
- assert_next_token :normal, " "
365
- assert_next_token :punct, "<<"
366
- assert_next_token :constant, "EOF"
367
- assert_next_token :string, "", :region_open
368
- assert_next_token :string, "\n foo\n bar\n baz\n EOF"
369
- assert_no_next_token
370
- end
371
-
372
- def test_here_doc_float_right
373
- tokenize "foo <<-EOF\n foo\n bar\n baz\n EOF"
374
- assert_next_token :ident, "foo"
375
- assert_next_token :normal, " "
376
- assert_next_token :punct, "<<-"
377
- assert_next_token :constant, "EOF"
378
- assert_next_token :string, "", :region_open
379
- assert_next_token :string, "\n foo\n bar\n baz\n"
380
- assert_next_token :string, "", :region_close
381
- assert_next_token :constant, " EOF"
382
- end
383
-
384
- def test_here_doc_single_quotes
385
- tokenize "foo <<'EOF'\n foo\#{x}\n bar\n baz\nEOF"
386
- assert_next_token :ident, "foo"
387
- assert_next_token :normal, " "
388
- assert_next_token :punct, "<<'"
389
- assert_next_token :constant, "EOF"
390
- assert_next_token :punct, "'"
391
- assert_next_token :string, "", :region_open
392
- assert_next_token :string, "\n foo\#{x}\n bar\n baz\n"
393
- assert_next_token :string, "", :region_close
394
- assert_next_token :constant, "EOF"
395
- end
396
-
397
- def test_here_doc_double_quotes
398
- tokenize "foo <<\"EOF\"\n foo\#{x}\n bar\n baz\nEOF"
399
- assert_next_token :ident, "foo"
400
- assert_next_token :normal, " "
401
- assert_next_token :punct, "<<\""
402
- assert_next_token :constant, "EOF"
403
- assert_next_token :punct, "\""
404
- assert_next_token :string, "", :region_open
405
- assert_next_token :string, "\n foo"
406
- assert_next_token :expr, '#{x}'
407
- assert_next_token :string, "\n bar\n baz\n"
408
- assert_next_token :string, "", :region_close
409
- assert_next_token :constant, "EOF"
410
- end
411
-
412
- def test_space
413
- tokenize "\n \t\t\n\n\r\n"
414
- assert_next_token :normal, "\n \t\t\n\n\r\n"
415
- end
416
-
417
- def test_number
418
- tokenize "1 1.0 1e5 1.0e5 1_2.5 1_2.5_2 1_2.5_2e3_2"
419
- assert_next_token :number, "1"
420
- skip_token
421
- assert_next_token :number, "1.0"
422
- skip_token
423
- assert_next_token :number, "1e5"
424
- skip_token
425
- assert_next_token :number, "1.0e5"
426
- skip_token
427
- assert_next_token :number, "1_2.5"
428
- skip_token
429
- assert_next_token :number, "1_2.5_2"
430
- skip_token
431
- assert_next_token :number, "1_2.5_2e3_2"
432
- end
433
-
434
- def test_dquoted_string
435
- tokenize '"foo #{x} bar\"\n\tbaz\xA5b\5\1234"'
436
- assert_next_token :punct, '"'
437
- assert_next_token :string, '', :region_open
438
- assert_next_token :string, 'foo '
439
- assert_next_token :expr, '#{x}'
440
- assert_next_token :string, ' bar'
441
- assert_next_token :escape, '\"\n\t'
442
- assert_next_token :string, 'baz'
443
- assert_next_token :escape, '\xA5'
444
- assert_next_token :string, 'b'
445
- assert_next_token :escape, '\5\123'
446
- assert_next_token :string, '4'
447
- assert_next_token :string, '', :region_close
448
- assert_next_token :punct, '"'
449
- end
450
-
451
- def test_squoted_string
452
- tokenize '\'foo #{x} bar\\\'\n\tbaz\\\\\xA5b\5\1234\''
453
- assert_next_token :punct, "'"
454
- assert_next_token :string, "", :region_open
455
- assert_next_token :string, 'foo #{x} bar'
456
- assert_next_token :escape, '\\\''
457
- assert_next_token :string, '\n\tbaz'
458
- assert_next_token :escape, '\\\\'
459
- assert_next_token :string, '\xA5b\5\1234'
460
- assert_next_token :string, "", :region_close
461
- assert_next_token :punct, "'"
462
- end
463
-
464
- def test_dot_selector
465
- tokenize 'foo.nil'
466
- skip_token
467
- assert_next_token :punct, "."
468
- assert_next_token :ident, "nil"
469
- end
470
-
471
- def test_dot_range_inclusive
472
- tokenize 'foo..nil'
473
- skip_token
474
- assert_next_token :punct, ".."
475
- assert_next_token :constant, "nil"
476
- end
477
-
478
- def test_dot_range_exclusive
479
- tokenize 'foo...nil'
480
- skip_token
481
- assert_next_token :punct, "..."
482
- assert_next_token :constant, "nil"
483
- end
484
-
485
- def test_dot_range_many
486
- tokenize 'foo.....nil'
487
- skip_token
488
- assert_next_token :punct, "....."
489
- assert_next_token :constant, "nil"
490
- end
491
-
492
- def test_attribute
493
- tokenize '@var_foo'
494
- assert_next_token :attribute, "@var_foo"
495
- end
496
-
497
- def test_global
498
- tokenize '$var_foo'
499
- assert_next_token :global, "$var_foo"
500
- tokenize '$12'
501
- assert_next_token :global, "$12"
502
- tokenize '$/f'
503
- assert_next_token :global, "$/"
504
- tokenize "$\n"
505
- assert_next_token :global, "$"
506
- end
507
-
508
- def test_paren_delimiter
509
- tokenize '%w(a)'
510
- assert_next_token :punct, "%w("
511
- assert_next_token :string, "", :region_open
512
- assert_next_token :string, "a"
513
- assert_next_token :string, "", :region_close
514
- assert_next_token :punct, ")"
515
- end
516
-
517
- def test_division
518
- tokenize 'm / 3'
519
- assert_next_token :ident, "m"
520
- assert_next_token :normal, " "
521
- assert_next_token :punct, "/"
522
- assert_next_token :normal, " "
523
- assert_next_token :number, "3"
524
- end
525
-
526
- def test_regex
527
- tokenize 'm =~ /3/'
528
- assert_next_token :ident, "m"
529
- assert_next_token :normal, " "
530
- assert_next_token :punct, "=~"
531
- assert_next_token :normal, " "
532
- assert_next_token :punct, "/"
533
- assert_next_token :regex, "", :region_open
534
- assert_next_token :regex, "3"
535
- assert_next_token :regex, "", :region_close
536
- assert_next_token :punct, "/"
537
- end
538
-
539
- def test_heredoc_with_trailing_text
540
- tokenize "foo('here', <<EOF)\n A heredoc.\nEOF\nfoo"
541
- assert_next_token :ident, "foo"
542
- assert_next_token :punct, "('"
543
- assert_next_token :string, '', :region_open
544
- assert_next_token :string, 'here'
545
- assert_next_token :string, '', :region_close
546
- assert_next_token :punct, "',"
547
- assert_next_token :normal, ' '
548
- assert_next_token :punct, '<<'
549
- assert_next_token :constant, "EOF"
550
- assert_next_token :punct, ')'
551
- assert_next_token :string, "", :region_open
552
- assert_next_token :string, "\n A heredoc.\n"
553
- assert_next_token :string, "", :region_close
554
- assert_next_token :constant, "EOF"
555
- assert_next_token :normal, "\n"
556
- assert_next_token :ident, "foo"
557
- end
558
-
559
- def test_multiple_heredocs
560
- tokenize <<'TEST'
561
- foo('here', <<EOF, 'there', <<-'FOO', 'blah')
562
- First heredoc, right here.
563
- Expressions are #{allowed}
564
- EOF
565
- Another heredoc, immediately after the first.
566
- Expressions are not #{allowed}
567
- FOO
568
- TEST
569
- assert_next_token :ident, "foo"
570
- assert_next_token :punct, "('"
571
- assert_next_token :string, '', :region_open
572
- assert_next_token :string, 'here'
573
- assert_next_token :string, '', :region_close
574
- assert_next_token :punct, "',"
575
- assert_next_token :normal, ' '
576
- assert_next_token :punct, '<<'
577
- assert_next_token :constant, "EOF"
578
- assert_next_token :punct, ','
579
- assert_next_token :normal, ' '
580
- assert_next_token :punct, "'"
581
- assert_next_token :string, '', :region_open
582
- assert_next_token :string, 'there'
583
- assert_next_token :string, '', :region_close
584
- assert_next_token :punct, "',"
585
- assert_next_token :normal, ' '
586
- assert_next_token :punct, "<<-'"
587
- assert_next_token :constant, "FOO"
588
- assert_next_token :punct, "',"
589
- assert_next_token :normal, ' '
590
- assert_next_token :punct, "'"
591
- assert_next_token :string, '', :region_open
592
- assert_next_token :string, 'blah'
593
- assert_next_token :string, '', :region_close
594
- assert_next_token :punct, "')"
595
- assert_next_token :string, "", :region_open
596
- assert_next_token :string, "\nFirst heredoc, right here.\nExpressions are "
597
- assert_next_token :expr, '#{allowed}'
598
- assert_next_token :string, "\n"
599
- assert_next_token :string, "", :region_close
600
- assert_next_token :constant, "EOF"
601
- assert_next_token :string, "", :region_open
602
- assert_next_token :string, "\n Another heredoc, immediately after the first.\n Expressions are not \#{allowed}\n"
603
- assert_next_token :string, "", :region_close
604
- assert_next_token :constant, " FOO"
605
- end
606
-
607
- def test_carldr_bad_heredoc_001
608
- tokenize <<'TEST'
609
- str = <<END
610
- here document #{1 + 1}
611
- END
612
-
613
- if str
614
- TEST
615
-
616
- assert_next_token :ident, "str"
617
- assert_next_token :normal, " "
618
- assert_next_token :punct, "="
619
- assert_next_token :normal, " "
620
- assert_next_token :punct, "<<"
621
- assert_next_token :constant, "END"
622
- assert_next_token :string, "", :region_open
623
- assert_next_token :string, "\nhere document "
624
- assert_next_token :expr, '#{1 + 1}'
625
- assert_next_token :string, "\n"
626
- assert_next_token :string, "", :region_close
627
- assert_next_token :constant, "END"
628
- assert_next_token :normal, "\n\n"
629
- assert_next_token :keyword, "if"
630
- assert_next_token :normal, " "
631
- assert_next_token :ident, "str"
632
- end
633
-
634
- def test_regex_after_keyword
635
- tokenize "when /[0-7]/\nfoo"
636
- assert_next_token :keyword, "when"
637
- assert_next_token :normal, " "
638
- assert_next_token :punct, "/"
639
- assert_next_token :regex, "", :region_open
640
- assert_next_token :regex, "[0-7]"
641
- assert_next_token :regex, "", :region_close
642
- assert_next_token :punct, "/"
643
- assert_next_token :normal, "\n"
644
- assert_next_token :ident, "foo"
645
- end
646
-
647
- def test_heredoc_after_lparen
648
- tokenize "foo(<<SRC, obj)\nblah blah\nSRC\nfoo"
649
- assert_next_token :ident, "foo"
650
- assert_next_token :punct, "(<<"
651
- assert_next_token :constant, "SRC"
652
- assert_next_token :punct, ","
653
- assert_next_token :normal, " "
654
- assert_next_token :ident, "obj"
655
- assert_next_token :punct, ")"
656
- assert_next_token :string, "", :region_open
657
- assert_next_token :string, "\nblah blah\n"
658
- assert_next_token :string, "", :region_close
659
- assert_next_token :constant, "SRC"
660
- assert_next_token :normal, "\n"
661
- assert_next_token :ident, "foo"
662
- end
663
-
664
- def test_division_after_parens
665
- tokenize "(a+b)/2"
666
- assert_next_token :punct, "("
667
- assert_next_token :ident, "a"
668
- assert_next_token :punct, "+"
669
- assert_next_token :ident, "b"
670
- assert_next_token :punct, ")/"
671
- assert_next_token :number, "2"
672
- end
673
-
674
- def test_heredoc_with_CRNL
675
- tokenize "foo <<SRC\r\nSome text\r\nSRC\r\nfoo"
676
- assert_next_token :ident, "foo"
677
- assert_next_token :normal, " "
678
- assert_next_token :punct, "<<"
679
- assert_next_token :constant, "SRC"
680
- assert_next_token :string, "", :region_open
681
- assert_next_token :string, "\r\nSome text\r\n"
682
- assert_next_token :string, "", :region_close
683
- assert_next_token :constant, "SRC"
684
- assert_next_token :normal, "\r\n"
685
- assert_next_token :ident, "foo"
686
- end
687
-
688
- def test_question_mark_at_newline
689
- tokenize "foo ?\n 'bar': 'baz'"
690
- assert_next_token :ident, "foo"
691
- assert_next_token :normal, " "
692
- assert_next_token :punct, "?"
693
- assert_next_token :normal, "\n "
694
- assert_next_token :punct, "'"
695
- assert_next_token :string, "", :region_open
696
- assert_next_token :string, "bar"
697
- assert_next_token :string, "", :region_close
698
- assert_next_token :punct, "':"
699
- assert_next_token :normal, " "
700
- assert_next_token :punct, "'"
701
- assert_next_token :string, "", :region_open
702
- assert_next_token :string, "baz"
703
- assert_next_token :string, "", :region_close
704
- assert_next_token :punct, "'"
705
- end
706
-
707
- def test_question_mark_and_escaped_newline
708
- tokenize "foo ?\\\n 'bar': 'baz'"
709
- assert_next_token :ident, "foo"
710
- assert_next_token :normal, " "
711
- assert_next_token :punct, "?\\"
712
- assert_next_token :normal, "\n "
713
- assert_next_token :punct, "'"
714
- assert_next_token :string, "", :region_open
715
- assert_next_token :string, "bar"
716
- assert_next_token :string, "", :region_close
717
- assert_next_token :punct, "':"
718
- assert_next_token :normal, " "
719
- assert_next_token :punct, "'"
720
- assert_next_token :string, "", :region_open
721
- assert_next_token :string, "baz"
722
- assert_next_token :string, "", :region_close
723
- assert_next_token :punct, "'"
724
- end
725
-
726
- def test_highlighted_subexpression
727
- tokenizer.set :expressions => :highlight
728
- tokenize '"la la #{["hello", "world"].each { |f| puts "string #{f}" }}"'
729
- assert_next_token :punct, '"'
730
- assert_next_token :string, "", :region_open
731
- assert_next_token :string, "la la "
732
- assert_next_token :expr, "", :region_open
733
- assert_next_token :expr, '#{'
734
- assert_next_token :punct, '["'
735
- assert_next_token :string, "", :region_open
736
- assert_next_token :string, 'hello'
737
- assert_next_token :string, "", :region_close
738
- assert_next_token :punct, '",'
739
- assert_next_token :normal, ' '
740
- assert_next_token :punct, '"'
741
- assert_next_token :string, "", :region_open
742
- assert_next_token :string, "world"
743
- assert_next_token :string, "", :region_close
744
- assert_next_token :punct, '"].'
745
- assert_next_token :ident, 'each'
746
- assert_next_token :normal, ' '
747
- assert_next_token :punct, '{'
748
- assert_next_token :normal, ' '
749
- assert_next_token :punct, '|'
750
- assert_next_token :ident, 'f'
751
- assert_next_token :punct, '|'
752
- assert_next_token :normal, ' '
753
- assert_next_token :ident, 'puts'
754
- assert_next_token :normal, ' '
755
- assert_next_token :punct, '"'
756
- assert_next_token :string, "", :region_open
757
- assert_next_token :string, "string "
758
- assert_next_token :expr, "", :region_open
759
- assert_next_token :expr, '#{'
760
- assert_next_token :ident, 'f'
761
- assert_next_token :expr, '}'
762
- assert_next_token :expr, "", :region_close
763
- assert_next_token :string, "", :region_close
764
- assert_next_token :punct, '"'
765
- assert_next_token :normal, ' '
766
- assert_next_token :punct, '}'
767
- assert_next_token :expr, '}'
768
- assert_next_token :expr, "", :region_close
769
- assert_next_token :string, "", :region_close
770
- assert_next_token :punct, '"'
771
- end
772
-
773
- def test_expr_in_braces
774
- tokenize '"#{f}"'
775
- assert_next_token :punct, '"'
776
- assert_next_token :string, "", :region_open
777
- assert_next_token :expr, '#{f}'
778
- assert_next_token :string, "", :region_close
779
- assert_next_token :punct, '"'
780
- end
781
-
782
- def test_expr_in_braces_with_nested_braces
783
- tokenize '"#{loop{break}}"'
784
- assert_next_token :punct, '"'
785
- assert_next_token :string, "", :region_open
786
- assert_next_token :expr, '#{loop{break}}'
787
- assert_next_token :string, "", :region_close
788
- assert_next_token :punct, '"'
789
- end
790
-
791
- def test_expr_with_global_var
792
- tokenize '"#$f"'
793
- assert_next_token :punct, '"'
794
- assert_next_token :string, "", :region_open
795
- assert_next_token :expr, '#$f'
796
- assert_next_token :string, "", :region_close
797
- assert_next_token :punct, '"'
798
- end
799
-
800
- def test_expr_with_instance_var
801
- tokenize '"#@f"'
802
- assert_next_token :punct, '"'
803
- assert_next_token :string, "", :region_open
804
- assert_next_token :expr, '#@f'
805
- assert_next_token :string, "", :region_close
806
- assert_next_token :punct, '"'
807
- end
808
-
809
- def test_expr_with_class_var
810
- tokenize '"#@@f"'
811
- assert_next_token :punct, '"'
812
- assert_next_token :string, "", :region_open
813
- assert_next_token :expr, '#@@f'
814
- assert_next_token :string, "", :region_close
815
- assert_next_token :punct, '"'
816
- end
817
-
818
- def test_qmark_space
819
- tokenize "? "
820
- assert_next_token :punct, "?"
821
- assert_next_token :normal, " "
822
- end
823
-
824
- def test_capitalized_method
825
- tokenize "obj.Foo"
826
- skip_token 2
827
- assert_next_token :ident, "Foo"
828
- end
829
-
830
- def test_hexadecimal_literal
831
- tokenize "0xDEADbeef 0X1234567890ABCDEFG"
832
- assert_next_token :number, "0xDEADbeef"
833
- skip_token
834
- assert_next_token :number, "0X1234567890ABCDEF"
835
- assert_next_token :constant, "G"
836
- end
837
-
838
- def test_binary_literal
839
- tokenize "0b2 0b0 0b101 0B123"
840
- assert_next_token :number, "0"
841
- assert_next_token :ident, "b2"
842
- skip_token
843
- assert_next_token :number, "0b0"
844
- skip_token
845
- assert_next_token :number, "0b101"
846
- skip_token
847
- assert_next_token :number, "0B123"
848
- end
849
-
850
- def test_octal_literal
851
- tokenize "0o9 0o12345670abc 0O12345678"
852
- assert_next_token :number, "0"
853
- assert_next_token :ident, "o9"
854
- skip_token
855
- assert_next_token :number, "0o12345670"
856
- assert_next_token :ident, "abc"
857
- skip_token
858
- assert_next_token :number, "0O12345678"
859
- end
860
-
861
- def test_decimal_literal
862
- tokenize "0dA 0d1234567890abc 0D1234567890"
863
- assert_next_token :number, "0"
864
- assert_next_token :ident, "dA"
865
- skip_token
866
- assert_next_token :number, "0d1234567890"
867
- assert_next_token :ident, "abc"
868
- skip_token
869
- assert_next_token :number, "0D1234567890"
870
- end
871
- end