crass 0.2.1 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +4 -0
  3. data/HISTORY.md +22 -1
  4. data/LICENSE +1 -1
  5. data/README.md +64 -72
  6. data/Rakefile +4 -0
  7. data/crass.gemspec +2 -2
  8. data/lib/crass.rb +1 -1
  9. data/lib/crass/parser.rb +231 -96
  10. data/lib/crass/scanner.rb +21 -21
  11. data/lib/crass/token-scanner.rb +8 -1
  12. data/lib/crass/tokenizer.rb +133 -131
  13. data/lib/crass/version.rb +1 -1
  14. data/test/css-parsing-tests/An+B.json +156 -0
  15. data/test/css-parsing-tests/LICENSE +8 -0
  16. data/test/css-parsing-tests/README.rst +301 -0
  17. data/test/css-parsing-tests/color3.json +142 -0
  18. data/test/css-parsing-tests/color3_hsl.json +3890 -0
  19. data/test/css-parsing-tests/color3_keywords.json +803 -0
  20. data/test/css-parsing-tests/component_value_list.json +432 -0
  21. data/test/css-parsing-tests/declaration_list.json +44 -0
  22. data/test/css-parsing-tests/make_color3_hsl.py +17 -0
  23. data/test/css-parsing-tests/make_color3_keywords.py +191 -0
  24. data/test/css-parsing-tests/one_component_value.json +27 -0
  25. data/test/css-parsing-tests/one_declaration.json +46 -0
  26. data/test/css-parsing-tests/one_rule.json +36 -0
  27. data/test/css-parsing-tests/rule_list.json +48 -0
  28. data/test/css-parsing-tests/stylesheet.json +44 -0
  29. data/test/css-parsing-tests/stylesheet_bytes.json +146 -0
  30. data/test/shared/parse_rules.rb +377 -434
  31. data/test/support/common.rb +124 -0
  32. data/test/support/serialization/animate.css +3158 -0
  33. data/test/support/serialization/html5-boilerplate.css +268 -0
  34. data/test/support/serialization/misc.css +9 -0
  35. data/test/test_css_parsing_tests.rb +150 -0
  36. data/test/test_parse_properties.rb +136 -211
  37. data/test/test_parse_rules.rb +0 -52
  38. data/test/test_parse_stylesheet.rb +0 -39
  39. data/test/test_serialization.rb +13 -4
  40. metadata +44 -7
  41. data/test/test_tokenizer.rb +0 -1562
@@ -1,8 +1,5 @@
1
1
  # encoding: utf-8
2
2
 
3
- # Includes tests based on Simon Sapin's CSS parsing tests:
4
- # https://github.com/SimonSapin/css-parsing-tests/
5
-
6
3
  require_relative 'support/common'
7
4
  require_relative 'shared/parse_rules'
8
5
 
@@ -16,54 +13,5 @@ describe 'Crass::Parser' do
16
13
  end
17
14
 
18
15
  behaves_like 'parsing a list of rules'
19
-
20
- it 'with CDO/CDC before rule' do
21
- tree = parse(" <!-- --> {")
22
-
23
- assert_equal(2, tree.size)
24
- assert_tokens(" ", tree[0])
25
-
26
- rule = tree[1]
27
- assert_equal(:style_rule, rule[:node])
28
- assert_equal([], rule[:children])
29
-
30
- selector = rule[:selector]
31
- assert_equal(:selector, selector[:node])
32
- assert_equal("<!-- -->", selector[:value])
33
- assert_tokens("<!-- --> ", selector[:tokens], 1)
34
- end
35
-
36
- it 'followed by CDC' do
37
- # TODO: This should be a parse error.
38
- tree = parse("div {} -->")
39
-
40
- assert_equal(2, tree.size)
41
-
42
- rule = tree[0]
43
- assert_equal(:style_rule, rule[:node])
44
- assert_equal([], rule[:children])
45
-
46
- selector = rule[:selector]
47
- assert_equal(:selector, selector[:node])
48
- assert_equal("div", selector[:value])
49
- assert_tokens("div ", selector[:tokens])
50
-
51
- assert_tokens(" ", tree[1], 6)
52
- end
53
-
54
- it 'should parse the block of an at-rule' do
55
- rule = CP.parse_stylesheet("@media (max-width: 400px) {#foo:not(.bar){color:#fff;}}")[0]
56
- assert_equal(:at_rule, rule[:node])
57
-
58
- style_rule = parse(rule[:block][:value])[0]
59
- assert_equal(:style_rule, style_rule[:node])
60
- assert_equal("#foo:not(.bar)", style_rule[:selector][:value])
61
- assert_equal(1, style_rule[:children].size)
62
-
63
- prop = style_rule[:children][0]
64
- assert_equal(:property, prop[:node])
65
- assert_equal("color", prop[:name])
66
- assert_equal("#fff", prop[:value])
67
- end
68
16
  end
69
17
  end
@@ -1,8 +1,5 @@
1
1
  # encoding: utf-8
2
2
 
3
- # Includes tests based on Simon Sapin's CSS parsing tests:
4
- # https://github.com/SimonSapin/css-parsing-tests/
5
-
6
3
  require_relative 'support/common'
7
4
  require_relative 'shared/parse_rules'
8
5
 
@@ -16,41 +13,5 @@ describe 'Crass::Parser' do
16
13
  end
17
14
 
18
15
  behaves_like 'parsing a list of rules'
19
-
20
- it 'with CDO/CDC before rule' do
21
- tree = parse(" <!-- --> {")
22
-
23
- assert_equal(4, tree.size)
24
-
25
- tree[0..2].each do |node|
26
- assert_equal(:whitespace, node[:node])
27
- end
28
-
29
- rule = tree[3]
30
- assert_equal(:style_rule, rule[:node])
31
- assert_equal([], rule[:children])
32
-
33
- selector = rule[:selector]
34
- assert_equal(:selector, selector[:node])
35
- assert_equal("", selector[:value])
36
- assert_equal([], selector[:tokens])
37
- end
38
-
39
- it 'followed by CDC' do
40
- tree = parse("div {} -->")
41
-
42
- assert_equal(2, tree.size)
43
-
44
- rule = tree[0]
45
- assert_equal(:style_rule, rule[:node])
46
- assert_equal([], rule[:children])
47
-
48
- selector = rule[:selector]
49
- assert_equal(:selector, selector[:node])
50
- assert_equal("div", selector[:value])
51
- assert_tokens("div ", selector[:tokens])
52
-
53
- assert_tokens(" ", tree[1], 6)
54
- end
55
16
  end
56
17
  end
@@ -2,19 +2,28 @@
2
2
  require_relative 'support/common'
3
3
 
4
4
  describe 'Serialization' do
5
+ make_my_diffs_pretty!
5
6
  parallelize_me!
6
7
 
8
+ # Parse a bunch of real-world CSS and make sure it's the same when we
9
+ # serialize it.
7
10
  Dir[File.join(File.dirname(__FILE__), 'support/serialization/*.css')].each do |filepath|
8
11
  it "should parse and serialize #{filepath}" do
9
- input = File.read(filepath)
12
+ input = File.read(filepath)
10
13
 
11
14
  tree = Crass.parse(input,
12
15
  :preserve_comments => true,
13
16
  :preserve_hacks => true)
14
17
 
15
- output = CP.stringify(tree)
16
-
17
- assert_equal(input, output)
18
+ assert_equal(input, CP.stringify(tree))
18
19
  end
19
20
  end
21
+
22
+ # -- Regression tests --------------------------------------------------------
23
+ it "should not omit a trailing semicolon when serializing a `@charset` rule" do
24
+ css = '@charset "utf-8";'
25
+ tree = Crass.parse(css)
26
+
27
+ assert_equal(css, CP.stringify(tree))
28
+ end
20
29
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: crass
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.1
4
+ version: 1.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ryan Grove
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-07-22 00:00:00.000000000 Z
11
+ date: 2014-11-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: minitest
@@ -38,7 +38,7 @@ dependencies:
38
38
  - - "~>"
39
39
  - !ruby/object:Gem::Version
40
40
  version: 10.1.0
41
- description: Crass is a pure Ruby CSS parser based on the CSS Syntax Level 3 draft.
41
+ description: Crass is a pure Ruby CSS parser based on the CSS Syntax Level 3 spec.
42
42
  email:
43
43
  - ryan@wonko.com
44
44
  executables: []
@@ -60,17 +60,36 @@ files:
60
60
  - lib/crass/token-scanner.rb
61
61
  - lib/crass/tokenizer.rb
62
62
  - lib/crass/version.rb
63
+ - test/css-parsing-tests/An+B.json
64
+ - test/css-parsing-tests/LICENSE
65
+ - test/css-parsing-tests/README.rst
66
+ - test/css-parsing-tests/color3.json
67
+ - test/css-parsing-tests/color3_hsl.json
68
+ - test/css-parsing-tests/color3_keywords.json
69
+ - test/css-parsing-tests/component_value_list.json
70
+ - test/css-parsing-tests/declaration_list.json
71
+ - test/css-parsing-tests/make_color3_hsl.py
72
+ - test/css-parsing-tests/make_color3_keywords.py
73
+ - test/css-parsing-tests/one_component_value.json
74
+ - test/css-parsing-tests/one_declaration.json
75
+ - test/css-parsing-tests/one_rule.json
76
+ - test/css-parsing-tests/rule_list.json
77
+ - test/css-parsing-tests/stylesheet.json
78
+ - test/css-parsing-tests/stylesheet_bytes.json
63
79
  - test/shared/parse_rules.rb
64
80
  - test/support/common.rb
81
+ - test/support/serialization/animate.css
65
82
  - test/support/serialization/bootstrap-theme.css
66
83
  - test/support/serialization/bootstrap.css
84
+ - test/support/serialization/html5-boilerplate.css
85
+ - test/support/serialization/misc.css
67
86
  - test/support/serialization/pure.css
68
87
  - test/test_crass.rb
88
+ - test/test_css_parsing_tests.rb
69
89
  - test/test_parse_properties.rb
70
90
  - test/test_parse_rules.rb
71
91
  - test/test_parse_stylesheet.rb
72
92
  - test/test_serialization.rb
73
- - test/test_tokenizer.rb
74
93
  homepage: https://github.com/rgrove/crass/
75
94
  licenses:
76
95
  - MIT
@@ -94,17 +113,35 @@ rubyforge_project:
94
113
  rubygems_version: 2.2.2
95
114
  signing_key:
96
115
  specification_version: 4
97
- summary: CSS parser based on the CSS Syntax Level 3 draft.
116
+ summary: CSS parser based on the CSS Syntax Level 3 spec.
98
117
  test_files:
118
+ - test/css-parsing-tests/An+B.json
119
+ - test/css-parsing-tests/LICENSE
120
+ - test/css-parsing-tests/README.rst
121
+ - test/css-parsing-tests/color3.json
122
+ - test/css-parsing-tests/color3_hsl.json
123
+ - test/css-parsing-tests/color3_keywords.json
124
+ - test/css-parsing-tests/component_value_list.json
125
+ - test/css-parsing-tests/declaration_list.json
126
+ - test/css-parsing-tests/make_color3_hsl.py
127
+ - test/css-parsing-tests/make_color3_keywords.py
128
+ - test/css-parsing-tests/one_component_value.json
129
+ - test/css-parsing-tests/one_declaration.json
130
+ - test/css-parsing-tests/one_rule.json
131
+ - test/css-parsing-tests/rule_list.json
132
+ - test/css-parsing-tests/stylesheet.json
133
+ - test/css-parsing-tests/stylesheet_bytes.json
99
134
  - test/shared/parse_rules.rb
100
135
  - test/support/common.rb
136
+ - test/support/serialization/animate.css
101
137
  - test/support/serialization/bootstrap-theme.css
102
138
  - test/support/serialization/bootstrap.css
139
+ - test/support/serialization/html5-boilerplate.css
140
+ - test/support/serialization/misc.css
103
141
  - test/support/serialization/pure.css
104
142
  - test/test_crass.rb
143
+ - test/test_css_parsing_tests.rb
105
144
  - test/test_parse_properties.rb
106
145
  - test/test_parse_rules.rb
107
146
  - test/test_parse_stylesheet.rb
108
147
  - test/test_serialization.rb
109
- - test/test_tokenizer.rb
110
- has_rdoc:
@@ -1,1562 +0,0 @@
1
- # encoding: utf-8
2
-
3
- # Includes tests based on Simon Sapin's CSS parsing tests:
4
- # https://github.com/SimonSapin/css-parsing-tests/
5
-
6
- require_relative 'support/common'
7
-
8
- describe 'Crass::Tokenizer' do
9
- make_my_diffs_pretty!
10
- parallelize_me!
11
-
12
- it 'should tokenize an empty string' do
13
- assert_equal([], CT.tokenize(""))
14
- end
15
-
16
- it 'should tokenize comments' do
17
- tokens = CT.tokenize("/*/*///** /* **/*//* ")
18
-
19
- assert_equal([
20
- {:node=>:delim, :pos=>5, :raw=>"/", :value=>"/"},
21
- {:node=>:delim, :pos=>16, :raw=>"*", :value=>"*"},
22
- {:node=>:delim, :pos=>17, :raw=>"/", :value=>"/"}
23
- ], tokens)
24
- end
25
-
26
- it 'should tokenize comments when :preserve_comments == true' do
27
- tokens = CT.tokenize("/*/*///** /* **/*//* ",
28
- :preserve_comments => true)
29
-
30
- assert_equal([
31
- {:node=>:comment, :pos=>0, :raw=>"/*/*/", :value=>"/"},
32
- {:node=>:delim, :pos=>5, :raw=>"/", :value=>"/"},
33
- {:node=>:comment, :pos=>6, :raw=>"/** /* **/", :value=>"* /* *"},
34
- {:node=>:delim, :pos=>16, :raw=>"*", :value=>"*"},
35
- {:node=>:delim, :pos=>17, :raw=>"/", :value=>"/"},
36
- {:node=>:comment, :pos=>18, :raw=>"/* ", :value=>" "}
37
- ], tokens)
38
- end
39
-
40
- it 'should tokenize an identity' do
41
- tokens = CT.tokenize("red")
42
-
43
- assert_equal([
44
- {:node=>:ident, :pos=>0, :raw=>"red", :value=>"red"}
45
- ], tokens)
46
- end
47
-
48
- it 'should tokenize an identity preceded and followed by whitespace' do
49
- tokens = CT.tokenize(" \t\t\r\n\nRed ")
50
-
51
- assert_equal([
52
- {:node=>:whitespace, :pos=>0, :raw=>" \t\t\n\n"},
53
- {:node=>:ident, :pos=>6, :raw=>"Red", :value=>"Red"},
54
- {:node=>:whitespace, :pos=>9, :raw=>" "}
55
- ], tokens)
56
- end
57
-
58
- it 'should tokenize a CDC' do
59
- tokens = CT.tokenize("red/* CDC */-->")
60
-
61
- assert_equal([
62
- {:node=>:ident, :pos=>0, :raw=>"red", :value=>"red"},
63
- {:node=>:cdc, :pos=>12, :raw=>"-->"}
64
- ], tokens)
65
- end
66
-
67
- it 'should not be fooled by an ident that appears to be a CDC' do
68
- tokens = CT.tokenize("red-->/* Not CDC */")
69
-
70
- assert_equal([
71
- {:node=>:ident, :pos=>0, :raw=>"red--", :value=>"red--"},
72
- {:node=>:delim, :pos=>5, :raw=>">", :value=>">"}
73
- ], tokens)
74
- end
75
-
76
- it 'should tokenize a mix of idents, delims, and dimensions' do
77
- tokens = CT.tokenize("red0 -red --red -\\-red\\ blue 0red -0red \u0000red _Red .red rêd r\\êd \u007F\u0080\u0081")
78
-
79
- assert_equal([
80
- {:node=>:ident, :pos=>0, :raw=>"red0", :value=>"red0"},
81
- {:node=>:whitespace, :pos=>4, :raw=>" "},
82
- {:node=>:ident, :pos=>5, :raw=>"-red", :value=>"-red"},
83
- {:node=>:whitespace, :pos=>9, :raw=>" "},
84
- {:node=>:delim, :pos=>10, :raw=>"-", :value=>"-"},
85
- {:node=>:ident, :pos=>11, :raw=>"-red", :value=>"-red"},
86
- {:node=>:whitespace, :pos=>15, :raw=>" "},
87
- {:node=>:ident, :pos=>16, :raw=>"-\\-red\\ blue", :value=>"--red blue"},
88
- {:node=>:whitespace, :pos=>28, :raw=>" "},
89
- {:node=>:dimension,
90
- :pos=>29,
91
- :raw=>"0red",
92
- :repr=>"0",
93
- :type=>:integer,
94
- :unit=>"red",
95
- :value=>0},
96
- {:node=>:whitespace, :pos=>33, :raw=>" "},
97
- {:node=>:dimension,
98
- :pos=>34,
99
- :raw=>"-0red",
100
- :repr=>"-0",
101
- :type=>:integer,
102
- :unit=>"red",
103
- :value=>0},
104
- {:node=>:whitespace, :pos=>39, :raw=>" "},
105
- {:node=>:ident, :pos=>40, :raw=>"\uFFFDred", :value=>"\uFFFDred"},
106
- {:node=>:whitespace, :pos=>44, :raw=>" "},
107
- {:node=>:ident, :pos=>45, :raw=>"_Red", :value=>"_Red"},
108
- {:node=>:whitespace, :pos=>49, :raw=>" "},
109
- {:node=>:delim, :pos=>50, :raw=>".", :value=>"."},
110
- {:node=>:ident, :pos=>51, :raw=>"red", :value=>"red"},
111
- {:node=>:whitespace, :pos=>54, :raw=>" "},
112
- {:node=>:ident, :pos=>55, :raw=>"rêd", :value=>"rêd"},
113
- {:node=>:whitespace, :pos=>58, :raw=>" "},
114
- {:node=>:ident, :pos=>59, :raw=>"r\\êd", :value=>"rêd"},
115
- {:node=>:whitespace, :pos=>63, :raw=>" "},
116
- {:node=>:delim, :pos=>64, :raw=>"\u007F", :value=>"\u007F"},
117
- {:node=>:ident, :pos=>65, :raw=>"\u0080\u0081", :value=>"\u0080\u0081"}
118
- ], tokens)
119
- end
120
-
121
- it 'should consume escape sequences' do
122
- tokens = CT.tokenize("\\30red \\00030 red \\30\r\nred \\0000000red \\1100000red \\red \\r ed \\.red \\ red \\\nred \\376\\37 6\\000376\\0000376\\")
123
-
124
- assert_equal([
125
- {:node=>:ident, :pos=>0, :raw=>"\\30red", :value=>"0red"},
126
- {:node=>:whitespace, :pos=>6, :raw=>" "},
127
- {:node=>:ident, :pos=>7, :raw=>"\\00030 red", :value=>"0red"},
128
- {:node=>:whitespace, :pos=>17, :raw=>" "},
129
- {:node=>:ident, :pos=>18, :raw=>"\\30\nred", :value=>"0red"},
130
- {:node=>:whitespace, :pos=>25, :raw=>" "},
131
- {:node=>:ident, :pos=>26, :raw=>"\\0000000red", :value=>"\uFFFD0red"},
132
- {:node=>:whitespace, :pos=>37, :raw=>" "},
133
- {:node=>:ident, :pos=>38, :raw=>"\\1100000red", :value=>"\uFFFD0red"},
134
- {:node=>:whitespace, :pos=>49, :raw=>" "},
135
- {:node=>:ident, :pos=>50, :raw=>"\\red", :value=>"red"},
136
- {:node=>:whitespace, :pos=>54, :raw=>" "},
137
- {:node=>:ident, :pos=>55, :raw=>"\\r", :value=>"r"},
138
- {:node=>:whitespace, :pos=>57, :raw=>" "},
139
- {:node=>:ident, :pos=>58, :raw=>"ed", :value=>"ed"},
140
- {:node=>:whitespace, :pos=>60, :raw=>" "},
141
- {:node=>:ident, :pos=>61, :raw=>"\\.red", :value=>".red"},
142
- {:node=>:whitespace, :pos=>66, :raw=>" "},
143
- {:node=>:ident, :pos=>67, :raw=>"\\ red", :value=>" red"},
144
- {:node=>:whitespace, :pos=>72, :raw=>" "},
145
- {:node=>:delim, :pos=>73, :raw=>"\\", :error=>true, :value=>"\\"},
146
- {:node=>:whitespace, :pos=>74, :raw=>"\n"},
147
- {:node=>:ident, :pos=>75, :raw=>"red", :value=>"red"},
148
- {:node=>:whitespace, :pos=>78, :raw=>" "},
149
- {:node=>:ident,
150
- :pos=>79,
151
- :raw=>"\\376\\37 6\\000376\\0000376\\",
152
- :value=>"Ͷ76Ͷ76\uFFFD"}
153
- ], tokens)
154
- end
155
-
156
- it 'should tokenize functions and hashes' do
157
- tokens = CT.tokenize("rgba0() -rgba() --rgba() -\\-rgba() 0rgba() -0rgba() _rgba() .rgba() rgbâ() \\30rgba() rgba () @rgba() #rgba()")
158
-
159
- assert_equal([
160
- {:node=>:function, :pos=>0, :raw=>"rgba0(", :value=>"rgba0"},
161
- {:node=>:")", :pos=>6, :raw=>")"},
162
- {:node=>:whitespace, :pos=>7, :raw=>" "},
163
- {:node=>:function, :pos=>8, :raw=>"-rgba(", :value=>"-rgba"},
164
- {:node=>:")", :pos=>14, :raw=>")"},
165
- {:node=>:whitespace, :pos=>15, :raw=>" "},
166
- {:node=>:delim, :pos=>16, :raw=>"-", :value=>"-"},
167
- {:node=>:function, :pos=>17, :raw=>"-rgba(", :value=>"-rgba"},
168
- {:node=>:")", :pos=>23, :raw=>")"},
169
- {:node=>:whitespace, :pos=>24, :raw=>" "},
170
- {:node=>:function, :pos=>25, :raw=>"-\\-rgba(", :value=>"--rgba"},
171
- {:node=>:")", :pos=>33, :raw=>")"},
172
- {:node=>:whitespace, :pos=>34, :raw=>" "},
173
- {:node=>:dimension,
174
- :pos=>35,
175
- :raw=>"0rgba",
176
- :repr=>"0",
177
- :type=>:integer,
178
- :unit=>"rgba",
179
- :value=>0},
180
- {:node=>:"(", :pos=>40, :raw=>"("},
181
- {:node=>:")", :pos=>41, :raw=>")"},
182
- {:node=>:whitespace, :pos=>42, :raw=>" "},
183
- {:node=>:dimension,
184
- :pos=>43,
185
- :raw=>"-0rgba",
186
- :repr=>"-0",
187
- :type=>:integer,
188
- :unit=>"rgba",
189
- :value=>0},
190
- {:node=>:"(", :pos=>49, :raw=>"("},
191
- {:node=>:")", :pos=>50, :raw=>")"},
192
- {:node=>:whitespace, :pos=>51, :raw=>" "},
193
- {:node=>:function, :pos=>52, :raw=>"_rgba(", :value=>"_rgba"},
194
- {:node=>:")", :pos=>58, :raw=>")"},
195
- {:node=>:whitespace, :pos=>59, :raw=>" "},
196
- {:node=>:delim, :pos=>60, :raw=>".", :value=>"."},
197
- {:node=>:function, :pos=>61, :raw=>"rgba(", :value=>"rgba"},
198
- {:node=>:")", :pos=>66, :raw=>")"},
199
- {:node=>:whitespace, :pos=>67, :raw=>" "},
200
- {:node=>:function, :pos=>68, :raw=>"rgbâ(", :value=>"rgbâ"},
201
- {:node=>:")", :pos=>73, :raw=>")"},
202
- {:node=>:whitespace, :pos=>74, :raw=>" "},
203
- {:node=>:function, :pos=>75, :raw=>"\\30rgba(", :value=>"0rgba"},
204
- {:node=>:")", :pos=>83, :raw=>")"},
205
- {:node=>:whitespace, :pos=>84, :raw=>" "},
206
- {:node=>:ident, :pos=>85, :raw=>"rgba", :value=>"rgba"},
207
- {:node=>:whitespace, :pos=>89, :raw=>" "},
208
- {:node=>:"(", :pos=>90, :raw=>"("},
209
- {:node=>:")", :pos=>91, :raw=>")"},
210
- {:node=>:whitespace, :pos=>92, :raw=>" "},
211
- {:node=>:at_keyword, :pos=>93, :raw=>"@rgba", :value=>"rgba"},
212
- {:node=>:"(", :pos=>98, :raw=>"("},
213
- {:node=>:")", :pos=>99, :raw=>")"},
214
- {:node=>:whitespace, :pos=>100, :raw=>" "},
215
- {:node=>:hash,
216
- :pos=>101,
217
- :raw=>"#rgba",
218
- :type=>:id,
219
- :value=>"rgba"},
220
- {:node=>:"(", :pos=>106, :raw=>"("},
221
- {:node=>:")", :pos=>107, :raw=>")"}
222
- ], tokens)
223
- end
224
-
225
- it 'should tokenize at-rules' do
226
- tokens = CT.tokenize("@media0 @-Media @--media @-\\-media @0media @-0media @_media @.media @medİa @\\30 media\\")
227
-
228
- assert_equal([
229
- {:node=>:at_keyword, :pos=>0, :raw=>"@media0", :value=>"media0"},
230
- {:node=>:whitespace, :pos=>7, :raw=>" "},
231
- {:node=>:at_keyword, :pos=>8, :raw=>"@-Media", :value=>"-Media"},
232
- {:node=>:whitespace, :pos=>15, :raw=>" "},
233
- {:node=>:delim, :pos=>16, :raw=>"@", :value=>"@"},
234
- {:node=>:delim, :pos=>17, :raw=>"-", :value=>"-"},
235
- {:node=>:ident, :pos=>18, :raw=>"-media", :value=>"-media"},
236
- {:node=>:whitespace, :pos=>24, :raw=>" "},
237
- {:node=>:at_keyword, :pos=>25, :raw=>"@-\\-media", :value=>"--media"},
238
- {:node=>:whitespace, :pos=>34, :raw=>" "},
239
- {:node=>:delim, :pos=>35, :raw=>"@", :value=>"@"},
240
- {:node=>:dimension,
241
- :pos=>36,
242
- :raw=>"0media",
243
- :repr=>"0",
244
- :type=>:integer,
245
- :unit=>"media",
246
- :value=>0},
247
- {:node=>:whitespace, :pos=>42, :raw=>" "},
248
- {:node=>:delim, :pos=>43, :raw=>"@", :value=>"@"},
249
- {:node=>:dimension,
250
- :pos=>44,
251
- :raw=>"-0media",
252
- :repr=>"-0",
253
- :type=>:integer,
254
- :unit=>"media",
255
- :value=>0},
256
- {:node=>:whitespace, :pos=>51, :raw=>" "},
257
- {:node=>:at_keyword, :pos=>52, :raw=>"@_media", :value=>"_media"},
258
- {:node=>:whitespace, :pos=>59, :raw=>" "},
259
- {:node=>:delim, :pos=>60, :raw=>"@", :value=>"@"},
260
- {:node=>:delim, :pos=>61, :raw=>".", :value=>"."},
261
- {:node=>:ident, :pos=>62, :raw=>"media", :value=>"media"},
262
- {:node=>:whitespace, :pos=>67, :raw=>" "},
263
- {:node=>:at_keyword, :pos=>68, :raw=>"@medİa", :value=>"medİa"},
264
- {:node=>:whitespace, :pos=>74, :raw=>" "},
265
- {:node=>:at_keyword, :pos=>75, :raw=>"@\\30 media\\", :value=>"0media\uFFFD"}
266
- ], tokens)
267
- end
268
-
269
- it 'should tokenize hashes' do
270
- tokens = CT.tokenize("#red0 #-Red #--red #-\\-red #0red #-0red #_Red #.red #rêd #\\.red\\")
271
-
272
- assert_equal([
273
- {:node=>:hash, :pos=>0, :raw=>"#red0", :type=>:id, :value=>"red0"},
274
- {:node=>:whitespace, :pos=>5, :raw=>" "},
275
- {:node=>:hash, :pos=>6, :raw=>"#-Red", :type=>:id, :value=>"-Red"},
276
- {:node=>:whitespace, :pos=>11, :raw=>" "},
277
- {:node=>:hash,
278
- :pos=>12,
279
- :raw=>"#--red",
280
- :type=>:unrestricted,
281
- :value=>"--red"},
282
- {:node=>:whitespace, :pos=>18, :raw=>" "},
283
- {:node=>:hash, :pos=>19, :raw=>"#-\\-red", :type=>:id, :value=>"--red"},
284
- {:node=>:whitespace, :pos=>26, :raw=>" "},
285
- {:node=>:hash, :pos=>27, :raw=>"#0red", :type=>:unrestricted, :value=>"0red"},
286
- {:node=>:whitespace, :pos=>32, :raw=>" "},
287
- {:node=>:hash,
288
- :pos=>33,
289
- :raw=>"#-0red",
290
- :type=>:unrestricted,
291
- :value=>"-0red"},
292
- {:node=>:whitespace, :pos=>39, :raw=>" "},
293
- {:node=>:hash, :pos=>40, :raw=>"#_Red", :type=>:id, :value=>"_Red"},
294
- {:node=>:whitespace, :pos=>45, :raw=>" "},
295
- {:node=>:delim, :pos=>46, :raw=>"#", :value=>"#"},
296
- {:node=>:delim, :pos=>47, :raw=>".", :value=>"."},
297
- {:node=>:ident, :pos=>48, :raw=>"red", :value=>"red"},
298
- {:node=>:whitespace, :pos=>51, :raw=>" "},
299
- {:node=>:hash, :pos=>52, :raw=>"#rêd", :type=>:id, :value=>"rêd"},
300
- {:node=>:whitespace, :pos=>56, :raw=>" "},
301
- {:node=>:hash, :pos=>57, :raw=>"#\\.red\\", :type=>:id, :value=>".red\uFFFD"}
302
- ], tokens)
303
- end
304
-
305
- it 'should tokenize strings containing escaped newlines' do
306
- tokens = CT.tokenize("p[example=\"\\\nfoo(int x) {\\\n this.x = x;\\\n}\\\n\"]")
307
-
308
- assert_equal([
309
- {:node=>:ident, :pos=>0, :raw=>"p", :value=>"p"},
310
- {:node=>:"[", :pos=>1, :raw=>"["},
311
- {:node=>:ident, :pos=>2, :raw=>"example", :value=>"example"},
312
- {:node=>:delim, :pos=>9, :raw=>"=", :value=>"="},
313
- {:node=>:string,
314
- :pos=>10,
315
- :raw=>"\"\\\nfoo(int x) {\\\n this.x = x;\\\n}\\\n\"",
316
- :value=>"foo(int x) { this.x = x;}"},
317
- {:node=>:"]", :pos=>47, :raw=>"]"}
318
- ], tokens)
319
- end
320
-
321
- it 'should not choke on bad single-quoted strings' do
322
- tokens = CT.tokenize("'' 'Lorem \"îpsum\"' 'a\\\nb' 'a\nb 'eof")
323
-
324
- assert_equal([
325
- {:node=>:string, :pos=>0, :raw=>"''", :value=>""},
326
- {:node=>:whitespace, :pos=>2, :raw=>" "},
327
- {:node=>:string,
328
- :pos=>3,
329
- :raw=>"'Lorem \"îpsum\"'",
330
- :value=>"Lorem \"îpsum\""},
331
- {:node=>:whitespace, :pos=>18, :raw=>" "},
332
- {:node=>:string, :pos=>19, :raw=>"'a\\\nb'", :value=>"ab"},
333
- {:node=>:whitespace, :pos=>25, :raw=>" "},
334
- {:node=>:bad_string, :pos=>26, :raw=>"'a", :error=>true, :value=>"a"},
335
- {:node=>:whitespace, :pos=>28, :raw=>"\n"},
336
- {:node=>:ident, :pos=>29, :raw=>"b", :value=>"b"},
337
- {:node=>:whitespace, :pos=>30, :raw=>" "},
338
- {:node=>:string, :pos=>31, :raw=>"'eof", :value=>"eof"}
339
- ], tokens)
340
- end
341
-
342
- it 'should not choke on bad double-quoted strings' do
343
- tokens = CT.tokenize("\"\" \"Lorem 'îpsum'\" \"a\\\nb\" \"a\nb \"eof")
344
-
345
- assert_equal([
346
- {:node=>:string, :pos=>0, :raw=>"\"\"", :value=>""},
347
- {:node=>:whitespace, :pos=>2, :raw=>" "},
348
- {:node=>:string, :pos=>3, :raw=>"\"Lorem 'îpsum'\"", :value=>"Lorem 'îpsum'"},
349
- {:node=>:whitespace, :pos=>18, :raw=>" "},
350
- {:node=>:string, :pos=>19, :raw=>"\"a\\\nb\"", :value=>"ab"},
351
- {:node=>:whitespace, :pos=>25, :raw=>" "},
352
- {:node=>:bad_string, :pos=>26, :raw=>"\"a", :error=>true, :value=>"a"},
353
- {:node=>:whitespace, :pos=>28, :raw=>"\n"},
354
- {:node=>:ident, :pos=>29, :raw=>"b", :value=>"b"},
355
- {:node=>:whitespace, :pos=>30, :raw=>" "},
356
- {:node=>:string, :pos=>31, :raw=>"\"eof", :value=>"eof"}
357
- ], tokens)
358
- end
359
-
360
- it 'should tokenize escapes within strings' do
361
- tokens = CT.tokenize("\"Lo\\rem \\130 ps\\u m\" '\\376\\37 6\\000376\\0000376\\")
362
-
363
- assert_equal([
364
- {:node=>:string,
365
- :pos=>0,
366
- :raw=>"\"Lo\\rem \\130 ps\\u m\"",
367
- :value=>"Lorem İpsu m"},
368
- {:node=>:whitespace, :pos=>20, :raw=>" "},
369
- {:node=>:string,
370
- :pos=>21,
371
- :raw=>"'\\376\\37 6\\000376\\0000376\\",
372
- :value=>"Ͷ76Ͷ76"}
373
- ], tokens)
374
- end
375
-
376
- it 'should tokenize URLs with single quotes' do
377
- tokens = CT.tokenize("url( '') url('Lorem \"îpsum\"'\n) url('a\\\nb' ) url('a\nb' \\){ ) url('eof")
378
-
379
- assert_equal([
380
- {:node=>:url, :pos=>0, :raw=>"url( '')", :value=>""},
381
- {:node=>:whitespace, :pos=>8, :raw=>" "},
382
- {:node=>:url,
383
- :pos=>9,
384
- :raw=>"url('Lorem \"îpsum\"'\n)",
385
- :value=>"Lorem \"îpsum\""},
386
- {:node=>:whitespace, :pos=>30, :raw=>" "},
387
- {:node=>:url, :pos=>31, :raw=>"url('a\\\nb' )", :value=>"ab"},
388
- {:node=>:whitespace, :pos=>43, :raw=>" "},
389
- {:node=>:bad_url, :pos=>44, :raw=>"url('a\nb' \\){ )", :value=>"a\nb' ){ "},
390
- {:node=>:whitespace, :pos=>59, :raw=>" "},
391
- {:node=>:url, :pos=>60, :raw=>"url('eof", :value=>"eof"}
392
- ], tokens)
393
- end
394
-
395
- it 'should tokenize an empty, unclosed URL' do
396
- tokens = CT.tokenize("url(")
397
-
398
- assert_equal([
399
- {:node=>:url, :pos=>0, :raw=>"url(", :value=>""}
400
- ], tokens)
401
- end
402
-
403
- it 'should tokenize an unclosed URL containing a tab' do
404
- tokens = CT.tokenize("url( \t")
405
-
406
- assert_equal([
407
- {:node=>:url, :pos=>0, :raw=>"url( \t", :value=>""}
408
- ], tokens)
409
- end
410
-
411
- it 'should tokenize URLs with double quotes' do
412
- tokens = CT.tokenize("url(\"\") url(\"Lorem 'îpsum'\"\n) url(\"a\\\nb\" ) url(\"a\nb\" \\){ ) url(\"eof")
413
-
414
- assert_equal([
415
- {:node=>:url, :pos=>0, :raw=>"url(\"\")", :value=>""},
416
- {:node=>:whitespace, :pos=>7, :raw=>" "},
417
- {:node=>:url,
418
- :pos=>8,
419
- :raw=>"url(\"Lorem 'îpsum'\"\n)",
420
- :value=>"Lorem 'îpsum'"},
421
- {:node=>:whitespace, :pos=>29, :raw=>" "},
422
- {:node=>:url, :pos=>30, :raw=>"url(\"a\\\nb\" )", :value=>"ab"},
423
- {:node=>:whitespace, :pos=>42, :raw=>" "},
424
- {:node=>:bad_url,
425
- :pos=>43,
426
- :raw=>"url(\"a\nb\" \\){ )",
427
- :value=>"a\nb\" ){ "},
428
- {:node=>:whitespace, :pos=>58, :raw=>" "},
429
- {:node=>:url, :pos=>59, :raw=>"url(\"eof", :value=>"eof"}
430
- ], tokens)
431
- end
432
-
433
- it 'should tokenize URLs containing escapes' do
434
- tokens = CT.tokenize("url(\"Lo\\rem \\130 ps\\u m\") url('\\376\\37 6\\000376\\0000376\\")
435
-
436
- assert_equal([
437
- {:node=>:url,
438
- :pos=>0,
439
- :raw=>"url(\"Lo\\rem \\130 ps\\u m\")",
440
- :value=>"Lorem İpsu m"},
441
- {:node=>:whitespace, :pos=>25, :raw=>" "},
442
- {:node=>:url,
443
- :pos=>26,
444
- :raw=>"url('\\376\\37 6\\000376\\0000376\\",
445
- :value=>"Ͷ76Ͷ76"}
446
- ], tokens)
447
- end
448
-
449
- it 'should tokenize unquoted URLs in a case-insensitive manner' do
450
- tokens = CT.tokenize("URL(foo) Url(foo) ûrl(foo) url (foo) url\\ (foo) url(\t 'foo' ")
451
-
452
- assert_equal([
453
- {:node=>:url, :pos=>0, :raw=>"URL(foo)", :value=>"foo"},
454
- {:node=>:whitespace, :pos=>8, :raw=>" "},
455
- {:node=>:url, :pos=>9, :raw=>"Url(foo)", :value=>"foo"},
456
- {:node=>:whitespace, :pos=>17, :raw=>" "},
457
- {:node=>:function, :pos=>18, :raw=>"ûrl(", :value=>"ûrl"},
458
- {:node=>:ident, :pos=>22, :raw=>"foo", :value=>"foo"},
459
- {:node=>:")", :pos=>25, :raw=>")"},
460
- {:node=>:whitespace, :pos=>26, :raw=>" "},
461
- {:node=>:ident, :pos=>27, :raw=>"url", :value=>"url"},
462
- {:node=>:whitespace, :pos=>30, :raw=>" "},
463
- {:node=>:"(", :pos=>31, :raw=>"("},
464
- {:node=>:ident, :pos=>32, :raw=>"foo", :value=>"foo"},
465
- {:node=>:")", :pos=>35, :raw=>")"},
466
- {:node=>:whitespace, :pos=>36, :raw=>" "},
467
- {:node=>:function, :pos=>37, :raw=>"url\\ (", :value=>"url "},
468
- {:node=>:ident, :pos=>43, :raw=>"foo", :value=>"foo"},
469
- {:node=>:")", :pos=>46, :raw=>")"},
470
- {:node=>:whitespace, :pos=>47, :raw=>" "},
471
- {:node=>:url, :pos=>48, :raw=>"url(\t 'foo' ", :value=>"foo"}
472
- ], tokens)
473
- end
474
-
475
- it 'should tokenize bad URLs with extra content after the quoted segment' do
476
- tokens = CT.tokenize("url('a' b) url('c' d)")
477
-
478
- assert_equal([
479
- {:node=>:bad_url, :pos=>0, :raw=>"url('a' b)", :value=>"ab"},
480
- {:node=>:whitespace, :pos=>10, :raw=>" "},
481
- {:node=>:bad_url, :pos=>11, :raw=>"url('c' d)", :value=>"cd"}
482
- ], tokens)
483
- end
484
-
485
- it 'should tokenize bad URLs with newlines in the quoted segment' do
486
- tokens = CT.tokenize("url('a\nb') url('c\n")
487
-
488
- assert_equal([
489
- {:node=>:bad_url, :pos=>0, :raw=>"url('a\nb')", :value=>"a\nb'"},
490
- {:node=>:whitespace, :pos=>10, :raw=>" "},
491
- {:node=>:bad_url, :pos=>11, :raw=>"url('c\n", :value=>"c\n"}
492
- ], tokens)
493
- end
494
-
495
- it 'should tokenize a mix of URLs with valid and invalid escapes' do
496
- tokens = CT.tokenize("url() url( \t) url( Foô\\030\n!\n) url(a b) url(a\\ b) url(a(b) url(a\\(b) url(a'b) url(a\\'b) url(a\"b) url(a\\\"b) url(a\nb) url(a\\\nb) url(a\\a b) url(a\\")
497
-
498
- assert_equal([
499
- {:node=>:url, :pos=>0, :raw=>"url()", :value=>""},
500
- {:node=>:whitespace, :pos=>5, :raw=>" "},
501
- {:node=>:url, :pos=>6, :raw=>"url( \t)", :value=>""},
502
- {:node=>:whitespace, :pos=>13, :raw=>" "},
503
- {:node=>:url, :pos=>14, :raw=>"url( Foô\\030\n!\n)", :value=>"Foô0!"},
504
- {:node=>:whitespace, :pos=>30, :raw=>" "},
505
- {:node=>:bad_url, :pos=>31, :raw=>"url(a b)", :value=>"ab"},
506
- {:node=>:whitespace, :pos=>39, :raw=>" "},
507
- {:node=>:url, :pos=>40, :raw=>"url(a\\ b)", :value=>"a b"},
508
- {:node=>:whitespace, :pos=>49, :raw=>" "},
509
- {:node=>:bad_url, :pos=>50, :raw=>"url(a(b)", :error=>true, :value=>"ab"},
510
- {:node=>:whitespace, :pos=>58, :raw=>" "},
511
- {:node=>:url, :pos=>59, :raw=>"url(a\\(b)", :value=>"a(b"},
512
- {:node=>:whitespace, :pos=>68, :raw=>" "},
513
- {:node=>:bad_url, :pos=>69, :raw=>"url(a'b)", :error=>true, :value=>"ab"},
514
- {:node=>:whitespace, :pos=>77, :raw=>" "},
515
- {:node=>:url, :pos=>78, :raw=>"url(a\\'b)", :value=>"a'b"},
516
- {:node=>:whitespace, :pos=>87, :raw=>" "},
517
- {:node=>:bad_url, :pos=>88, :raw=>"url(a\"b)", :error=>true, :value=>"ab"},
518
- {:node=>:whitespace, :pos=>96, :raw=>" "},
519
- {:node=>:url, :pos=>97, :raw=>"url(a\\\"b)", :value=>"a\"b"},
520
- {:node=>:whitespace, :pos=>106, :raw=>" "},
521
- {:node=>:bad_url, :pos=>107, :raw=>"url(a\nb)", :value=>"ab"},
522
- {:node=>:whitespace, :pos=>115, :raw=>" "},
523
- {:node=>:bad_url,
524
- :pos=>116,
525
- :raw=>"url(a\\\nb)",
526
- :error=>true,
527
- :value=>"a\nb"},
528
- {:node=>:whitespace, :pos=>125, :raw=>" "},
529
- {:node=>:url, :pos=>126, :raw=>"url(a\\a b)", :value=>"a\nb"},
530
- {:node=>:whitespace, :pos=>136, :raw=>" "},
531
- {:node=>:url, :pos=>137, :raw=>"url(a\\", :value=>"a\uFFFD"}
532
- ], tokens)
533
- end
534
-
535
- it 'should tokenize a longass unquoted, unclosed URL' do
536
- tokens = CT.tokenize("url(\u0000!\#$%&*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~\u0080\u0081\u009e\u009f\u00a0\u00a1\u00a2")
537
-
538
- assert_equal([
539
- {:node=>:url,
540
- :pos=>0,
541
- :raw=>
542
- "url(\uFFFD!\#$%&*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~\u0080\u0081\u009E\u009F ¡¢",
543
- :value=>
544
- "\uFFFD!\#$%&*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~\u0080\u0081\u009e\u009f\u00a0¡¢"}
545
- ], tokens)
546
- end
547
-
548
- it 'should tokenize lots of bad escaped URLs' do
549
- tokens = CT.tokenize("url(\u0001) url(\u0002) url(\u0003) url(\u0004) url(\u0005) url(\u0006) url(\u0007) url(\u0008) url(\u000b) url(\u000e) url(\u000f) url(\u0010) url(\u0011) url(\u0012) url(\u0013) url(\u0014) url(\u0015) url(\u0016) url(\u0017) url(\u0018) url(\u0019) url(\u001a) url(\u001b) url(\u001c) url(\u001d) url(\u001e) url(\u001f) url(\u007f)")
550
-
551
- assert_equal([
552
- {:node=>:bad_url, :pos=>0, :raw=>"url(\u0001)", :error=>true, :value=>""},
553
- {:node=>:whitespace, :pos=>6, :raw=>" "},
554
- {:node=>:bad_url, :pos=>7, :raw=>"url(\u0002)", :error=>true, :value=>""},
555
- {:node=>:whitespace, :pos=>13, :raw=>" "},
556
- {:node=>:bad_url, :pos=>14, :raw=>"url(\u0003)", :error=>true, :value=>""},
557
- {:node=>:whitespace, :pos=>20, :raw=>" "},
558
- {:node=>:bad_url, :pos=>21, :raw=>"url(\u0004)", :error=>true, :value=>""},
559
- {:node=>:whitespace, :pos=>27, :raw=>" "},
560
- {:node=>:bad_url, :pos=>28, :raw=>"url(\u0005)", :error=>true, :value=>""},
561
- {:node=>:whitespace, :pos=>34, :raw=>" "},
562
- {:node=>:bad_url, :pos=>35, :raw=>"url(\u0006)", :error=>true, :value=>""},
563
- {:node=>:whitespace, :pos=>41, :raw=>" "},
564
- {:node=>:bad_url, :pos=>42, :raw=>"url(\a)", :error=>true, :value=>""},
565
- {:node=>:whitespace, :pos=>48, :raw=>" "},
566
- {:node=>:bad_url, :pos=>49, :raw=>"url(\b)", :error=>true, :value=>""},
567
- {:node=>:whitespace, :pos=>55, :raw=>" "},
568
- {:node=>:bad_url, :pos=>56, :raw=>"url(\v)", :error=>true, :value=>""},
569
- {:node=>:whitespace, :pos=>62, :raw=>" "},
570
- {:node=>:bad_url, :pos=>63, :raw=>"url(\u000E)", :error=>true, :value=>""},
571
- {:node=>:whitespace, :pos=>69, :raw=>" "},
572
- {:node=>:bad_url, :pos=>70, :raw=>"url(\u000F)", :error=>true, :value=>""},
573
- {:node=>:whitespace, :pos=>76, :raw=>" "},
574
- {:node=>:bad_url, :pos=>77, :raw=>"url(\u0010)", :error=>true, :value=>""},
575
- {:node=>:whitespace, :pos=>83, :raw=>" "},
576
- {:node=>:bad_url, :pos=>84, :raw=>"url(\u0011)", :error=>true, :value=>""},
577
- {:node=>:whitespace, :pos=>90, :raw=>" "},
578
- {:node=>:bad_url, :pos=>91, :raw=>"url(\u0012)", :error=>true, :value=>""},
579
- {:node=>:whitespace, :pos=>97, :raw=>" "},
580
- {:node=>:bad_url, :pos=>98, :raw=>"url(\u0013)", :error=>true, :value=>""},
581
- {:node=>:whitespace, :pos=>104, :raw=>" "},
582
- {:node=>:bad_url, :pos=>105, :raw=>"url(\u0014)", :error=>true, :value=>""},
583
- {:node=>:whitespace, :pos=>111, :raw=>" "},
584
- {:node=>:bad_url, :pos=>112, :raw=>"url(\u0015)", :error=>true, :value=>""},
585
- {:node=>:whitespace, :pos=>118, :raw=>" "},
586
- {:node=>:bad_url, :pos=>119, :raw=>"url(\u0016)", :error=>true, :value=>""},
587
- {:node=>:whitespace, :pos=>125, :raw=>" "},
588
- {:node=>:bad_url, :pos=>126, :raw=>"url(\u0017)", :error=>true, :value=>""},
589
- {:node=>:whitespace, :pos=>132, :raw=>" "},
590
- {:node=>:bad_url, :pos=>133, :raw=>"url(\u0018)", :error=>true, :value=>""},
591
- {:node=>:whitespace, :pos=>139, :raw=>" "},
592
- {:node=>:bad_url, :pos=>140, :raw=>"url(\u0019)", :error=>true, :value=>""},
593
- {:node=>:whitespace, :pos=>146, :raw=>" "},
594
- {:node=>:bad_url, :pos=>147, :raw=>"url(\u001A)", :error=>true, :value=>""},
595
- {:node=>:whitespace, :pos=>153, :raw=>" "},
596
- {:node=>:bad_url, :pos=>154, :raw=>"url(\e)", :error=>true, :value=>""},
597
- {:node=>:whitespace, :pos=>160, :raw=>" "},
598
- {:node=>:bad_url, :pos=>161, :raw=>"url(\u001C)", :error=>true, :value=>""},
599
- {:node=>:whitespace, :pos=>167, :raw=>" "},
600
- {:node=>:bad_url, :pos=>168, :raw=>"url(\u001D)", :error=>true, :value=>""},
601
- {:node=>:whitespace, :pos=>174, :raw=>" "},
602
- {:node=>:bad_url, :pos=>175, :raw=>"url(\u001E)", :error=>true, :value=>""},
603
- {:node=>:whitespace, :pos=>181, :raw=>" "},
604
- {:node=>:bad_url, :pos=>182, :raw=>"url(\u001F)", :error=>true, :value=>""},
605
- {:node=>:whitespace, :pos=>188, :raw=>" "},
606
- {:node=>:bad_url, :pos=>189, :raw=>"url(\u007F)", :error=>true, :value=>""}
607
- ], tokens)
608
- end
609
-
610
- it 'should tokenize numbers' do
611
- tokens = CT.tokenize("12 +34 -45 .67 +.89 -.01 2.3 +45.0 -0.67")
612
-
613
- assert_equal([
614
- {:node=>:number,
615
- :pos=>0,
616
- :raw=>"12",
617
- :repr=>"12",
618
- :type=>:integer,
619
- :value=>12},
620
- {:node=>:whitespace, :pos=>2, :raw=>" "},
621
- {:node=>:number,
622
- :pos=>3,
623
- :raw=>"+34",
624
- :repr=>"+34",
625
- :type=>:integer,
626
- :value=>34},
627
- {:node=>:whitespace, :pos=>6, :raw=>" "},
628
- {:node=>:number,
629
- :pos=>7,
630
- :raw=>"-45",
631
- :repr=>"-45",
632
- :type=>:integer,
633
- :value=>-45},
634
- {:node=>:whitespace, :pos=>10, :raw=>" "},
635
- {:node=>:number,
636
- :pos=>11,
637
- :raw=>".67",
638
- :repr=>".67",
639
- :type=>:number,
640
- :value=>0.67},
641
- {:node=>:whitespace, :pos=>14, :raw=>" "},
642
- {:node=>:number,
643
- :pos=>15,
644
- :raw=>"+.89",
645
- :repr=>"+.89",
646
- :type=>:number,
647
- :value=>0.89},
648
- {:node=>:whitespace, :pos=>19, :raw=>" "},
649
- {:node=>:number,
650
- :pos=>20,
651
- :raw=>"-.01",
652
- :repr=>"-.01",
653
- :type=>:number,
654
- :value=>-0.01},
655
- {:node=>:whitespace, :pos=>24, :raw=>" "},
656
- {:node=>:number,
657
- :pos=>25,
658
- :raw=>"2.3",
659
- :repr=>"2.3",
660
- :type=>:number,
661
- :value=>2.3},
662
- {:node=>:whitespace, :pos=>28, :raw=>" "},
663
- {:node=>:number,
664
- :pos=>29,
665
- :raw=>"+45.0",
666
- :repr=>"+45.0",
667
- :type=>:number,
668
- :value=>45},
669
- {:node=>:whitespace, :pos=>34, :raw=>" "},
670
- {:node=>:number,
671
- :pos=>35,
672
- :raw=>"-0.67",
673
- :repr=>"-0.67",
674
- :type=>:number,
675
- :value=>-0.67}
676
- ], tokens)
677
- end
678
-
679
- it 'should tokenize scientific notation' do
680
- tokens = CT.tokenize("12e2 +34e+1 -45E-0 .68e+3 +.79e-1 -.01E2 2.3E+1 +45.0e6 -0.67e0")
681
-
682
- assert_equal([
683
- {:node=>:number,
684
- :pos=>0,
685
- :raw=>"12e2",
686
- :repr=>"12e2",
687
- :type=>:number,
688
- :value=>1200},
689
- {:node=>:whitespace, :pos=>4, :raw=>" "},
690
- {:node=>:number,
691
- :pos=>5,
692
- :raw=>"+34e+1",
693
- :repr=>"+34e+1",
694
- :type=>:number,
695
- :value=>340},
696
- {:node=>:whitespace, :pos=>11, :raw=>" "},
697
- {:node=>:number,
698
- :pos=>12,
699
- :raw=>"-45E-0",
700
- :repr=>"-45E-0",
701
- :type=>:number,
702
- :value=>-45},
703
- {:node=>:whitespace, :pos=>18, :raw=>" "},
704
- {:node=>:number,
705
- :pos=>19,
706
- :raw=>".68e+3",
707
- :repr=>".68e+3",
708
- :type=>:number,
709
- :value=>680},
710
- {:node=>:whitespace, :pos=>25, :raw=>" "},
711
- {:node=>:number,
712
- :pos=>26,
713
- :raw=>"+.79e-1",
714
- :repr=>"+.79e-1",
715
- :type=>:number,
716
- :value=>0.079},
717
- {:node=>:whitespace, :pos=>33, :raw=>" "},
718
- {:node=>:number,
719
- :pos=>34,
720
- :raw=>"-.01E2",
721
- :repr=>"-.01E2",
722
- :type=>:number,
723
- :value=>-1},
724
- {:node=>:whitespace, :pos=>40, :raw=>" "},
725
- {:node=>:number,
726
- :pos=>41,
727
- :raw=>"2.3E+1",
728
- :repr=>"2.3E+1",
729
- :type=>:number,
730
- :value=>23},
731
- {:node=>:whitespace, :pos=>47, :raw=>" "},
732
- {:node=>:number,
733
- :pos=>48,
734
- :raw=>"+45.0e6",
735
- :repr=>"+45.0e6",
736
- :type=>:number,
737
- :value=>45000000},
738
- {:node=>:whitespace, :pos=>55, :raw=>" "},
739
- {:node=>:number,
740
- :pos=>56,
741
- :raw=>"-0.67e0",
742
- :repr=>"-0.67e0",
743
- :type=>:number,
744
- :value=>-0.67}
745
- ], tokens)
746
- end
747
-
748
- it 'should tokenize a decimal point with no following digits as a delim' do
749
- tokens = CT.tokenize("3. ")
750
-
751
- assert_equal([
752
- {:node=>:number, :pos=>0, :raw=>"3", :repr=>"3", :type=>:integer, :value=>3},
753
- {:node=>:delim, :pos=>1, :raw=>".", :value=>"."},
754
- {:node=>:whitespace, :pos=>2, :raw=>" "}
755
- ], tokens)
756
- end
757
-
758
- it 'should not allow a scientific notation "E" to be escaped' do
759
- tokens = CT.tokenize("3\\65-2 ")
760
-
761
- assert_equal([
762
- {:node=>:dimension,
763
- :pos=>0,
764
- :raw=>"3\\65-2",
765
- :repr=>"3",
766
- :type=>:integer,
767
- :unit=>"e-2",
768
- :value=>3},
769
- {:node=>:whitespace, :pos=>6, :raw=>" "}
770
- ], tokens)
771
- end
772
-
773
- it 'should only allow integer exponents in scientific notation' do
774
- tokens = CT.tokenize("3e-2.1 ")
775
-
776
- assert_equal([
777
- {:node=>:number,
778
- :pos=>0,
779
- :raw=>"3e-2",
780
- :repr=>"3e-2",
781
- :type=>:number,
782
- :value=>0.03},
783
- {:node=>:number,
784
- :pos=>4,
785
- :raw=>".1",
786
- :repr=>".1",
787
- :type=>:number,
788
- :value=>0.1},
789
- {:node=>:whitespace, :pos=>6, :raw=>" "}
790
- ], tokens)
791
- end
792
-
793
- it 'should tokenize percentages' do
794
- tokens = CT.tokenize("12% +34% -45% .67% +.89% -.01% 2.3% +45.0% -0.67%")
795
-
796
- assert_equal([
797
- {:node=>:percentage,
798
- :pos=>0,
799
- :raw=>"12%",
800
- :repr=>"12",
801
- :type=>:integer,
802
- :value=>12},
803
- {:node=>:whitespace, :pos=>3, :raw=>" "},
804
- {:node=>:percentage,
805
- :pos=>4,
806
- :raw=>"+34%",
807
- :repr=>"+34",
808
- :type=>:integer,
809
- :value=>34},
810
- {:node=>:whitespace, :pos=>8, :raw=>" "},
811
- {:node=>:percentage,
812
- :pos=>9,
813
- :raw=>"-45%",
814
- :repr=>"-45",
815
- :type=>:integer,
816
- :value=>-45},
817
- {:node=>:whitespace, :pos=>13, :raw=>" "},
818
- {:node=>:percentage,
819
- :pos=>14,
820
- :raw=>".67%",
821
- :repr=>".67",
822
- :type=>:number,
823
- :value=>0.67},
824
- {:node=>:whitespace, :pos=>18, :raw=>" "},
825
- {:node=>:percentage,
826
- :pos=>19,
827
- :raw=>"+.89%",
828
- :repr=>"+.89",
829
- :type=>:number,
830
- :value=>0.89},
831
- {:node=>:whitespace, :pos=>24, :raw=>" "},
832
- {:node=>:percentage,
833
- :pos=>25,
834
- :raw=>"-.01%",
835
- :repr=>"-.01",
836
- :type=>:number,
837
- :value=>-0.01},
838
- {:node=>:whitespace, :pos=>30, :raw=>" "},
839
- {:node=>:percentage,
840
- :pos=>31,
841
- :raw=>"2.3%",
842
- :repr=>"2.3",
843
- :type=>:number,
844
- :value=>2.3},
845
- {:node=>:whitespace, :pos=>35, :raw=>" "},
846
- {:node=>:percentage,
847
- :pos=>36,
848
- :raw=>"+45.0%",
849
- :repr=>"+45.0",
850
- :type=>:number,
851
- :value=>45},
852
- {:node=>:whitespace, :pos=>42, :raw=>" "},
853
- {:node=>:percentage,
854
- :pos=>43,
855
- :raw=>"-0.67%",
856
- :repr=>"-0.67",
857
- :type=>:number,
858
- :value=>-0.67}
859
- ], tokens)
860
- end
861
-
862
- it 'should tokenize percentages with scientific notation' do
863
- tokens = CT.tokenize("12e2% +34e+1% -45E-0% .68e+3% +.79e-1% -.01E2% 2.3E+1% +45.0e6% -0.67e0%")
864
-
865
- assert_equal([
866
- {:node=>:percentage,
867
- :pos=>0,
868
- :raw=>"12e2%",
869
- :repr=>"12e2",
870
- :type=>:number,
871
- :value=>1200},
872
- {:node=>:whitespace, :pos=>5, :raw=>" "},
873
- {:node=>:percentage,
874
- :pos=>6,
875
- :raw=>"+34e+1%",
876
- :repr=>"+34e+1",
877
- :type=>:number,
878
- :value=>340},
879
- {:node=>:whitespace, :pos=>13, :raw=>" "},
880
- {:node=>:percentage,
881
- :pos=>14,
882
- :raw=>"-45E-0%",
883
- :repr=>"-45E-0",
884
- :type=>:number,
885
- :value=>-45},
886
- {:node=>:whitespace, :pos=>21, :raw=>" "},
887
- {:node=>:percentage,
888
- :pos=>22,
889
- :raw=>".68e+3%",
890
- :repr=>".68e+3",
891
- :type=>:number,
892
- :value=>680},
893
- {:node=>:whitespace, :pos=>29, :raw=>" "},
894
- {:node=>:percentage,
895
- :pos=>30,
896
- :raw=>"+.79e-1%",
897
- :repr=>"+.79e-1",
898
- :type=>:number,
899
- :value=>0.079},
900
- {:node=>:whitespace, :pos=>38, :raw=>" "},
901
- {:node=>:percentage,
902
- :pos=>39,
903
- :raw=>"-.01E2%",
904
- :repr=>"-.01E2",
905
- :type=>:number,
906
- :value=>-1},
907
- {:node=>:whitespace, :pos=>46, :raw=>" "},
908
- {:node=>:percentage,
909
- :pos=>47,
910
- :raw=>"2.3E+1%",
911
- :repr=>"2.3E+1",
912
- :type=>:number,
913
- :value=>23},
914
- {:node=>:whitespace, :pos=>54, :raw=>" "},
915
- {:node=>:percentage,
916
- :pos=>55,
917
- :raw=>"+45.0e6%",
918
- :repr=>"+45.0e6",
919
- :type=>:number,
920
- :value=>45000000},
921
- {:node=>:whitespace, :pos=>63, :raw=>" "},
922
- {:node=>:percentage,
923
- :pos=>64,
924
- :raw=>"-0.67e0%",
925
- :repr=>"-0.67e0",
926
- :type=>:number,
927
- :value=>-0.67}
928
- ], tokens)
929
- end
930
-
931
- it 'should not tokenize an escaped percent sign' do
932
- tokens = CT.tokenize("12\\% ")
933
-
934
- assert_equal([
935
- {:node=>:dimension,
936
- :pos=>0,
937
- :raw=>"12\\%",
938
- :repr=>"12",
939
- :type=>:integer,
940
- :unit=>"%",
941
- :value=>12},
942
- {:node=>:whitespace, :pos=>4, :raw=>" "}
943
- ], tokens)
944
- end
945
-
946
- it 'should tokenize dimensions' do
947
- tokens = CT.tokenize("12px +34px -45px .67px +.89px -.01px 2.3px +45.0px -0.67px")
948
-
949
- assert_equal([
950
- {:node=>:dimension,
951
- :pos=>0,
952
- :raw=>"12px",
953
- :repr=>"12",
954
- :type=>:integer,
955
- :unit=>"px",
956
- :value=>12},
957
- {:node=>:whitespace, :pos=>4, :raw=>" "},
958
- {:node=>:dimension,
959
- :pos=>5,
960
- :raw=>"+34px",
961
- :repr=>"+34",
962
- :type=>:integer,
963
- :unit=>"px",
964
- :value=>34},
965
- {:node=>:whitespace, :pos=>10, :raw=>" "},
966
- {:node=>:dimension,
967
- :pos=>11,
968
- :raw=>"-45px",
969
- :repr=>"-45",
970
- :type=>:integer,
971
- :unit=>"px",
972
- :value=>-45},
973
- {:node=>:whitespace, :pos=>16, :raw=>" "},
974
- {:node=>:dimension,
975
- :pos=>17,
976
- :raw=>".67px",
977
- :repr=>".67",
978
- :type=>:number,
979
- :unit=>"px",
980
- :value=>0.67},
981
- {:node=>:whitespace, :pos=>22, :raw=>" "},
982
- {:node=>:dimension,
983
- :pos=>23,
984
- :raw=>"+.89px",
985
- :repr=>"+.89",
986
- :type=>:number,
987
- :unit=>"px",
988
- :value=>0.89},
989
- {:node=>:whitespace, :pos=>29, :raw=>" "},
990
- {:node=>:dimension,
991
- :pos=>30,
992
- :raw=>"-.01px",
993
- :repr=>"-.01",
994
- :type=>:number,
995
- :unit=>"px",
996
- :value=>-0.01},
997
- {:node=>:whitespace, :pos=>36, :raw=>" "},
998
- {:node=>:dimension,
999
- :pos=>37,
1000
- :raw=>"2.3px",
1001
- :repr=>"2.3",
1002
- :type=>:number,
1003
- :unit=>"px",
1004
- :value=>2.3},
1005
- {:node=>:whitespace, :pos=>42, :raw=>" "},
1006
- {:node=>:dimension,
1007
- :pos=>43,
1008
- :raw=>"+45.0px",
1009
- :repr=>"+45.0",
1010
- :type=>:number,
1011
- :unit=>"px",
1012
- :value=>45},
1013
- {:node=>:whitespace, :pos=>50, :raw=>" "},
1014
- {:node=>:dimension,
1015
- :pos=>51,
1016
- :raw=>"-0.67px",
1017
- :repr=>"-0.67",
1018
- :type=>:number,
1019
- :unit=>"px",
1020
- :value=>-0.67}
1021
- ], tokens)
1022
- end
1023
-
1024
- it 'should tokenize dimensions with scientific notation' do
1025
- tokens = CT.tokenize("12e2px +34e+1px -45E-0px .68e+3px +.79e-1px -.01E2px 2.3E+1px +45.0e6px -0.67e0px")
1026
-
1027
- assert_equal([
1028
- {:node=>:dimension,
1029
- :pos=>0,
1030
- :raw=>"12e2px",
1031
- :repr=>"12e2",
1032
- :type=>:number,
1033
- :unit=>"px",
1034
- :value=>1200},
1035
- {:node=>:whitespace, :pos=>6, :raw=>" "},
1036
- {:node=>:dimension,
1037
- :pos=>7,
1038
- :raw=>"+34e+1px",
1039
- :repr=>"+34e+1",
1040
- :type=>:number,
1041
- :unit=>"px",
1042
- :value=>340},
1043
- {:node=>:whitespace, :pos=>15, :raw=>" "},
1044
- {:node=>:dimension,
1045
- :pos=>16,
1046
- :raw=>"-45E-0px",
1047
- :repr=>"-45E-0",
1048
- :type=>:number,
1049
- :unit=>"px",
1050
- :value=>-45},
1051
- {:node=>:whitespace, :pos=>24, :raw=>" "},
1052
- {:node=>:dimension,
1053
- :pos=>25,
1054
- :raw=>".68e+3px",
1055
- :repr=>".68e+3",
1056
- :type=>:number,
1057
- :unit=>"px",
1058
- :value=>680},
1059
- {:node=>:whitespace, :pos=>33, :raw=>" "},
1060
- {:node=>:dimension,
1061
- :pos=>34,
1062
- :raw=>"+.79e-1px",
1063
- :repr=>"+.79e-1",
1064
- :type=>:number,
1065
- :unit=>"px",
1066
- :value=>0.079},
1067
- {:node=>:whitespace, :pos=>43, :raw=>" "},
1068
- {:node=>:dimension,
1069
- :pos=>44,
1070
- :raw=>"-.01E2px",
1071
- :repr=>"-.01E2",
1072
- :type=>:number,
1073
- :unit=>"px",
1074
- :value=>-1},
1075
- {:node=>:whitespace, :pos=>52, :raw=>" "},
1076
- {:node=>:dimension,
1077
- :pos=>53,
1078
- :raw=>"2.3E+1px",
1079
- :repr=>"2.3E+1",
1080
- :type=>:number,
1081
- :unit=>"px",
1082
- :value=>23},
1083
- {:node=>:whitespace, :pos=>61, :raw=>" "},
1084
- {:node=>:dimension,
1085
- :pos=>62,
1086
- :raw=>"+45.0e6px",
1087
- :repr=>"+45.0e6",
1088
- :type=>:number,
1089
- :unit=>"px",
1090
- :value=>45000000},
1091
- {:node=>:whitespace, :pos=>71, :raw=>" "},
1092
- {:node=>:dimension,
1093
- :pos=>72,
1094
- :raw=>"-0.67e0px",
1095
- :repr=>"-0.67e0",
1096
- :type=>:number,
1097
- :unit=>"px",
1098
- :value=>-0.67}
1099
- ], tokens)
1100
- end
1101
-
1102
- it 'should tokenize a mix of dimensions and numbers' do
1103
- tokens = CT.tokenize("12red0 12.0-red 12--red 12-\\-red 120red 12-0red 12\u0000red 12_Red 12.red 12rêd")
1104
-
1105
- assert_equal([
1106
- {:node=>:dimension,
1107
- :pos=>0,
1108
- :raw=>"12red0",
1109
- :repr=>"12",
1110
- :type=>:integer,
1111
- :unit=>"red0",
1112
- :value=>12},
1113
- {:node=>:whitespace, :pos=>6, :raw=>" "},
1114
- {:node=>:dimension,
1115
- :pos=>7,
1116
- :raw=>"12.0-red",
1117
- :repr=>"12.0",
1118
- :type=>:number,
1119
- :unit=>"-red",
1120
- :value=>12},
1121
- {:node=>:whitespace, :pos=>15, :raw=>" "},
1122
- {:node=>:number,
1123
- :pos=>16,
1124
- :raw=>"12",
1125
- :repr=>"12",
1126
- :type=>:integer,
1127
- :value=>12},
1128
- {:node=>:delim, :pos=>18, :raw=>"-", :value=>"-"},
1129
- {:node=>:ident, :pos=>19, :raw=>"-red", :value=>"-red"},
1130
- {:node=>:whitespace, :pos=>23, :raw=>" "},
1131
- {:node=>:dimension,
1132
- :pos=>24,
1133
- :raw=>"12-\\-red",
1134
- :repr=>"12",
1135
- :type=>:integer,
1136
- :unit=>"--red",
1137
- :value=>12},
1138
- {:node=>:whitespace, :pos=>32, :raw=>" "},
1139
- {:node=>:dimension,
1140
- :pos=>33,
1141
- :raw=>"120red",
1142
- :repr=>"120",
1143
- :type=>:integer,
1144
- :unit=>"red",
1145
- :value=>120},
1146
- {:node=>:whitespace, :pos=>39, :raw=>" "},
1147
- {:node=>:number,
1148
- :pos=>40,
1149
- :raw=>"12",
1150
- :repr=>"12",
1151
- :type=>:integer,
1152
- :value=>12},
1153
- {:node=>:dimension,
1154
- :pos=>42,
1155
- :raw=>"-0red",
1156
- :repr=>"-0",
1157
- :type=>:integer,
1158
- :unit=>"red",
1159
- :value=>0},
1160
- {:node=>:whitespace, :pos=>47, :raw=>" "},
1161
- {:node=>:dimension,
1162
- :pos=>48,
1163
- :raw=>"12\ufffdred",
1164
- :repr=>"12",
1165
- :type=>:integer,
1166
- :unit=>"\ufffdred",
1167
- :value=>12},
1168
- {:node=>:whitespace, :pos=>54, :raw=>" "},
1169
- {:node=>:dimension,
1170
- :pos=>55,
1171
- :raw=>"12_Red",
1172
- :repr=>"12",
1173
- :type=>:integer,
1174
- :unit=>"_Red",
1175
- :value=>12},
1176
- {:node=>:whitespace, :pos=>61, :raw=>" "},
1177
- {:node=>:number,
1178
- :pos=>62,
1179
- :raw=>"12",
1180
- :repr=>"12",
1181
- :type=>:integer,
1182
- :value=>12},
1183
- {:node=>:delim, :pos=>64, :raw=>".", :value=>"."},
1184
- {:node=>:ident, :pos=>65, :raw=>"red", :value=>"red"},
1185
- {:node=>:whitespace, :pos=>68, :raw=>" "},
1186
- {:node=>:dimension,
1187
- :pos=>69,
1188
- :raw=>"12rêd",
1189
- :repr=>"12",
1190
- :type=>:integer,
1191
- :unit=>"rêd",
1192
- :value=>12}
1193
- ], tokens)
1194
- end
1195
-
1196
- it 'should tokenize unicode ranges' do
1197
- tokens = CT.tokenize("u+1 U+10 U+100 U+1000 U+10000 U+100000 U+1000000")
1198
-
1199
- assert_equal([
1200
- {:node=>:unicode_range, :pos=>0, :raw=>"u+1", :start=>1, :end=>1},
1201
- {:node=>:whitespace, :pos=>3, :raw=>" "},
1202
- {:node=>:unicode_range, :pos=>4, :raw=>"U+10", :start=>16, :end=>16},
1203
- {:node=>:whitespace, :pos=>8, :raw=>" "},
1204
- {:node=>:unicode_range, :pos=>9, :raw=>"U+100", :start=>256, :end=>256},
1205
- {:node=>:whitespace, :pos=>14, :raw=>" "},
1206
- {:node=>:unicode_range, :pos=>15, :raw=>"U+1000", :start=>4096, :end=>4096},
1207
- {:node=>:whitespace, :pos=>21, :raw=>" "},
1208
- {:node=>:unicode_range,
1209
- :pos=>22,
1210
- :raw=>"U+10000",
1211
- :start=>65536,
1212
- :end=>65536},
1213
- {:node=>:whitespace, :pos=>29, :raw=>" "},
1214
- {:node=>:unicode_range,
1215
- :pos=>30,
1216
- :raw=>"U+100000",
1217
- :start=>1048576,
1218
- :end=>1048576},
1219
- {:node=>:whitespace, :pos=>38, :raw=>" "},
1220
- {:node=>:unicode_range,
1221
- :pos=>39,
1222
- :raw=>"U+100000",
1223
- :start=>1048576,
1224
- :end=>1048576},
1225
- {:node=>:number, :pos=>47, :raw=>"0", :repr=>"0", :type=>:integer, :value=>0}
1226
- ], tokens)
1227
- end
1228
-
1229
- it 'should tokenize Unicode ranges with single wildcards' do
1230
- tokens = CT.tokenize("u+? u+1? U+10? U+100? U+1000? U+10000? U+100000?")
1231
-
1232
- assert_equal([
1233
- {:node=>:unicode_range, :pos=>0, :raw=>"u+?", :start=>0, :end=>15},
1234
- {:node=>:whitespace, :pos=>3, :raw=>" "},
1235
- {:node=>:unicode_range, :pos=>4, :raw=>"u+1?", :start=>16, :end=>31},
1236
- {:node=>:whitespace, :pos=>8, :raw=>" "},
1237
- {:node=>:unicode_range, :pos=>9, :raw=>"U+10?", :start=>256, :end=>271},
1238
- {:node=>:whitespace, :pos=>14, :raw=>" "},
1239
- {:node=>:unicode_range, :pos=>15, :raw=>"U+100?", :start=>4096, :end=>4111},
1240
- {:node=>:whitespace, :pos=>21, :raw=>" "},
1241
- {:node=>:unicode_range,
1242
- :pos=>22,
1243
- :raw=>"U+1000?",
1244
- :start=>65536,
1245
- :end=>65551},
1246
- {:node=>:whitespace, :pos=>29, :raw=>" "},
1247
- {:node=>:unicode_range,
1248
- :pos=>30,
1249
- :raw=>"U+10000?",
1250
- :start=>1048576,
1251
- :end=>1048591},
1252
- {:node=>:whitespace, :pos=>38, :raw=>" "},
1253
- {:node=>:unicode_range,
1254
- :pos=>39,
1255
- :raw=>"U+100000",
1256
- :start=>1048576,
1257
- :end=>1048576},
1258
- {:node=>:delim, :pos=>47, :raw=>"?", :value=>"?"}
1259
- ], tokens)
1260
- end
1261
-
1262
- it 'should tokenize Unicode ranges with two wildcards' do
1263
- tokens = CT.tokenize("u+?? U+1?? U+10?? U+100?? U+1000?? U+10000??")
1264
-
1265
- assert_equal([
1266
- {:node=>:unicode_range, :pos=>0, :raw=>"u+??", :start=>0, :end=>255},
1267
- {:node=>:whitespace, :pos=>4, :raw=>" "},
1268
- {:node=>:unicode_range, :pos=>5, :raw=>"U+1??", :start=>256, :end=>511},
1269
- {:node=>:whitespace, :pos=>10, :raw=>" "},
1270
- {:node=>:unicode_range, :pos=>11, :raw=>"U+10??", :start=>4096, :end=>4351},
1271
- {:node=>:whitespace, :pos=>17, :raw=>" "},
1272
- {:node=>:unicode_range,
1273
- :pos=>18,
1274
- :raw=>"U+100??",
1275
- :start=>65536,
1276
- :end=>65791},
1277
- {:node=>:whitespace, :pos=>25, :raw=>" "},
1278
- {:node=>:unicode_range,
1279
- :pos=>26,
1280
- :raw=>"U+1000??",
1281
- :start=>1048576,
1282
- :end=>1048831},
1283
- {:node=>:whitespace, :pos=>34, :raw=>" "},
1284
- {:node=>:unicode_range,
1285
- :pos=>35,
1286
- :raw=>"U+10000?",
1287
- :start=>1048576,
1288
- :end=>1048591},
1289
- {:node=>:delim, :pos=>43, :raw=>"?", :value=>"?"}
1290
- ], tokens)
1291
- end
1292
-
1293
- it 'should tokenize Unicode ranges with three wildcards' do
1294
- tokens = CT.tokenize("u+??? U+1??? U+10??? U+100??? U+1000???")
1295
-
1296
- assert_equal([
1297
- {:node=>:unicode_range, :pos=>0, :raw=>"u+???", :start=>0, :end=>4095},
1298
- {:node=>:whitespace, :pos=>5, :raw=>" "},
1299
- {:node=>:unicode_range, :pos=>6, :raw=>"U+1???", :start=>4096, :end=>8191},
1300
- {:node=>:whitespace, :pos=>12, :raw=>" "},
1301
- {:node=>:unicode_range,
1302
- :pos=>13,
1303
- :raw=>"U+10???",
1304
- :start=>65536,
1305
- :end=>69631},
1306
- {:node=>:whitespace, :pos=>20, :raw=>" "},
1307
- {:node=>:unicode_range,
1308
- :pos=>21,
1309
- :raw=>"U+100???",
1310
- :start=>1048576,
1311
- :end=>1052671},
1312
- {:node=>:whitespace, :pos=>29, :raw=>" "},
1313
- {:node=>:unicode_range,
1314
- :pos=>30,
1315
- :raw=>"U+1000??",
1316
- :start=>1048576,
1317
- :end=>1048831},
1318
- {:node=>:delim, :pos=>38, :raw=>"?", :value=>"?"}
1319
- ], tokens)
1320
- end
1321
-
1322
- it 'should tokenize Unicode ranges with four wildcards' do
1323
- tokens = CT.tokenize("u+???? U+1???? U+10???? U+100????")
1324
-
1325
- assert_equal([
1326
- {:node=>:unicode_range, :pos=>0, :raw=>"u+????", :start=>0, :end=>65535},
1327
- {:node=>:whitespace, :pos=>6, :raw=>" "},
1328
- {:node=>:unicode_range,
1329
- :pos=>7,
1330
- :raw=>"U+1????",
1331
- :start=>65536,
1332
- :end=>131071},
1333
- {:node=>:whitespace, :pos=>14, :raw=>" "},
1334
- {:node=>:unicode_range,
1335
- :pos=>15,
1336
- :raw=>"U+10????",
1337
- :start=>1048576,
1338
- :end=>1114111},
1339
- {:node=>:whitespace, :pos=>23, :raw=>" "},
1340
- {:node=>:unicode_range,
1341
- :pos=>24,
1342
- :raw=>"U+100???",
1343
- :start=>1048576,
1344
- :end=>1052671},
1345
- {:node=>:delim, :pos=>32, :raw=>"?", :value=>"?"}
1346
- ], tokens)
1347
- end
1348
-
1349
- it 'should tokenize Unicode ranges with five wildcards' do
1350
- tokens = CT.tokenize("u+????? U+1????? U+10?????")
1351
-
1352
- assert_equal([
1353
- {:node=>:unicode_range, :pos=>0, :raw=>"u+?????", :start=>0, :end=>1048575},
1354
- {:node=>:whitespace, :pos=>7, :raw=>" "},
1355
- {:node=>:unicode_range,
1356
- :pos=>8,
1357
- :raw=>"U+1?????",
1358
- :start=>1048576,
1359
- :end=>2097151},
1360
- {:node=>:whitespace, :pos=>16, :raw=>" "},
1361
- {:node=>:unicode_range,
1362
- :pos=>17,
1363
- :raw=>"U+10????",
1364
- :start=>1048576,
1365
- :end=>1114111},
1366
- {:node=>:delim, :pos=>25, :raw=>"?", :value=>"?"}
1367
- ], tokens)
1368
- end
1369
-
1370
- it 'should tokenize Unicode ranges with six wildcards' do
1371
- tokens = CT.tokenize("u+?????? U+1??????")
1372
-
1373
- assert_equal([
1374
- {:node=>:unicode_range, :pos=>0, :raw=>"u+??????", :start=>0, :end=>16777215},
1375
- {:node=>:whitespace, :pos=>8, :raw=>" "},
1376
- {:node=>:unicode_range,
1377
- :pos=>9,
1378
- :raw=>"U+1?????",
1379
- :start=>1048576,
1380
- :end=>2097151},
1381
- {:node=>:delim, :pos=>17, :raw=>"?", :value=>"?"}
1382
- ], tokens)
1383
- end
1384
-
1385
- it 'should not get confused by an ambiguous number after a Unicode range' do
1386
- tokens = CT.tokenize("u+1-2 U+100000-2 U+1000000-2 U+10-200000")
1387
-
1388
- assert_equal([
1389
- {:node=>:unicode_range, :pos=>0, :raw=>"u+1-2", :start=>1, :end=>2},
1390
- {:node=>:whitespace, :pos=>5, :raw=>" "},
1391
- {:node=>:unicode_range,
1392
- :pos=>6,
1393
- :raw=>"U+100000-2",
1394
- :start=>1048576,
1395
- :end=>2},
1396
- {:node=>:whitespace, :pos=>16, :raw=>" "},
1397
- {:node=>:unicode_range,
1398
- :pos=>17,
1399
- :raw=>"U+100000",
1400
- :start=>1048576,
1401
- :end=>1048576},
1402
- {:node=>:number, :pos=>25, :raw=>"0", :repr=>"0", :type=>:integer, :value=>0},
1403
- {:node=>:number,
1404
- :pos=>26,
1405
- :raw=>"-2",
1406
- :repr=>"-2",
1407
- :type=>:integer,
1408
- :value=>-2},
1409
- {:node=>:whitespace, :pos=>28, :raw=>" "},
1410
- {:node=>:unicode_range,
1411
- :pos=>29,
1412
- :raw=>"U+10-200000",
1413
- :start=>16,
1414
- :end=>2097152}
1415
- ], tokens)
1416
- end
1417
-
1418
- it 'should not get fooled by invalid Unicode range prefixes' do
1419
- tokens = CT.tokenize("ù+12 Ü+12 u +12 U+ 12 U+12 - 20 U+1?2 U+1?-50")
1420
-
1421
- assert_equal([
1422
- {:node=>:ident, :pos=>0, :raw=>"ù", :value=>"ù"},
1423
- {:node=>:number,
1424
- :pos=>1,
1425
- :raw=>"+12",
1426
- :repr=>"+12",
1427
- :type=>:integer,
1428
- :value=>12},
1429
- {:node=>:whitespace, :pos=>4, :raw=>" "},
1430
- {:node=>:ident, :pos=>5, :raw=>"Ü", :value=>"Ü"},
1431
- {:node=>:number,
1432
- :pos=>6,
1433
- :raw=>"+12",
1434
- :repr=>"+12",
1435
- :type=>:integer,
1436
- :value=>12},
1437
- {:node=>:whitespace, :pos=>9, :raw=>" "},
1438
- {:node=>:ident, :pos=>10, :raw=>"u", :value=>"u"},
1439
- {:node=>:whitespace, :pos=>11, :raw=>" "},
1440
- {:node=>:number,
1441
- :pos=>12,
1442
- :raw=>"+12",
1443
- :repr=>"+12",
1444
- :type=>:integer,
1445
- :value=>12},
1446
- {:node=>:whitespace, :pos=>15, :raw=>" "},
1447
- {:node=>:ident, :pos=>16, :raw=>"U", :value=>"U"},
1448
- {:node=>:delim, :pos=>17, :raw=>"+", :value=>"+"},
1449
- {:node=>:whitespace, :pos=>18, :raw=>" "},
1450
- {:node=>:number,
1451
- :pos=>19,
1452
- :raw=>"12",
1453
- :repr=>"12",
1454
- :type=>:integer,
1455
- :value=>12},
1456
- {:node=>:whitespace, :pos=>21, :raw=>" "},
1457
- {:node=>:unicode_range, :pos=>22, :raw=>"U+12", :start=>18, :end=>18},
1458
- {:node=>:whitespace, :pos=>26, :raw=>" "},
1459
- {:node=>:delim, :pos=>27, :raw=>"-", :value=>"-"},
1460
- {:node=>:whitespace, :pos=>28, :raw=>" "},
1461
- {:node=>:number,
1462
- :pos=>29,
1463
- :raw=>"20",
1464
- :repr=>"20",
1465
- :type=>:integer,
1466
- :value=>20},
1467
- {:node=>:whitespace, :pos=>31, :raw=>" "},
1468
- {:node=>:unicode_range, :pos=>32, :raw=>"U+1?", :start=>16, :end=>31},
1469
- {:node=>:number, :pos=>36, :raw=>"2", :repr=>"2", :type=>:integer, :value=>2},
1470
- {:node=>:whitespace, :pos=>37, :raw=>" "},
1471
- {:node=>:unicode_range, :pos=>38, :raw=>"U+1?", :start=>16, :end=>31},
1472
- {:node=>:number,
1473
- :pos=>42,
1474
- :raw=>"-50",
1475
- :repr=>"-50",
1476
- :type=>:integer,
1477
- :value=>-50}
1478
- ], tokens)
1479
- end
1480
-
1481
- it 'should tokenize match operators and columns' do
1482
- tokens = CT.tokenize("~=|=^=$=*=||<!------> |/**/| ~/**/=")
1483
-
1484
- assert_equal([
1485
- {:node=>:include_match, :pos=>0, :raw=>"~="},
1486
- {:node=>:dash_match, :pos=>2, :raw=>"|="},
1487
- {:node=>:prefix_match, :pos=>4, :raw=>"^="},
1488
- {:node=>:suffix_match, :pos=>6, :raw=>"$="},
1489
- {:node=>:substring_match, :pos=>8, :raw=>"*="},
1490
- {:node=>:column, :pos=>10, :raw=>"||"},
1491
- {:node=>:cdo, :pos=>12, :raw=>"<!--"},
1492
- {:node=>:delim, :pos=>16, :raw=>"-", :value=>"-"},
1493
- {:node=>:delim, :pos=>17, :raw=>"-", :value=>"-"},
1494
- {:node=>:cdc, :pos=>18, :raw=>"-->"},
1495
- {:node=>:whitespace, :pos=>21, :raw=>" "},
1496
- {:node=>:delim, :pos=>22, :raw=>"|", :value=>"|"},
1497
- {:node=>:delim, :pos=>27, :raw=>"|", :value=>"|"},
1498
- {:node=>:whitespace, :pos=>28, :raw=>" "},
1499
- {:node=>:delim, :pos=>29, :raw=>"~", :value=>"~"},
1500
- {:node=>:delim, :pos=>34, :raw=>"=", :value=>"="}
1501
- ], tokens)
1502
- end
1503
-
1504
- it 'should tokenize selector functions and rule blocks' do
1505
- tokens = CT.tokenize("a:not([href^=http\\:], [href ^=\t'https\\:'\n]) { color: rgba(0%, 100%, 50%); }")
1506
-
1507
- assert_equal([
1508
- {:node=>:ident, :pos=>0, :raw=>"a", :value=>"a"},
1509
- {:node=>:colon, :pos=>1, :raw=>":"},
1510
- {:node=>:function, :pos=>2, :raw=>"not(", :value=>"not"},
1511
- {:node=>:"[", :pos=>6, :raw=>"["},
1512
- {:node=>:ident, :pos=>7, :raw=>"href", :value=>"href"},
1513
- {:node=>:prefix_match, :pos=>11, :raw=>"^="},
1514
- {:node=>:ident, :pos=>13, :raw=>"http\\:", :value=>"http:"},
1515
- {:node=>:"]", :pos=>19, :raw=>"]"},
1516
- {:node=>:comma, :pos=>20, :raw=>","},
1517
- {:node=>:whitespace, :pos=>21, :raw=>" "},
1518
- {:node=>:"[", :pos=>23, :raw=>"["},
1519
- {:node=>:ident, :pos=>24, :raw=>"href", :value=>"href"},
1520
- {:node=>:whitespace, :pos=>28, :raw=>" "},
1521
- {:node=>:prefix_match, :pos=>29, :raw=>"^="},
1522
- {:node=>:whitespace, :pos=>31, :raw=>"\t"},
1523
- {:node=>:string, :pos=>32, :raw=>"'https\\:'", :value=>"https:"},
1524
- {:node=>:whitespace, :pos=>41, :raw=>"\n"},
1525
- {:node=>:"]", :pos=>42, :raw=>"]"},
1526
- {:node=>:")", :pos=>43, :raw=>")"},
1527
- {:node=>:whitespace, :pos=>44, :raw=>" "},
1528
- {:node=>:"{", :pos=>45, :raw=>"{"},
1529
- {:node=>:whitespace, :pos=>46, :raw=>" "},
1530
- {:node=>:ident, :pos=>47, :raw=>"color", :value=>"color"},
1531
- {:node=>:colon, :pos=>52, :raw=>":"},
1532
- {:node=>:whitespace, :pos=>53, :raw=>" "},
1533
- {:node=>:function, :pos=>54, :raw=>"rgba(", :value=>"rgba"},
1534
- {:node=>:percentage,
1535
- :pos=>59,
1536
- :raw=>"0%",
1537
- :repr=>"0",
1538
- :type=>:integer,
1539
- :value=>0},
1540
- {:node=>:comma, :pos=>61, :raw=>","},
1541
- {:node=>:whitespace, :pos=>62, :raw=>" "},
1542
- {:node=>:percentage,
1543
- :pos=>63,
1544
- :raw=>"100%",
1545
- :repr=>"100",
1546
- :type=>:integer,
1547
- :value=>100},
1548
- {:node=>:comma, :pos=>67, :raw=>","},
1549
- {:node=>:whitespace, :pos=>68, :raw=>" "},
1550
- {:node=>:percentage,
1551
- :pos=>69,
1552
- :raw=>"50%",
1553
- :repr=>"50",
1554
- :type=>:integer,
1555
- :value=>50},
1556
- {:node=>:")", :pos=>72, :raw=>")"},
1557
- {:node=>:semicolon, :pos=>73, :raw=>";"},
1558
- {:node=>:whitespace, :pos=>74, :raw=>" "},
1559
- {:node=>:"}", :pos=>75, :raw=>"}"}
1560
- ], tokens)
1561
- end
1562
- end