minicss 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.editorconfig +10 -0
- data/.rspec +3 -0
- data/.rubocop.yml +66 -0
- data/ACKNOWLEDGMENTS.md +47 -0
- data/CODE_OF_CONDUCT.md +132 -0
- data/LICENSE +21 -0
- data/README.md +178 -0
- data/Rakefile +12 -0
- data/lib/minicss/ast/at_rule.rb +17 -0
- data/lib/minicss/ast/bad_token.rb +14 -0
- data/lib/minicss/ast/block.rb +29 -0
- data/lib/minicss/ast/decl.rb +17 -0
- data/lib/minicss/ast/decl_list.rb +18 -0
- data/lib/minicss/ast/dimension.rb +14 -0
- data/lib/minicss/ast/function.rb +15 -0
- data/lib/minicss/ast/number.rb +14 -0
- data/lib/minicss/ast/percentage.rb +8 -0
- data/lib/minicss/ast/rule.rb +28 -0
- data/lib/minicss/ast/string_token.rb +14 -0
- data/lib/minicss/ast/syntax_error.rb +13 -0
- data/lib/minicss/ast/unicode_range.rb +13 -0
- data/lib/minicss/ast/url.rb +13 -0
- data/lib/minicss/ast.rb +72 -0
- data/lib/minicss/css/ast/at_rule.rb +19 -0
- data/lib/minicss/css/ast/declaration.rb +21 -0
- data/lib/minicss/css/ast/declaration_list.rb +11 -0
- data/lib/minicss/css/ast/function.rb +20 -0
- data/lib/minicss/css/ast/qualified_rule.rb +19 -0
- data/lib/minicss/css/ast/simple_block.rb +37 -0
- data/lib/minicss/css/ast/stylesheet.rb +17 -0
- data/lib/minicss/css/ast.rb +9 -0
- data/lib/minicss/css/errors.rb +8 -0
- data/lib/minicss/css/parser.rb +360 -0
- data/lib/minicss/css/position.rb +15 -0
- data/lib/minicss/css/refinements.rb +78 -0
- data/lib/minicss/css/token.rb +28 -0
- data/lib/minicss/css/token_stream.rb +56 -0
- data/lib/minicss/css/tokenizer.rb +572 -0
- data/lib/minicss/css.rb +10 -0
- data/lib/minicss/errors.rb +6 -0
- data/lib/minicss/sel.rb +382 -0
- data/lib/minicss/serializer.rb +59 -0
- data/lib/minicss/version.rb +5 -0
- data/lib/minicss.rb +53 -0
- metadata +87 -0
data/lib/minicss/sel.rb
ADDED
@@ -0,0 +1,382 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module MiniCSS
|
4
|
+
module Sel
|
5
|
+
module_function
|
6
|
+
|
7
|
+
PLACEHOLDER_CHAR = "¶"
|
8
|
+
ESCAPE_SENTINEL = "\uE000"
|
9
|
+
STRING_SENTINEL = "\uE001"
|
10
|
+
|
11
|
+
TOKENS = {
|
12
|
+
"attribute" => /\[\s*(?:(?<namespace>\*|[-\w\p{^ASCII}]*)\|)?(?<name>[-\w\p{^ASCII}]+)\s*(?:(?<operator>\W?=)\s*(?<value>.+?)\s*(\s(?<caseSensitive>[iIsS]))?\s*)?\]/u,
|
13
|
+
"id" => /#(?<name>[-\w\p{^ASCII}]+)/u,
|
14
|
+
"class" => /\.(?<name>[-\w\p{^ASCII}]+)/u,
|
15
|
+
"comma" => /\s*,\s*/u,
|
16
|
+
"combinator" => /\s*[\s>+~]\s*/u,
|
17
|
+
"pseudo-element" => /::(?<name>[-\w\p{^ASCII}]+)(?:\((?<argument>¶*)\))?/u,
|
18
|
+
"pseudo-class" => /:(?<name>[-\w\p{^ASCII}]+)(?:\((?<argument>¶*)\))?/u,
|
19
|
+
"universal" => /(?:(?<namespace>\*|[-\w\p{^ASCII}]*)\|)?\*/u,
|
20
|
+
"type" => /(?:(?<namespace>\*|[-\w\p{^ASCII}]*)\|)?(?<name>[-\w\p{^ASCII}]+)/u
|
21
|
+
}.freeze
|
22
|
+
|
23
|
+
ARGUMENT_PATTERNS = {
|
24
|
+
"pseudo-element" => Regexp.new(
|
25
|
+
TOKENS["pseudo-element"].source.sub("(?<argument>¶*)", "(?<argument>.*)"),
|
26
|
+
TOKENS["pseudo-element"].options
|
27
|
+
),
|
28
|
+
"pseudo-class" => Regexp.new(
|
29
|
+
TOKENS["pseudo-class"].source.sub("(?<argument>¶*)", "(?<argument>.*)"),
|
30
|
+
TOKENS["pseudo-class"].options
|
31
|
+
)
|
32
|
+
}.freeze
|
33
|
+
|
34
|
+
TRIM_TOKENS = Set.new(%w[combinator comma]).freeze
|
35
|
+
|
36
|
+
RECURSIVE_PSEUDO_CLASSES = Set.new(
|
37
|
+
%w[not is where has matches -moz-any -webkit-any nth-child nth-last-child]
|
38
|
+
).freeze
|
39
|
+
|
40
|
+
NTH_CHILD_REGEXP = /(?<index>[\dn+-]+)\s+of\s+(?<subtree>.+)/u
|
41
|
+
|
42
|
+
RECURSIVE_PSEUDO_CLASSES_ARGS = {
|
43
|
+
"nth-child" => NTH_CHILD_REGEXP,
|
44
|
+
"nth-last-child" => NTH_CHILD_REGEXP
|
45
|
+
}.freeze
|
46
|
+
|
47
|
+
STRING_PATTERN = /(['"])([^\\\n]*?)\1/u
|
48
|
+
ESCAPE_PATTERN = /\\./u
|
49
|
+
|
50
|
+
def get_argument_pattern_by_type(type)
|
51
|
+
ARGUMENT_PATTERNS[type] || TOKENS[type]
|
52
|
+
end
|
53
|
+
|
54
|
+
def gobble_parens(text, offset)
|
55
|
+
nesting = 0
|
56
|
+
result = +""
|
57
|
+
while offset < text.length
|
58
|
+
char = text[offset]
|
59
|
+
case char
|
60
|
+
when "("
|
61
|
+
nesting += 1
|
62
|
+
when ")"
|
63
|
+
nesting -= 1
|
64
|
+
end
|
65
|
+
result << char
|
66
|
+
offset += 1
|
67
|
+
return result if nesting.zero?
|
68
|
+
end
|
69
|
+
result
|
70
|
+
end
|
71
|
+
|
72
|
+
def tokenize_by(text, grammar = TOKENS)
|
73
|
+
return [] if text.nil? || text.empty?
|
74
|
+
|
75
|
+
tokens = [text.dup]
|
76
|
+
|
77
|
+
grammar.each do |type, pattern|
|
78
|
+
i = 0
|
79
|
+
while i < tokens.length
|
80
|
+
token = tokens[i]
|
81
|
+
unless token.is_a?(String)
|
82
|
+
i += 1
|
83
|
+
next
|
84
|
+
end
|
85
|
+
|
86
|
+
match = pattern.match(token)
|
87
|
+
if match.nil?
|
88
|
+
i += 1
|
89
|
+
next
|
90
|
+
end
|
91
|
+
|
92
|
+
content = match[0]
|
93
|
+
start_index = match.begin(0)
|
94
|
+
before = token[0...start_index]
|
95
|
+
after = token[(start_index + content.length)..]
|
96
|
+
parts = []
|
97
|
+
|
98
|
+
parts << before if before && !before.empty?
|
99
|
+
|
100
|
+
named = match.named_captures.transform_keys(&:to_sym)
|
101
|
+
parts << named.merge(type: type, content: content)
|
102
|
+
|
103
|
+
parts << after if after && !after.empty?
|
104
|
+
|
105
|
+
tokens.slice!(i)
|
106
|
+
tokens.insert(i, *parts)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
offset = 0
|
111
|
+
tokens.each do |token|
|
112
|
+
case token
|
113
|
+
when String
|
114
|
+
raise ArgumentError,
|
115
|
+
"Unexpected sequence #{token} found at index #{offset}"
|
116
|
+
when Hash
|
117
|
+
offset += token[:content].length
|
118
|
+
token[:pos] = [offset - token[:content].length, offset]
|
119
|
+
if TRIM_TOKENS.include?(token[:type])
|
120
|
+
trimmed = token[:content].strip
|
121
|
+
token[:content] = trimmed.empty? ? " " : trimmed
|
122
|
+
end
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
tokens
|
127
|
+
end
|
128
|
+
|
129
|
+
def tokenize(selector, grammar = TOKENS)
|
130
|
+
selector = selector.to_s.strip
|
131
|
+
return [] if selector.empty?
|
132
|
+
|
133
|
+
replacements = []
|
134
|
+
|
135
|
+
selector = selector.gsub(ESCAPE_PATTERN) do |value|
|
136
|
+
offset = Regexp.last_match.begin(0)
|
137
|
+
replacements << { value: value, offset: offset }
|
138
|
+
ESCAPE_SENTINEL * value.length
|
139
|
+
end
|
140
|
+
|
141
|
+
selector = selector.gsub(STRING_PATTERN) do |value|
|
142
|
+
match = Regexp.last_match
|
143
|
+
quote = match[1]
|
144
|
+
content = match[2] || ""
|
145
|
+
offset = match.begin(0)
|
146
|
+
replacements << { value: value, offset: offset }
|
147
|
+
"#{quote}#{STRING_SENTINEL * content.length}#{quote}"
|
148
|
+
end
|
149
|
+
|
150
|
+
pos = 0
|
151
|
+
while (offset = selector.index("(", pos))
|
152
|
+
value = gobble_parens(selector, offset)
|
153
|
+
replacements << { value: value, offset: offset }
|
154
|
+
placeholder = "(#{PLACEHOLDER_CHAR * (value.length - 2)})"
|
155
|
+
selector = selector[0...offset] + placeholder + selector[(offset + value.length)..].to_s
|
156
|
+
pos = offset + placeholder.length
|
157
|
+
end
|
158
|
+
|
159
|
+
tokens = tokenize_by(selector, grammar)
|
160
|
+
changed_tokens = {}
|
161
|
+
|
162
|
+
replacements.reverse_each do |replacement|
|
163
|
+
tokens.each do |token|
|
164
|
+
next unless token.is_a?(Hash)
|
165
|
+
|
166
|
+
offset = replacement[:offset]
|
167
|
+
value = replacement[:value]
|
168
|
+
token_pos = token[:pos]
|
169
|
+
next unless token_pos[0] <= offset && (offset + value.length) <= token_pos[1]
|
170
|
+
|
171
|
+
content = token[:content]
|
172
|
+
token_offset = offset - token_pos[0]
|
173
|
+
token[:content] =
|
174
|
+
content[0...token_offset].to_s +
|
175
|
+
value +
|
176
|
+
content[(token_offset + value.length)..].to_s
|
177
|
+
changed_tokens[token.object_id] = token
|
178
|
+
end
|
179
|
+
end
|
180
|
+
|
181
|
+
changed_tokens.each_value do |token|
|
182
|
+
pattern = get_argument_pattern_by_type(token[:type])
|
183
|
+
raise ArgumentError, "Unknown token type: #{token[:type]}" unless pattern
|
184
|
+
|
185
|
+
match = pattern.match(token[:content])
|
186
|
+
unless match
|
187
|
+
raise ArgumentError,
|
188
|
+
"Unable to parse content for #{token[:type]}: #{token[:content]}"
|
189
|
+
end
|
190
|
+
|
191
|
+
match.named_captures.each do |key, value|
|
192
|
+
token[key.to_sym] = value
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
tokens
|
197
|
+
end
|
198
|
+
|
199
|
+
def nest_tokens(tokens, list: true)
|
200
|
+
if list && tokens.any? { |t| t[:type] == "comma" }
|
201
|
+
selectors = []
|
202
|
+
temp = []
|
203
|
+
|
204
|
+
tokens.each_with_index do |token, index|
|
205
|
+
if token[:type] == "comma"
|
206
|
+
raise ArgumentError, "Incorrect comma at #{index}" if temp.empty?
|
207
|
+
|
208
|
+
selectors << nest_tokens(temp, list: false)
|
209
|
+
temp = []
|
210
|
+
else
|
211
|
+
temp << token
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
raise ArgumentError, "Trailing comma" if temp.empty?
|
216
|
+
|
217
|
+
selectors << nest_tokens(temp, list: false)
|
218
|
+
return { type: "list", list: selectors }
|
219
|
+
end
|
220
|
+
|
221
|
+
(tokens.length - 1).downto(0) do |i|
|
222
|
+
token = tokens[i]
|
223
|
+
next unless token[:type] == "combinator"
|
224
|
+
|
225
|
+
left = tokens[0...i]
|
226
|
+
right = tokens[(i + 1)..] || []
|
227
|
+
|
228
|
+
if left.empty?
|
229
|
+
return {
|
230
|
+
type: "relative",
|
231
|
+
combinator: token[:content],
|
232
|
+
right: nest_tokens(right)
|
233
|
+
}
|
234
|
+
end
|
235
|
+
|
236
|
+
return {
|
237
|
+
type: "complex",
|
238
|
+
combinator: token[:content],
|
239
|
+
left: nest_tokens(left),
|
240
|
+
right: nest_tokens(right)
|
241
|
+
}
|
242
|
+
end
|
243
|
+
|
244
|
+
case tokens.length
|
245
|
+
when 0
|
246
|
+
raise ArgumentError, "Could not build AST."
|
247
|
+
when 1
|
248
|
+
tokens.first
|
249
|
+
else
|
250
|
+
{ type: "compound", list: tokens.dup }
|
251
|
+
end
|
252
|
+
end
|
253
|
+
|
254
|
+
def flatten(node, parent = nil, &block)
|
255
|
+
return enum_for(:flatten, node, parent) unless block_given?
|
256
|
+
|
257
|
+
case node[:type]
|
258
|
+
when "list"
|
259
|
+
node[:list].each { |child| flatten(child, node, &block) }
|
260
|
+
when "complex"
|
261
|
+
flatten(node[:left], node, &block)
|
262
|
+
flatten(node[:right], node, &block)
|
263
|
+
when "relative"
|
264
|
+
flatten(node[:right], node, &block)
|
265
|
+
when "compound"
|
266
|
+
node[:list].each { |token| block.call(token, node) }
|
267
|
+
else
|
268
|
+
block.call(node, parent)
|
269
|
+
end
|
270
|
+
end
|
271
|
+
|
272
|
+
def walk(node, visit = nil, parent = nil, &block)
|
273
|
+
visitor = visit || block
|
274
|
+
raise ArgumentError, "No visitor provided" unless visitor
|
275
|
+
return if node.nil?
|
276
|
+
|
277
|
+
flatten(node, parent).each do |token, ast|
|
278
|
+
visitor.call(token, ast)
|
279
|
+
end
|
280
|
+
end
|
281
|
+
|
282
|
+
def parse(selector, recursive: true, list: true)
|
283
|
+
tokens = tokenize(selector)
|
284
|
+
return nil if tokens.empty?
|
285
|
+
|
286
|
+
ast = nest_tokens(tokens, list: list)
|
287
|
+
return ast unless recursive
|
288
|
+
|
289
|
+
flatten(ast).each do |token, _|
|
290
|
+
next unless token[:type] == "pseudo-class"
|
291
|
+
|
292
|
+
argument = token[:argument]
|
293
|
+
next unless argument
|
294
|
+
next unless RECURSIVE_PSEUDO_CLASSES.include?(token[:name])
|
295
|
+
|
296
|
+
child_arg = RECURSIVE_PSEUDO_CLASSES_ARGS[token[:name]]
|
297
|
+
if child_arg
|
298
|
+
match = child_arg.match(argument)
|
299
|
+
next unless match
|
300
|
+
|
301
|
+
match.named_captures.each do |key, value|
|
302
|
+
token[key.to_sym] = value
|
303
|
+
end
|
304
|
+
argument = match[:subtree]
|
305
|
+
end
|
306
|
+
|
307
|
+
next if argument.nil? || argument.empty?
|
308
|
+
|
309
|
+
token[:subtree] = parse(argument, recursive: true, list: true)
|
310
|
+
end
|
311
|
+
|
312
|
+
ast
|
313
|
+
end
|
314
|
+
|
315
|
+
def stringify(list_or_node)
|
316
|
+
return list_or_node.map { |token| token[:content] }.join if list_or_node.is_a?(Array)
|
317
|
+
|
318
|
+
case list_or_node[:type]
|
319
|
+
when "list"
|
320
|
+
list_or_node[:list].map { |node| stringify(node) }.join(",")
|
321
|
+
when "relative"
|
322
|
+
list_or_node[:combinator] + stringify(list_or_node[:right])
|
323
|
+
when "complex"
|
324
|
+
stringify(list_or_node[:left]) +
|
325
|
+
list_or_node[:combinator] +
|
326
|
+
stringify(list_or_node[:right])
|
327
|
+
when "compound"
|
328
|
+
list_or_node[:list].map { |node| stringify(node) }.join
|
329
|
+
else
|
330
|
+
list_or_node[:content]
|
331
|
+
end
|
332
|
+
end
|
333
|
+
|
334
|
+
def specificity_to_number(specificity, base = nil)
|
335
|
+
base ||= specificity.max.to_i + 1
|
336
|
+
(specificity[0] * (base << 1)) +
|
337
|
+
(specificity[1] * base) +
|
338
|
+
specificity[2]
|
339
|
+
end
|
340
|
+
|
341
|
+
def specificity(selector)
|
342
|
+
ast = selector
|
343
|
+
ast = parse(selector, recursive: true) if selector.is_a?(String)
|
344
|
+
return [] unless ast
|
345
|
+
|
346
|
+
if ast.is_a?(Hash) && ast[:type] == "list"
|
347
|
+
base = 10
|
348
|
+
specificities = ast[:list].map do |entry|
|
349
|
+
sp = specificity(entry)
|
350
|
+
base = [base, *sp].max
|
351
|
+
sp
|
352
|
+
end
|
353
|
+
numbers = specificities.map { |sp| specificity_to_number(sp, base) }
|
354
|
+
return specificities[numbers.index(numbers.max)]
|
355
|
+
end
|
356
|
+
|
357
|
+
ret = [0, 0, 0]
|
358
|
+
flatten(ast).each do |token, _|
|
359
|
+
case token[:type]
|
360
|
+
when "id"
|
361
|
+
ret[0] += 1
|
362
|
+
when "class", "attribute"
|
363
|
+
ret[1] += 1
|
364
|
+
when "pseudo-element", "type"
|
365
|
+
ret[2] += 1
|
366
|
+
when "pseudo-class"
|
367
|
+
next if token[:name] == "where"
|
368
|
+
|
369
|
+
unless RECURSIVE_PSEUDO_CLASSES.include?(token[:name]) && token[:subtree]
|
370
|
+
ret[1] += 1
|
371
|
+
next
|
372
|
+
end
|
373
|
+
sub = specificity(token[:subtree])
|
374
|
+
sub.each_with_index { |value, index| ret[index] += value }
|
375
|
+
ret[1] += 1 if %w[nth-child nth-last-child].include?(token[:name])
|
376
|
+
end
|
377
|
+
end
|
378
|
+
|
379
|
+
ret
|
380
|
+
end
|
381
|
+
end
|
382
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module MiniCSS
|
4
|
+
module Serializer
|
5
|
+
module_function
|
6
|
+
|
7
|
+
def serialize(value)
|
8
|
+
case value
|
9
|
+
when Array
|
10
|
+
value.map { serialize(it) }.flatten.compact.join
|
11
|
+
when String
|
12
|
+
value
|
13
|
+
when AST::Rule
|
14
|
+
[
|
15
|
+
Sel.stringify(value.selector), "{",
|
16
|
+
value.decls.map { serialize(it) },
|
17
|
+
value.child_rules.map { serialize(it) },
|
18
|
+
"}"
|
19
|
+
].flatten.compact.join
|
20
|
+
when AST::Decl
|
21
|
+
[
|
22
|
+
"#{value.name}:",
|
23
|
+
value.value.map { serialize(it) },
|
24
|
+
value.important? ? "!important" : nil,
|
25
|
+
";"
|
26
|
+
].flatten.compact.join
|
27
|
+
when AST::Number
|
28
|
+
[
|
29
|
+
value.sign,
|
30
|
+
(value.type == :integer ? value.value.to_i : value.value.to_f).to_s
|
31
|
+
].compact.join
|
32
|
+
when AST::AtRule
|
33
|
+
[
|
34
|
+
"@",
|
35
|
+
value.name,
|
36
|
+
value.prelude && !value.prelude.nil? && !value.prelude.empty? ? " #{value.prelude.map { serialize(it) }.flatten.compact.join}" : nil,
|
37
|
+
"{",
|
38
|
+
value.child_rules.map { serialize(it) }
|
39
|
+
].flatten.compact.join
|
40
|
+
when AST::URL
|
41
|
+
"url(#{value.value})"
|
42
|
+
when AST::Function
|
43
|
+
[value.name, "(",
|
44
|
+
value.value.map { serialize(it) },
|
45
|
+
")"].join
|
46
|
+
when AST::Block
|
47
|
+
[
|
48
|
+
value.left_token,
|
49
|
+
value.value.map { serialize(it) },
|
50
|
+
value.right_token
|
51
|
+
].flatten.compact.join
|
52
|
+
when AST::StringToken
|
53
|
+
[value.quoting, value.value, value.quoting].flatten.compact.join
|
54
|
+
else
|
55
|
+
raise "Unexpected element in serialization pipeline: #{value.class} #{value.inspect}"
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
data/lib/minicss.rb
ADDED
@@ -0,0 +1,53 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "minicss/version"
|
4
|
+
require_relative "minicss/errors"
|
5
|
+
require_relative "minicss/css"
|
6
|
+
require_relative "minicss/sel"
|
7
|
+
require_relative "minicss/ast"
|
8
|
+
require_relative "minicss/serializer"
|
9
|
+
|
10
|
+
# MiniCSS exposes the library’s public API. It bundles high-level helpers for
|
11
|
+
# tokenizing raw CSS, parsing it into MiniCSS::AST nodes, and serializing AST
|
12
|
+
# structures back to CSS.
|
13
|
+
module MiniCSS
|
14
|
+
module_function
|
15
|
+
|
16
|
+
# Tokenize a CSS source string or IO.
|
17
|
+
#
|
18
|
+
# @param input [String, #read] Raw CSS, or an IO-like object that responds to
|
19
|
+
# `#read`. Invalid UTF-8 bytes will be replaced with U+FFFD, and all line
|
20
|
+
# endings are normalized before tokenization.
|
21
|
+
# @param allow_unicode_ranges [Boolean] When true, Unicode range tokens are
|
22
|
+
# retained rather than downgraded to generic identifiers.
|
23
|
+
# @return [Array<MiniCSS::CSS::Token>] Tokens annotated with positional
|
24
|
+
# metadata (`pos_start`, `pos_end`, and `literal`).
|
25
|
+
def tokenize(input, allow_unicode_ranges: false)
|
26
|
+
tok = CSS::Tokenizer.new(input, allow_unicode_ranges:)
|
27
|
+
tok.tokenize
|
28
|
+
tok.tokens
|
29
|
+
end
|
30
|
+
|
31
|
+
# Parse CSS into MiniCSS AST nodes.
|
32
|
+
#
|
33
|
+
# @param input [String, #read] The stylesheet text or an IO-like object.
|
34
|
+
# @param allow_unicode_ranges [Boolean] Passed through to {#tokenize}.
|
35
|
+
# @return [Array<MiniCSS::AST::Rule, MiniCSS::AST::AtRule,
|
36
|
+
# MiniCSS::AST::SyntaxError, Object>] A stylesheet represented as MiniCSS
|
37
|
+
# AST nodes; syntax issues surface as {MiniCSS::AST::SyntaxError} entries.
|
38
|
+
def parse(input, allow_unicode_ranges: false)
|
39
|
+
toks = tokenize(input, allow_unicode_ranges:)
|
40
|
+
pars = CSS::Parser.new(toks)
|
41
|
+
sheet = pars.parse_stylesheet
|
42
|
+
AST.convert(sheet)
|
43
|
+
end
|
44
|
+
|
45
|
+
# Serialize MiniCSS AST nodes back into CSS.
|
46
|
+
#
|
47
|
+
# @param ast [MiniCSS::AST::Rule, MiniCSS::AST::Decl, Array<Object>] Any AST
|
48
|
+
# node—or array of nodes—produced by {#parse} or the lower-level
|
49
|
+
# MiniCSS::AST conversion helpers.
|
50
|
+
# @return [String] Normalized CSS suitable for writing back to disk or the
|
51
|
+
# network.
|
52
|
+
def serialize(ast) = Serializer.serialize(ast)
|
53
|
+
end
|
metadata
ADDED
@@ -0,0 +1,87 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: minicss
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Vito Sartori
|
8
|
+
bindir: exe
|
9
|
+
cert_chain: []
|
10
|
+
date: 2025-10-01 00:00:00.000000000 Z
|
11
|
+
dependencies: []
|
12
|
+
description: MiniCSS is a pure-Ruby CSS parsing library
|
13
|
+
email:
|
14
|
+
- hey@vito.io
|
15
|
+
executables: []
|
16
|
+
extensions: []
|
17
|
+
extra_rdoc_files: []
|
18
|
+
files:
|
19
|
+
- ".editorconfig"
|
20
|
+
- ".rspec"
|
21
|
+
- ".rubocop.yml"
|
22
|
+
- ACKNOWLEDGMENTS.md
|
23
|
+
- CODE_OF_CONDUCT.md
|
24
|
+
- LICENSE
|
25
|
+
- README.md
|
26
|
+
- Rakefile
|
27
|
+
- lib/minicss.rb
|
28
|
+
- lib/minicss/ast.rb
|
29
|
+
- lib/minicss/ast/at_rule.rb
|
30
|
+
- lib/minicss/ast/bad_token.rb
|
31
|
+
- lib/minicss/ast/block.rb
|
32
|
+
- lib/minicss/ast/decl.rb
|
33
|
+
- lib/minicss/ast/decl_list.rb
|
34
|
+
- lib/minicss/ast/dimension.rb
|
35
|
+
- lib/minicss/ast/function.rb
|
36
|
+
- lib/minicss/ast/number.rb
|
37
|
+
- lib/minicss/ast/percentage.rb
|
38
|
+
- lib/minicss/ast/rule.rb
|
39
|
+
- lib/minicss/ast/string_token.rb
|
40
|
+
- lib/minicss/ast/syntax_error.rb
|
41
|
+
- lib/minicss/ast/unicode_range.rb
|
42
|
+
- lib/minicss/ast/url.rb
|
43
|
+
- lib/minicss/css.rb
|
44
|
+
- lib/minicss/css/ast.rb
|
45
|
+
- lib/minicss/css/ast/at_rule.rb
|
46
|
+
- lib/minicss/css/ast/declaration.rb
|
47
|
+
- lib/minicss/css/ast/declaration_list.rb
|
48
|
+
- lib/minicss/css/ast/function.rb
|
49
|
+
- lib/minicss/css/ast/qualified_rule.rb
|
50
|
+
- lib/minicss/css/ast/simple_block.rb
|
51
|
+
- lib/minicss/css/ast/stylesheet.rb
|
52
|
+
- lib/minicss/css/errors.rb
|
53
|
+
- lib/minicss/css/parser.rb
|
54
|
+
- lib/minicss/css/position.rb
|
55
|
+
- lib/minicss/css/refinements.rb
|
56
|
+
- lib/minicss/css/token.rb
|
57
|
+
- lib/minicss/css/token_stream.rb
|
58
|
+
- lib/minicss/css/tokenizer.rb
|
59
|
+
- lib/minicss/errors.rb
|
60
|
+
- lib/minicss/sel.rb
|
61
|
+
- lib/minicss/serializer.rb
|
62
|
+
- lib/minicss/version.rb
|
63
|
+
homepage: https://github.com/heyvito/minicss
|
64
|
+
licenses:
|
65
|
+
- MIT
|
66
|
+
metadata:
|
67
|
+
allowed_push_host: https://rubygems.org
|
68
|
+
homepage_uri: https://github.com/heyvito/minicss
|
69
|
+
rubygems_mfa_required: 'true'
|
70
|
+
rdoc_options: []
|
71
|
+
require_paths:
|
72
|
+
- lib
|
73
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
74
|
+
requirements:
|
75
|
+
- - ">="
|
76
|
+
- !ruby/object:Gem::Version
|
77
|
+
version: '3.4'
|
78
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - ">="
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '0'
|
83
|
+
requirements: []
|
84
|
+
rubygems_version: 3.6.2
|
85
|
+
specification_version: 4
|
86
|
+
summary: MiniCSS is a pure-Ruby CSS parsing library
|
87
|
+
test_files: []
|