ebnf 0.1.0 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
data/lib/ebnf/base.rb CHANGED
@@ -165,7 +165,7 @@ module EBNF
165
165
  require 'sxp'
166
166
  SXP::Generator.string(ast.sort)
167
167
  rescue LoadError
168
- ast.to_sxp
168
+ ast.sort_by{|r| r.num.to_f}.to_sxp
169
169
  end
170
170
  end
171
171
  def to_s; to_sxp; end
@@ -219,7 +219,7 @@ module EBNF
219
219
 
220
220
  # Progress output, less than debugging
221
221
  def progress(*args)
222
- return unless @options[:progress]
222
+ return unless @options[:progress] || @options[:debug]
223
223
  options = args.last.is_a?(Hash) ? args.pop : {}
224
224
  depth = options[:depth] || @depth
225
225
  args << yield if block_given?
data/lib/ebnf/ll1.rb CHANGED
@@ -50,126 +50,148 @@ module EBNF
50
50
 
51
51
  # Comprehnsion rule, create shorter versions of all non-terminal sequences
52
52
  comprehensions = []
53
- begin
54
- comprehensions = []
55
- ast.select {|r| r.seq? && r.kind == :rule && r.expr.length > 2}.each do |rule|
56
- new_expr = rule.expr[2..-1].unshift(:seq)
57
- unless ast.any? {|r| r.expr == new_expr}
58
- debug("first_follow") {"add comprehension rule for #{rule.sym} => #{new_expr.inspect}"}
59
- new_rule = rule.build(new_expr)
60
- rule.comp = new_rule
61
- comprehensions << new_rule
53
+ ittr = 0
54
+ depth do
55
+ begin
56
+ comprehensions = []
57
+ ast.select {|r| r.seq? && r.kind == :rule && r.expr.length > 2}.each do |rule|
58
+ new_expr = rule.expr[2..-1].unshift(:seq)
59
+ unless ast.any? {|r| r.expr == new_expr}
60
+ debug("FF.c") {"(#{ittr}) add comprehension rule for #{rule.sym} => #{new_expr.inspect}"}
61
+ new_rule = rule.build(new_expr)
62
+ rule.comp = new_rule
63
+ comprehensions << new_rule
64
+ end
62
65
  end
63
- end
64
66
 
65
- @ast += comprehensions
66
- progress("first_follow") {"comprehensions #{comprehensions.length}"}
67
- end while !comprehensions.empty?
68
-
69
- # Fi(a w' ) = { a } for every terminal a
70
- # For each rule who's expr's first element of a seq a terminal, or having any element of alt a terminal, add that terminal to the first set for this rule
71
- each(:rule) do |rule|
72
- each(:terminal) do |terminal|
73
- rule.add_first([terminal.sym]) if rule.starts_with(terminal.sym)
74
- end
67
+ @ast += comprehensions
68
+ progress("FF.c") {"(#{ittr}) comprehensions #{comprehensions.length}"}
69
+ ittr += 1
70
+ end while !comprehensions.empty?
75
71
 
76
- # Add strings to first for strings which are start elements
77
- start_strs = rule.starts_with(String)
78
- rule.add_first(start_strs) if start_strs
79
- end
80
-
81
- # # Fi(ε) = { ε }
82
- # Add _eps as a first of _empty
83
- empty = ast.detect {|r| r.sym == :_empty}
84
- empty.add_first([:_eps])
85
-
86
- # Loop until no more first elements are added
87
- firsts, follows = 0, 0
88
- begin
89
- firsts, follows = 0, 0
72
+ # Fi(a w' ) = { a } for every terminal a
73
+ # For each rule who's expr's first element of a seq a terminal, or having any element of alt a terminal, add that terminal to the first set for this rule
90
74
  each(:rule) do |rule|
91
- each(:rule) do |first_rule|
92
- next if first_rule == rule || first_rule.first.nil?
93
-
94
- # Fi(A w' ) = Fi(A) for every nonterminal A with ε not in Fi(A)
95
- # For each rule that starts with another rule having firsts, add the firsts of that rule to this rule, unless it already has those terminals in its first
96
- if rule.starts_with(first_rule.sym)
97
- depth {debug("FF.1") {"add first #{first_rule.first.inspect} to #{rule.sym}"}}
98
- firsts += rule.add_first(first_rule.first)
75
+ each(:terminal) do |terminal|
76
+ if rule.starts_with?(terminal.sym)
77
+ debug("FF.t") {"(0) add first #{terminal.sym} to #{rule.sym}"}
78
+ rule.add_first([terminal.sym])
99
79
  end
80
+ end
100
81
 
101
- # Fi(A w' ) = Fi(A) \ { ε } ∪ Fi(w' ) for every nonterminal A with ε in Fi(A)
102
- # For each rule starting with eps, add the terminals for the comprehension of this rule
103
- if rule.seq? &&
104
- rule.expr.fetch(1, nil) == first_rule &&
105
- first_rule.first.include?(:_eps) &&
106
- (comp = rule.comp)
82
+ # Add strings to first for strings which are start elements
83
+ start_strs = rule.starts_with?(String)
84
+ if start_strs
85
+ debug("FF.t") {"(0) add firsts #{start_strs.join(", ")} to #{rule.sym}"}
86
+ rule.add_first(start_strs)
87
+ end
88
+ end
89
+
90
+ # # Fi(ε) = { ε }
91
+ # Add _eps as a first of _empty
92
+ find_rule(:_empty).add_first([:_eps])
93
+
94
+ # Loop until no more first elements are added
95
+ firsts, follows, ittr = 0, 0, 0
96
+ begin
97
+ firsts, follows = 0, 0
98
+ each(:rule) do |rule|
99
+ each(:rule) do |first_rule|
100
+ next if first_rule == rule || first_rule.first.nil?
101
+
102
+ # Fi(A w' ) = Fi(A) for every nonterminal A with ε not in Fi(A)
103
+ # For each rule that starts with another rule having firsts which don't include _eps, add the firsts of that rule to this rule, unless it already has those terminals in its first.
104
+ # Note that it's simpler to promote all fi(A) to fi(A w') and exclude _eps, as this covers corner cases of the following rule.
105
+ if rule.starts_with?(first_rule.sym) && first_rule.first != [:_eps]
106
+ debug("FF.1") {"(#{ittr}) add first #{first_rule.first.inspect} from #{first_rule.sym} to #{rule.sym}"}
107
+ firsts += rule.add_first(first_rule.first - [:_eps])
108
+ end
107
109
 
108
- depth {debug("FF.2") {"add first #{first_rule.first.inspect} to #{comp.sym}"}}
109
- firsts += comp.add_first(first_rule.first)
110
+ # Fi(A w' ) = Fi(A) \ { ε } Fi(w' ) for every nonterminal A with ε in Fi(A)
111
+ # For each rule starting with eps, add the terminals for the comprehension of this rule
112
+ if rule.seq? &&
113
+ rule.expr.fetch(1, nil) == first_rule.sym &&
114
+ first_rule.first_includes_eps? &&
115
+ (comp = rule.comp) &&
116
+ comp.first &&
117
+ !(comp.first - [:_eps]).empty?
118
+
119
+ to_add = comp.first - [:_eps]
120
+ debug("FF.2") {"(#{ittr}) add first #{to_add.inspect} from #{comp.sym} to #{rule.sym}"}
121
+ firsts += rule.add_first(to_add)
122
+ end
110
123
  end
111
- end
112
124
 
113
- # Only run these rules if the rule is a sequence having two or more elements, whos first element is also a sequence and first_rule is the comprehension of rule
114
- if rule.seq? && (comp = rule.comp)
115
- #if there is a rule of the form Aj → wAiw' , then
116
- #
117
- if (ai = find_rule(rule.expr[1])) && ai.kind == :rule && comp.first
118
- # * if the terminal a is in Fi(w' ), then add a to Fo(Ai)
119
- #
120
- # Add follow terminals based on the first terminals
121
- # of a comprehension of this rule (having the same
122
- # sequence other than the first rule in the sequence)
125
+ # Only run these rules if the rule is a sequence having two or more elements, whos first element is also a sequence and first_rule is the comprehension of rule
126
+ if rule.seq? && (comp = rule.comp)
127
+ #if there is a rule of the form Aj → wAiw' , then
128
+ #
129
+ if (ai = find_rule(rule.expr[1])) && ai.kind == :rule && comp.first
130
+ # * if the terminal a is in Fi(w' ), then add a to Fo(Ai)
131
+ #
132
+ # Add follow terminals based on the first terminals
133
+ # of a comprehension of this rule (having the same
134
+ # sequence other than the first rule in the sequence)
135
+ #
136
+ # @example
137
+ # rule: (seq a b c)
138
+ # first_rule: (seq b c)
139
+ # if first_rule.first == [T]
140
+ # => a.follow += [T]
141
+ debug("FF.3") {"(#{ittr}) add follow #{comp.first.inspect} from #{comp.sym} to #{ai.sym}"}
142
+ follows += ai.add_follow(comp.first)
143
+ end
144
+
145
+ # Follows of a rule are also follows of the comprehension of the rule.
146
+ if rule.follow
147
+ debug("FF.4") {"(#{ittr}) add follow #{rule.follow.inspect} to from #{rule.sym} #{comp.sym}"}
148
+ follows += comp.add_follow(rule.follow)
149
+ end
150
+
151
+ # * if ε is in Fi(w' ), then add Fo(Aj) to Fo(Ai)
123
152
  #
124
- # @example
125
- # rule: (seq a b c)
126
- # first_rule: (seq b c)
127
- # if first_rule.first == [T]
128
- # => a.follow += [T]
129
- depth {debug("FF.3") {"add follow #{comp.first.inspect} to #{ai.sym}"}}
130
- follows += ai.add_follow(comp.first)
153
+ # If the comprehension of a sequence has an _eps first, then the follows of the rule also become the follows of the first member of the rule
154
+ if comp.first && comp.first.include?(:_eps) && rule.first &&
155
+ (member = find_rule(rule.expr.fetch(1, nil))) &&
156
+ member.kind == :rule
157
+
158
+ debug("FF.5") {"(#{ittr}) add follow #{rule.follow.inspect} from #{rule.sym} to #{member.sym}"}
159
+ follows += member.add_follow(rule.first)
160
+ end
131
161
  end
132
162
 
133
- # Follows of a rule are also follows of the comprehension of the rule.
134
- if rule.follow
135
- depth {debug("FF.4") {"add follow #{rule.follow.inspect} to #{comp.sym}"}}
136
- follows += comp.add_follow(rule.follow)
163
+ # Firsts of elements of an alt are firsts of the alt
164
+ if rule.alt?
165
+ rule.expr[1..-1].map {|s| find_rule(s)}.compact.select(&:first).each do |mem|
166
+ debug("FF.6") {"(#{ittr}) add first #{mem.first.inspect} from #{mem.sym} to #{rule.sym}"}
167
+ rule.add_first(mem.first)
168
+ end
137
169
  end
138
170
 
139
- # * if ε is in Fi(w' ), then add Fo(Aj) to Fo(Ai)
140
- #
141
- # If the comprehension of a sequence has an _eps first, then the follows of the rule also become the follows of the first member of the rule
142
- if comp.first && comp.first.include?(:_eps) && rule.first &&
143
- (member = find_rule(rule.expr.fetch(1, nil))) &&
171
+ # Follows of a rule are also follows of the last production in the rule
172
+ if rule.seq? && rule.follow &&
173
+ (member = find_rule(rule.expr.last)) &&
144
174
  member.kind == :rule
145
175
 
146
- depth {debug("FF.5") {"add follow #{rule.follow.inspect} to #{member.sym}"}}
147
- follows += member.add_follow(rule.first)
176
+ debug("FF.7") {"(#{ittr}) add follow #{rule.follow.inspect} to #{member.sym}"}
177
+ follows += member.add_follow(rule.follow)
148
178
  end
149
- end
150
-
151
- # Follows of a rule are also follows of the last production in the rule
152
- if rule.seq? && rule.follow &&
153
- (member = find_rule(rule.expr.last)) &&
154
- member.kind == :rule
155
-
156
- depth {debug("FF.6") {"add follow #{rule.follow.inspect} to #{member.sym}"}}
157
- follows += member.add_follow(rule.follow)
158
- end
159
179
 
160
- # For alts, anything that follows the rule follows each member of the rule
161
- if rule.alt? && rule.follow
162
- rule.expr[1..-1].map {|s| find_rule(s)}.each do |mem|
163
- if mem && mem.kind == :rule
164
- depth {debug("FF.7") {"add follow #{rule.first.inspect} to #{mem.sym}"}}
165
- follows += mem.add_follow(rule.follow)
180
+ # For alts, anything that follows the rule follows each member of the rule
181
+ if rule.alt? && rule.follow
182
+ rule.expr[1..-1].map {|s| find_rule(s)}.each do |mem|
183
+ if mem && mem.kind == :rule
184
+ debug("FF.8") {"(#{ittr}) add follow #{rule.first.inspect} to #{mem.sym}"}
185
+ follows += mem.add_follow(rule.follow)
186
+ end
166
187
  end
167
188
  end
168
189
  end
169
- end
170
190
 
171
- progress("first_follow") {"firsts #{firsts}, follows #{follows}"}
172
- end while (firsts + follows) > 0
191
+ progress("first_follow") {"(#{ittr}) firsts #{firsts}, follows #{follows}"}
192
+ ittr += 1
193
+ end while (firsts + follows) > 0
194
+ end
173
195
  end
174
196
 
175
197
  ##
@@ -183,19 +205,19 @@ module EBNF
183
205
  @first = ast.
184
206
  select(&:first).
185
207
  inject({}) {|memo, r|
186
- memo[r.sym] = r.first.reject {|t| t == :_eps};
208
+ memo[r.sym] = r.first if r.first
187
209
  memo
188
210
  }
189
211
  @follow = ast.
190
212
  select(&:follow).
191
213
  inject({}) {|memo, r|
192
- memo[r.sym] = r.first.reject {|t| t == :_eps};
214
+ memo[r.sym] = r.first if r.first
193
215
  memo
194
216
  }
195
217
  @terminals = ast.map do |r|
196
218
  (r.first || []) + (r.follow || [])
197
219
  end.flatten.uniq
198
- @terminals = (@terminals - [:_eps, :_eof, :_empty]).sort_by(&:to_s)
220
+ @terminals = (@terminals - [:_eps, :_eof, :_empty]).sort_by(&:inspect)
199
221
 
200
222
  @branch = {}
201
223
  @already = []
@@ -228,14 +250,14 @@ module EBNF
228
250
 
229
251
  if table.is_a?(Hash)
230
252
  io.puts "#{ind0}#{name} = {"
231
- table.keys.sort_by(&:to_s).each do |prod|
253
+ table.keys.sort_by(&:inspect).each do |prod|
232
254
  case table[prod]
233
255
  when Array
234
256
  list = table[prod].map(&:inspect).join(",\n#{ind2}")
235
257
  io.puts "#{ind1}#{prod.inspect} => [\n#{ind2}#{list}],"
236
258
  when Hash
237
259
  io.puts "#{ind1}#{prod.inspect} => {"
238
- table[prod].keys.sort_by(&:to_s).each do |term|
260
+ table[prod].keys.sort_by(&:inspect).each do |term|
239
261
  list = table[prod][term].map(&:inspect).join(", ")
240
262
  io.puts "#{ind2}#{term.inspect} => [#{list}],"
241
263
  end
@@ -247,7 +269,7 @@ module EBNF
247
269
  io.puts "#{ind0}}.freeze\n"
248
270
  else
249
271
  io.puts "#{ind0}#{name} = [\n#{ind1}" +
250
- table.sort_by(&:to_s).map(&:inspect).join(",\n#{ind1}") +
272
+ table.sort_by(&:inspect).map(&:inspect).join(",\n#{ind1}") +
251
273
  "\n#{ind0}].freeze\n"
252
274
  end
253
275
  end
@@ -71,13 +71,16 @@ module EBNF::LL1
71
71
  # @return [String]
72
72
  # @see http://www.w3.org/TR/rdf-sparql-query/#codepointEscape
73
73
  def self.unescape_codepoints(string)
74
+ string = string.dup
75
+ string.force_encoding(Encoding::ASCII_8BIT) if string.respond_to?(:force_encoding)
76
+
74
77
  # Decode \uXXXX and \UXXXXXXXX code points:
75
78
  string = string.gsub(UCHAR) do |c|
76
79
  s = [(c[2..-1]).hex].pack('U*')
77
80
  s.respond_to?(:force_encoding) ? s.force_encoding(Encoding::ASCII_8BIT) : s
78
81
  end
79
82
 
80
- string.force_encoding(Encoding::UTF_8) if string.respond_to?(:force_encoding) # Ruby 1.9+
83
+ string.force_encoding(Encoding::UTF_8) if string.respond_to?(:force_encoding)
81
84
  string
82
85
  end
83
86
 
@@ -114,26 +117,26 @@ module EBNF::LL1
114
117
  # Initializes a new lexer instance.
115
118
  #
116
119
  # @param [String, #to_s] input
117
- # @param [Array<Array<Symbol, Regexp>>] terminals
120
+ # @param [Array<Array<Symbol, Regexp>, Terminal>] terminals
118
121
  # Array of symbol, regexp pairs used to match terminals.
119
122
  # If the symbol is nil, it defines a Regexp to match string terminals.
120
123
  # @param [Hash{Symbol => Object}] options
121
124
  # @option options [Regexp] :whitespace (WS)
122
- # @option options [Regexp] :comment (COMMENT)
123
- # @option options [Array<Symbol>] :unescape_terms ([])
124
125
  # Regular expression matching the beginning of terminals that may cross newlines
126
+ # @option options [Regexp] :comment (COMMENT)
125
127
  def initialize(input = nil, terminals = nil, options = {})
126
128
  @options = options.dup
127
129
  @whitespace = @options[:whitespace] || WS
128
130
  @comment = @options[:comment] || COMMENT
129
- @unescape_terms = @options[:unescape_terms] || []
130
- @terminals = terminals
131
+ @terminals = terminals.map do |term|
132
+ term.is_a?(Array) ? Terminal.new(*term) : term
133
+ end
131
134
 
132
135
  raise Error, "Terminal patterns not defined" unless @terminals && @terminals.length > 0
133
136
 
134
137
  @lineno = 1
135
138
  @scanner = Scanner.new(input) do |string|
136
- string.force_encoding(Encoding::UTF_8) if string.respond_to?(:force_encoding) # Ruby 1.9+
139
+ string.force_encoding(Encoding::UTF_8) if string.respond_to?(:force_encoding)
137
140
  string
138
141
  end
139
142
  end
@@ -209,7 +212,7 @@ module EBNF::LL1
209
212
  token
210
213
  end
211
214
  rescue ArgumentError, Encoding::CompatibilityError => e
212
- raise Error.new("#{e.message} on line #{lineno + 1}",
215
+ raise Error.new(e.message,
213
216
  :input => (scanner.rest[0..100] rescue '??'), :token => lexme, :lineno => lineno)
214
217
  rescue Error
215
218
  raise
@@ -248,13 +251,6 @@ module EBNF::LL1
248
251
  # @return [StringScanner]
249
252
  attr_reader :scanner
250
253
 
251
- # Perform string and codepoint unescaping
252
- # @param [String] string
253
- # @return [String]
254
- def unescape(string)
255
- self.class.unescape_string(self.class.unescape_codepoints(string))
256
- end
257
-
258
254
  ##
259
255
  # Skip whitespace or comments, as defined through input options or defaults
260
256
  def skip_whitespace
@@ -270,22 +266,80 @@ module EBNF::LL1
270
266
  end
271
267
 
272
268
  ##
273
- # Return the matched token
269
+ # Return the matched token.
270
+ #
271
+ # If the token was matched with a case-insensitive regexp,
272
+ # track this with the resulting {Token}, so that comparisons
273
+ # with that token are also case insensitive
274
274
  #
275
275
  # @return [Token]
276
276
  def match_token
277
- @terminals.each do |(term, regexp)|
278
- #STDERR.puts "match[#{term}] #{scanner.rest[0..100].inspect} against #{regexp.inspect}" #if term == :STRING_LITERAL_SINGLE_QUOTE
279
- if matched = scanner.scan(regexp)
280
- matched = unescape(matched) if @unescape_terms.include?(term)
281
- #STDERR.puts " unescape? #{@unescape_terms.include?(term).inspect}"
282
- #STDERR.puts " matched #{term.inspect}: #{matched.inspect}"
283
- return token(term, matched)
277
+ @terminals.each do |term|
278
+ #STDERR.puts "match[#{term.type}] #{scanner.rest[0..100].inspect} against #{term.regexp.inspect}" #if term.type == :STRING_LITERAL_SINGLE_QUOTE
279
+ if matched = scanner.scan(term.regexp)
280
+ #STDERR.puts " matched #{term.type.inspect}: #{matched.inspect}"
281
+ return token(term.type, term.canonicalize(matched))
284
282
  end
285
283
  end
286
284
  nil
287
285
  end
288
286
 
287
+ # Terminal class, representing the terminal identifier and
288
+ # matching regular expression. Optionally, a Terminal may include
289
+ # a map to turn case-insensitively matched terminals into their
290
+ # canonical form
291
+ class Terminal
292
+ attr_reader :type
293
+ attr_reader :regexp
294
+
295
+ # @param [Symbol, nil] type
296
+ # @param [Regexp] regexp
297
+ # @param [Hash{Symbol => Object}] options
298
+ # @option options [Hash{String => String}] :map ({})
299
+ # A mapping from terminals, in lower-case form, to
300
+ # their canonical value
301
+ # @option options [Boolean] :unescape
302
+ # Cause strings and codepoints to be unescaped.
303
+ def initialize(type, regexp, options = {})
304
+ @type, @regexp, @options = type, regexp, options
305
+ @map = options.fetch(:map, {})
306
+ end
307
+
308
+ # Map a terminal to it's canonical form. If there is no
309
+ # map, `value` is returned. `value` is unescaped if there
310
+ # is no canonical mapping, and the `:unescape` option is set.
311
+ #
312
+ # @param [String] value
313
+ # value to canonicalize
314
+ # @return [String]
315
+ def canonicalize(value)
316
+ @map.fetch(value.downcase, unescape(value))
317
+ end
318
+
319
+ def ==(other)
320
+ case other
321
+ when Array
322
+ @type == other.first && @regexp == other.last
323
+ when Terminal
324
+ @type == other.type && @regexp == other.regexp
325
+ end
326
+ end
327
+
328
+ protected
329
+
330
+ # Perform string and codepoint unescaping if defined for this terminal
331
+ # @param [String] string
332
+ # @return [String]
333
+ def unescape(string)
334
+ if @options[:unescape]
335
+ Lexer.unescape_string(Lexer.unescape_codepoints(string))
336
+ else
337
+ string
338
+ end
339
+ end
340
+
341
+ end
342
+
289
343
  protected
290
344
 
291
345
  ##
@@ -298,9 +352,10 @@ module EBNF::LL1
298
352
  # @param [Symbol] type
299
353
  # @param [String] value
300
354
  # Scanner instance with access to matched groups
355
+ # @param [Hash{Symbol => Object}] options
301
356
  # @return [Token]
302
- def token(type, value)
303
- Token.new(type, value, :lineno => lineno)
357
+ def token(type, value, options = {})
358
+ Token.new(type, value, options.merge(:lineno => lineno))
304
359
  end
305
360
 
306
361
  ##
@@ -313,19 +368,6 @@ module EBNF::LL1
313
368
  #
314
369
  # @see http://en.wikipedia.org/wiki/Lexical_analysis#Token
315
370
  class Token
316
- ##
317
- # Initializes a new token instance.
318
- #
319
- # @param [Symbol] type
320
- # @param [String] value
321
- # @param [Hash{Symbol => Object}] options
322
- # @option options [Integer] :lineno (nil)
323
- def initialize(type, value, options = {})
324
- @type, @value = (type ? type.to_s.to_sym : nil), value
325
- @options = options.dup
326
- @lineno = @options.delete(:lineno)
327
- end
328
-
329
371
  ##
330
372
  # The token's symbol type.
331
373
  #
@@ -350,6 +392,20 @@ module EBNF::LL1
350
392
  # @return [Hash]
351
393
  attr_reader :options
352
394
 
395
+ ##
396
+ # Initializes a new token instance.
397
+ #
398
+ # @param [Symbol] type
399
+ # @param [String] value
400
+ # @param [Hash{Symbol => Object}] options
401
+ # @option options [Integer] :lineno (nil)
402
+ def initialize(type, value, options = {})
403
+ @type = type.to_s.to_sym if type
404
+ @value = value.to_s
405
+ @options = options.dup
406
+ @lineno = @options.delete(:lineno)
407
+ end
408
+
353
409
  ##
354
410
  # Returns the attribute named by `key`.
355
411
  #
@@ -378,8 +434,10 @@ module EBNF::LL1
378
434
  # @return [Boolean]
379
435
  def ===(value)
380
436
  case value
381
- when Symbol then value == @type
382
- when ::String then value.to_s == @value.to_s
437
+ when Symbol
438
+ value == @type
439
+ when ::String
440
+ @value == (@options[:case_insensitive] ? value.to_s.downcase : value.to_s)
383
441
  else value == @value
384
442
  end
385
443
  end