dentaku 1.2.6 → 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (56) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +52 -57
  3. data/Rakefile +1 -1
  4. data/lib/dentaku.rb +8 -0
  5. data/lib/dentaku/ast.rb +22 -0
  6. data/lib/dentaku/ast/addition.rb +15 -0
  7. data/lib/dentaku/ast/combinators.rb +15 -0
  8. data/lib/dentaku/ast/comparators.rb +47 -0
  9. data/lib/dentaku/ast/division.rb +15 -0
  10. data/lib/dentaku/ast/exponentiation.rb +15 -0
  11. data/lib/dentaku/ast/function.rb +54 -0
  12. data/lib/dentaku/ast/functions/if.rb +26 -0
  13. data/lib/dentaku/ast/functions/max.rb +5 -0
  14. data/lib/dentaku/ast/functions/min.rb +5 -0
  15. data/lib/dentaku/ast/functions/not.rb +5 -0
  16. data/lib/dentaku/ast/functions/round.rb +5 -0
  17. data/lib/dentaku/ast/functions/rounddown.rb +5 -0
  18. data/lib/dentaku/ast/functions/roundup.rb +5 -0
  19. data/lib/dentaku/ast/functions/ruby_math.rb +8 -0
  20. data/lib/dentaku/ast/grouping.rb +13 -0
  21. data/lib/dentaku/ast/identifier.rb +29 -0
  22. data/lib/dentaku/ast/multiplication.rb +15 -0
  23. data/lib/dentaku/ast/negation.rb +25 -0
  24. data/lib/dentaku/ast/nil.rb +9 -0
  25. data/lib/dentaku/ast/node.rb +13 -0
  26. data/lib/dentaku/ast/numeric.rb +17 -0
  27. data/lib/dentaku/ast/operation.rb +20 -0
  28. data/lib/dentaku/ast/string.rb +17 -0
  29. data/lib/dentaku/ast/subtraction.rb +15 -0
  30. data/lib/dentaku/bulk_expression_solver.rb +6 -11
  31. data/lib/dentaku/calculator.rb +26 -20
  32. data/lib/dentaku/parser.rb +131 -0
  33. data/lib/dentaku/token.rb +4 -0
  34. data/lib/dentaku/token_matchers.rb +29 -0
  35. data/lib/dentaku/token_scanner.rb +18 -3
  36. data/lib/dentaku/tokenizer.rb +10 -2
  37. data/lib/dentaku/version.rb +1 -1
  38. data/spec/ast/function_spec.rb +19 -0
  39. data/spec/ast/node_spec.rb +37 -0
  40. data/spec/bulk_expression_solver_spec.rb +12 -5
  41. data/spec/calculator_spec.rb +14 -1
  42. data/spec/external_function_spec.rb +12 -28
  43. data/spec/parser_spec.rb +88 -0
  44. data/spec/spec_helper.rb +2 -1
  45. data/spec/token_scanner_spec.rb +4 -3
  46. data/spec/tokenizer_spec.rb +32 -6
  47. metadata +36 -16
  48. data/lib/dentaku/binary_operation.rb +0 -35
  49. data/lib/dentaku/evaluator.rb +0 -166
  50. data/lib/dentaku/expression.rb +0 -56
  51. data/lib/dentaku/external_function.rb +0 -10
  52. data/lib/dentaku/rule_set.rb +0 -153
  53. data/spec/binary_operation_spec.rb +0 -45
  54. data/spec/evaluator_spec.rb +0 -145
  55. data/spec/expression_spec.rb +0 -25
  56. data/spec/rule_set_spec.rb +0 -43
@@ -12,7 +12,33 @@ describe Dentaku::Tokenizer do
12
12
  expect(tokens.map(&:category)).to eq([:numeric, :operator, :numeric])
13
13
  expect(tokens.map(&:value)).to eq([1, :add, 1])
14
14
  end
15
-
15
+
16
+ it 'tokenizes unary minus' do
17
+ tokens = tokenizer.tokenize('-5')
18
+ expect(tokens.map(&:category)).to eq([:operator, :numeric])
19
+ expect(tokens.map(&:value)).to eq([:negate, 5])
20
+
21
+ tokens = tokenizer.tokenize('(-5)')
22
+ expect(tokens.map(&:category)).to eq([:grouping, :operator, :numeric, :grouping])
23
+ expect(tokens.map(&:value)).to eq([:open, :negate, 5, :close])
24
+
25
+ tokens = tokenizer.tokenize('if(-5 > x, -7, -8) - 9')
26
+ expect(tokens.map(&:category)).to eq([
27
+ :function, :grouping, # if(
28
+ :operator, :numeric, :comparator, :identifier, :grouping, # -5 > x,
29
+ :operator, :numeric, :grouping, # -7,
30
+ :operator, :numeric, :grouping, # -8)
31
+ :operator, :numeric # - 9
32
+ ])
33
+ expect(tokens.map(&:value)).to eq([
34
+ :if, :fopen, # if(
35
+ :negate, 5, :gt, 'x', :comma, # -5 > x,
36
+ :negate, 7, :comma, # -7,
37
+ :negate, 8, :close, # -8)
38
+ :subtract, 9 # - 9
39
+ ])
40
+ end
41
+
16
42
  it 'tokenizes comparison with =' do
17
43
  tokens = tokenizer.tokenize('number = 5')
18
44
  expect(tokens.map(&:category)).to eq([:identifier, :comparator, :numeric])
@@ -46,7 +72,7 @@ describe Dentaku::Tokenizer do
46
72
  it 'tokenizes power operations' do
47
73
  tokens = tokenizer.tokenize('0 * 10 ^ -5')
48
74
  expect(tokens.map(&:category)).to eq([:numeric, :operator, :numeric, :operator, :operator, :numeric])
49
- expect(tokens.map(&:value)).to eq([0, :multiply, 10, :pow, :subtract, 5])
75
+ expect(tokens.map(&:value)).to eq([0, :multiply, 10, :pow, :negate, 5])
50
76
  end
51
77
 
52
78
  it 'handles floating point' do
@@ -62,9 +88,9 @@ describe Dentaku::Tokenizer do
62
88
  end
63
89
 
64
90
  it 'accepts arbitrary identifiers' do
65
- tokens = tokenizer.tokenize('monkeys > 1500')
91
+ tokens = tokenizer.tokenize('sea_monkeys > 1500')
66
92
  expect(tokens.map(&:category)).to eq([:identifier, :comparator, :numeric])
67
- expect(tokens.map(&:value)).to eq(['monkeys', :gt, 1500])
93
+ expect(tokens.map(&:value)).to eq(['sea_monkeys', :gt, 1500])
68
94
  end
69
95
 
70
96
  it 'recognizes double-quoted strings' do
@@ -88,13 +114,13 @@ describe Dentaku::Tokenizer do
88
114
  it 'recognizes unary minus operator' do
89
115
  tokens = tokenizer.tokenize('-2 + 3')
90
116
  expect(tokens.map(&:category)).to eq([:operator, :numeric, :operator, :numeric])
91
- expect(tokens.map(&:value)).to eq([:subtract, 2, :add, 3])
117
+ expect(tokens.map(&:value)).to eq([:negate, 2, :add, 3])
92
118
  end
93
119
 
94
120
  it 'recognizes unary minus operator' do
95
121
  tokens = tokenizer.tokenize('2 - -3')
96
122
  expect(tokens.map(&:category)).to eq([:numeric, :operator, :operator, :numeric])
97
- expect(tokens.map(&:value)).to eq([2, :subtract, :subtract, 3])
123
+ expect(tokens.map(&:value)).to eq([2, :subtract, :negate, 3])
98
124
  end
99
125
 
100
126
  it 'matches "<=" before "<"' do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dentaku
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.6
4
+ version: 2.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Solomon White
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-05-30 00:00:00.000000000 Z
11
+ date: 2015-08-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rake
@@ -68,29 +68,50 @@ files:
68
68
  - Rakefile
69
69
  - dentaku.gemspec
70
70
  - lib/dentaku.rb
71
- - lib/dentaku/binary_operation.rb
71
+ - lib/dentaku/ast.rb
72
+ - lib/dentaku/ast/addition.rb
73
+ - lib/dentaku/ast/combinators.rb
74
+ - lib/dentaku/ast/comparators.rb
75
+ - lib/dentaku/ast/division.rb
76
+ - lib/dentaku/ast/exponentiation.rb
77
+ - lib/dentaku/ast/function.rb
78
+ - lib/dentaku/ast/functions/if.rb
79
+ - lib/dentaku/ast/functions/max.rb
80
+ - lib/dentaku/ast/functions/min.rb
81
+ - lib/dentaku/ast/functions/not.rb
82
+ - lib/dentaku/ast/functions/round.rb
83
+ - lib/dentaku/ast/functions/rounddown.rb
84
+ - lib/dentaku/ast/functions/roundup.rb
85
+ - lib/dentaku/ast/functions/ruby_math.rb
86
+ - lib/dentaku/ast/grouping.rb
87
+ - lib/dentaku/ast/identifier.rb
88
+ - lib/dentaku/ast/multiplication.rb
89
+ - lib/dentaku/ast/negation.rb
90
+ - lib/dentaku/ast/nil.rb
91
+ - lib/dentaku/ast/node.rb
92
+ - lib/dentaku/ast/numeric.rb
93
+ - lib/dentaku/ast/operation.rb
94
+ - lib/dentaku/ast/string.rb
95
+ - lib/dentaku/ast/subtraction.rb
72
96
  - lib/dentaku/bulk_expression_solver.rb
73
97
  - lib/dentaku/calculator.rb
74
98
  - lib/dentaku/dependency_resolver.rb
75
- - lib/dentaku/evaluator.rb
76
99
  - lib/dentaku/exceptions.rb
77
- - lib/dentaku/expression.rb
78
- - lib/dentaku/external_function.rb
79
- - lib/dentaku/rule_set.rb
100
+ - lib/dentaku/parser.rb
80
101
  - lib/dentaku/token.rb
81
102
  - lib/dentaku/token_matcher.rb
103
+ - lib/dentaku/token_matchers.rb
82
104
  - lib/dentaku/token_scanner.rb
83
105
  - lib/dentaku/tokenizer.rb
84
106
  - lib/dentaku/version.rb
107
+ - spec/ast/function_spec.rb
108
+ - spec/ast/node_spec.rb
85
109
  - spec/benchmark.rb
86
- - spec/binary_operation_spec.rb
87
110
  - spec/bulk_expression_solver_spec.rb
88
111
  - spec/calculator_spec.rb
89
112
  - spec/dentaku_spec.rb
90
- - spec/evaluator_spec.rb
91
- - spec/expression_spec.rb
92
113
  - spec/external_function_spec.rb
93
- - spec/rule_set_spec.rb
114
+ - spec/parser_spec.rb
94
115
  - spec/spec_helper.rb
95
116
  - spec/token_matcher_spec.rb
96
117
  - spec/token_scanner_spec.rb
@@ -116,20 +137,19 @@ required_rubygems_version: !ruby/object:Gem::Requirement
116
137
  version: '0'
117
138
  requirements: []
118
139
  rubyforge_project: dentaku
119
- rubygems_version: 2.4.5
140
+ rubygems_version: 2.4.8
120
141
  signing_key:
121
142
  specification_version: 4
122
143
  summary: A formula language parser and evaluator
123
144
  test_files:
145
+ - spec/ast/function_spec.rb
146
+ - spec/ast/node_spec.rb
124
147
  - spec/benchmark.rb
125
- - spec/binary_operation_spec.rb
126
148
  - spec/bulk_expression_solver_spec.rb
127
149
  - spec/calculator_spec.rb
128
150
  - spec/dentaku_spec.rb
129
- - spec/evaluator_spec.rb
130
- - spec/expression_spec.rb
131
151
  - spec/external_function_spec.rb
132
- - spec/rule_set_spec.rb
152
+ - spec/parser_spec.rb
133
153
  - spec/spec_helper.rb
134
154
  - spec/token_matcher_spec.rb
135
155
  - spec/token_scanner_spec.rb
@@ -1,35 +0,0 @@
1
- require 'bigdecimal'
2
-
3
- module Dentaku
4
- class BinaryOperation
5
- attr_reader :left, :right
6
-
7
- def initialize(left, right)
8
- @left = left
9
- @right = right
10
- end
11
-
12
- def pow; [:numeric, left ** right]; end
13
- def add; [:numeric, left + right]; end
14
- def subtract; [:numeric, left - right]; end
15
- def multiply; [:numeric, left * right]; end
16
-
17
- def divide
18
- quotient, remainder = left.divmod(right)
19
- return [:numeric, quotient] if remainder == 0
20
- [:numeric, BigDecimal.new(left.to_s) / BigDecimal.new(right.to_s)]
21
- end
22
-
23
- def mod; [:numeric, left % right]; end
24
-
25
- def le; [:logical, left <= right]; end
26
- def ge; [:logical, left >= right]; end
27
- def lt; [:logical, left < right]; end
28
- def gt; [:logical, left > right]; end
29
- def ne; [:logical, left != right]; end
30
- def eq; [:logical, left == right]; end
31
-
32
- def and; [:logical, left && right]; end
33
- def or; [:logical, left || right]; end
34
- end
35
- end
@@ -1,166 +0,0 @@
1
- require 'dentaku/rule_set'
2
- require 'dentaku/binary_operation'
3
-
4
- module Dentaku
5
- class Evaluator
6
- attr_reader :rule_set
7
-
8
- def initialize(rule_set)
9
- @rule_set = rule_set
10
- end
11
-
12
- def evaluate(tokens)
13
- evaluate_token_stream(tokens).value
14
- end
15
-
16
- def evaluate_token_stream(tokens)
17
- while tokens.length > 1
18
- matched, tokens = match_rule_pattern(tokens)
19
- raise "no rule matched {{#{ inspect_tokens(tokens) }}}" unless matched
20
- end
21
-
22
- tokens << Token.new(:numeric, 0) if tokens.empty?
23
-
24
- tokens.first
25
- end
26
-
27
- def inspect_tokens(tokens)
28
- tokens.map { |t| t.to_s }.join(' ')
29
- end
30
-
31
- def match_rule_pattern(tokens)
32
- matched = false
33
-
34
- rule_set.filter(tokens).each do |pattern, evaluator|
35
- pos, match = find_rule_match(pattern, tokens)
36
-
37
- if pos
38
- tokens = evaluate_step(tokens, pos, match.length, evaluator)
39
- matched = true
40
- break
41
- end
42
- end
43
-
44
- [matched, tokens]
45
- end
46
-
47
- def find_rule_match(pattern, token_stream)
48
- position = 0
49
-
50
- while position <= token_stream.length
51
- matches = []
52
- matched = true
53
-
54
- pattern.each do |matcher|
55
- _matched, match = matcher.match(token_stream, position + matches.length)
56
- matched &&= _matched
57
- break unless matched
58
- matches += match
59
- end
60
-
61
- return position, matches if matched
62
- return if pattern.first.caret?
63
- position += 1
64
- end
65
-
66
- nil
67
- end
68
-
69
- def evaluate_step(token_stream, start, length, evaluator)
70
- substream = token_stream.slice!(start, length)
71
-
72
- if self.respond_to?(evaluator)
73
- token_stream.insert start, *self.send(evaluator, *substream)
74
- else
75
- result = user_defined_function(evaluator, substream)
76
- token_stream.insert start, result
77
- end
78
- end
79
-
80
- def user_defined_function(evaluator, tokens)
81
- function = rule_set.function(evaluator)
82
- raise "unknown function '#{ evaluator }'" unless function
83
-
84
- arguments = extract_arguments_from_function_call(tokens).map { |t| t.value }
85
- return_value = function.body.call(*arguments)
86
- Token.new(function.type, return_value)
87
- end
88
-
89
- def extract_arguments_from_function_call(tokens)
90
- _function_name, _open, *args_and_commas, _close = tokens
91
- args_and_commas.reject { |token| token.is?(:grouping) }
92
- end
93
-
94
- def evaluate_group(*args)
95
- evaluate_token_stream(args[1..-2])
96
- end
97
-
98
- def apply(lvalue, operator, rvalue)
99
- operation = BinaryOperation.new(lvalue.value, rvalue.value)
100
- raise "unknown operation #{ operator.value }" unless operation.respond_to?(operator.value)
101
- Token.new(*operation.send(operator.value))
102
- end
103
-
104
- def negate(_, token)
105
- Token.new(token.category, token.value * -1)
106
- end
107
-
108
- def pow_negate(base, _, _, exp)
109
- Token.new(base.category, base.value ** (exp.value * -1))
110
- end
111
-
112
- def mul_negate(val1, _, _, val2)
113
- Token.new(val1.category, val1.value * val2.value * -1)
114
- end
115
-
116
- def percentage(token, _)
117
- Token.new(token.category, token.value / 100.0)
118
- end
119
-
120
- def expand_range(left, oper1, middle, oper2, right)
121
- [left, oper1, middle, Token.new(:combinator, :and), middle, oper2, right]
122
- end
123
-
124
- def if(*args)
125
- _if, _open, condition, _, true_value, _, false_value, _close = args
126
-
127
- if condition.value
128
- true_value
129
- else
130
- false_value
131
- end
132
- end
133
-
134
- def round(*args)
135
- _, _, *tokens, _ = args
136
-
137
- input_tokens, places_tokens = tokens.chunk { |t| t.category == :grouping }.
138
- reject { |flag, tokens| flag }.
139
- map { |flag, tokens| tokens }
140
-
141
- input_value = evaluate_token_stream(input_tokens).value
142
- places = places_tokens ? evaluate_token_stream(places_tokens).value : 0
143
-
144
- value = input_value.round(places)
145
-
146
- Token.new(:numeric, value)
147
- end
148
-
149
- def round_int(*args)
150
- function, _, *tokens, _ = args
151
-
152
- value = evaluate_token_stream(tokens).value
153
- rounded = if function.value == :roundup
154
- value.ceil
155
- else
156
- value.floor
157
- end
158
-
159
- Token.new(:numeric, rounded)
160
- end
161
-
162
- def not(*args)
163
- Token.new(:logical, ! evaluate_token_stream(args[2..-2]).value)
164
- end
165
- end
166
- end
@@ -1,56 +0,0 @@
1
- require 'dentaku/tokenizer'
2
-
3
- module Dentaku
4
- class Expression
5
- attr_reader :tokens, :variables
6
-
7
- def initialize(string, variables={})
8
- @raw = string
9
- @tokenizer ||= Tokenizer.new
10
- @tokens = @tokenizer.tokenize(@raw)
11
- @variables = Hash[variables.map { |k,v| [k.to_s, v] }]
12
- replace_identifiers_with_values
13
- end
14
-
15
- def identifiers
16
- @tokens.select { |t| t.category == :identifier }.map { |t| t.value }
17
- end
18
-
19
- def unbound?
20
- identifiers.any?
21
- end
22
-
23
- private
24
-
25
- def replace_identifiers_with_values
26
- @tokens.map! do |token|
27
- if token.is?(:identifier)
28
- replace_identifier_with_value(token)
29
- else
30
- token
31
- end
32
- end
33
- end
34
-
35
- def replace_identifier_with_value(token)
36
- key = token.value.to_s
37
-
38
- if variables.key? key
39
- value = variables[key]
40
- type = type_for_value(value)
41
-
42
- Token.new(type, value)
43
- else
44
- token
45
- end
46
- end
47
-
48
- def type_for_value(value)
49
- case value
50
- when String then :string
51
- when TrueClass, FalseClass then :logical
52
- else :numeric
53
- end
54
- end
55
- end
56
- end
@@ -1,10 +0,0 @@
1
- class ExternalFunction < Struct.new(:name, :type, :signature, :body)
2
- def initialize(*)
3
- super
4
- self.name = self.name.to_s
5
- end
6
-
7
- def tokens
8
- signature.flat_map { |t| [t, :comma] }[0...-1]
9
- end
10
- end
@@ -1,153 +0,0 @@
1
- require 'dentaku/external_function'
2
-
3
- module Dentaku
4
- class RuleSet
5
- def initialize
6
- self.custom_rules = []
7
- self.custom_functions = {}
8
- end
9
-
10
- def rules
11
- custom_rules + core_rules
12
- end
13
-
14
- def each
15
- rules.each { |r| yield r }
16
- end
17
-
18
- def add_function(function)
19
- fn = ExternalFunction.new(function[:name], function[:type], function[:signature], function[:body])
20
-
21
- custom_rules.push [
22
- function_token_matchers(fn.name, *fn.tokens),
23
- fn.name
24
- ]
25
-
26
- custom_functions[fn.name] = fn
27
- clear_cache
28
- end
29
-
30
- def filter(tokens)
31
- categories = tokens.map(&:category).uniq
32
- values = tokens.map { |token| token.value.is_a?(Numeric) ? 0 : token.value }
33
- .reject { |token| [:fopen, :close].include?(token) }
34
- select(categories, values)
35
- end
36
-
37
- def select(categories, values)
38
- @cache ||= {}
39
- return @cache[categories + values] if @cache.has_key?(categories + values)
40
-
41
- @cache[categories + values] = rules.select do |pattern, _|
42
- categories_intersection = matcher_categories[pattern] & categories
43
- values_intersection = matcher_values[pattern] & values
44
- categories_intersection.length > 0 && (values_intersection.length > 0 || matcher_values[pattern].empty?)
45
- end
46
- end
47
-
48
- def function(name)
49
- custom_functions.fetch(name)
50
- end
51
-
52
- private
53
- attr_accessor :custom_rules, :custom_functions
54
-
55
- def matcher_categories
56
- @matcher_categories ||= rules.each_with_object({}) do |(pattern, _), h|
57
- h[pattern] = pattern.map(&:categories).reduce { |a,b| a.merge(b) }.keys
58
- end
59
- end
60
-
61
- def matcher_values
62
- @matcher_values ||= rules.each_with_object({}) do |(pattern, _), h|
63
- h[pattern] = pattern.map(&:values).reduce { |a,b| a.merge(b) }.keys
64
- end
65
- end
66
-
67
- def clear_cache
68
- @cache = nil
69
- @matcher_categories = nil
70
- @matcher_values = nil
71
- end
72
-
73
- def core_rules
74
- @core_rules ||= [
75
- [ pattern(:if), :if ],
76
- [ pattern(:round), :round ],
77
- [ pattern(:roundup), :round_int ],
78
- [ pattern(:rounddown), :round_int ],
79
- [ pattern(:not), :not ],
80
-
81
- [ pattern(:group), :evaluate_group ],
82
- [ pattern(:start_neg), :negate ],
83
- [ pattern(:math_pow), :apply ],
84
- [ pattern(:math_neg_pow), :pow_negate ],
85
- [ pattern(:math_mod), :apply ],
86
- [ pattern(:math_mul), :apply ],
87
- [ pattern(:math_neg_mul), :mul_negate ],
88
- [ pattern(:math_add), :apply ],
89
- [ pattern(:percentage), :percentage ],
90
- [ pattern(:negation), :negate ],
91
- [ pattern(:range_asc), :expand_range ],
92
- [ pattern(:range_desc), :expand_range ],
93
- [ pattern(:num_comp), :apply ],
94
- [ pattern(:str_comp), :apply ],
95
- [ pattern(:combine), :apply ]
96
- ]
97
- end
98
-
99
- def pattern(name)
100
- @patterns ||= {
101
- group: token_matchers(:open, :non_group_star, :close),
102
- math_add: token_matchers(:numeric, :addsub, :numeric),
103
- math_mul: token_matchers(:numeric, :muldiv, :numeric),
104
- math_neg_mul: token_matchers(:numeric, :muldiv, :subtract, :numeric),
105
- math_pow: token_matchers(:numeric, :pow, :numeric),
106
- math_neg_pow: token_matchers(:numeric, :pow, :subtract, :numeric),
107
- math_mod: token_matchers(:numeric, :mod, :numeric),
108
- negation: token_matchers(:subtract, :numeric),
109
- start_neg: token_matchers(:anchored_minus, :numeric),
110
- percentage: token_matchers(:numeric, :mod),
111
- range_asc: token_matchers(:numeric, :comp_lt, :numeric, :comp_lt, :numeric),
112
- range_desc: token_matchers(:numeric, :comp_gt, :numeric, :comp_gt, :numeric),
113
- num_comp: token_matchers(:numeric, :comparator, :numeric),
114
- str_comp: token_matchers(:string, :comparator, :string),
115
- combine: token_matchers(:logical, :combinator, :logical),
116
-
117
- if: function_token_matchers(:if, :non_group, :comma, :non_group, :comma, :non_group),
118
- round: function_token_matchers(:round, :arguments),
119
- roundup: function_token_matchers(:roundup, :arguments),
120
- rounddown: function_token_matchers(:rounddown, :arguments),
121
- not: function_token_matchers(:not, :arguments)
122
- }
123
-
124
- @patterns[name]
125
- end
126
-
127
- def token_matchers(*symbols)
128
- symbols.map { |s| matcher(s) }
129
- end
130
-
131
- def function_token_matchers(function_name, *symbols)
132
- token_matchers(:fopen, *symbols, :close).unshift(
133
- TokenMatcher.send(function_name)
134
- )
135
- end
136
-
137
- def matcher(symbol)
138
- @matchers ||= [
139
- :numeric, :string, :addsub, :subtract, :muldiv, :pow, :mod,
140
- :comparator, :comp_gt, :comp_lt, :fopen, :open, :close, :comma,
141
- :non_close_plus, :non_group, :non_group_star, :arguments,
142
- :logical, :combinator, :if, :round, :roundup, :rounddown, :not,
143
- :anchored_minus, :math_neg_pow, :math_neg_mul
144
- ].each_with_object({}) do |name, matchers|
145
- matchers[name] = TokenMatcher.send(name)
146
- end
147
-
148
- @matchers.fetch(symbol) do
149
- raise "Unknown token symbol #{ symbol }"
150
- end
151
- end
152
- end
153
- end