dentaku 1.2.0 → 1.2.1

Sign up to get free protection for your applications and to get access to all the features.
data/README.md CHANGED
@@ -199,9 +199,9 @@ As an example, the exponentiation function takes two parameters, the mantissa
199
199
  and the exponent, so the token list could be defined as: `[:numeric,
200
200
  :numeric]`. Other functions might be variadic -- consider `max`, a function
201
201
  that takes any number of numeric inputs and returns the largest one. Its token
202
- list could be defined as: `[:non_close_plus]` (one or more tokens that are not
203
- closing parentheses). See the
204
- [rules definitions](https://github.com/rubysolo/dentaku/blob/master/lib/dentaku/token_matcher.rb#L61)
202
+ list could be defined as: `[:arguments]` (one or more numeric, string, or logical
203
+ values, separated by commas). See the
204
+ [rules definitions](https://github.com/rubysolo/dentaku/blob/master/lib/dentaku/token_matcher.rb#L87)
205
205
  for the names of token patterns you can use.
206
206
 
207
207
  Functions can be added individually using Calculator#add_function, or en masse using
@@ -230,10 +230,10 @@ Here's an example of adding the `max` function:
230
230
  > c.add_function(
231
231
  name: :max,
232
232
  type: :numeric,
233
- signature: [:non_close_plus],
233
+ signature: [:arguments],
234
234
  body: ->(*args) { args.max }
235
235
  )
236
- > c.evaluate 'MAX(5,3,9,6,2)'
236
+ > c.evaluate 'MAX(8,6,7,5,3,0,9)'
237
237
  #=> 9
238
238
  ```
239
239
 
@@ -245,14 +245,17 @@ Big thanks to [ElkStone Basements](http://www.elkstonebasements.com/) for
245
245
  allowing me to extract and open source this code. Thanks also to all the
246
246
  contributors:
247
247
 
248
+ * [0xCCD](https://github.com/0xCCD)
249
+ * [AlexeyMK](https://github.com/AlexeyMK)
248
250
  * [CraigCottingham](https://github.com/CraigCottingham)
249
- * [arnaudl](https://github.com/arnaudl)
250
- * [thbar](https://github.com/thbar) / [BoxCar](https://www.boxcar.io)
251
251
  * [antonversal](https://github.com/antonversal)
252
- * [mvbrocato](https://github.com/mvbrocato)
252
+ * [arnaudl](https://github.com/arnaudl)
253
+ * [bernardofire](https://github.com/bernardofire)
253
254
  * [brixen](https://github.com/brixen)
254
- * [0xCCD](https://github.com/0xCCD)
255
- * [AlexeyMK](https://github.com/AlexeyMK)
255
+ * [jasonhutchens](https://github.com/jasonhutchens)
256
+ * [jmangs](https://github.com/jmangs)
257
+ * [mvbrocato](https://github.com/mvbrocato)
258
+ * [thbar](https://github.com/thbar) / [BoxCar](https://www.boxcar.io)
256
259
 
257
260
 
258
261
  LICENSE
@@ -7,14 +7,6 @@ module Dentaku
7
7
  calculator.evaluate(expression, data)
8
8
  end
9
9
 
10
- class UnboundVariableError < StandardError
11
- attr_reader :unbound_variables
12
-
13
- def initialize(unbound_variables)
14
- @unbound_variables = unbound_variables
15
- end
16
- end
17
-
18
10
  private
19
11
 
20
12
  def self.calculator
@@ -1,3 +1,5 @@
1
+ require 'bigdecimal'
2
+
1
3
  module Dentaku
2
4
  class BinaryOperation
3
5
  attr_reader :left, :right
@@ -1,4 +1,5 @@
1
1
  require 'dentaku/evaluator'
2
+ require 'dentaku/exceptions'
2
3
  require 'dentaku/expression'
3
4
  require 'dentaku/rules'
4
5
  require 'dentaku/token'
@@ -0,0 +1,9 @@
1
+ module Dentaku
2
+ class UnboundVariableError < StandardError
3
+ attr_reader :unbound_variables
4
+
5
+ def initialize(unbound_variables)
6
+ @unbound_variables = unbound_variables
7
+ end
8
+ end
9
+ end
@@ -42,7 +42,7 @@ module Dentaku
42
42
  @rules.unshift [
43
43
  [
44
44
  TokenMatcher.send(ext.name),
45
- t(:open),
45
+ t(:fopen),
46
46
  *pattern(*ext.tokens),
47
47
  t(:close)
48
48
  ],
@@ -53,22 +53,20 @@ module Dentaku
53
53
 
54
54
  def self.func(name)
55
55
  @funcs ||= {}
56
- @funcs[name]
56
+ @funcs.fetch(name)
57
57
  end
58
58
 
59
59
  def self.t(name)
60
60
  @matchers ||= generate_matchers
61
- @matchers[name]
61
+ @matchers.fetch(name)
62
62
  end
63
63
 
64
64
  def self.generate_matchers
65
65
  [
66
66
  :numeric, :string, :addsub, :subtract, :muldiv, :pow, :mod,
67
- :comparator, :comp_gt, :comp_lt,
68
- :open, :close, :comma,
69
- :non_close_plus, :non_group, :non_group_star,
70
- :logical, :combinator,
71
- :if, :round, :roundup, :rounddown, :not
67
+ :comparator, :comp_gt, :comp_lt, :fopen, :open, :close, :comma,
68
+ :non_close_plus, :non_group, :non_group_star, :arguments,
69
+ :logical, :combinator, :if, :round, :roundup, :rounddown, :not
72
70
  ].each_with_object({}) do |name, matchers|
73
71
  matchers[name] = TokenMatcher.send(name)
74
72
  end
@@ -90,10 +88,10 @@ module Dentaku
90
88
  combine: pattern(:logical, :combinator, :logical),
91
89
 
92
90
  if: func_pattern(:if, :non_group, :comma, :non_group, :comma, :non_group),
93
- round: func_pattern(:round, :non_close_plus),
94
- roundup: func_pattern(:roundup, :non_close_plus),
95
- rounddown: func_pattern(:rounddown, :non_close_plus),
96
- not: func_pattern(:not, :non_close_plus)
91
+ round: func_pattern(:round, :arguments),
92
+ roundup: func_pattern(:roundup, :arguments),
93
+ rounddown: func_pattern(:rounddown, :arguments),
94
+ not: func_pattern(:not, :arguments)
97
95
  }
98
96
 
99
97
  @patterns[name]
@@ -104,7 +102,7 @@ module Dentaku
104
102
  end
105
103
 
106
104
  def self.func_pattern(func, *tokens)
107
- pattern(func, :open, *tokens, :close)
105
+ pattern(func, :fopen, *tokens, :close)
108
106
  end
109
107
  end
110
108
  end
@@ -2,27 +2,31 @@ require 'dentaku/token'
2
2
 
3
3
  module Dentaku
4
4
  class TokenMatcher
5
- def initialize(categories=nil, values=nil)
6
- @categories = [categories].compact.flatten
7
- @values = [values].compact.flatten
8
- @invert = false
5
+ attr_reader :children
9
6
 
10
- @categories_hash = Hash[@categories.map { |cat| [cat, 1] }]
11
- @values_hash = Hash[@values.map { |value| [value, 1] }]
7
+ def initialize(categories=nil, values=nil, children=[])
8
+ # store categories and values as hash to optimize key lookup, h/t @jan-mangs
9
+ @categories = [categories].compact.flatten.each_with_object({}) { |c,h| h[c] = 1 }
10
+ @values = [values].compact.flatten.each_with_object({}) { |v,h| h[v] = 1 }
11
+ @children = children.compact
12
+ @invert = false
12
13
 
13
14
  @min = 1
14
15
  @max = 1
15
16
  @range = (@min..@max)
16
17
  end
17
18
 
19
+ def | (other_matcher)
20
+ self.class.new(:nomatch, :nomatch, leaf_matchers + other_matcher.leaf_matchers)
21
+ end
22
+
18
23
  def invert
19
24
  @invert = ! @invert
20
25
  self
21
26
  end
22
27
 
23
28
  def ==(token)
24
- return false if token.nil?
25
- (category_match(token.category) && value_match(token.value)) ^ @invert
29
+ leaf_matcher? ? matches_token?(token) : any_child_matches_token?(token)
26
30
  end
27
31
 
28
32
  def match(token_stream, offset=0)
@@ -53,40 +57,65 @@ module Dentaku
53
57
  self
54
58
  end
55
59
 
60
+ def leaf_matcher?
61
+ children.empty?
62
+ end
63
+
64
+ def leaf_matchers
65
+ leaf_matcher? ? [self] : children
66
+ end
67
+
56
68
  private
57
69
 
70
+ def any_child_matches_token?(token)
71
+ children.any? { |child| child == token }
72
+ end
73
+
74
+ def matches_token?(token)
75
+ return false if token.nil?
76
+ (category_match(token.category) && value_match(token.value)) ^ @invert
77
+ end
78
+
58
79
  def category_match(category)
59
- @categories_hash.empty? || @categories_hash.key?(category)
80
+ @categories.empty? || @categories.key?(category)
60
81
  end
61
82
 
62
83
  def value_match(value)
63
- @values.empty? || @values_hash.key?(value)
84
+ @values.empty? || @values.key?(value)
64
85
  end
65
86
 
66
87
  def self.numeric; new(:numeric); end
67
88
  def self.string; new(:string); end
89
+ def self.logical; new(:logical); end
90
+ def self.value
91
+ new(:numeric) | new(:string) | new(:logical)
92
+ end
93
+
68
94
  def self.addsub; new(:operator, [:add, :subtract]); end
69
95
  def self.subtract; new(:operator, :subtract); end
70
96
  def self.muldiv; new(:operator, [:multiply, :divide]); end
71
97
  def self.pow; new(:operator, :pow); end
72
98
  def self.mod; new(:operator, :mod); end
99
+ def self.combinator; new(:combinator); end
100
+
73
101
  def self.comparator; new(:comparator); end
74
102
  def self.comp_gt; new(:comparator, [:gt, :ge]); end
75
103
  def self.comp_lt; new(:comparator, [:lt, :le]); end
104
+
105
+ def self.fopen; new(:grouping, :fopen); end
76
106
  def self.open; new(:grouping, :open); end
77
107
  def self.close; new(:grouping, :close); end
78
108
  def self.comma; new(:grouping, :comma); end
79
- def self.logical; new(:logical); end
80
- def self.combinator; new(:combinator); end
109
+ def self.non_group; new(:grouping).invert; end
110
+ def self.non_group_star; new(:grouping).invert.star; end
111
+ def self.non_close_plus; new(:grouping, :close).invert.plus; end
112
+ def self.arguments; (value | comma).plus; end
113
+
81
114
  def self.if; new(:function, :if); end
82
115
  def self.round; new(:function, :round); end
83
116
  def self.roundup; new(:function, :roundup); end
84
117
  def self.rounddown; new(:function, :rounddown); end
85
118
  def self.not; new(:function, :not); end
86
- def self.non_close_plus; new(:grouping, :close).invert.plus; end
87
- def self.non_group; new(:grouping).invert; end
88
- def self.non_group_star; new(:grouping).invert.star; end
89
-
90
119
 
91
120
  def self.method_missing(name, *args, &block)
92
121
  new(:function, name)
@@ -95,6 +124,5 @@ module Dentaku
95
124
  def self.respond_to_missing?(name, include_priv)
96
125
  true
97
126
  end
98
-
99
127
  end
100
- end
128
+ end
@@ -13,7 +13,9 @@ module Dentaku
13
13
  value = raw = m.to_s
14
14
  value = @converter.call(raw) if @converter
15
15
 
16
- return Token.new(@category, value, raw)
16
+ return Array(value).map do |v|
17
+ Token === v ? v : Token.new(@category, v, raw)
18
+ end
17
19
  end
18
20
 
19
21
  false
@@ -68,15 +70,18 @@ module Dentaku
68
70
  end
69
71
 
70
72
  def combinator
71
- new(:combinator, '(and|or)\b', lambda {|raw| raw.strip.downcase.to_sym })
73
+ new(:combinator, '(and|or)\b', lambda { |raw| raw.strip.downcase.to_sym })
72
74
  end
73
75
 
74
76
  def function
75
- new(:function, '(\w+\s*(?=\())', lambda {|raw| raw.strip.downcase.to_sym })
77
+ new(:function, '\w+\s*\(', lambda do |raw|
78
+ function_name = raw.gsub('(', '')
79
+ [Token.new(:function, function_name.strip.downcase.to_sym, function_name), Token.new(:grouping, :fopen, '(')]
80
+ end)
76
81
  end
77
82
 
78
83
  def identifier
79
- new(:identifier, '\w+\b', lambda {|raw| raw.strip.downcase.to_sym })
84
+ new(:identifier, '\w+\b', lambda { |raw| raw.strip.downcase.to_sym })
80
85
  end
81
86
  end
82
87
  end
@@ -4,7 +4,7 @@ require 'dentaku/token_scanner'
4
4
 
5
5
  module Dentaku
6
6
  class Tokenizer
7
- LPAREN = TokenMatcher.new(:grouping, :open)
7
+ LPAREN = TokenMatcher.new(:grouping, [:open, :fopen])
8
8
  RPAREN = TokenMatcher.new(:grouping, :close)
9
9
 
10
10
  def tokenize(string)
@@ -25,16 +25,19 @@ module Dentaku
25
25
  end
26
26
 
27
27
  def scan(string, scanner)
28
- if token = scanner.scan(string)
29
- raise "unexpected zero-width match (:#{ token.category }) at '#{ string }'" if token.length == 0
28
+ if tokens = scanner.scan(string)
29
+ tokens.each do |token|
30
+ raise "unexpected zero-width match (:#{ token.category }) at '#{ string }'" if token.length == 0
30
31
 
31
- @nesting += 1 if LPAREN == token
32
- @nesting -= 1 if RPAREN == token
33
- raise "too many closing parentheses" if @nesting < 0
32
+ @nesting += 1 if LPAREN == token
33
+ @nesting -= 1 if RPAREN == token
34
+ raise "too many closing parentheses" if @nesting < 0
34
35
 
35
- @tokens << token unless token.is?(:whitespace)
36
+ @tokens << token unless token.is?(:whitespace)
37
+ end
36
38
 
37
- [true, string[token.length..-1]]
39
+ match_length = tokens.map(&:length).reduce(:+)
40
+ [true, string[match_length..-1]]
38
41
  else
39
42
  [false, string]
40
43
  end
@@ -1,3 +1,3 @@
1
1
  module Dentaku
2
- VERSION = "1.2.0"
2
+ VERSION = "1.2.1"
3
3
  end
@@ -8,4 +8,8 @@ describe Dentaku do
8
8
  it 'binds values to variables' do
9
9
  expect(Dentaku('oranges > 7', {:oranges => 10})).to be_truthy
10
10
  end
11
+
12
+ it 'evaulates a nested function' do
13
+ expect(Dentaku('roundup(roundup(3 * cherries) + raspberries)', cherries: 1.5, raspberries: 0.9)).to eql(6)
14
+ end
11
15
  end
@@ -70,12 +70,21 @@ describe Dentaku::Evaluator do
70
70
  end
71
71
  end
72
72
 
73
+ describe 'find_rule_match' do
74
+ it 'matches a function call' do
75
+ if_pattern, _ = *Dentaku::Rules.core_rules.first
76
+ position, tokens = evaluator.find_rule_match(if_pattern, token_stream(:if, :fopen, true, :comma, 1, :comma, 2, :close))
77
+ expect(position).to eq 0
78
+ expect(tokens.length).to eq 8
79
+ end
80
+ end
81
+
73
82
  describe 'functions' do
74
83
  it 'is evaluated' do
75
- expect(evaluator.evaluate(token_stream(:round, :open, 5, :divide, 3.0, :close))).to eq 2
76
- expect(evaluator.evaluate(token_stream(:round, :open, 5, :divide, 3.0, :comma, 2, :close))).to eq 1.67
77
- expect(evaluator.evaluate(token_stream(:roundup, :open, 5, :divide, 1.2, :close))).to eq 5
78
- expect(evaluator.evaluate(token_stream(:rounddown, :open, 5, :divide, 1.2, :close))).to eq 4
84
+ expect(evaluator.evaluate(token_stream(:round, :fopen, 5, :divide, 3.0, :close))).to eq 2
85
+ expect(evaluator.evaluate(token_stream(:round, :fopen, 5, :divide, 3.0, :comma, 2, :close))).to eq 1.67
86
+ expect(evaluator.evaluate(token_stream(:roundup, :fopen, 5, :divide, 1.2, :close))).to eq 5
87
+ expect(evaluator.evaluate(token_stream(:rounddown, :fopen, 5, :divide, 1.2, :close))).to eq 4
79
88
  end
80
89
  end
81
90
 
@@ -96,13 +105,13 @@ describe Dentaku::Evaluator do
96
105
  end
97
106
 
98
107
  it 'evaluates combined conditionals' do
99
- expect(evaluator.evaluate(token_stream(5, :gt, 1, :or, :false))).to be_truthy
100
- expect(evaluator.evaluate(token_stream(5, :gt, 1, :and, :false))).to be_falsey
108
+ expect(evaluator.evaluate(token_stream(5, :gt, 1, :or, false))).to be_truthy
109
+ expect(evaluator.evaluate(token_stream(5, :gt, 1, :and, false))).to be_falsey
101
110
  end
102
111
 
103
112
  it 'negates a logical value' do
104
- expect(evaluator.evaluate(token_stream(:not, :open, 5, :gt, 1, :or, :false, :close))).to be_falsey
105
- expect(evaluator.evaluate(token_stream(:not, :open, 5, :gt, 1, :and, :false, :close))).to be_truthy
113
+ expect(evaluator.evaluate(token_stream(:not, :fopen, 5, :gt, 1, :or, false, :close))).to be_falsey
114
+ expect(evaluator.evaluate(token_stream(:not, :fopen, 5, :gt, 1, :and, false, :close))).to be_truthy
106
115
  end
107
116
  end
108
117
  end
@@ -1,3 +1,4 @@
1
+ require 'spec_helper'
1
2
  require 'dentaku/calculator'
2
3
 
3
4
  describe Dentaku::Calculator do
@@ -20,13 +21,13 @@ describe Dentaku::Calculator do
20
21
  {
21
22
  name: :max,
22
23
  type: :numeric,
23
- signature: [ :non_close_plus ],
24
+ signature: [ :arguments ],
24
25
  body: ->(*args) { args.max }
25
26
  },
26
27
  {
27
28
  name: :min,
28
29
  type: :numeric,
29
- signature: [ :non_close_plus ],
30
+ signature: [ :arguments ],
30
31
  body: ->(*args) { args.min }
31
32
  }
32
33
  ]
@@ -2,7 +2,6 @@
2
2
  def token_stream(*args)
3
3
  args.map do |value|
4
4
  type = type_for(value)
5
- value = (value == :true) if type == :logical
6
5
  Dentaku::Token.new(type, value)
7
6
  end
8
7
  end
@@ -12,16 +11,18 @@ def type_for(value)
12
11
  case value
13
12
  when Numeric
14
13
  :numeric
14
+ when String
15
+ :string
16
+ when true, false
17
+ :logical
15
18
  when :add, :subtract, :multiply, :divide, :mod
16
19
  :operator
17
- when :open, :close, :comma
20
+ when :fopen, :open, :close, :comma
18
21
  :grouping
19
22
  when :le, :ge, :ne, :ne, :lt, :gt, :eq
20
23
  :comparator
21
24
  when :and, :or
22
25
  :combinator
23
- when :true, :false
24
- :logical
25
26
  when :if, :round, :roundup, :rounddown, :not
26
27
  :function
27
28
  else
@@ -53,6 +53,26 @@ describe Dentaku::TokenMatcher do
53
53
  expect(matcher).to eq(cmp)
54
54
  end
55
55
 
56
+ describe 'combining multiple tokens' do
57
+ let(:numeric) { described_class.new(:numeric) }
58
+ let(:string) { described_class.new(:string) }
59
+
60
+ it 'matches either' do
61
+ either = numeric | string
62
+ expect(either).to eq(Dentaku::Token.new(:numeric, 5))
63
+ expect(either).to eq(Dentaku::Token.new(:string, 'rhubarb'))
64
+ end
65
+
66
+ it 'matches any value' do
67
+ value = described_class.value
68
+ expect(value).to eq(Dentaku::Token.new(:numeric, 8))
69
+ expect(value).to eq(Dentaku::Token.new(:string, 'apricot'))
70
+ expect(value).to eq(Dentaku::Token.new(:logical, false))
71
+ expect(value).not_to eq(Dentaku::Token.new(:function, :round))
72
+ expect(value).not_to eq(Dentaku::Token.new(:identifier, :hello))
73
+ end
74
+ end
75
+
56
76
  describe 'stream matching' do
57
77
  let(:stream) { token_stream(5, 11, 9, 24, :hello, 8) }
58
78
 
@@ -100,6 +120,16 @@ describe Dentaku::TokenMatcher do
100
120
  expect(matched).not_to be_truthy
101
121
  end
102
122
  end
123
+
124
+ describe 'arguments' do
125
+ it 'matches comma-separated values' do
126
+ stream = token_stream(1, :comma, 2, :comma, true, :comma, 'olive', :comma, :'(')
127
+ matched, substream = described_class.arguments.match(stream)
128
+ expect(matched).to be_truthy
129
+ expect(substream.length).to eq 8
130
+ expect(substream.map(&:value)).to eq [1, :comma, 2, :comma, true, :comma, 'olive', :comma]
131
+ end
132
+ end
103
133
  end
104
134
  end
105
135
 
@@ -5,7 +5,7 @@ describe Dentaku::TokenScanner do
5
5
  let(:numeric) { described_class.new(:numeric, '(\d+(\.\d+)?|\.\d+)', lambda{|raw| raw =~ /\./ ? BigDecimal.new(raw) : raw.to_i }) }
6
6
 
7
7
  it 'returns a token for a matching string' do
8
- token = whitespace.scan(' ')
8
+ token = whitespace.scan(' ').first
9
9
  expect(token.category).to eq(:whitespace)
10
10
  expect(token.value).to eq(' ')
11
11
  end
@@ -15,7 +15,7 @@ describe Dentaku::TokenScanner do
15
15
  end
16
16
 
17
17
  it 'performs raw value conversion' do
18
- token = numeric.scan('5')
18
+ token = numeric.scan('5').first
19
19
  expect(token.category).to eq(:numeric)
20
20
  expect(token.value).to eq(5)
21
21
  end
@@ -102,36 +102,43 @@ describe Dentaku::Tokenizer do
102
102
  tokens = tokenizer.tokenize('if(x < 10, y, z)')
103
103
  expect(tokens.length).to eq(10)
104
104
  expect(tokens.map(&:category)).to eq([:function, :grouping, :identifier, :comparator, :numeric, :grouping, :identifier, :grouping, :identifier, :grouping])
105
- expect(tokens.map(&:value)).to eq([:if, :open, :x, :lt, 10, :comma, :y, :comma, :z, :close])
105
+ expect(tokens.map(&:value)).to eq([:if, :fopen, :x, :lt, 10, :comma, :y, :comma, :z, :close])
106
106
  end
107
107
 
108
108
  it 'include ROUND/UP/DOWN' do
109
109
  tokens = tokenizer.tokenize('round(8.2)')
110
110
  expect(tokens.length).to eq(4)
111
111
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
112
- expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal.new('8.2'), :close])
112
+ expect(tokens.map(&:value)).to eq([:round, :fopen, BigDecimal.new('8.2'), :close])
113
113
 
114
114
  tokens = tokenizer.tokenize('round(8.75, 1)')
115
115
  expect(tokens.length).to eq(6)
116
116
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping, :numeric, :grouping])
117
- expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal.new('8.75'), :comma, 1, :close])
117
+ expect(tokens.map(&:value)).to eq([:round, :fopen, BigDecimal.new('8.75'), :comma, 1, :close])
118
118
 
119
119
  tokens = tokenizer.tokenize('ROUNDUP(8.2)')
120
120
  expect(tokens.length).to eq(4)
121
121
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
122
- expect(tokens.map(&:value)).to eq([:roundup, :open, BigDecimal.new('8.2'), :close])
122
+ expect(tokens.map(&:value)).to eq([:roundup, :fopen, BigDecimal.new('8.2'), :close])
123
123
 
124
124
  tokens = tokenizer.tokenize('RoundDown(8.2)')
125
125
  expect(tokens.length).to eq(4)
126
126
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
127
- expect(tokens.map(&:value)).to eq([:rounddown, :open, BigDecimal.new('8.2'), :close])
127
+ expect(tokens.map(&:value)).to eq([:rounddown, :fopen, BigDecimal.new('8.2'), :close])
128
128
  end
129
129
 
130
130
  it 'include NOT' do
131
131
  tokens = tokenizer.tokenize('not(8 < 5)')
132
132
  expect(tokens.length).to eq(6)
133
133
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :comparator, :numeric, :grouping])
134
- expect(tokens.map(&:value)).to eq([:not, :open, 8, :lt, 5, :close])
134
+ expect(tokens.map(&:value)).to eq([:not, :fopen, 8, :lt, 5, :close])
135
+ end
136
+
137
+ it 'handles whitespace after function name' do
138
+ tokens = tokenizer.tokenize('not (8 < 5)')
139
+ expect(tokens.length).to eq(6)
140
+ expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :comparator, :numeric, :grouping])
141
+ expect(tokens.map(&:value)).to eq([:not, :fopen, 8, :lt, 5, :close])
135
142
  end
136
143
  end
137
144
  end
metadata CHANGED
@@ -1,53 +1,59 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dentaku
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.0
4
+ version: 1.2.1
5
+ prerelease:
5
6
  platform: ruby
6
7
  authors:
7
8
  - Solomon White
8
9
  autorequire:
9
10
  bindir: bin
10
11
  cert_chain: []
11
- date: 2014-10-21 00:00:00.000000000 Z
12
+ date: 2014-10-22 00:00:00.000000000 Z
12
13
  dependencies:
13
14
  - !ruby/object:Gem::Dependency
14
15
  name: rake
15
16
  requirement: !ruby/object:Gem::Requirement
17
+ none: false
16
18
  requirements:
17
- - - ">="
19
+ - - ! '>='
18
20
  - !ruby/object:Gem::Version
19
21
  version: '0'
20
22
  type: :development
21
23
  prerelease: false
22
24
  version_requirements: !ruby/object:Gem::Requirement
25
+ none: false
23
26
  requirements:
24
- - - ">="
27
+ - - ! '>='
25
28
  - !ruby/object:Gem::Version
26
29
  version: '0'
27
30
  - !ruby/object:Gem::Dependency
28
31
  name: rspec
29
32
  requirement: !ruby/object:Gem::Requirement
33
+ none: false
30
34
  requirements:
31
- - - ">="
35
+ - - ! '>='
32
36
  - !ruby/object:Gem::Version
33
37
  version: '0'
34
38
  type: :development
35
39
  prerelease: false
36
40
  version_requirements: !ruby/object:Gem::Requirement
41
+ none: false
37
42
  requirements:
38
- - - ">="
43
+ - - ! '>='
39
44
  - !ruby/object:Gem::Version
40
45
  version: '0'
41
- description: |2
42
- Dentaku is a parser and evaluator for mathematical formulas
46
+ description: ! ' Dentaku is a parser and evaluator for mathematical formulas
47
+
48
+ '
43
49
  email:
44
50
  - rubysolo@gmail.com
45
51
  executables: []
46
52
  extensions: []
47
53
  extra_rdoc_files: []
48
54
  files:
49
- - ".gitignore"
50
- - ".travis.yml"
55
+ - .gitignore
56
+ - .travis.yml
51
57
  - Gemfile
52
58
  - README.md
53
59
  - Rakefile
@@ -57,6 +63,7 @@ files:
57
63
  - lib/dentaku/calculator.rb
58
64
  - lib/dentaku/dependency_resolver.rb
59
65
  - lib/dentaku/evaluator.rb
66
+ - lib/dentaku/exceptions.rb
60
67
  - lib/dentaku/expression.rb
61
68
  - lib/dentaku/external_function.rb
62
69
  - lib/dentaku/rules.rb
@@ -79,26 +86,33 @@ files:
79
86
  homepage: http://github.com/rubysolo/dentaku
80
87
  licenses:
81
88
  - MIT
82
- metadata: {}
83
89
  post_install_message:
84
90
  rdoc_options: []
85
91
  require_paths:
86
92
  - lib
87
93
  required_ruby_version: !ruby/object:Gem::Requirement
94
+ none: false
88
95
  requirements:
89
- - - ">="
96
+ - - ! '>='
90
97
  - !ruby/object:Gem::Version
91
98
  version: '0'
99
+ segments:
100
+ - 0
101
+ hash: -1375666282720046081
92
102
  required_rubygems_version: !ruby/object:Gem::Requirement
103
+ none: false
93
104
  requirements:
94
- - - ">="
105
+ - - ! '>='
95
106
  - !ruby/object:Gem::Version
96
107
  version: '0'
108
+ segments:
109
+ - 0
110
+ hash: -1375666282720046081
97
111
  requirements: []
98
112
  rubyforge_project: dentaku
99
- rubygems_version: 2.2.2
113
+ rubygems_version: 1.8.23.2
100
114
  signing_key:
101
- specification_version: 4
115
+ specification_version: 3
102
116
  summary: A formula language parser and evaluator
103
117
  test_files:
104
118
  - spec/binary_operation_spec.rb
checksums.yaml DELETED
@@ -1,7 +0,0 @@
1
- ---
2
- SHA1:
3
- metadata.gz: 064d1f2a40ca1fa6caf9b07f5fb535a673fcbe32
4
- data.tar.gz: 0fe2cb4349c5423da447fa81173d3b70636bacff
5
- SHA512:
6
- metadata.gz: 55fdf11b6a81a851c4d5db09bd2d1ee3385d7ca0f08d2ba806bb9cf9dc914acda0b90d38b817353d28ab99c1b89b198a7d10e998360b2870ca04d485616a97cf
7
- data.tar.gz: f68bc08dd8d1ff2f3d4e79f5af7f3131b59ce539d2001360e78c8fe340421f993c6e67d4ea1490b69928861c19b74615caea09cfd0e74cd251f0affd142eb0bd