dentaku 2.0.1 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: fa37d9abba8c7bf0ea6d99f1f0ac472058772436
4
- data.tar.gz: c07c67b22659bb6c26d7a70edd119bee53ada064
3
+ metadata.gz: 321fc84e833503ab21de4fdb06254666376ebe85
4
+ data.tar.gz: aa1d2820110ba82ff68219e6b5b0681224bfd44d
5
5
  SHA512:
6
- metadata.gz: 4b489326fecca7a6a114c64d01efd17149992338167e42cec9fd71f1e12b96bafb72a91ccf2b8b4f6ab895b85d72a74ceb12ad3c3a2b574bff7f3a43471bbb2f
7
- data.tar.gz: 58bf32696c4c604269f117fa062dc5ea155dd6c9d2eab7007611dd8bc75b129b332d63dfa15aff3deb4f5c6bd1153aed9546fa3ffbca0b59a8c9fa398dfc0868
6
+ metadata.gz: d5929679dbe4e6caea26ba61f4c920fe9a1d9e3b4880cc21c59a57e33109dc0ae9896f1c47ad4baadfe3ad1e2a0ac20d588b2266b63c8b3fcf4f535504871c95
7
+ data.tar.gz: aae2e18c9cad2e3452e2c8193c93be0754ee09628095119fdff91db9de613526cba57f9f4b3f5ff0c0eee6935298c754075ded6cd75b86c96c3aa1000cc29add
@@ -7,5 +7,6 @@ rvm:
7
7
  - 2.2.0
8
8
  - 2.2.1
9
9
  - 2.2.2
10
+ - 2.2.3
10
11
  - jruby-19mode
11
12
  - rbx-2
@@ -1,5 +1,10 @@
1
1
  # Change Log
2
2
 
3
+ ## [v2.0.2] 2015-08-20
4
+ - bug fixes
5
+ - performance enhancements
6
+ - code cleanup
7
+
3
8
  ## [v2.0.1] 2015-08-15
4
9
  - add support for boolean literals
5
10
  - implement basic parse-time type checking
@@ -75,6 +80,8 @@
75
80
  ## [v0.1.0] 2012-01-20
76
81
  - initial release
77
82
 
83
+ [v2.0.2]: https://github.com/rubysolo/dentaku/compare/v2.0.1...v2.0.2
84
+ [v2.0.1]: https://github.com/rubysolo/dentaku/compare/v2.0.0...v2.0.1
78
85
  [v2.0.0]: https://github.com/rubysolo/dentaku/compare/v1.2.6...v2.0.0
79
86
  [v1.2.6]: https://github.com/rubysolo/dentaku/compare/v1.2.5...v1.2.6
80
87
  [v1.2.5]: https://github.com/rubysolo/dentaku/compare/v1.2.2...v1.2.5
data/README.md CHANGED
@@ -99,6 +99,26 @@ Dentaku('plums * 1.5', plums: 2)
99
99
  #=> 3.0
100
100
  ```
101
101
 
102
+ PERFORMANCE
103
+ -----------
104
+
105
+ The flexibility and safety of Dentaku don't come without a price. Tokenizing a
106
+ string, parsing to an AST, and then evaluating that AST are about 2 orders of
107
+ magnitude slower than doing the same math in pure Ruby!
108
+
109
+ The good news is that most of the time is spent in the tokenization and parsing
110
+ phases, so if performance is a concern, you can enable AST caching:
111
+
112
+ ```ruby
113
+ Dentaku.enable_ast_cache!
114
+ ```
115
+
116
+ After this, Dentaku will cache the AST of each formula that it evaluates, so
117
+ subsequent evaluations (even with different values for variables) will be much
118
+ faster -- closer to 4x native Ruby speed. As usual, these benchmarks should be
119
+ considered rough estimates, and you should measure with representative formulas
120
+ from your application. Also, if new formulas are constantly introduced to your
121
+ application, AST caching will consume more memory with each new formula.
102
122
 
103
123
  BUILT-IN OPERATORS AND FUNCTIONS
104
124
  ---------------------------------
@@ -8,6 +8,10 @@ module Dentaku
8
8
  fail "#{ self.class } requires logical operands" unless valid_node?(left) && valid_node?(right)
9
9
  end
10
10
 
11
+ def type
12
+ :logical
13
+ end
14
+
11
15
  private
12
16
 
13
17
  def valid_node?(node)
@@ -59,14 +59,14 @@ module Dentaku
59
59
  end
60
60
 
61
61
  def store(key_or_hash, value=nil)
62
- restore = memory.dup
62
+ restore = Hash[memory]
63
63
 
64
64
  if value.nil?
65
65
  key_or_hash.each do |key, val|
66
- memory[key.downcase.to_s] = val
66
+ memory[key.to_s.downcase] = val
67
67
  end
68
68
  else
69
- memory[key_or_hash.to_s] = value
69
+ memory[key_or_hash.to_s.downcase] = value
70
70
  end
71
71
 
72
72
  if block_given?
@@ -60,7 +60,7 @@ module Dentaku
60
60
 
61
61
  when :grouping
62
62
  case token.value
63
- when :open, :fopen
63
+ when :open
64
64
  if input.first && input.first.value == :close
65
65
  input.shift
66
66
  consume(0)
@@ -112,7 +112,6 @@ module Dentaku
112
112
  def self.comp_gt; new(:comparator, [:gt, :ge]); end
113
113
  def self.comp_lt; new(:comparator, [:lt, :le]); end
114
114
 
115
- def self.fopen; new(:grouping, :fopen); end
116
115
  def self.open; new(:grouping, :open); end
117
116
  def self.close; new(:grouping, :close); end
118
117
  def self.comma; new(:grouping, :comma); end
@@ -5,7 +5,7 @@ module Dentaku
5
5
  end
6
6
 
7
7
  def self.function_token_matchers(function_name, *symbols)
8
- token_matchers(:fopen, *symbols, :close).unshift(
8
+ token_matchers(:open, *symbols, :close).unshift(
9
9
  TokenMatcher.send(function_name)
10
10
  )
11
11
  end
@@ -13,7 +13,7 @@ module Dentaku
13
13
  def self.matcher(symbol)
14
14
  @matchers ||= [
15
15
  :numeric, :string, :addsub, :subtract, :muldiv, :pow, :mod,
16
- :comparator, :comp_gt, :comp_lt, :fopen, :open, :close, :comma,
16
+ :comparator, :comp_gt, :comp_lt, :open, :close, :comma,
17
17
  :non_close_plus, :non_group, :non_group_star, :arguments,
18
18
  :logical, :combinator, :if, :round, :roundup, :rounddown, :not,
19
19
  :anchored_minus, :math_neg_pow, :math_neg_mul
@@ -64,7 +64,6 @@ module Dentaku
64
64
  last_token.is?(:comparator) ||
65
65
  last_token.is?(:combinator) ||
66
66
  last_token.value == :open ||
67
- last_token.value == :fopen ||
68
67
  last_token.value == :comma
69
68
  })
70
69
  end
@@ -96,7 +95,7 @@ module Dentaku
96
95
  def function
97
96
  new(:function, '\w+\s*\(', lambda do |raw|
98
97
  function_name = raw.gsub('(', '')
99
- [Token.new(:function, function_name.strip.downcase.to_sym, function_name), Token.new(:grouping, :fopen, '(')]
98
+ [Token.new(:function, function_name.strip.downcase.to_sym, function_name), Token.new(:grouping, :open, '(')]
100
99
  end)
101
100
  end
102
101
 
@@ -4,7 +4,7 @@ require 'dentaku/token_scanner'
4
4
 
5
5
  module Dentaku
6
6
  class Tokenizer
7
- LPAREN = TokenMatcher.new(:grouping, [:open, :fopen])
7
+ LPAREN = TokenMatcher.new(:grouping, :open)
8
8
  RPAREN = TokenMatcher.new(:grouping, :close)
9
9
 
10
10
  def tokenize(string)
@@ -1,3 +1,3 @@
1
1
  module Dentaku
2
- VERSION = "2.0.1"
2
+ VERSION = "2.0.2"
3
3
  end
@@ -23,5 +23,10 @@ describe Dentaku::AST::And do
23
23
  expect {
24
24
  described_class.new(t, expression)
25
25
  }.not_to raise_error
26
+
27
+ expression = Dentaku::AST::Or.new(t, f)
28
+ expect {
29
+ described_class.new(t, expression)
30
+ }.not_to raise_error
26
31
  end
27
32
  end
@@ -26,20 +26,19 @@ def test(args, custom_function: true)
26
26
  bm = Benchmark.measure do
27
27
  stats = AllocationStats.trace do
28
28
 
29
- calls.each do |formule, bound|
29
+ calls.each do |formula, bound|
30
30
 
31
31
  calculator = Dentaku::Calculator.new
32
32
 
33
33
  if custom_function
34
34
  calculator.add_function(
35
- name: :sum,
36
- type: :numeric,
37
- signature: [:arguments],
38
- body: ->(numbers) { numbers.inject(:+) },
35
+ :sum,
36
+ :numeric,
37
+ ->(numbers) { numbers.inject(:+) }
39
38
  )
40
39
  end
41
40
 
42
- calculator.evaluate(formule, bound)
41
+ calculator.evaluate(formula, bound)
43
42
  end
44
43
  end
45
44
  end
@@ -63,7 +63,7 @@ describe Dentaku::Parser do
63
63
 
64
64
  it 'evaluates functions' do
65
65
  fn = Dentaku::Token.new(:function, :if)
66
- fopen = Dentaku::Token.new(:grouping, :fopen)
66
+ fopen = Dentaku::Token.new(:grouping, :open)
67
67
  five = Dentaku::Token.new(:numeric, 5)
68
68
  lt = Dentaku::Token.new(:comparator, :lt)
69
69
  four = Dentaku::Token.new(:numeric, 4)
@@ -19,7 +19,7 @@ def type_for(value)
19
19
  :logical
20
20
  when :add, :subtract, :multiply, :divide, :mod, :pow
21
21
  :operator
22
- when :fopen, :open, :close, :comma
22
+ when :open, :close, :comma
23
23
  :grouping
24
24
  when :le, :ge, :ne, :ne, :lt, :gt, :eq
25
25
  :comparator
@@ -31,7 +31,7 @@ describe Dentaku::Tokenizer do
31
31
  :operator, :numeric # - 9
32
32
  ])
33
33
  expect(tokens.map(&:value)).to eq([
34
- :if, :fopen, # if(
34
+ :if, :open, # if(
35
35
  :negate, 5, :gt, 'x', :comma, # -5 > x,
36
36
  :negate, 7, :comma, # -7,
37
37
  :negate, 8, :close, # -8)
@@ -170,43 +170,43 @@ describe Dentaku::Tokenizer do
170
170
  tokens = tokenizer.tokenize('if(x < 10, y, z)')
171
171
  expect(tokens.length).to eq(10)
172
172
  expect(tokens.map(&:category)).to eq([:function, :grouping, :identifier, :comparator, :numeric, :grouping, :identifier, :grouping, :identifier, :grouping])
173
- expect(tokens.map(&:value)).to eq([:if, :fopen, 'x', :lt, 10, :comma, 'y', :comma, 'z', :close])
173
+ expect(tokens.map(&:value)).to eq([:if, :open, 'x', :lt, 10, :comma, 'y', :comma, 'z', :close])
174
174
  end
175
175
 
176
176
  it 'include ROUND/UP/DOWN' do
177
177
  tokens = tokenizer.tokenize('round(8.2)')
178
178
  expect(tokens.length).to eq(4)
179
179
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
180
- expect(tokens.map(&:value)).to eq([:round, :fopen, BigDecimal.new('8.2'), :close])
180
+ expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal.new('8.2'), :close])
181
181
 
182
182
  tokens = tokenizer.tokenize('round(8.75, 1)')
183
183
  expect(tokens.length).to eq(6)
184
184
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping, :numeric, :grouping])
185
- expect(tokens.map(&:value)).to eq([:round, :fopen, BigDecimal.new('8.75'), :comma, 1, :close])
185
+ expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal.new('8.75'), :comma, 1, :close])
186
186
 
187
187
  tokens = tokenizer.tokenize('ROUNDUP(8.2)')
188
188
  expect(tokens.length).to eq(4)
189
189
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
190
- expect(tokens.map(&:value)).to eq([:roundup, :fopen, BigDecimal.new('8.2'), :close])
190
+ expect(tokens.map(&:value)).to eq([:roundup, :open, BigDecimal.new('8.2'), :close])
191
191
 
192
192
  tokens = tokenizer.tokenize('RoundDown(8.2)')
193
193
  expect(tokens.length).to eq(4)
194
194
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
195
- expect(tokens.map(&:value)).to eq([:rounddown, :fopen, BigDecimal.new('8.2'), :close])
195
+ expect(tokens.map(&:value)).to eq([:rounddown, :open, BigDecimal.new('8.2'), :close])
196
196
  end
197
197
 
198
198
  it 'include NOT' do
199
199
  tokens = tokenizer.tokenize('not(8 < 5)')
200
200
  expect(tokens.length).to eq(6)
201
201
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :comparator, :numeric, :grouping])
202
- expect(tokens.map(&:value)).to eq([:not, :fopen, 8, :lt, 5, :close])
202
+ expect(tokens.map(&:value)).to eq([:not, :open, 8, :lt, 5, :close])
203
203
  end
204
204
 
205
205
  it 'handles whitespace after function name' do
206
206
  tokens = tokenizer.tokenize('not (8 < 5)')
207
207
  expect(tokens.length).to eq(6)
208
208
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :comparator, :numeric, :grouping])
209
- expect(tokens.map(&:value)).to eq([:not, :fopen, 8, :lt, 5, :close])
209
+ expect(tokens.map(&:value)).to eq([:not, :open, 8, :lt, 5, :close])
210
210
  end
211
211
  end
212
212
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dentaku
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.1
4
+ version: 2.0.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Solomon White
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-08-16 00:00:00.000000000 Z
11
+ date: 2015-08-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rake