dentaku 2.0.1 → 2.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.travis.yml +1 -0
- data/CHANGELOG.md +7 -0
- data/README.md +20 -0
- data/lib/dentaku/ast/combinators.rb +4 -0
- data/lib/dentaku/calculator.rb +3 -3
- data/lib/dentaku/parser.rb +1 -1
- data/lib/dentaku/token_matcher.rb +0 -1
- data/lib/dentaku/token_matchers.rb +2 -2
- data/lib/dentaku/token_scanner.rb +1 -2
- data/lib/dentaku/tokenizer.rb +1 -1
- data/lib/dentaku/version.rb +1 -1
- data/spec/ast/and_spec.rb +5 -0
- data/spec/benchmark.rb +5 -6
- data/spec/parser_spec.rb +1 -1
- data/spec/spec_helper.rb +1 -1
- data/spec/tokenizer_spec.rb +8 -8
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 321fc84e833503ab21de4fdb06254666376ebe85
|
4
|
+
data.tar.gz: aa1d2820110ba82ff68219e6b5b0681224bfd44d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d5929679dbe4e6caea26ba61f4c920fe9a1d9e3b4880cc21c59a57e33109dc0ae9896f1c47ad4baadfe3ad1e2a0ac20d588b2266b63c8b3fcf4f535504871c95
|
7
|
+
data.tar.gz: aae2e18c9cad2e3452e2c8193c93be0754ee09628095119fdff91db9de613526cba57f9f4b3f5ff0c0eee6935298c754075ded6cd75b86c96c3aa1000cc29add
|
data/.travis.yml
CHANGED
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,10 @@
|
|
1
1
|
# Change Log
|
2
2
|
|
3
|
+
## [v2.0.2] 2015-08-20
|
4
|
+
- bug fixes
|
5
|
+
- performance enhancements
|
6
|
+
- code cleanup
|
7
|
+
|
3
8
|
## [v2.0.1] 2015-08-15
|
4
9
|
- add support for boolean literals
|
5
10
|
- implement basic parse-time type checking
|
@@ -75,6 +80,8 @@
|
|
75
80
|
## [v0.1.0] 2012-01-20
|
76
81
|
- initial release
|
77
82
|
|
83
|
+
[v2.0.2]: https://github.com/rubysolo/dentaku/compare/v2.0.1...v2.0.2
|
84
|
+
[v2.0.1]: https://github.com/rubysolo/dentaku/compare/v2.0.0...v2.0.1
|
78
85
|
[v2.0.0]: https://github.com/rubysolo/dentaku/compare/v1.2.6...v2.0.0
|
79
86
|
[v1.2.6]: https://github.com/rubysolo/dentaku/compare/v1.2.5...v1.2.6
|
80
87
|
[v1.2.5]: https://github.com/rubysolo/dentaku/compare/v1.2.2...v1.2.5
|
data/README.md
CHANGED
@@ -99,6 +99,26 @@ Dentaku('plums * 1.5', plums: 2)
|
|
99
99
|
#=> 3.0
|
100
100
|
```
|
101
101
|
|
102
|
+
PERFORMANCE
|
103
|
+
-----------
|
104
|
+
|
105
|
+
The flexibility and safety of Dentaku don't come without a price. Tokenizing a
|
106
|
+
string, parsing to an AST, and then evaluating that AST are about 2 orders of
|
107
|
+
magnitude slower than doing the same math in pure Ruby!
|
108
|
+
|
109
|
+
The good news is that most of the time is spent in the tokenization and parsing
|
110
|
+
phases, so if performance is a concern, you can enable AST caching:
|
111
|
+
|
112
|
+
```ruby
|
113
|
+
Dentaku.enable_ast_cache!
|
114
|
+
```
|
115
|
+
|
116
|
+
After this, Dentaku will cache the AST of each formula that it evaluates, so
|
117
|
+
subsequent evaluations (even with different values for variables) will be much
|
118
|
+
faster -- closer to 4x native Ruby speed. As usual, these benchmarks should be
|
119
|
+
considered rough estimates, and you should measure with representative formulas
|
120
|
+
from your application. Also, if new formulas are constantly introduced to your
|
121
|
+
application, AST caching will consume more memory with each new formula.
|
102
122
|
|
103
123
|
BUILT-IN OPERATORS AND FUNCTIONS
|
104
124
|
---------------------------------
|
data/lib/dentaku/calculator.rb
CHANGED
@@ -59,14 +59,14 @@ module Dentaku
|
|
59
59
|
end
|
60
60
|
|
61
61
|
def store(key_or_hash, value=nil)
|
62
|
-
restore = memory
|
62
|
+
restore = Hash[memory]
|
63
63
|
|
64
64
|
if value.nil?
|
65
65
|
key_or_hash.each do |key, val|
|
66
|
-
memory[key.downcase
|
66
|
+
memory[key.to_s.downcase] = val
|
67
67
|
end
|
68
68
|
else
|
69
|
-
memory[key_or_hash.to_s] = value
|
69
|
+
memory[key_or_hash.to_s.downcase] = value
|
70
70
|
end
|
71
71
|
|
72
72
|
if block_given?
|
data/lib/dentaku/parser.rb
CHANGED
@@ -112,7 +112,6 @@ module Dentaku
|
|
112
112
|
def self.comp_gt; new(:comparator, [:gt, :ge]); end
|
113
113
|
def self.comp_lt; new(:comparator, [:lt, :le]); end
|
114
114
|
|
115
|
-
def self.fopen; new(:grouping, :fopen); end
|
116
115
|
def self.open; new(:grouping, :open); end
|
117
116
|
def self.close; new(:grouping, :close); end
|
118
117
|
def self.comma; new(:grouping, :comma); end
|
@@ -5,7 +5,7 @@ module Dentaku
|
|
5
5
|
end
|
6
6
|
|
7
7
|
def self.function_token_matchers(function_name, *symbols)
|
8
|
-
token_matchers(:
|
8
|
+
token_matchers(:open, *symbols, :close).unshift(
|
9
9
|
TokenMatcher.send(function_name)
|
10
10
|
)
|
11
11
|
end
|
@@ -13,7 +13,7 @@ module Dentaku
|
|
13
13
|
def self.matcher(symbol)
|
14
14
|
@matchers ||= [
|
15
15
|
:numeric, :string, :addsub, :subtract, :muldiv, :pow, :mod,
|
16
|
-
:comparator, :comp_gt, :comp_lt, :
|
16
|
+
:comparator, :comp_gt, :comp_lt, :open, :close, :comma,
|
17
17
|
:non_close_plus, :non_group, :non_group_star, :arguments,
|
18
18
|
:logical, :combinator, :if, :round, :roundup, :rounddown, :not,
|
19
19
|
:anchored_minus, :math_neg_pow, :math_neg_mul
|
@@ -64,7 +64,6 @@ module Dentaku
|
|
64
64
|
last_token.is?(:comparator) ||
|
65
65
|
last_token.is?(:combinator) ||
|
66
66
|
last_token.value == :open ||
|
67
|
-
last_token.value == :fopen ||
|
68
67
|
last_token.value == :comma
|
69
68
|
})
|
70
69
|
end
|
@@ -96,7 +95,7 @@ module Dentaku
|
|
96
95
|
def function
|
97
96
|
new(:function, '\w+\s*\(', lambda do |raw|
|
98
97
|
function_name = raw.gsub('(', '')
|
99
|
-
[Token.new(:function, function_name.strip.downcase.to_sym, function_name), Token.new(:grouping, :
|
98
|
+
[Token.new(:function, function_name.strip.downcase.to_sym, function_name), Token.new(:grouping, :open, '(')]
|
100
99
|
end)
|
101
100
|
end
|
102
101
|
|
data/lib/dentaku/tokenizer.rb
CHANGED
data/lib/dentaku/version.rb
CHANGED
data/spec/ast/and_spec.rb
CHANGED
data/spec/benchmark.rb
CHANGED
@@ -26,20 +26,19 @@ def test(args, custom_function: true)
|
|
26
26
|
bm = Benchmark.measure do
|
27
27
|
stats = AllocationStats.trace do
|
28
28
|
|
29
|
-
calls.each do |
|
29
|
+
calls.each do |formula, bound|
|
30
30
|
|
31
31
|
calculator = Dentaku::Calculator.new
|
32
32
|
|
33
33
|
if custom_function
|
34
34
|
calculator.add_function(
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
body: ->(numbers) { numbers.inject(:+) },
|
35
|
+
:sum,
|
36
|
+
:numeric,
|
37
|
+
->(numbers) { numbers.inject(:+) }
|
39
38
|
)
|
40
39
|
end
|
41
40
|
|
42
|
-
calculator.evaluate(
|
41
|
+
calculator.evaluate(formula, bound)
|
43
42
|
end
|
44
43
|
end
|
45
44
|
end
|
data/spec/parser_spec.rb
CHANGED
@@ -63,7 +63,7 @@ describe Dentaku::Parser do
|
|
63
63
|
|
64
64
|
it 'evaluates functions' do
|
65
65
|
fn = Dentaku::Token.new(:function, :if)
|
66
|
-
fopen = Dentaku::Token.new(:grouping, :
|
66
|
+
fopen = Dentaku::Token.new(:grouping, :open)
|
67
67
|
five = Dentaku::Token.new(:numeric, 5)
|
68
68
|
lt = Dentaku::Token.new(:comparator, :lt)
|
69
69
|
four = Dentaku::Token.new(:numeric, 4)
|
data/spec/spec_helper.rb
CHANGED
data/spec/tokenizer_spec.rb
CHANGED
@@ -31,7 +31,7 @@ describe Dentaku::Tokenizer do
|
|
31
31
|
:operator, :numeric # - 9
|
32
32
|
])
|
33
33
|
expect(tokens.map(&:value)).to eq([
|
34
|
-
:if, :
|
34
|
+
:if, :open, # if(
|
35
35
|
:negate, 5, :gt, 'x', :comma, # -5 > x,
|
36
36
|
:negate, 7, :comma, # -7,
|
37
37
|
:negate, 8, :close, # -8)
|
@@ -170,43 +170,43 @@ describe Dentaku::Tokenizer do
|
|
170
170
|
tokens = tokenizer.tokenize('if(x < 10, y, z)')
|
171
171
|
expect(tokens.length).to eq(10)
|
172
172
|
expect(tokens.map(&:category)).to eq([:function, :grouping, :identifier, :comparator, :numeric, :grouping, :identifier, :grouping, :identifier, :grouping])
|
173
|
-
expect(tokens.map(&:value)).to eq([:if, :
|
173
|
+
expect(tokens.map(&:value)).to eq([:if, :open, 'x', :lt, 10, :comma, 'y', :comma, 'z', :close])
|
174
174
|
end
|
175
175
|
|
176
176
|
it 'include ROUND/UP/DOWN' do
|
177
177
|
tokens = tokenizer.tokenize('round(8.2)')
|
178
178
|
expect(tokens.length).to eq(4)
|
179
179
|
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
|
180
|
-
expect(tokens.map(&:value)).to eq([:round, :
|
180
|
+
expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal.new('8.2'), :close])
|
181
181
|
|
182
182
|
tokens = tokenizer.tokenize('round(8.75, 1)')
|
183
183
|
expect(tokens.length).to eq(6)
|
184
184
|
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping, :numeric, :grouping])
|
185
|
-
expect(tokens.map(&:value)).to eq([:round, :
|
185
|
+
expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal.new('8.75'), :comma, 1, :close])
|
186
186
|
|
187
187
|
tokens = tokenizer.tokenize('ROUNDUP(8.2)')
|
188
188
|
expect(tokens.length).to eq(4)
|
189
189
|
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
|
190
|
-
expect(tokens.map(&:value)).to eq([:roundup, :
|
190
|
+
expect(tokens.map(&:value)).to eq([:roundup, :open, BigDecimal.new('8.2'), :close])
|
191
191
|
|
192
192
|
tokens = tokenizer.tokenize('RoundDown(8.2)')
|
193
193
|
expect(tokens.length).to eq(4)
|
194
194
|
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
|
195
|
-
expect(tokens.map(&:value)).to eq([:rounddown, :
|
195
|
+
expect(tokens.map(&:value)).to eq([:rounddown, :open, BigDecimal.new('8.2'), :close])
|
196
196
|
end
|
197
197
|
|
198
198
|
it 'include NOT' do
|
199
199
|
tokens = tokenizer.tokenize('not(8 < 5)')
|
200
200
|
expect(tokens.length).to eq(6)
|
201
201
|
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :comparator, :numeric, :grouping])
|
202
|
-
expect(tokens.map(&:value)).to eq([:not, :
|
202
|
+
expect(tokens.map(&:value)).to eq([:not, :open, 8, :lt, 5, :close])
|
203
203
|
end
|
204
204
|
|
205
205
|
it 'handles whitespace after function name' do
|
206
206
|
tokens = tokenizer.tokenize('not (8 < 5)')
|
207
207
|
expect(tokens.length).to eq(6)
|
208
208
|
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :comparator, :numeric, :grouping])
|
209
|
-
expect(tokens.map(&:value)).to eq([:not, :
|
209
|
+
expect(tokens.map(&:value)).to eq([:not, :open, 8, :lt, 5, :close])
|
210
210
|
end
|
211
211
|
end
|
212
212
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: dentaku
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.0.
|
4
|
+
version: 2.0.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Solomon White
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-08-
|
11
|
+
date: 2015-08-20 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rake
|