dentaku 1.0.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.travis.yml +6 -1
- data/README.md +16 -3
- data/lib/dentaku.rb +9 -0
- data/lib/dentaku/binary_operation.rb +1 -1
- data/lib/dentaku/calculator.rb +9 -31
- data/lib/dentaku/evaluator.rb +19 -11
- data/lib/dentaku/expression.rb +55 -0
- data/lib/dentaku/token.rb +4 -0
- data/lib/dentaku/token_matcher.rb +13 -9
- data/lib/dentaku/token_scanner.rb +3 -3
- data/lib/dentaku/version.rb +1 -1
- data/spec/binary_operation_spec.rb +14 -14
- data/spec/calculator_spec.rb +62 -62
- data/spec/dentaku_spec.rb +4 -4
- data/spec/evaluator_spec.rb +44 -44
- data/spec/expression_spec.rb +25 -0
- data/spec/external_function_spec.rb +11 -11
- data/spec/token_matcher_spec.rb +45 -45
- data/spec/token_scanner_spec.rb +11 -11
- data/spec/token_spec.rb +4 -4
- data/spec/tokenizer_spec.rb +75 -57
- metadata +5 -2
data/spec/dentaku_spec.rb
CHANGED
@@ -1,11 +1,11 @@
|
|
1
1
|
require 'dentaku'
|
2
2
|
|
3
3
|
describe Dentaku do
|
4
|
-
it '
|
5
|
-
Dentaku('5+3').
|
4
|
+
it 'evaulates an expression' do
|
5
|
+
expect(Dentaku('5+3')).to eql(8)
|
6
6
|
end
|
7
7
|
|
8
|
-
it '
|
9
|
-
Dentaku('oranges > 7', {:oranges => 10}).
|
8
|
+
it 'binds values to variables' do
|
9
|
+
expect(Dentaku('oranges > 7', {:oranges => 10})).to be_truthy
|
10
10
|
end
|
11
11
|
end
|
data/spec/evaluator_spec.rb
CHANGED
@@ -5,104 +5,104 @@ describe Dentaku::Evaluator do
|
|
5
5
|
let(:evaluator) { Dentaku::Evaluator.new }
|
6
6
|
|
7
7
|
describe 'rule scanning' do
|
8
|
-
it '
|
8
|
+
it 'finds a matching rule' do
|
9
9
|
rule = [Dentaku::TokenMatcher.new(:numeric, nil)]
|
10
10
|
stream = [Dentaku::Token.new(:numeric, 1), Dentaku::Token.new(:operator, :add), Dentaku::Token.new(:numeric, 1)]
|
11
11
|
position, _match = evaluator.find_rule_match(rule, stream)
|
12
|
-
position.
|
12
|
+
expect(position).to eq(0)
|
13
13
|
end
|
14
14
|
end
|
15
15
|
|
16
16
|
describe 'evaluating' do
|
17
|
-
it 'empty expression
|
18
|
-
evaluator.evaluate([]).
|
17
|
+
it 'empty expression is be truthy' do
|
18
|
+
expect(evaluator.evaluate([])).to be
|
19
19
|
end
|
20
20
|
|
21
|
-
it 'empty expression
|
22
|
-
evaluator.evaluate([]).
|
21
|
+
it 'empty expression equals 0' do
|
22
|
+
expect(evaluator.evaluate([])).to eq(0)
|
23
23
|
end
|
24
24
|
|
25
|
-
it 'single numeric
|
26
|
-
evaluator.evaluate([Dentaku::Token.new(:numeric, 10)]).
|
27
|
-
evaluator.evaluate([Dentaku::Token.new(:string, 'a')]).
|
25
|
+
it 'single numeric evaluates to its value' do
|
26
|
+
expect(evaluator.evaluate([Dentaku::Token.new(:numeric, 10)])).to eq(10)
|
27
|
+
expect(evaluator.evaluate([Dentaku::Token.new(:string, 'a')])).to eq('a')
|
28
28
|
end
|
29
29
|
|
30
|
-
it '
|
30
|
+
it 'evaluates one apply step' do
|
31
31
|
stream = token_stream(1, :add, 1, :add, 1)
|
32
32
|
expected = token_stream(2, :add, 1)
|
33
33
|
|
34
|
-
evaluator.evaluate_step(stream, 0, 3, :apply).
|
34
|
+
expect(evaluator.evaluate_step(stream, 0, 3, :apply)).to eq(expected)
|
35
35
|
end
|
36
36
|
|
37
|
-
it '
|
37
|
+
it 'evaluates one grouping step' do
|
38
38
|
stream = token_stream(:open, 1, :add, 1, :close, :multiply, 5)
|
39
39
|
expected = token_stream(2, :multiply, 5)
|
40
40
|
|
41
|
-
evaluator.evaluate_step(stream, 0, 5, :evaluate_group).
|
41
|
+
expect(evaluator.evaluate_step(stream, 0, 5, :evaluate_group)).to eq(expected)
|
42
42
|
end
|
43
43
|
|
44
44
|
it 'supports unary minus' do
|
45
|
-
evaluator.evaluate(token_stream(:subtract, 1)).
|
46
|
-
evaluator.evaluate(token_stream(1, :subtract, :subtract, 1)).
|
45
|
+
expect(evaluator.evaluate(token_stream(:subtract, 1))).to eq(-1)
|
46
|
+
expect(evaluator.evaluate(token_stream(1, :subtract, :subtract, 1))).to eq(2)
|
47
47
|
end
|
48
48
|
|
49
49
|
it 'supports unary percentage' do
|
50
|
-
evaluator.evaluate(token_stream(50, :mod)).
|
51
|
-
evaluator.evaluate(token_stream(50, :mod, :multiply, 100)).
|
50
|
+
expect(evaluator.evaluate(token_stream(50, :mod))).to eq(0.5)
|
51
|
+
expect(evaluator.evaluate(token_stream(50, :mod, :multiply, 100))).to eq(50)
|
52
52
|
end
|
53
53
|
|
54
54
|
describe 'maths' do
|
55
|
-
it '
|
56
|
-
evaluator.evaluate(token_stream(1, :add, 1)).
|
55
|
+
it 'performs addition' do
|
56
|
+
expect(evaluator.evaluate(token_stream(1, :add, 1))).to eq(2)
|
57
57
|
end
|
58
58
|
|
59
|
-
it '
|
60
|
-
evaluator.evaluate(token_stream(1, :add, 1, :multiply, 5)).
|
61
|
-
evaluator.evaluate(token_stream(2, :add, 10, :mod, 2)).
|
59
|
+
it 'respects order of precedence' do
|
60
|
+
expect(evaluator.evaluate(token_stream(1, :add, 1, :multiply, 5))).to eq(6)
|
61
|
+
expect(evaluator.evaluate(token_stream(2, :add, 10, :mod, 2))).to eq(2)
|
62
62
|
end
|
63
63
|
|
64
|
-
it '
|
65
|
-
evaluator.evaluate(token_stream(:open, 1, :add, 1, :close, :multiply, 5)).
|
64
|
+
it 'respects explicit grouping' do
|
65
|
+
expect(evaluator.evaluate(token_stream(:open, 1, :add, 1, :close, :multiply, 5))).to eq(10)
|
66
66
|
end
|
67
67
|
|
68
|
-
it '
|
69
|
-
evaluator.evaluate(token_stream(5, :divide, 4)).
|
68
|
+
it 'returns floating point from division when there is a remainder' do
|
69
|
+
expect(evaluator.evaluate(token_stream(5, :divide, 4))).to eq(1.25)
|
70
70
|
end
|
71
71
|
end
|
72
72
|
|
73
73
|
describe 'functions' do
|
74
|
-
it '
|
75
|
-
evaluator.evaluate(token_stream(:round, :open, 5, :divide, 3.0, :close)).
|
76
|
-
evaluator.evaluate(token_stream(:round, :open, 5, :divide, 3.0, :comma, 2, :close)).
|
77
|
-
evaluator.evaluate(token_stream(:roundup, :open, 5, :divide, 1.2, :close)).
|
78
|
-
evaluator.evaluate(token_stream(:rounddown, :open, 5, :divide, 1.2, :close)).
|
74
|
+
it 'is evaluated' do
|
75
|
+
expect(evaluator.evaluate(token_stream(:round, :open, 5, :divide, 3.0, :close))).to eq 2
|
76
|
+
expect(evaluator.evaluate(token_stream(:round, :open, 5, :divide, 3.0, :comma, 2, :close))).to eq 1.67
|
77
|
+
expect(evaluator.evaluate(token_stream(:roundup, :open, 5, :divide, 1.2, :close))).to eq 5
|
78
|
+
expect(evaluator.evaluate(token_stream(:rounddown, :open, 5, :divide, 1.2, :close))).to eq 4
|
79
79
|
end
|
80
80
|
end
|
81
81
|
|
82
82
|
describe 'logic' do
|
83
|
-
it '
|
84
|
-
evaluator.evaluate(token_stream(5, :gt, 1)).
|
83
|
+
it 'evaluates conditional' do
|
84
|
+
expect(evaluator.evaluate(token_stream(5, :gt, 1))).to be_truthy
|
85
85
|
end
|
86
86
|
|
87
|
-
it '
|
87
|
+
it 'expands inequality ranges' do
|
88
88
|
stream = token_stream(5, :lt, 10, :le, 10)
|
89
89
|
expected = token_stream(5, :lt, 10, :and, 10, :le, 10)
|
90
|
-
evaluator.evaluate_step(stream, 0, 5, :expand_range).
|
90
|
+
expect(evaluator.evaluate_step(stream, 0, 5, :expand_range)).to eq(expected)
|
91
91
|
|
92
|
-
evaluator.evaluate(token_stream(5, :lt, 10, :le, 10)).
|
93
|
-
evaluator.evaluate(token_stream(3, :gt, 5, :ge, 1)).
|
92
|
+
expect(evaluator.evaluate(token_stream(5, :lt, 10, :le, 10))).to be_truthy
|
93
|
+
expect(evaluator.evaluate(token_stream(3, :gt, 5, :ge, 1))).to be_falsey
|
94
94
|
|
95
|
-
|
95
|
+
expect { evaluator.evaluate(token_stream(3, :gt, 2, :lt, 1)) }.to raise_error
|
96
96
|
end
|
97
97
|
|
98
|
-
it '
|
99
|
-
evaluator.evaluate(token_stream(5, :gt, 1, :or, :false)).
|
100
|
-
evaluator.evaluate(token_stream(5, :gt, 1, :and, :false)).
|
98
|
+
it 'evaluates combined conditionals' do
|
99
|
+
expect(evaluator.evaluate(token_stream(5, :gt, 1, :or, :false))).to be_truthy
|
100
|
+
expect(evaluator.evaluate(token_stream(5, :gt, 1, :and, :false))).to be_falsey
|
101
101
|
end
|
102
102
|
|
103
|
-
it '
|
104
|
-
evaluator.evaluate(token_stream(:not, :open, 5, :gt, 1, :or, :false, :close)).
|
105
|
-
evaluator.evaluate(token_stream(:not, :open, 5, :gt, 1, :and, :false, :close)).
|
103
|
+
it 'negates a logical value' do
|
104
|
+
expect(evaluator.evaluate(token_stream(:not, :open, 5, :gt, 1, :or, :false, :close))).to be_falsey
|
105
|
+
expect(evaluator.evaluate(token_stream(:not, :open, 5, :gt, 1, :and, :false, :close))).to be_truthy
|
106
106
|
end
|
107
107
|
end
|
108
108
|
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
require 'dentaku/expression'
|
3
|
+
|
4
|
+
describe Dentaku::Expression do
|
5
|
+
describe 'an all literal expression' do
|
6
|
+
it 'is fully bound' do
|
7
|
+
static = described_class.new('1 + 1')
|
8
|
+
expect(static).not_to be_unbound
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
describe 'an expression with variable identifiers' do
|
13
|
+
it 'is unbound' do
|
14
|
+
dynamic = described_class.new('a > 5')
|
15
|
+
expect(dynamic).to be_unbound
|
16
|
+
end
|
17
|
+
|
18
|
+
describe 'with values set for all variables' do
|
19
|
+
it 'is fully bound' do
|
20
|
+
dynamic = described_class.new('a > 5', {a: 7})
|
21
|
+
expect(dynamic).not_to be_unbound
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -34,24 +34,24 @@ describe Dentaku::Calculator do
|
|
34
34
|
c.add_functions(fns)
|
35
35
|
end
|
36
36
|
|
37
|
-
it '
|
37
|
+
it 'includes NOW' do
|
38
38
|
now = with_external_funcs.evaluate('NOW()')
|
39
|
-
now.
|
40
|
-
now.
|
39
|
+
expect(now).not_to be_nil
|
40
|
+
expect(now).not_to be_empty
|
41
41
|
end
|
42
42
|
|
43
|
-
it '
|
44
|
-
with_external_funcs.evaluate('EXP(2,3)').
|
45
|
-
with_external_funcs.evaluate('EXP(3,2)').
|
46
|
-
with_external_funcs.evaluate('EXP(mantissa,exponent)', mantissa: 2, exponent: 4).
|
43
|
+
it 'includes EXP' do
|
44
|
+
expect(with_external_funcs.evaluate('EXP(2,3)')).to eq(8)
|
45
|
+
expect(with_external_funcs.evaluate('EXP(3,2)')).to eq(9)
|
46
|
+
expect(with_external_funcs.evaluate('EXP(mantissa,exponent)', mantissa: 2, exponent: 4)).to eq(16)
|
47
47
|
end
|
48
48
|
|
49
|
-
it '
|
50
|
-
with_external_funcs.evaluate('MAX(8,6,7,5,3,0,9)').
|
49
|
+
it 'includes MAX' do
|
50
|
+
expect(with_external_funcs.evaluate('MAX(8,6,7,5,3,0,9)')).to eq(9)
|
51
51
|
end
|
52
52
|
|
53
|
-
it '
|
54
|
-
with_external_funcs.evaluate('MIN(8,6,7,5,3,0,9)').
|
53
|
+
it 'includes MIN' do
|
54
|
+
expect(with_external_funcs.evaluate('MIN(8,6,7,5,3,0,9)')).to eq(0)
|
55
55
|
end
|
56
56
|
end
|
57
57
|
end
|
data/spec/token_matcher_spec.rb
CHANGED
@@ -2,102 +2,102 @@ require 'spec_helper'
|
|
2
2
|
require 'dentaku/token_matcher'
|
3
3
|
|
4
4
|
describe Dentaku::TokenMatcher do
|
5
|
-
it 'with single category
|
5
|
+
it 'with single category matches token category' do
|
6
6
|
matcher = described_class.new(:numeric)
|
7
7
|
token = Dentaku::Token.new(:numeric, 5)
|
8
8
|
|
9
|
-
matcher.
|
9
|
+
expect(matcher).to eq(token)
|
10
10
|
end
|
11
11
|
|
12
|
-
it 'with multiple categories
|
12
|
+
it 'with multiple categories matches any included token category' do
|
13
13
|
matcher = described_class.new([:comparator, :operator])
|
14
14
|
numeric = Dentaku::Token.new(:numeric, 5)
|
15
15
|
comparator = Dentaku::Token.new(:comparator, :lt)
|
16
16
|
operator = Dentaku::Token.new(:operator, :add)
|
17
17
|
|
18
|
-
matcher.
|
19
|
-
matcher.
|
20
|
-
matcher.
|
18
|
+
expect(matcher).to eq(comparator)
|
19
|
+
expect(matcher).to eq(operator)
|
20
|
+
expect(matcher).not_to eq(numeric)
|
21
21
|
end
|
22
22
|
|
23
|
-
it 'with single category and value
|
23
|
+
it 'with single category and value matches token category and value' do
|
24
24
|
matcher = described_class.new(:operator, :add)
|
25
25
|
addition = Dentaku::Token.new(:operator, :add)
|
26
26
|
subtraction = Dentaku::Token.new(:operator, :subtract)
|
27
27
|
|
28
|
-
matcher.
|
29
|
-
matcher.
|
28
|
+
expect(matcher).to eq(addition)
|
29
|
+
expect(matcher).not_to eq(subtraction)
|
30
30
|
end
|
31
31
|
|
32
|
-
it 'with multiple values
|
32
|
+
it 'with multiple values matches any included token value' do
|
33
33
|
matcher = described_class.new(:operator, [:add, :subtract])
|
34
34
|
add = Dentaku::Token.new(:operator, :add)
|
35
35
|
sub = Dentaku::Token.new(:operator, :subtract)
|
36
36
|
mul = Dentaku::Token.new(:operator, :multiply)
|
37
37
|
div = Dentaku::Token.new(:operator, :divide)
|
38
38
|
|
39
|
-
matcher.
|
40
|
-
matcher.
|
41
|
-
matcher.
|
42
|
-
matcher.
|
39
|
+
expect(matcher).to eq(add)
|
40
|
+
expect(matcher).to eq(sub)
|
41
|
+
expect(matcher).not_to eq(mul)
|
42
|
+
expect(matcher).not_to eq(div)
|
43
43
|
end
|
44
44
|
|
45
|
-
it '
|
45
|
+
it 'is invertible' do
|
46
46
|
matcher = described_class.new(:operator, [:add, :subtract]).invert
|
47
47
|
add = Dentaku::Token.new(:operator, :add)
|
48
48
|
mul = Dentaku::Token.new(:operator, :multiply)
|
49
49
|
cmp = Dentaku::Token.new(:comparator, :lt)
|
50
50
|
|
51
|
-
matcher.
|
52
|
-
matcher.
|
53
|
-
matcher.
|
51
|
+
expect(matcher).not_to eq(add)
|
52
|
+
expect(matcher).to eq(mul)
|
53
|
+
expect(matcher).to eq(cmp)
|
54
54
|
end
|
55
55
|
|
56
56
|
describe 'stream matching' do
|
57
57
|
let(:stream) { token_stream(5, 11, 9, 24, :hello, 8) }
|
58
58
|
|
59
|
-
describe
|
59
|
+
describe 'standard' do
|
60
60
|
let(:standard) { described_class.new(:numeric) }
|
61
61
|
|
62
|
-
it '
|
63
|
-
substream = standard.match(stream)
|
64
|
-
|
65
|
-
substream.length.
|
66
|
-
substream.map(&:value).
|
62
|
+
it 'matches zero or more occurrences in a token stream' do
|
63
|
+
matched, substream = standard.match(stream)
|
64
|
+
expect(matched).to be_truthy
|
65
|
+
expect(substream.length).to eq 1
|
66
|
+
expect(substream.map(&:value)).to eq [5]
|
67
67
|
|
68
|
-
substream = standard.match(stream, 4)
|
69
|
-
substream.
|
70
|
-
|
68
|
+
matched, substream = standard.match(stream, 4)
|
69
|
+
expect(substream).to be_empty
|
70
|
+
expect(matched).not_to be_truthy
|
71
71
|
end
|
72
72
|
end
|
73
73
|
|
74
|
-
describe
|
74
|
+
describe 'star' do
|
75
75
|
let(:star) { described_class.new(:numeric).star }
|
76
76
|
|
77
|
-
it '
|
78
|
-
substream = star.match(stream)
|
79
|
-
|
80
|
-
substream.length.
|
81
|
-
substream.map(&:value).
|
77
|
+
it 'matches zero or more occurrences in a token stream' do
|
78
|
+
matched, substream = star.match(stream)
|
79
|
+
expect(matched).to be_truthy
|
80
|
+
expect(substream.length).to eq 4
|
81
|
+
expect(substream.map(&:value)).to eq [5, 11, 9, 24]
|
82
82
|
|
83
|
-
substream = star.match(stream, 4)
|
84
|
-
substream.
|
85
|
-
|
83
|
+
matched, substream = star.match(stream, 4)
|
84
|
+
expect(substream).to be_empty
|
85
|
+
expect(matched).to be_truthy
|
86
86
|
end
|
87
87
|
end
|
88
88
|
|
89
|
-
describe
|
89
|
+
describe 'plus' do
|
90
90
|
let(:plus) { described_class.new(:numeric).plus }
|
91
91
|
|
92
|
-
it '
|
93
|
-
substream = plus.match(stream)
|
94
|
-
|
95
|
-
substream.length.
|
96
|
-
substream.map(&:value).
|
92
|
+
it 'matches one or more occurrences in a token stream' do
|
93
|
+
matched, substream = plus.match(stream)
|
94
|
+
expect(matched).to be_truthy
|
95
|
+
expect(substream.length).to eq 4
|
96
|
+
expect(substream.map(&:value)).to eq [5, 11, 9, 24]
|
97
97
|
|
98
|
-
substream = plus.match(stream, 4)
|
99
|
-
substream.
|
100
|
-
|
98
|
+
matched, substream = plus.match(stream, 4)
|
99
|
+
expect(substream).to be_empty
|
100
|
+
expect(matched).not_to be_truthy
|
101
101
|
end
|
102
102
|
end
|
103
103
|
end
|
data/spec/token_scanner_spec.rb
CHANGED
@@ -2,26 +2,26 @@ require 'dentaku/token_scanner'
|
|
2
2
|
|
3
3
|
describe Dentaku::TokenScanner do
|
4
4
|
let(:whitespace) { described_class.new(:whitespace, '\s') }
|
5
|
-
let(:numeric) { described_class.new(:numeric, '(\d+(\.\d+)?|\.\d+)', lambda{|raw| raw =~ /\./ ? raw
|
5
|
+
let(:numeric) { described_class.new(:numeric, '(\d+(\.\d+)?|\.\d+)', lambda{|raw| raw =~ /\./ ? BigDecimal.new(raw) : raw.to_i }) }
|
6
6
|
|
7
|
-
it '
|
7
|
+
it 'returns a token for a matching string' do
|
8
8
|
token = whitespace.scan(' ')
|
9
|
-
token.category.
|
10
|
-
token.value.
|
9
|
+
expect(token.category).to eq(:whitespace)
|
10
|
+
expect(token.value).to eq(' ')
|
11
11
|
end
|
12
12
|
|
13
|
-
it '
|
14
|
-
whitespace.scan('A').
|
13
|
+
it 'returns falsy for a non-matching string' do
|
14
|
+
expect(whitespace.scan('A')).not_to be
|
15
15
|
end
|
16
16
|
|
17
|
-
it '
|
17
|
+
it 'performs raw value conversion' do
|
18
18
|
token = numeric.scan('5')
|
19
|
-
token.category.
|
20
|
-
token.value.
|
19
|
+
expect(token.category).to eq(:numeric)
|
20
|
+
expect(token.value).to eq(5)
|
21
21
|
end
|
22
22
|
|
23
|
-
it '
|
24
|
-
described_class.scanners.length.
|
23
|
+
it 'returns a list of all configured scanners' do
|
24
|
+
expect(described_class.scanners.length).to eq 10
|
25
25
|
end
|
26
26
|
end
|
27
27
|
|
data/spec/token_spec.rb
CHANGED
@@ -1,10 +1,10 @@
|
|
1
1
|
require 'dentaku/token'
|
2
2
|
|
3
3
|
describe Dentaku::Token do
|
4
|
-
it '
|
4
|
+
it 'has a category and a value' do
|
5
5
|
token = Dentaku::Token.new(:numeric, 5)
|
6
|
-
token.category.
|
7
|
-
token.value.
|
8
|
-
token.is?(:numeric).
|
6
|
+
expect(token.category).to eq(:numeric)
|
7
|
+
expect(token.value).to eq(5)
|
8
|
+
expect(token.is?(:numeric)).to be_truthy
|
9
9
|
end
|
10
10
|
end
|
data/spec/tokenizer_spec.rb
CHANGED
@@ -3,117 +3,135 @@ require 'dentaku/tokenizer'
|
|
3
3
|
describe Dentaku::Tokenizer do
|
4
4
|
let(:tokenizer) { described_class.new }
|
5
5
|
|
6
|
-
it '
|
7
|
-
tokenizer.tokenize('').
|
6
|
+
it 'handles an empty expression' do
|
7
|
+
expect(tokenizer.tokenize('')).to be_empty
|
8
8
|
end
|
9
9
|
|
10
|
-
it '
|
10
|
+
it 'tokenizes addition' do
|
11
11
|
tokens = tokenizer.tokenize('1+1')
|
12
|
-
tokens.map(&:category).
|
13
|
-
tokens.map(&:value).
|
12
|
+
expect(tokens.map(&:category)).to eq([:numeric, :operator, :numeric])
|
13
|
+
expect(tokens.map(&:value)).to eq([1, :add, 1])
|
14
|
+
end
|
15
|
+
|
16
|
+
it 'tokenizes comparison with =' do
|
17
|
+
tokens = tokenizer.tokenize('number = 5')
|
18
|
+
expect(tokens.map(&:category)).to eq([:identifier, :comparator, :numeric])
|
19
|
+
expect(tokens.map(&:value)).to eq([:number, :eq, 5])
|
20
|
+
end
|
21
|
+
|
22
|
+
it 'tokenizes comparison with =' do
|
23
|
+
tokens = tokenizer.tokenize('number = 5')
|
24
|
+
expect(tokens.map(&:category)).to eq([:identifier, :comparator, :numeric])
|
25
|
+
expect(tokens.map(&:value)).to eq([:number, :eq, 5])
|
26
|
+
end
|
27
|
+
|
28
|
+
it 'tokenizes comparison with alternate ==' do
|
29
|
+
tokens = tokenizer.tokenize('number == 5')
|
30
|
+
expect(tokens.map(&:category)).to eq([:identifier, :comparator, :numeric])
|
31
|
+
expect(tokens.map(&:value)).to eq([:number, :eq, 5])
|
14
32
|
end
|
15
33
|
|
16
|
-
it '
|
34
|
+
it 'ignores whitespace' do
|
17
35
|
tokens = tokenizer.tokenize('1 / 1 ')
|
18
|
-
tokens.map(&:category).
|
19
|
-
tokens.map(&:value).
|
36
|
+
expect(tokens.map(&:category)).to eq([:numeric, :operator, :numeric])
|
37
|
+
expect(tokens.map(&:value)).to eq([1, :divide, 1])
|
20
38
|
end
|
21
39
|
|
22
|
-
it '
|
40
|
+
it 'handles floating point' do
|
23
41
|
tokens = tokenizer.tokenize('1.5 * 3.7')
|
24
|
-
tokens.map(&:category).
|
25
|
-
tokens.map(&:value).
|
42
|
+
expect(tokens.map(&:category)).to eq([:numeric, :operator, :numeric])
|
43
|
+
expect(tokens.map(&:value)).to eq([1.5, :multiply, 3.7])
|
26
44
|
end
|
27
45
|
|
28
|
-
it '
|
46
|
+
it 'does not require leading zero' do
|
29
47
|
tokens = tokenizer.tokenize('.5 * 3.7')
|
30
|
-
tokens.map(&:category).
|
31
|
-
tokens.map(&:value).
|
48
|
+
expect(tokens.map(&:category)).to eq([:numeric, :operator, :numeric])
|
49
|
+
expect(tokens.map(&:value)).to eq([0.5, :multiply, 3.7])
|
32
50
|
end
|
33
51
|
|
34
|
-
it '
|
52
|
+
it 'accepts arbitrary identifiers' do
|
35
53
|
tokens = tokenizer.tokenize('monkeys > 1500')
|
36
|
-
tokens.map(&:category).
|
37
|
-
tokens.map(&:value).
|
54
|
+
expect(tokens.map(&:category)).to eq([:identifier, :comparator, :numeric])
|
55
|
+
expect(tokens.map(&:value)).to eq([:monkeys, :gt, 1500])
|
38
56
|
end
|
39
57
|
|
40
|
-
it '
|
58
|
+
it 'recognizes double-quoted strings' do
|
41
59
|
tokens = tokenizer.tokenize('animal = "giraffe"')
|
42
|
-
tokens.map(&:category).
|
43
|
-
tokens.map(&:value).
|
60
|
+
expect(tokens.map(&:category)).to eq([:identifier, :comparator, :string])
|
61
|
+
expect(tokens.map(&:value)).to eq([:animal, :eq, 'giraffe'])
|
44
62
|
end
|
45
63
|
|
46
|
-
it '
|
64
|
+
it 'recognizes single-quoted strings' do
|
47
65
|
tokens = tokenizer.tokenize("animal = 'giraffe'")
|
48
|
-
tokens.map(&:category).
|
49
|
-
tokens.map(&:value).
|
66
|
+
expect(tokens.map(&:category)).to eq([:identifier, :comparator, :string])
|
67
|
+
expect(tokens.map(&:value)).to eq([:animal, :eq, 'giraffe'])
|
50
68
|
end
|
51
69
|
|
52
|
-
it '
|
70
|
+
it 'matches "<=" before "<"' do
|
53
71
|
tokens = tokenizer.tokenize('perimeter <= 7500')
|
54
|
-
tokens.map(&:category).
|
55
|
-
tokens.map(&:value).
|
72
|
+
expect(tokens.map(&:category)).to eq([:identifier, :comparator, :numeric])
|
73
|
+
expect(tokens.map(&:value)).to eq([:perimeter, :le, 7500])
|
56
74
|
end
|
57
75
|
|
58
|
-
it '
|
76
|
+
it 'matches "and" for logical expressions' do
|
59
77
|
tokens = tokenizer.tokenize('octopi <= 7500 AND sharks > 1500')
|
60
|
-
tokens.map(&:category).
|
61
|
-
tokens.map(&:value).
|
78
|
+
expect(tokens.map(&:category)).to eq([:identifier, :comparator, :numeric, :combinator, :identifier, :comparator, :numeric])
|
79
|
+
expect(tokens.map(&:value)).to eq([:octopi, :le, 7500, :and, :sharks, :gt, 1500])
|
62
80
|
end
|
63
81
|
|
64
|
-
it '
|
82
|
+
it 'matches "or" for logical expressions' do
|
65
83
|
tokens = tokenizer.tokenize('size < 3 or admin = 1')
|
66
|
-
tokens.map(&:category).
|
67
|
-
tokens.map(&:value).
|
84
|
+
expect(tokens.map(&:category)).to eq([:identifier, :comparator, :numeric, :combinator, :identifier, :comparator, :numeric])
|
85
|
+
expect(tokens.map(&:value)).to eq([:size, :lt, 3, :or, :admin, :eq, 1])
|
68
86
|
end
|
69
87
|
|
70
|
-
it '
|
71
|
-
|
72
|
-
|
88
|
+
it 'detects unbalanced parentheses' do
|
89
|
+
expect { tokenizer.tokenize('(5+3') }.to raise_error
|
90
|
+
expect { tokenizer.tokenize(')') }.to raise_error
|
73
91
|
end
|
74
92
|
|
75
|
-
it '
|
93
|
+
it 'recognizes identifiers that share initial substrings with combinators' do
|
76
94
|
tokens = tokenizer.tokenize('andover < 10')
|
77
|
-
tokens.length.
|
78
|
-
tokens.map(&:category).
|
79
|
-
tokens.map(&:value).
|
95
|
+
expect(tokens.length).to eq(3)
|
96
|
+
expect(tokens.map(&:category)).to eq([:identifier, :comparator, :numeric])
|
97
|
+
expect(tokens.map(&:value)).to eq([:andover, :lt, 10])
|
80
98
|
end
|
81
99
|
|
82
100
|
describe 'functions' do
|
83
101
|
it 'include IF' do
|
84
102
|
tokens = tokenizer.tokenize('if(x < 10, y, z)')
|
85
|
-
tokens.length.
|
86
|
-
tokens.map(&:category).
|
87
|
-
tokens.map(&:value).
|
103
|
+
expect(tokens.length).to eq(10)
|
104
|
+
expect(tokens.map(&:category)).to eq([:function, :grouping, :identifier, :comparator, :numeric, :grouping, :identifier, :grouping, :identifier, :grouping])
|
105
|
+
expect(tokens.map(&:value)).to eq([:if, :open, :x, :lt, 10, :comma, :y, :comma, :z, :close])
|
88
106
|
end
|
89
107
|
|
90
108
|
it 'include ROUND/UP/DOWN' do
|
91
109
|
tokens = tokenizer.tokenize('round(8.2)')
|
92
|
-
tokens.length.
|
93
|
-
tokens.map(&:category).
|
94
|
-
tokens.map(&:value).
|
110
|
+
expect(tokens.length).to eq(4)
|
111
|
+
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
|
112
|
+
expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal.new('8.2'), :close])
|
95
113
|
|
96
114
|
tokens = tokenizer.tokenize('round(8.75, 1)')
|
97
|
-
tokens.length.
|
98
|
-
tokens.map(&:category).
|
99
|
-
tokens.map(&:value).
|
115
|
+
expect(tokens.length).to eq(6)
|
116
|
+
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping, :numeric, :grouping])
|
117
|
+
expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal.new('8.75'), :comma, 1, :close])
|
100
118
|
|
101
119
|
tokens = tokenizer.tokenize('ROUNDUP(8.2)')
|
102
|
-
tokens.length.
|
103
|
-
tokens.map(&:category).
|
104
|
-
tokens.map(&:value).
|
120
|
+
expect(tokens.length).to eq(4)
|
121
|
+
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
|
122
|
+
expect(tokens.map(&:value)).to eq([:roundup, :open, BigDecimal.new('8.2'), :close])
|
105
123
|
|
106
124
|
tokens = tokenizer.tokenize('RoundDown(8.2)')
|
107
|
-
tokens.length.
|
108
|
-
tokens.map(&:category).
|
109
|
-
tokens.map(&:value).
|
125
|
+
expect(tokens.length).to eq(4)
|
126
|
+
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
|
127
|
+
expect(tokens.map(&:value)).to eq([:rounddown, :open, BigDecimal.new('8.2'), :close])
|
110
128
|
end
|
111
129
|
|
112
130
|
it 'include NOT' do
|
113
131
|
tokens = tokenizer.tokenize('not(8 < 5)')
|
114
|
-
tokens.length.
|
115
|
-
tokens.map(&:category).
|
116
|
-
tokens.map(&:value).
|
132
|
+
expect(tokens.length).to eq(6)
|
133
|
+
expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :comparator, :numeric, :grouping])
|
134
|
+
expect(tokens.map(&:value)).to eq([:not, :open, 8, :lt, 5, :close])
|
117
135
|
end
|
118
136
|
end
|
119
137
|
end
|