dentaku 3.2.0 → 3.5.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (100) hide show
  1. checksums.yaml +5 -5
  2. data/.rubocop.yml +5 -10
  3. data/.travis.yml +4 -6
  4. data/CHANGELOG.md +86 -2
  5. data/README.md +7 -6
  6. data/dentaku.gemspec +1 -1
  7. data/lib/dentaku/ast/access.rb +21 -1
  8. data/lib/dentaku/ast/arithmetic.rb +51 -15
  9. data/lib/dentaku/ast/array.rb +41 -0
  10. data/lib/dentaku/ast/bitwise.rb +30 -5
  11. data/lib/dentaku/ast/case/case_conditional.rb +17 -2
  12. data/lib/dentaku/ast/case/case_else.rb +17 -3
  13. data/lib/dentaku/ast/case/case_switch_variable.rb +14 -0
  14. data/lib/dentaku/ast/case/case_then.rb +17 -3
  15. data/lib/dentaku/ast/case/case_when.rb +21 -3
  16. data/lib/dentaku/ast/case.rb +19 -3
  17. data/lib/dentaku/ast/comparators.rb +38 -28
  18. data/lib/dentaku/ast/function.rb +11 -3
  19. data/lib/dentaku/ast/function_registry.rb +21 -0
  20. data/lib/dentaku/ast/functions/all.rb +23 -0
  21. data/lib/dentaku/ast/functions/and.rb +2 -2
  22. data/lib/dentaku/ast/functions/any.rb +23 -0
  23. data/lib/dentaku/ast/functions/avg.rb +2 -2
  24. data/lib/dentaku/ast/functions/count.rb +8 -0
  25. data/lib/dentaku/ast/functions/duration.rb +51 -0
  26. data/lib/dentaku/ast/functions/enum.rb +37 -0
  27. data/lib/dentaku/ast/functions/filter.rb +23 -0
  28. data/lib/dentaku/ast/functions/if.rb +19 -2
  29. data/lib/dentaku/ast/functions/map.rb +23 -0
  30. data/lib/dentaku/ast/functions/or.rb +4 -4
  31. data/lib/dentaku/ast/functions/pluck.rb +30 -0
  32. data/lib/dentaku/ast/functions/round.rb +1 -1
  33. data/lib/dentaku/ast/functions/rounddown.rb +1 -1
  34. data/lib/dentaku/ast/functions/roundup.rb +1 -1
  35. data/lib/dentaku/ast/functions/ruby_math.rb +50 -3
  36. data/lib/dentaku/ast/functions/string_functions.rb +105 -12
  37. data/lib/dentaku/ast/functions/xor.rb +44 -0
  38. data/lib/dentaku/ast/grouping.rb +3 -1
  39. data/lib/dentaku/ast/identifier.rb +16 -4
  40. data/lib/dentaku/ast/literal.rb +10 -0
  41. data/lib/dentaku/ast/negation.rb +7 -1
  42. data/lib/dentaku/ast/nil.rb +4 -0
  43. data/lib/dentaku/ast/node.rb +8 -0
  44. data/lib/dentaku/ast/operation.rb +17 -0
  45. data/lib/dentaku/ast/string.rb +7 -0
  46. data/lib/dentaku/ast.rb +8 -0
  47. data/lib/dentaku/bulk_expression_solver.rb +38 -27
  48. data/lib/dentaku/calculator.rb +21 -8
  49. data/lib/dentaku/date_arithmetic.rb +45 -0
  50. data/lib/dentaku/exceptions.rb +11 -8
  51. data/lib/dentaku/flat_hash.rb +9 -2
  52. data/lib/dentaku/parser.rb +57 -16
  53. data/lib/dentaku/print_visitor.rb +101 -0
  54. data/lib/dentaku/token_matcher.rb +1 -1
  55. data/lib/dentaku/token_scanner.rb +9 -3
  56. data/lib/dentaku/tokenizer.rb +7 -2
  57. data/lib/dentaku/version.rb +1 -1
  58. data/lib/dentaku/visitor/infix.rb +82 -0
  59. data/lib/dentaku.rb +20 -7
  60. data/spec/ast/addition_spec.rb +7 -1
  61. data/spec/ast/all_spec.rb +25 -0
  62. data/spec/ast/and_function_spec.rb +6 -6
  63. data/spec/ast/and_spec.rb +1 -1
  64. data/spec/ast/any_spec.rb +23 -0
  65. data/spec/ast/arithmetic_spec.rb +64 -29
  66. data/spec/ast/avg_spec.rb +9 -5
  67. data/spec/ast/comparator_spec.rb +31 -1
  68. data/spec/ast/count_spec.rb +7 -7
  69. data/spec/ast/division_spec.rb +7 -1
  70. data/spec/ast/filter_spec.rb +25 -0
  71. data/spec/ast/function_spec.rb +20 -15
  72. data/spec/ast/map_spec.rb +27 -0
  73. data/spec/ast/max_spec.rb +16 -3
  74. data/spec/ast/min_spec.rb +16 -3
  75. data/spec/ast/mul_spec.rb +11 -6
  76. data/spec/ast/negation_spec.rb +48 -0
  77. data/spec/ast/node_spec.rb +11 -8
  78. data/spec/ast/numeric_spec.rb +1 -1
  79. data/spec/ast/or_spec.rb +7 -7
  80. data/spec/ast/pluck_spec.rb +32 -0
  81. data/spec/ast/round_spec.rb +14 -4
  82. data/spec/ast/rounddown_spec.rb +14 -4
  83. data/spec/ast/roundup_spec.rb +14 -4
  84. data/spec/ast/string_functions_spec.rb +73 -0
  85. data/spec/ast/sum_spec.rb +11 -6
  86. data/spec/ast/switch_spec.rb +5 -5
  87. data/spec/ast/xor_spec.rb +35 -0
  88. data/spec/bulk_expression_solver_spec.rb +37 -1
  89. data/spec/calculator_spec.rb +341 -32
  90. data/spec/dentaku_spec.rb +19 -6
  91. data/spec/external_function_spec.rb +32 -6
  92. data/spec/parser_spec.rb +100 -123
  93. data/spec/print_visitor_spec.rb +66 -0
  94. data/spec/spec_helper.rb +6 -4
  95. data/spec/token_matcher_spec.rb +8 -8
  96. data/spec/token_scanner_spec.rb +4 -4
  97. data/spec/tokenizer_spec.rb +56 -13
  98. data/spec/visitor/infix_spec.rb +31 -0
  99. data/spec/visitor_spec.rb +138 -0
  100. metadata +52 -7
data/spec/parser_spec.rb CHANGED
@@ -1,189 +1,166 @@
1
1
  require 'spec_helper'
2
2
  require 'dentaku/token'
3
+ require 'dentaku/tokenizer'
3
4
  require 'dentaku/parser'
4
5
 
5
6
  describe Dentaku::Parser do
6
- it 'is constructed from a token' do
7
- token = Dentaku::Token.new(:numeric, 5)
8
- node = described_class.new([token]).parse
9
- expect(node.value).to eq 5
7
+ it 'parses an integer literal' do
8
+ node = parse('5')
9
+ expect(node.value).to eq(5)
10
10
  end
11
11
 
12
12
  it 'performs simple addition' do
13
- five = Dentaku::Token.new(:numeric, 5)
14
- plus = Dentaku::Token.new(:operator, :add)
15
- four = Dentaku::Token.new(:numeric, 4)
16
-
17
- node = described_class.new([five, plus, four]).parse
18
- expect(node.value).to eq 9
13
+ node = parse('5 + 4')
14
+ expect(node.value).to eq(9)
19
15
  end
20
16
 
21
17
  it 'compares two numbers' do
22
- five = Dentaku::Token.new(:numeric, 5)
23
- lt = Dentaku::Token.new(:comparator, :lt)
24
- four = Dentaku::Token.new(:numeric, 4)
25
-
26
- node = described_class.new([five, lt, four]).parse
27
- expect(node.value).to eq false
18
+ node = parse('5 < 4')
19
+ expect(node.value).to eq(false)
28
20
  end
29
21
 
30
22
  it 'calculates unary percentage' do
31
- five = Dentaku::Token.new(:numeric, 5)
32
- mod = Dentaku::Token.new(:operator, :mod)
33
-
34
- node = described_class.new([five, mod]).parse
35
- expect(node.value).to eq 0.05
23
+ node = parse('5%')
24
+ expect(node.value).to eq(0.05)
36
25
  end
37
26
 
38
27
  it 'calculates bitwise OR' do
39
- two = Dentaku::Token.new(:numeric, 2)
40
- bitor = Dentaku::Token.new(:operator, :bitor)
41
- three = Dentaku::Token.new(:numeric, 3)
42
-
43
- node = described_class.new([two, bitor, three]).parse
44
- expect(node.value).to eq 3
28
+ node = parse('2|3')
29
+ expect(node.value).to eq(3)
45
30
  end
46
31
 
47
32
  it 'performs multiple operations in one stream' do
48
- five = Dentaku::Token.new(:numeric, 5)
49
- plus = Dentaku::Token.new(:operator, :add)
50
- four = Dentaku::Token.new(:numeric, 4)
51
- times = Dentaku::Token.new(:operator, :multiply)
52
- three = Dentaku::Token.new(:numeric, 3)
53
-
54
- node = described_class.new([five, plus, four, times, three]).parse
55
- expect(node.value).to eq 17
33
+ node = parse('5 * 4 + 3')
34
+ expect(node.value).to eq(23)
56
35
  end
57
36
 
58
37
  it 'respects order of operations' do
59
- five = Dentaku::Token.new(:numeric, 5)
60
- times = Dentaku::Token.new(:operator, :multiply)
61
- four = Dentaku::Token.new(:numeric, 4)
62
- plus = Dentaku::Token.new(:operator, :add)
63
- three = Dentaku::Token.new(:numeric, 3)
64
-
65
- node = described_class.new([five, times, four, plus, three]).parse
66
- expect(node.value).to eq 23
38
+ node = parse('5 + 4*3')
39
+ expect(node.value).to eq(17)
67
40
  end
68
41
 
69
42
  it 'respects grouping by parenthesis' do
70
- lpar = Dentaku::Token.new(:grouping, :open)
71
- five = Dentaku::Token.new(:numeric, 5)
72
- plus = Dentaku::Token.new(:operator, :add)
73
- four = Dentaku::Token.new(:numeric, 4)
74
- rpar = Dentaku::Token.new(:grouping, :close)
75
- times = Dentaku::Token.new(:operator, :multiply)
76
- three = Dentaku::Token.new(:numeric, 3)
77
-
78
- node = described_class.new([lpar, five, plus, four, rpar, times, three]).parse
79
- expect(node.value).to eq 27
43
+ node = parse('(5 + 4) * 3')
44
+ expect(node.value).to eq(27)
80
45
  end
81
46
 
82
47
  it 'evaluates functions' do
83
- fn = Dentaku::Token.new(:function, :if)
84
- fopen = Dentaku::Token.new(:grouping, :open)
85
- five = Dentaku::Token.new(:numeric, 5)
86
- lt = Dentaku::Token.new(:comparator, :lt)
87
- four = Dentaku::Token.new(:numeric, 4)
88
- comma = Dentaku::Token.new(:grouping, :comma)
89
- three = Dentaku::Token.new(:numeric, 3)
90
- two = Dentaku::Token.new(:numeric, 2)
91
- rpar = Dentaku::Token.new(:grouping, :close)
92
-
93
- node = described_class.new([fn, fopen, five, lt, four, comma, three, comma, two, rpar]).parse
94
- expect(node.value).to eq 2
48
+ node = parse('IF(5 < 4, 3, 2)')
49
+ expect(node.value).to eq(2)
95
50
  end
96
51
 
97
52
  it 'represents formulas with variables' do
98
- five = Dentaku::Token.new(:numeric, 5)
99
- times = Dentaku::Token.new(:operator, :multiply)
100
- x = Dentaku::Token.new(:identifier, :x)
101
-
102
- node = described_class.new([five, times, x]).parse
53
+ node = parse('5 * x')
103
54
  expect { node.value }.to raise_error(Dentaku::UnboundVariableError)
104
- expect(node.value(x: 3)).to eq 15
55
+ expect(node.value("x" => 3)).to eq(15)
105
56
  end
106
57
 
107
58
  it 'evaluates access into data structures' do
108
- a = token(:a)
109
- lbracket = token(:lbracket)
110
- one = token(1)
111
- rbracket = token(:rbracket)
112
-
113
- node = described_class.new([a, lbracket, one, rbracket]).parse
59
+ node = parse('a[1]')
114
60
  expect { node.value }.to raise_error(Dentaku::UnboundVariableError)
115
- expect(node.value(a: [1, 2, 3])).to eq 2
61
+ expect(node.value("a" => [1, 2, 3])).to eq(2)
116
62
  end
117
63
 
118
64
  it 'evaluates boolean expressions' do
119
- d_true = Dentaku::Token.new(:logical, true)
120
- d_and = Dentaku::Token.new(:combinator, :and)
121
- d_false = Dentaku::Token.new(:logical, false)
122
-
123
- node = described_class.new([d_true, d_and, d_false]).parse
124
- expect(node.value).to eq false
65
+ node = parse('true AND false')
66
+ expect(node.value).to eq(false)
125
67
  end
126
68
 
127
69
  it 'evaluates a case statement' do
128
- case_start = Dentaku::Token.new(:case, :open)
129
- x = Dentaku::Token.new(:identifier, :x)
130
- case_when1 = Dentaku::Token.new(:case, :when)
131
- one = Dentaku::Token.new(:numeric, 1)
132
- case_then1 = Dentaku::Token.new(:case, :then)
133
- two = Dentaku::Token.new(:numeric, 2)
134
- case_when2 = Dentaku::Token.new(:case, :when)
135
- three = Dentaku::Token.new(:numeric, 3)
136
- case_then2 = Dentaku::Token.new(:case, :then)
137
- four = Dentaku::Token.new(:numeric, 4)
138
- case_close = Dentaku::Token.new(:case, :close)
139
-
140
- node = described_class.new(
141
- [case_start,
142
- x,
143
- case_when1,
144
- one,
145
- case_then1,
146
- two,
147
- case_when2,
148
- three,
149
- case_then2,
150
- four,
151
- case_close]).parse
152
- expect(node.value(x: 3)).to eq(4)
70
+ node = parse('CASE x WHEN 1 THEN 2 WHEN 3 THEN 4 END')
71
+ expect(node.value("x" => 3)).to eq(4)
72
+ end
73
+
74
+ it 'evaluates a nested case statement with case-sensitivity' do
75
+ node = parse('CASE x WHEN 1 THEN CASE Y WHEN "A" THEN 2 WHEN "B" THEN 3 END END', { case_sensitive: true }, { case_sensitive: true })
76
+ expect(node.value("x" => 1, "y" => "A", "Y" => "B")).to eq(3)
77
+ end
78
+
79
+ it 'evaluates arrays' do
80
+ node = parse('{1, 2, 3}')
81
+ expect(node.value).to eq([1, 2, 3])
153
82
  end
154
83
 
155
84
  context 'invalid expression' do
156
85
  it 'raises a parse error for bad math' do
157
- five = Dentaku::Token.new(:numeric, 5)
158
- times = Dentaku::Token.new(:operator, :multiply)
159
- minus = Dentaku::Token.new(:operator, :subtract)
160
-
161
86
  expect {
162
- described_class.new([five, times, minus]).parse
87
+ parse("5 * -")
163
88
  }.to raise_error(Dentaku::ParseError)
164
89
  end
165
90
 
166
91
  it 'raises a parse error for bad logic' do
167
- this = Dentaku::Token.new(:logical, true)
168
- also = Dentaku::Token.new(:combinator, :and)
92
+ expect {
93
+ parse("TRUE AND")
94
+ }.to raise_error(Dentaku::ParseError)
95
+ end
169
96
 
97
+ it 'raises a parse error for too many operands' do
170
98
  expect {
171
- described_class.new([this, also]).parse
99
+ parse("IF(1, 0, IF(1, 2, 3, 4))")
100
+ }.to raise_error(Dentaku::ParseError)
101
+
102
+ expect {
103
+ parse("CASE a WHEN 1 THEN true ELSE THEN IF(1, 2, 3, 4) END")
172
104
  }.to raise_error(Dentaku::ParseError)
173
105
  end
174
106
 
175
- it 'raises an exception when trying to access an undefined function' do
176
- fn = Dentaku::Token.new(:function, 'non_exists_func')
107
+ it 'raises a parse error for bad grouping structure' do
108
+ expect {
109
+ parse(",")
110
+ }.to raise_error(Dentaku::ParseError)
111
+
112
+ expect {
113
+ parse("5, x")
114
+ described_class.new([five, comma, x]).parse
115
+ }.to raise_error(Dentaku::ParseError)
177
116
 
178
117
  expect {
179
- described_class.new([fn]).parse
118
+ parse("5 + 5, x")
119
+ }.to raise_error(Dentaku::ParseError)
120
+
121
+ expect {
122
+ parse("{1, 2, }")
123
+ }.to raise_error(Dentaku::ParseError)
124
+
125
+ expect {
126
+ parse("CONCAT('1', '2', )")
127
+ }.to raise_error(Dentaku::ParseError)
128
+ end
129
+
130
+ it 'raises parse errors for malformed case statements' do
131
+ expect {
132
+ parse("CASE a when 'one' then 1")
133
+ }.to raise_error(Dentaku::ParseError)
134
+
135
+ expect {
136
+ parse("case a whend 'one' then 1 end")
137
+ }.to raise_error(Dentaku::ParseError)
138
+
139
+ expect {
140
+ parse("CASE a WHEN 'one' THEND 1 END")
141
+ }.to raise_error(Dentaku::ParseError)
142
+
143
+ expect {
144
+ parse("CASE a when 'one' then end")
145
+ }.to raise_error(Dentaku::ParseError)
146
+ end
147
+
148
+ it 'raises a parse error when trying to access an undefined function' do
149
+ expect {
150
+ parse("undefined()")
180
151
  }.to raise_error(Dentaku::ParseError)
181
152
  end
182
153
  end
183
154
 
184
- it "evaluates explicit 'NULL' as a Nil" do
185
- null = Dentaku::Token.new(:null, nil)
186
- node = described_class.new([null]).parse
155
+ it "evaluates explicit 'NULL' as nil" do
156
+ node = parse("NULL")
187
157
  expect(node.value).to eq(nil)
188
158
  end
159
+
160
+ private
161
+
162
+ def parse(expr, parser_options = {}, tokenizer_options = {})
163
+ tokens = Dentaku::Tokenizer.new.tokenize(expr, tokenizer_options)
164
+ described_class.new(tokens, parser_options).parse
165
+ end
189
166
  end
@@ -0,0 +1,66 @@
1
+ require 'dentaku/print_visitor'
2
+ require 'dentaku/tokenizer'
3
+ require 'dentaku/parser'
4
+
5
+ describe Dentaku::PrintVisitor do
6
+ it 'prints a representation of an AST' do
7
+ repr = roundtrip('5+4')
8
+ expect(repr).to eq('5 + 4')
9
+ end
10
+
11
+ it 'quotes string literals' do
12
+ repr = roundtrip('Concat(\'a\', "B")')
13
+ expect(repr).to eq('CONCAT("a", "B")')
14
+ end
15
+
16
+ it 'handles unary operations on literals' do
17
+ repr = roundtrip('- 4')
18
+ expect(repr).to eq('-4')
19
+ end
20
+
21
+ it 'handles unary operations on trees' do
22
+ repr = roundtrip('- (5 + 5)')
23
+ expect(repr).to eq('-(5 + 5)')
24
+ end
25
+
26
+ it 'handles a complex arithmetic expression' do
27
+ repr = roundtrip('(((1 + 7) * (8 ^ 2)) / - (3.0 - apples))')
28
+ expect(repr).to eq('(1 + 7) * 8 ^ 2 / -(3.0 - apples)')
29
+ end
30
+
31
+ it 'handles a complex logical expression' do
32
+ repr = roundtrip('1 < 2 and 3 <= 4 or 5 > 6 AND 7 >= 8 OR 9 != 10 and true')
33
+ expect(repr).to eq('1 < 2 and 3 <= 4 or 5 > 6 and 7 >= 8 or 9 != 10 and true')
34
+ end
35
+
36
+ it 'handles a function call' do
37
+ repr = roundtrip('IF(a[0] = NULL, "five", \'seven\')')
38
+ expect(repr).to eq('IF(a[0] = NULL, "five", "seven")')
39
+ end
40
+
41
+ it 'handles a case statement' do
42
+ repr = roundtrip('case (a % 5) when 0 then a else b end')
43
+ expect(repr).to eq('CASE a % 5 WHEN 0 THEN a ELSE b END')
44
+ end
45
+
46
+ it 'handles a bitwise operators' do
47
+ repr = roundtrip('0xCAFE & 0xDECAF | 0xBEEF')
48
+ expect(repr).to eq('0xCAFE & 0xDECAF | 0xBEEF')
49
+ end
50
+
51
+ it 'handles a datetime literal' do
52
+ repr = roundtrip('2017-12-24 23:59:59')
53
+ expect(repr).to eq('2017-12-24 23:59:59')
54
+ end
55
+
56
+ private
57
+
58
+ def roundtrip(string)
59
+ described_class.new(parsed(string)).to_s
60
+ end
61
+
62
+ def parsed(string)
63
+ tokens = Dentaku::Tokenizer.new.tokenize(string)
64
+ Dentaku::Parser.new(tokens).parse
65
+ end
66
+ end
data/spec/spec_helper.rb CHANGED
@@ -16,9 +16,11 @@ end
16
16
 
17
17
  RSpec.configure do |c|
18
18
  c.before(:all) {
19
- # add example for alias because we can set aliases just once
20
- # before `calculator` method called
21
- Dentaku.aliases = { roundup: ['roundupup'] }
19
+ if Dentaku.respond_to?(:aliases=)
20
+ # add example for alias because we can set aliases just once
21
+ # before `calculator` method called
22
+ Dentaku.aliases = { roundup: ['roundupup'] }
23
+ end
22
24
  }
23
25
  end
24
26
 
@@ -45,7 +47,7 @@ def type_for(value)
45
47
  :grouping
46
48
  when :lbracket, :rbracket
47
49
  :access
48
- when :le, :ge, :ne, :ne, :lt, :gt, :eq
50
+ when :le, :ge, :ne, :lt, :gt, :eq
49
51
  :comparator
50
52
  when :and, :or
51
53
  :combinator
@@ -82,8 +82,8 @@ describe Dentaku::TokenMatcher do
82
82
  it 'matches zero or more occurrences in a token stream' do
83
83
  matched, substream = standard.match(stream)
84
84
  expect(matched).to be_truthy
85
- expect(substream.length).to eq 1
86
- expect(substream.map(&:value)).to eq [5]
85
+ expect(substream.length).to eq(1)
86
+ expect(substream.map(&:value)).to eq([5])
87
87
 
88
88
  matched, substream = standard.match(stream, 4)
89
89
  expect(substream).to be_empty
@@ -97,8 +97,8 @@ describe Dentaku::TokenMatcher do
97
97
  it 'matches zero or more occurrences in a token stream' do
98
98
  matched, substream = star.match(stream)
99
99
  expect(matched).to be_truthy
100
- expect(substream.length).to eq 4
101
- expect(substream.map(&:value)).to eq [5, 11, 9, 24]
100
+ expect(substream.length).to eq(4)
101
+ expect(substream.map(&:value)).to eq([5, 11, 9, 24])
102
102
 
103
103
  matched, substream = star.match(stream, 4)
104
104
  expect(substream).to be_empty
@@ -112,8 +112,8 @@ describe Dentaku::TokenMatcher do
112
112
  it 'matches one or more occurrences in a token stream' do
113
113
  matched, substream = plus.match(stream)
114
114
  expect(matched).to be_truthy
115
- expect(substream.length).to eq 4
116
- expect(substream.map(&:value)).to eq [5, 11, 9, 24]
115
+ expect(substream.length).to eq(4)
116
+ expect(substream.map(&:value)).to eq([5, 11, 9, 24])
117
117
 
118
118
  matched, substream = plus.match(stream, 4)
119
119
  expect(substream).to be_empty
@@ -126,8 +126,8 @@ describe Dentaku::TokenMatcher do
126
126
  stream = token_stream(1, :comma, 2, :comma, true, :comma, 'olive', :comma, :'(')
127
127
  matched, substream = described_class.arguments.match(stream)
128
128
  expect(matched).to be_truthy
129
- expect(substream.length).to eq 8
130
- expect(substream.map(&:value)).to eq [1, :comma, 2, :comma, true, :comma, 'olive', :comma]
129
+ expect(substream.length).to eq(8)
130
+ expect(substream.map(&:value)).to eq([1, :comma, 2, :comma, true, :comma, 'olive', :comma])
131
131
  end
132
132
  end
133
133
  end
@@ -3,7 +3,7 @@ require 'dentaku/token_scanner'
3
3
  describe Dentaku::TokenScanner do
4
4
  let(:whitespace) { described_class.new(:whitespace, '\s') }
5
5
  let(:numeric) { described_class.new(:numeric, '(\d+(\.\d+)?|\.\d+)',
6
- ->(raw) { raw =~ /\./ ? BigDecimal.new(raw) : raw.to_i })
6
+ ->(raw) { raw =~ /\./ ? BigDecimal(raw) : raw.to_i })
7
7
  }
8
8
  let(:custom) { described_class.new(:identifier, '#\w+\b',
9
9
  ->(raw) { raw.gsub('#', '').to_sym })
@@ -29,18 +29,18 @@ describe Dentaku::TokenScanner do
29
29
 
30
30
  it 'allows customizing available scanners' do
31
31
  described_class.scanners = [:whitespace, :numeric]
32
- expect(described_class.scanners.length).to eq 2
32
+ expect(described_class.scanners.length).to eq(2)
33
33
  end
34
34
 
35
35
  it 'ignores invalid scanners' do
36
36
  described_class.scanners = [:whitespace, :numeric, :fake]
37
- expect(described_class.scanners.length).to eq 2
37
+ expect(described_class.scanners.length).to eq(2)
38
38
  end
39
39
 
40
40
  it 'uses a custom scanner' do
41
41
  described_class.scanners = [:whitespace, :numeric]
42
42
  described_class.register_scanner(:custom, custom)
43
- expect(described_class.scanners.length).to eq 3
43
+ expect(described_class.scanners.length).to eq(3)
44
44
 
45
45
  token = custom.scan('#apple + #pear').first
46
46
  expect(token.category).to eq(:identifier)
@@ -25,6 +25,12 @@ describe Dentaku::Tokenizer do
25
25
  expect(tokens.map(&:category)).to eq([:numeric])
26
26
  expect(tokens.map(&:value)).to eq([6.02e23])
27
27
  end
28
+
29
+ tokens = tokenizer.tokenize('6E23')
30
+ expect(tokens.map(&:value)).to eq([0.6e24])
31
+
32
+ tokens = tokenizer.tokenize('6e-23')
33
+ expect(tokens.map(&:value)).to eq([0.6e-22])
28
34
  end
29
35
 
30
36
  it 'tokenizes addition' do
@@ -83,6 +89,18 @@ describe Dentaku::Tokenizer do
83
89
  expect(tokens.map(&:value)).to eq([2, :bitand, 3])
84
90
  end
85
91
 
92
+ it 'tokenizes bitwise SHIFT LEFT' do
93
+ tokens = tokenizer.tokenize('2 << 3')
94
+ expect(tokens.map(&:category)).to eq([:numeric, :operator, :numeric])
95
+ expect(tokens.map(&:value)).to eq([2, :bitshiftleft, 3])
96
+ end
97
+
98
+ it 'tokenizes bitwise SHIFT RIGHT' do
99
+ tokens = tokenizer.tokenize('2 >> 3')
100
+ expect(tokens.map(&:category)).to eq([:numeric, :operator, :numeric])
101
+ expect(tokens.map(&:value)).to eq([2, :bitshiftright, 3])
102
+ end
103
+
86
104
  it 'ignores whitespace' do
87
105
  tokens = tokenizer.tokenize('1 / 1 ')
88
106
  expect(tokens.map(&:category)).to eq([:numeric, :operator, :numeric])
@@ -179,6 +197,12 @@ describe Dentaku::Tokenizer do
179
197
  expect(tokens.map(&:value)).to eq(['size', :lt, 3, :or, 'admin', :eq, 1])
180
198
  end
181
199
 
200
+ it 'tokenizes curly brackets for array literals' do
201
+ tokens = tokenizer.tokenize('{}')
202
+ expect(tokens.map(&:category)).to eq(%i(array array))
203
+ expect(tokens.map(&:value)).to eq(%i(array_start array_end))
204
+ end
205
+
182
206
  it 'tokenizes square brackets for data structure access' do
183
207
  tokens = tokenizer.tokenize('a[1]')
184
208
  expect(tokens.map(&:category)).to eq(%i(identifier access numeric access))
@@ -225,43 +249,62 @@ describe Dentaku::Tokenizer do
225
249
  ])
226
250
  end
227
251
 
228
- describe 'functions' do
229
- it 'include IF' do
252
+ describe 'tokenizing function calls' do
253
+ it 'handles IF' do
230
254
  tokens = tokenizer.tokenize('if(x < 10, y, z)')
231
255
  expect(tokens.length).to eq(10)
232
256
  expect(tokens.map(&:category)).to eq([:function, :grouping, :identifier, :comparator, :numeric, :grouping, :identifier, :grouping, :identifier, :grouping])
233
257
  expect(tokens.map(&:value)).to eq([:if, :open, 'x', :lt, 10, :comma, 'y', :comma, 'z', :close])
234
258
  end
235
259
 
236
- it 'include ROUND/UP/DOWN' do
260
+ it 'handles ROUND/UP/DOWN' do
237
261
  tokens = tokenizer.tokenize('round(8.2)')
238
262
  expect(tokens.length).to eq(4)
239
263
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
240
- expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal.new('8.2'), :close])
264
+ expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal('8.2'), :close])
241
265
 
242
266
  tokens = tokenizer.tokenize('round(8.75, 1)')
243
267
  expect(tokens.length).to eq(6)
244
268
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping, :numeric, :grouping])
245
- expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal.new('8.75'), :comma, 1, :close])
269
+ expect(tokens.map(&:value)).to eq([:round, :open, BigDecimal('8.75'), :comma, 1, :close])
246
270
 
247
271
  tokens = tokenizer.tokenize('ROUNDUP(8.2)')
248
272
  expect(tokens.length).to eq(4)
249
273
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
250
- expect(tokens.map(&:value)).to eq([:roundup, :open, BigDecimal.new('8.2'), :close])
274
+ expect(tokens.map(&:value)).to eq([:roundup, :open, BigDecimal('8.2'), :close])
251
275
 
252
276
  tokens = tokenizer.tokenize('RoundDown(8.2)')
253
277
  expect(tokens.length).to eq(4)
254
278
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :grouping])
255
- expect(tokens.map(&:value)).to eq([:rounddown, :open, BigDecimal.new('8.2'), :close])
279
+ expect(tokens.map(&:value)).to eq([:rounddown, :open, BigDecimal('8.2'), :close])
256
280
  end
257
281
 
258
- it 'include NOT' do
282
+ it 'handles NOT' do
259
283
  tokens = tokenizer.tokenize('not(8 < 5)')
260
284
  expect(tokens.length).to eq(6)
261
285
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :comparator, :numeric, :grouping])
262
286
  expect(tokens.map(&:value)).to eq([:not, :open, 8, :lt, 5, :close])
263
287
  end
264
288
 
289
+ it 'handles ANY/ALL' do
290
+ %i( any all ).each do |fn|
291
+ tokens = tokenizer.tokenize("#{fn}(users, u, u.age > 18)")
292
+ expect(tokens.length).to eq(10)
293
+ expect(tokens.map { |t| [t.category, t.value] }).to eq([
294
+ [:function, fn ], # function call (any/all)
295
+ [:grouping, :open ], # (
296
+ [:identifier, "users"], # users
297
+ [:grouping, :comma ], # ,
298
+ [:identifier, "u" ], # u
299
+ [:grouping, :comma ], # ,
300
+ [:identifier, "u.age"], # u.age
301
+ [:comparator, :gt ], # >
302
+ [:numeric, 18 ], # 18
303
+ [:grouping, :close ] # )
304
+ ])
305
+ end
306
+ end
307
+
265
308
  it 'handles whitespace after function name' do
266
309
  tokens = tokenizer.tokenize('not (8 < 5)')
267
310
  expect(tokens.length).to eq(6)
@@ -269,7 +312,7 @@ describe Dentaku::Tokenizer do
269
312
  expect(tokens.map(&:value)).to eq([:not, :open, 8, :lt, 5, :close])
270
313
  end
271
314
 
272
- it 'can end with a bang' do
315
+ it 'handles when function ends with a bang' do
273
316
  tokens = tokenizer.tokenize('exp!(5 * 3)')
274
317
  expect(tokens.length).to eq(6)
275
318
  expect(tokens.map(&:category)).to eq([:function, :grouping, :numeric, :operator, :numeric, :grouping])
@@ -292,25 +335,25 @@ describe Dentaku::Tokenizer do
292
335
  it 'replaced with function name' do
293
336
  input = 'rrrrround!(8.2) + minimo(4,6,2)'
294
337
  tokenizer.tokenize(input, aliases: aliases)
295
- expect(tokenizer.replace_aliases(input)).to eq 'round(8.2) + min(4,6,2)'
338
+ expect(tokenizer.replace_aliases(input)).to eq('round(8.2) + min(4,6,2)')
296
339
  end
297
340
 
298
341
  it 'case insensitive' do
299
342
  input = 'MinImO(4,6,2)'
300
343
  tokenizer.tokenize(input, aliases: aliases)
301
- expect(tokenizer.replace_aliases(input)).to eq 'min(4,6,2)'
344
+ expect(tokenizer.replace_aliases(input)).to eq('min(4,6,2)')
302
345
  end
303
346
 
304
347
  it 'replace only whole aliases without word parts' do
305
348
  input = 'maximo(2,minimoooo())' # `minimoooo` doesn't match `minimo`
306
349
  tokenizer.tokenize(input, aliases: aliases)
307
- expect(tokenizer.replace_aliases(input)).to eq 'max(2,minimoooo())'
350
+ expect(tokenizer.replace_aliases(input)).to eq('max(2,minimoooo())')
308
351
  end
309
352
 
310
353
  it 'work with non-latin symbols' do
311
354
  input = '如果(1,2,3)'
312
355
  tokenizer.tokenize(input, aliases: aliases)
313
- expect(tokenizer.replace_aliases(input)).to eq 'if(1,2,3)'
356
+ expect(tokenizer.replace_aliases(input)).to eq('if(1,2,3)')
314
357
  end
315
358
  end
316
359
  end
@@ -0,0 +1,31 @@
1
+ require 'spec_helper'
2
+
3
+ require 'dentaku/visitor/infix'
4
+
5
+ class ArrayProcessor
6
+ attr_reader :expression
7
+ include Dentaku::Visitor::Infix
8
+
9
+ def initialize
10
+ @expression = []
11
+ end
12
+
13
+ def process(node)
14
+ @expression << node.to_s
15
+ end
16
+ end
17
+
18
+ RSpec.describe Dentaku::Visitor::Infix do
19
+ it 'generates array representation of operation' do
20
+ processor = ArrayProcessor.new
21
+ processor.visit(ast('5 + 3'))
22
+ expect(processor.expression).to eq ['5', '+', '3']
23
+ end
24
+
25
+ private
26
+
27
+ def ast(expression)
28
+ tokens = Dentaku::Tokenizer.new.tokenize(expression)
29
+ Dentaku::Parser.new(tokens).parse
30
+ end
31
+ end