sparkql 1.1.17 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +8 -8
- data/CHANGELOG.md +4 -0
- data/GRAMMAR.md +16 -3
- data/VERSION +1 -1
- data/lib/sparkql/function_resolver.rb +42 -164
- data/lib/sparkql/lexer.rb +0 -3
- data/lib/sparkql/parser.rb +146 -99
- data/lib/sparkql/parser.y +21 -5
- data/lib/sparkql/parser_compatibility.rb +11 -17
- data/lib/sparkql/parser_tools.rb +60 -25
- data/test/unit/function_resolver_test.rb +16 -16
- data/test/unit/parser_compatability_test.rb +17 -0
- data/test/unit/parser_test.rb +71 -25
- metadata +2 -2
data/lib/sparkql/parser.rb
CHANGED
@@ -16,72 +16,86 @@ module Sparkql
|
|
16
16
|
##### State transition tables begin ###
|
17
17
|
|
18
18
|
racc_action_table = [
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
35,
|
26
|
-
|
27
|
-
45, 46, 47, 14, nil,
|
28
|
-
|
29
|
-
|
30
|
-
|
19
|
+
36, 14, 72, 44, 45, 35, 46, 47, 48, 14,
|
20
|
+
58, 32, 33, 34, 37, 38, 39, 40, 41, 36,
|
21
|
+
22, 62, 18, 19, 35, 63, 10, 11, 14, 23,
|
22
|
+
32, 33, 34, 37, 38, 39, 40, 41, 36, 17,
|
23
|
+
16, 61, 50, 35, 51, 10, 11, 14, 15, 32,
|
24
|
+
33, 34, 37, 38, 39, 40, 41, 36, 17, 16,
|
25
|
+
75, 57, 35, 66, 76, -35, 14, nil, 32, 33,
|
26
|
+
34, 37, 38, 39, 40, 41, 36, 14, nil, 44,
|
27
|
+
45, 35, 46, 47, 48, 14, nil, 32, 33, 34,
|
28
|
+
37, 38, 39, 40, 41, 36, nil, nil, 36, nil,
|
29
|
+
35, nil, nil, 35, 14, nil, 32, 33, 34, 32,
|
30
|
+
33, 34, 36, nil, 8, nil, 9, 35, 10, 11,
|
31
|
+
14, nil, nil, 32, 33, 34, 8, nil, 9, nil,
|
32
|
+
10, 11, 14, 9, nil, 10, 11, 14, 9, nil,
|
33
|
+
10, 11, 14, 9, nil, 10, 11, 14 ]
|
31
34
|
|
32
35
|
racc_action_check = [
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
36
|
+
76, 61, 59, 61, 61, 76, 61, 61, 61, 76,
|
37
|
+
30, 76, 76, 76, 76, 76, 76, 76, 76, 63,
|
38
|
+
13, 52, 6, 6, 63, 52, 63, 63, 63, 15,
|
39
|
+
63, 63, 63, 63, 63, 63, 63, 63, 22, 21,
|
40
|
+
21, 43, 21, 22, 22, 22, 22, 22, 1, 22,
|
41
|
+
22, 22, 22, 22, 22, 22, 22, 58, 2, 2,
|
42
|
+
67, 29, 58, 58, 67, 28, 58, nil, 58, 58,
|
43
|
+
58, 58, 58, 58, 58, 58, 18, 19, nil, 19,
|
44
|
+
19, 18, 19, 19, 19, 18, nil, 18, 18, 18,
|
45
|
+
18, 18, 18, 18, 18, 57, nil, nil, 36, nil,
|
46
|
+
57, nil, nil, 36, 57, nil, 57, 57, 57, 36,
|
47
|
+
36, 36, 35, nil, 9, nil, 9, 35, 9, 9,
|
48
|
+
9, nil, nil, 35, 35, 35, 0, nil, 0, nil,
|
49
|
+
0, 0, 0, 17, nil, 17, 17, 17, 16, nil,
|
50
|
+
16, 16, 16, 8, nil, 8, 8, 8 ]
|
45
51
|
|
46
52
|
racc_action_pointer = [
|
47
|
-
|
48
|
-
nil, nil, nil,
|
49
|
-
nil,
|
50
|
-
nil, nil, nil, nil,
|
51
|
-
nil, nil,
|
52
|
-
nil,
|
53
|
-
nil,
|
53
|
+
121, 48, 53, nil, nil, nil, 19, nil, 136, 109,
|
54
|
+
nil, nil, nil, 13, nil, 29, 131, 126, 74, 66,
|
55
|
+
nil, 34, 36, nil, nil, nil, nil, nil, 53, 49,
|
56
|
+
3, nil, nil, nil, nil, 110, 96, nil, nil, nil,
|
57
|
+
nil, nil, nil, 29, nil, nil, nil, nil, nil, nil,
|
58
|
+
nil, nil, 13, nil, nil, nil, nil, 93, 55, -6,
|
59
|
+
nil, -10, nil, 17, nil, nil, nil, 52, nil, nil,
|
60
|
+
nil, nil, nil, nil, nil, nil, -2, nil ]
|
54
61
|
|
55
62
|
racc_action_default = [
|
56
|
-
-2, -
|
57
|
-
-13, -14, -15, -
|
58
|
-
-9, -
|
59
|
-
-
|
60
|
-
-
|
61
|
-
-
|
62
|
-
-20, -
|
63
|
+
-2, -55, -1, -3, -4, -5, -55, -8, -55, -55,
|
64
|
+
-13, -14, -15, -55, -23, -55, -55, -55, -55, -55,
|
65
|
+
-9, -55, -55, 78, -10, -11, -6, -16, -17, -18,
|
66
|
+
-55, -34, -39, -40, -41, -55, -55, -44, -45, -46,
|
67
|
+
-47, -48, -7, -55, -49, -50, -51, -52, -53, -54,
|
68
|
+
-12, -19, -55, -24, -26, -27, -28, -55, -55, -55,
|
69
|
+
-43, -55, -20, -55, -36, -37, -21, -55, -29, -31,
|
70
|
+
-32, -33, -42, -38, -25, -22, -55, -30 ]
|
63
71
|
|
64
72
|
racc_goto_table = [
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
nil, nil, nil, nil, nil, nil, nil, nil,
|
69
|
-
|
73
|
+
30, 27, 43, 31, 68, 56, 53, 49, 2, 28,
|
74
|
+
20, 52, 67, 29, 42, 26, 1, 21, 24, 25,
|
75
|
+
59, 60, 77, nil, nil, nil, nil, nil, nil, nil,
|
76
|
+
nil, nil, nil, nil, nil, nil, nil, nil, nil, nil,
|
77
|
+
30, 69, 64, 70, 73, 65, 56, 74, nil, 49,
|
78
|
+
nil, nil, nil, nil, nil, nil, nil, nil, 30, 69,
|
79
|
+
nil, 70 ]
|
70
80
|
|
71
81
|
racc_goto_check = [
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
nil, nil, nil, nil, nil, nil, nil, nil,
|
76
|
-
|
82
|
+
14, 11, 20, 18, 19, 6, 17, 10, 2, 12,
|
83
|
+
3, 15, 16, 13, 8, 7, 1, 2, 3, 3,
|
84
|
+
18, 18, 19, nil, nil, nil, nil, nil, nil, nil,
|
85
|
+
nil, nil, nil, nil, nil, nil, nil, nil, nil, nil,
|
86
|
+
14, 11, 18, 18, 20, 10, 6, 17, nil, 10,
|
87
|
+
nil, nil, nil, nil, nil, nil, nil, nil, 14, 11,
|
88
|
+
nil, 18 ]
|
77
89
|
|
78
90
|
racc_goto_pointer = [
|
79
|
-
nil, 8,
|
80
|
-
-
|
91
|
+
nil, 16, 8, 2, nil, nil, -17, -3, -5, nil,
|
92
|
+
-12, -17, -9, -5, -18, -11, -46, -16, -15, -54,
|
93
|
+
-17 ]
|
81
94
|
|
82
95
|
racc_goto_default = [
|
83
96
|
nil, nil, nil, 3, 4, 5, 6, nil, nil, 7,
|
84
|
-
12,
|
97
|
+
12, 54, 71, nil, 13, nil, nil, nil, 55, nil,
|
98
|
+
nil ]
|
85
99
|
|
86
100
|
racc_reduce_table = [
|
87
101
|
0, 0, :racc_error,
|
@@ -105,37 +119,44 @@ racc_reduce_table = [
|
|
105
119
|
1, 28, :_reduce_18,
|
106
120
|
3, 31, :_reduce_19,
|
107
121
|
4, 31, :_reduce_20,
|
108
|
-
|
122
|
+
3, 33, :_reduce_21,
|
123
|
+
4, 33, :_reduce_22,
|
109
124
|
1, 35, :_reduce_none,
|
110
|
-
3, 35, :_reduce_23,
|
111
|
-
1, 36, :_reduce_none,
|
112
125
|
1, 36, :_reduce_none,
|
113
|
-
|
114
|
-
1,
|
115
|
-
1,
|
116
|
-
|
117
|
-
3, 33, :_reduce_30,
|
118
|
-
3, 29, :_reduce_31,
|
119
|
-
1, 37, :_reduce_none,
|
120
|
-
1, 37, :_reduce_none,
|
126
|
+
3, 36, :_reduce_25,
|
127
|
+
1, 38, :_reduce_none,
|
128
|
+
1, 38, :_reduce_none,
|
129
|
+
1, 38, :_reduce_28,
|
121
130
|
1, 37, :_reduce_none,
|
122
|
-
3, 37, :
|
123
|
-
|
131
|
+
3, 37, :_reduce_30,
|
132
|
+
1, 40, :_reduce_none,
|
133
|
+
1, 40, :_reduce_none,
|
134
|
+
1, 40, :_reduce_none,
|
135
|
+
1, 34, :_reduce_none,
|
136
|
+
1, 34, :_reduce_none,
|
137
|
+
3, 34, :_reduce_36,
|
138
|
+
3, 34, :_reduce_37,
|
139
|
+
3, 29, :_reduce_38,
|
140
|
+
1, 39, :_reduce_none,
|
141
|
+
1, 39, :_reduce_none,
|
142
|
+
1, 39, :_reduce_none,
|
143
|
+
3, 39, :_reduce_42,
|
144
|
+
2, 39, :_reduce_43,
|
124
145
|
1, 32, :_reduce_none,
|
125
146
|
1, 32, :_reduce_none,
|
126
147
|
1, 32, :_reduce_none,
|
127
148
|
1, 32, :_reduce_none,
|
128
149
|
1, 32, :_reduce_none,
|
129
|
-
1,
|
130
|
-
1,
|
131
|
-
1,
|
132
|
-
1,
|
133
|
-
1,
|
134
|
-
1,
|
150
|
+
1, 41, :_reduce_none,
|
151
|
+
1, 41, :_reduce_none,
|
152
|
+
1, 41, :_reduce_none,
|
153
|
+
1, 41, :_reduce_none,
|
154
|
+
1, 41, :_reduce_none,
|
155
|
+
1, 41, :_reduce_none ]
|
135
156
|
|
136
|
-
racc_reduce_n =
|
157
|
+
racc_reduce_n = 55
|
137
158
|
|
138
|
-
racc_shift_n =
|
159
|
+
racc_shift_n = 78
|
139
160
|
|
140
161
|
racc_token_table = {
|
141
162
|
false => 0,
|
@@ -214,11 +235,14 @@ Racc_token_to_s_table = [
|
|
214
235
|
"group",
|
215
236
|
"function",
|
216
237
|
"literal",
|
238
|
+
"literal_function",
|
217
239
|
"literal_list",
|
218
240
|
"function_name",
|
219
241
|
"function_args",
|
242
|
+
"literal_function_args",
|
220
243
|
"function_arg",
|
221
244
|
"literals",
|
245
|
+
"literal_function_arg",
|
222
246
|
"rangeable" ]
|
223
247
|
|
224
248
|
Racc_debug_parser = false
|
@@ -297,42 +321,42 @@ def _reduce_20(val, _values, result)
|
|
297
321
|
result
|
298
322
|
end
|
299
323
|
|
300
|
-
|
301
|
-
|
302
|
-
|
324
|
+
def _reduce_21(val, _values, result)
|
325
|
+
result = tokenize_function(val[0], [])
|
326
|
+
result
|
327
|
+
end
|
303
328
|
|
304
|
-
def
|
305
|
-
result =
|
329
|
+
def _reduce_22(val, _values, result)
|
330
|
+
result = tokenize_function(val[0], val[2])
|
306
331
|
result
|
307
332
|
end
|
308
333
|
|
309
|
-
# reduce
|
334
|
+
# reduce 23 omitted
|
310
335
|
|
311
|
-
# reduce
|
336
|
+
# reduce 24 omitted
|
312
337
|
|
313
|
-
def
|
314
|
-
result =
|
338
|
+
def _reduce_25(val, _values, result)
|
339
|
+
result = tokenize_function_args(val[0], val[2])
|
315
340
|
result
|
316
341
|
end
|
317
342
|
|
318
|
-
# reduce
|
343
|
+
# reduce 26 omitted
|
319
344
|
|
320
|
-
# reduce
|
345
|
+
# reduce 27 omitted
|
321
346
|
|
322
|
-
def
|
323
|
-
result =
|
347
|
+
def _reduce_28(val, _values, result)
|
348
|
+
result = tokenize_field_arg(val[0])
|
324
349
|
result
|
325
350
|
end
|
326
351
|
|
352
|
+
# reduce 29 omitted
|
353
|
+
|
327
354
|
def _reduce_30(val, _values, result)
|
328
|
-
result =
|
355
|
+
result = tokenize_function_args(val[0], val[2])
|
329
356
|
result
|
330
357
|
end
|
331
358
|
|
332
|
-
|
333
|
-
result = tokenize_multiple(val[0], val[2])
|
334
|
-
result
|
335
|
-
end
|
359
|
+
# reduce 31 omitted
|
336
360
|
|
337
361
|
# reduce 32 omitted
|
338
362
|
|
@@ -340,19 +364,22 @@ end
|
|
340
364
|
|
341
365
|
# reduce 34 omitted
|
342
366
|
|
343
|
-
|
344
|
-
result = val[1]
|
345
|
-
result
|
346
|
-
end
|
367
|
+
# reduce 35 omitted
|
347
368
|
|
348
369
|
def _reduce_36(val, _values, result)
|
349
|
-
result =
|
370
|
+
result = tokenize_multiple(val[0], val[2])
|
350
371
|
result
|
351
372
|
end
|
352
373
|
|
353
|
-
|
374
|
+
def _reduce_37(val, _values, result)
|
375
|
+
result = tokenize_multiple(val[0], val[2])
|
376
|
+
result
|
377
|
+
end
|
354
378
|
|
355
|
-
|
379
|
+
def _reduce_38(val, _values, result)
|
380
|
+
result = tokenize_multiple(val[0], val[2])
|
381
|
+
result
|
382
|
+
end
|
356
383
|
|
357
384
|
# reduce 39 omitted
|
358
385
|
|
@@ -360,9 +387,15 @@ end
|
|
360
387
|
|
361
388
|
# reduce 41 omitted
|
362
389
|
|
363
|
-
|
390
|
+
def _reduce_42(val, _values, result)
|
391
|
+
result = val[1]
|
392
|
+
result
|
393
|
+
end
|
364
394
|
|
365
|
-
|
395
|
+
def _reduce_43(val, _values, result)
|
396
|
+
result = tokenize_literal_negation(val[1])
|
397
|
+
result
|
398
|
+
end
|
366
399
|
|
367
400
|
# reduce 44 omitted
|
368
401
|
|
@@ -372,6 +405,20 @@ end
|
|
372
405
|
|
373
406
|
# reduce 47 omitted
|
374
407
|
|
408
|
+
# reduce 48 omitted
|
409
|
+
|
410
|
+
# reduce 49 omitted
|
411
|
+
|
412
|
+
# reduce 50 omitted
|
413
|
+
|
414
|
+
# reduce 51 omitted
|
415
|
+
|
416
|
+
# reduce 52 omitted
|
417
|
+
|
418
|
+
# reduce 53 omitted
|
419
|
+
|
420
|
+
# reduce 54 omitted
|
421
|
+
|
375
422
|
def _reduce_none(val, _values, result)
|
376
423
|
val[0]
|
377
424
|
end
|
data/lib/sparkql/parser.y
CHANGED
@@ -102,7 +102,7 @@ rule
|
|
102
102
|
# on filtering values
|
103
103
|
condition
|
104
104
|
: literal
|
105
|
-
|
|
105
|
+
| literal_function
|
106
106
|
| literal_list { result = tokenize_list(val[0]) }
|
107
107
|
;
|
108
108
|
|
@@ -115,6 +115,11 @@ rule
|
|
115
115
|
: function_name LPAREN RPAREN { result = tokenize_function(val[0], []) }
|
116
116
|
| function_name LPAREN function_args RPAREN { result = tokenize_function(val[0], val[2]) }
|
117
117
|
;
|
118
|
+
|
119
|
+
literal_function
|
120
|
+
: function_name LPAREN RPAREN { result = tokenize_function(val[0], []) }
|
121
|
+
| function_name LPAREN literal_function_args RPAREN { result = tokenize_function(val[0], val[2]) }
|
122
|
+
;
|
118
123
|
|
119
124
|
function_name
|
120
125
|
: KEYWORD
|
@@ -126,20 +131,31 @@ rule
|
|
126
131
|
function_args
|
127
132
|
: function_arg
|
128
133
|
| function_args COMMA function_arg { result = tokenize_function_args(val[0], val[2]) }
|
129
|
-
;
|
130
|
-
|
134
|
+
;
|
135
|
+
|
131
136
|
function_arg
|
132
137
|
: literal
|
133
138
|
| literals
|
134
139
|
| field { result = tokenize_field_arg(val[0]) }
|
135
140
|
;
|
136
|
-
|
141
|
+
|
142
|
+
literal_function_args
|
143
|
+
: literal_function_arg
|
144
|
+
| literal_function_args COMMA literal_function_arg { result = tokenize_function_args(val[0], val[2]) }
|
145
|
+
;
|
146
|
+
|
147
|
+
literal_function_arg
|
148
|
+
: literal
|
149
|
+
| literals
|
150
|
+
| literal_function
|
151
|
+
;
|
152
|
+
|
137
153
|
##### Literal List
|
138
154
|
#
|
139
155
|
# A comma delimited list of functions and values.
|
140
156
|
literal_list
|
141
157
|
: literals
|
142
|
-
|
|
158
|
+
| literal_function
|
143
159
|
| literal_list COMMA literals { result = tokenize_multiple(val[0], val[2]) }
|
144
160
|
| literal_list COMMA function { result = tokenize_multiple(val[0], val[2]) }
|
145
161
|
;
|
@@ -4,61 +4,51 @@ module Sparkql::ParserCompatibility
|
|
4
4
|
MAXIMUM_MULTIPLE_VALUES = 200
|
5
5
|
MAXIMUM_EXPRESSIONS = 75
|
6
6
|
MAXIMUM_LEVEL_DEPTH = 2
|
7
|
+
MAXIMUM_FUNCTION_DEPTH = 5
|
7
8
|
|
8
|
-
# TODO I Really don't think this is required anymore
|
9
9
|
# Ordered by precedence.
|
10
10
|
FILTER_VALUES = [
|
11
11
|
{
|
12
12
|
:type => :datetime,
|
13
|
-
:regex => /^[0-9]{4}\-[0-9]{2}\-[0-9]{2}T[0-9]{2}\:[0-9]{2}\:[0-9]{2}\.[0-9]{6}$/,
|
14
13
|
:operators => Sparkql::Token::OPERATORS + [Sparkql::Token::RANGE_OPERATOR]
|
15
14
|
},
|
16
15
|
{
|
17
16
|
:type => :date,
|
18
|
-
:regex => /^[0-9]{4}\-[0-9]{2}\-[0-9]{2}$/,
|
19
17
|
:operators => Sparkql::Token::OPERATORS + [Sparkql::Token::RANGE_OPERATOR]
|
20
18
|
},
|
21
19
|
{
|
22
20
|
:type => :time,
|
23
|
-
:regex => /^[0-9]{2}\:[0-9]{2}(\:[0-9]{2})?(\.[0-9]{6)$/,
|
24
21
|
:operators => Sparkql::Token::OPERATORS + [Sparkql::Token::RANGE_OPERATOR]
|
25
22
|
},
|
26
23
|
{
|
27
24
|
:type => :character,
|
28
|
-
:regex => /^'([^'\\]*(\\.[^'\\]*)*)'$/, # Strings must be single quoted. Any inside single quotes must be escaped.
|
29
25
|
:multiple => /^'([^'\\]*(\\.[^'\\]*)*)'/,
|
30
26
|
:operators => Sparkql::Token::EQUALITY_OPERATORS
|
31
27
|
},
|
32
28
|
{
|
33
29
|
:type => :integer,
|
34
|
-
:regex => /^\-?[0-9]+$/,
|
35
30
|
:multiple => /^\-?[0-9]+/,
|
36
31
|
:operators => Sparkql::Token::OPERATORS + [Sparkql::Token::RANGE_OPERATOR]
|
37
32
|
},
|
38
33
|
{
|
39
34
|
:type => :decimal,
|
40
|
-
:regex => /^\-?[0-9]+\.[0-9]+$/,
|
41
35
|
:multiple => /^\-?[0-9]+\.[0-9]+/,
|
42
36
|
:operators => Sparkql::Token::OPERATORS + [Sparkql::Token::RANGE_OPERATOR]
|
43
37
|
},
|
44
38
|
{
|
45
39
|
:type => :shape,
|
46
|
-
# This type is not parseable, so no regex
|
47
40
|
:operators => Sparkql::Token::EQUALITY_OPERATORS
|
48
41
|
},
|
49
42
|
{
|
50
43
|
:type => :boolean,
|
51
|
-
:regex => /^true|false$/,
|
52
44
|
:operators => Sparkql::Token::EQUALITY_OPERATORS
|
53
45
|
},
|
54
46
|
{
|
55
47
|
:type => :null,
|
56
|
-
:regex => /^NULL|Null|null$/,
|
57
48
|
:operators => Sparkql::Token::EQUALITY_OPERATORS
|
58
49
|
},
|
59
50
|
{
|
60
51
|
:type => :function,
|
61
|
-
# This type is not parseable, so no regex
|
62
52
|
:operators => Sparkql::Token::OPERATORS + [Sparkql::Token::RANGE_OPERATOR]
|
63
53
|
},
|
64
54
|
]
|
@@ -206,6 +196,10 @@ module Sparkql::ParserCompatibility
|
|
206
196
|
MAXIMUM_MULTIPLE_VALUES
|
207
197
|
end
|
208
198
|
|
199
|
+
def max_function_depth
|
200
|
+
MAXIMUM_FUNCTION_DEPTH
|
201
|
+
end
|
202
|
+
|
209
203
|
private
|
210
204
|
|
211
205
|
def tokenizer_error( error_hash )
|
@@ -224,11 +218,11 @@ module Sparkql::ParserCompatibility
|
|
224
218
|
(supports_nulls && expression[:type] == :null)
|
225
219
|
return true
|
226
220
|
# If the field will be passed into a function,
|
227
|
-
# check the type of the return value
|
221
|
+
# check the type of the return value of the function
|
228
222
|
# and coerce if necessary.
|
229
|
-
elsif expression[:
|
230
|
-
expression[:type] == :integer &&
|
231
|
-
expression[:
|
223
|
+
elsif expression[:field_manipulations] &&
|
224
|
+
expression[:type] == :integer &&
|
225
|
+
expression[:field_manipulations][:return_type] == :decimal
|
232
226
|
expression[:type] = :decimal
|
233
227
|
expression[:cast] = :integer
|
234
228
|
return true
|
@@ -264,9 +258,9 @@ module Sparkql::ParserCompatibility
|
|
264
258
|
# the function matches what is expected, and that the function supports the
|
265
259
|
# field type as the first argument.
|
266
260
|
def check_function_type?(expression, expected)
|
267
|
-
return false unless expression[:
|
261
|
+
return false unless expression.key?(:field_manipulations) && expression[:field_manipulations][:return_type] == expression[:type]
|
268
262
|
# Lookup the function arguments
|
269
|
-
function = Sparkql::FunctionResolver::SUPPORTED_FUNCTIONS[expression[:
|
263
|
+
function = Sparkql::FunctionResolver::SUPPORTED_FUNCTIONS[expression[:field_manipulations][:function_name].to_sym]
|
270
264
|
return false if function.nil?
|
271
265
|
|
272
266
|
Array(function[:args].first).include?(expected)
|
data/lib/sparkql/parser_tools.rb
CHANGED
@@ -24,35 +24,37 @@ module Sparkql::ParserTools
|
|
24
24
|
|
25
25
|
def tokenize_expression(field, op, val)
|
26
26
|
operator = get_operator(val,op) unless val.nil?
|
27
|
-
|
28
|
-
|
29
|
-
# is remapped to the expression
|
27
|
+
|
28
|
+
field_manipulations = nil
|
30
29
|
if field.is_a?(Hash) && field[:type] == :function
|
31
|
-
function = Sparkql::FunctionResolver::SUPPORTED_FUNCTIONS[field[:
|
32
|
-
if
|
33
|
-
|
34
|
-
field_args[:args] = field[:args]
|
35
|
-
|
36
|
-
if field_args[:field_function] == 'cast'
|
37
|
-
field_args[:field_function_type] = field[:args].last.to_sym
|
38
|
-
else
|
39
|
-
field_args[:field_function_type] = function[:return_type]
|
40
|
-
end
|
41
|
-
else
|
42
|
-
tokenizer_error(:token => field[:value],
|
30
|
+
function = Sparkql::FunctionResolver::SUPPORTED_FUNCTIONS[field[:function_name].to_sym]
|
31
|
+
if function.nil?
|
32
|
+
tokenizer_error(:token => field[:function_name],
|
43
33
|
:message => "Unsupported function type", :status => :fatal )
|
44
34
|
end
|
45
|
-
|
35
|
+
field_manipulations = field
|
36
|
+
field = field[:field]
|
46
37
|
end
|
47
|
-
|
38
|
+
|
39
|
+
custom_field = !field.nil? && field.is_a?(String) && field.start_with?('"')
|
40
|
+
|
48
41
|
block_group = (@lexer.level == 0) ? 0 : @lexer.block_group_identifier
|
49
42
|
expression = {:field => field, :operator => operator, :conjunction => 'And',
|
50
43
|
:conjunction_level => 0, :level => @lexer.level,
|
51
|
-
:block_group => block_group, :custom_field => custom_field}
|
52
|
-
|
44
|
+
:block_group => block_group, :custom_field => custom_field}
|
45
|
+
|
46
|
+
if !field_manipulations.nil?
|
47
|
+
# Keeping field_function and field_function_type for backward compatibility with datacon
|
48
|
+
expression.merge!(field_manipulations: field_manipulations,
|
49
|
+
field_function: field_manipulations[:function_name],
|
50
|
+
field_function_type: field_manipulations[:return_type],
|
51
|
+
args: field_manipulations[:function_parameters])
|
52
|
+
end
|
53
|
+
|
53
54
|
expression = val.merge(expression) unless val.nil?
|
54
55
|
expression[:condition] ||= expression[:value]
|
55
56
|
validate_level_depth expression
|
57
|
+
validate_field_function_depth(expression[:field_manipulations])
|
56
58
|
if operator.nil?
|
57
59
|
tokenizer_error(:token => op, :expression => expression,
|
58
60
|
:message => "Operator not supported for this type and value string", :status => :fatal )
|
@@ -145,10 +147,14 @@ module Sparkql::ParserTools
|
|
145
147
|
end
|
146
148
|
|
147
149
|
def tokenize_field_arg(field)
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
150
|
+
if field.is_a?(Hash) && field[:type] == :function
|
151
|
+
field
|
152
|
+
else
|
153
|
+
{
|
154
|
+
:type => :field,
|
155
|
+
:value => field,
|
156
|
+
}
|
157
|
+
end
|
152
158
|
end
|
153
159
|
|
154
160
|
def tokenize_function(name, f_args)
|
@@ -180,7 +186,6 @@ module Sparkql::ParserTools
|
|
180
186
|
def on_error(error_token_id, error_value, value_stack)
|
181
187
|
token_name = token_to_str(error_token_id)
|
182
188
|
token_name.downcase!
|
183
|
-
token = error_value.to_s.inspect
|
184
189
|
tokenizer_error(:token => @lexer.current_token_value,
|
185
190
|
:message => "Error parsing token #{token_name}",
|
186
191
|
:status => :fatal,
|
@@ -194,7 +199,15 @@ module Sparkql::ParserTools
|
|
194
199
|
:status => :fatal, :syntax => false, :constraint => true )
|
195
200
|
end
|
196
201
|
end
|
197
|
-
|
202
|
+
|
203
|
+
def validate_field_function_depth(expression)
|
204
|
+
if nested_function_depth(expression) > max_function_depth
|
205
|
+
compile_error(:token => "(", :expression => expression,
|
206
|
+
:message => "You have exceeded the maximum function nesting level. Please nest no more than #{max_function_depth} levels deep.",
|
207
|
+
:status => :fatal, :syntax => false, :constraint => true )
|
208
|
+
end
|
209
|
+
end
|
210
|
+
|
198
211
|
def validate_expressions results
|
199
212
|
if results.size > max_expressions
|
200
213
|
compile_error(:token => results[max_expressions][:field], :expression => results[max_expressions],
|
@@ -237,4 +250,26 @@ module Sparkql::ParserTools
|
|
237
250
|
end
|
238
251
|
end
|
239
252
|
|
253
|
+
def nested_function_depth(expression)
|
254
|
+
return 0 unless expression && expression[:type] == :function
|
255
|
+
|
256
|
+
height = 0
|
257
|
+
queue = []
|
258
|
+
queue.push(expression)
|
259
|
+
|
260
|
+
while true
|
261
|
+
count = queue.size
|
262
|
+
return height if count == 0
|
263
|
+
|
264
|
+
height += 1
|
265
|
+
|
266
|
+
while count > 0
|
267
|
+
node = queue.shift
|
268
|
+
node[:args].each do |child|
|
269
|
+
queue.push(child) if child[:type] == :function
|
270
|
+
end
|
271
|
+
count -= 1
|
272
|
+
end
|
273
|
+
end
|
274
|
+
end
|
240
275
|
end
|