sparkql 1.2.8 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +111 -0
- data/.ruby-version +1 -0
- data/CHANGELOG.md +4 -0
- data/Rakefile +2 -3
- data/VERSION +1 -1
- data/lib/sparkql/errors.rb +68 -71
- data/lib/sparkql/evaluator.rb +13 -9
- data/lib/sparkql/expression_resolver.rb +2 -3
- data/lib/sparkql/expression_state.rb +7 -9
- data/lib/sparkql/function_resolver.rb +15 -10
- data/lib/sparkql/geo/record_circle.rb +1 -1
- data/lib/sparkql/lexer.rb +54 -56
- data/lib/sparkql/parser.rb +35 -35
- data/lib/sparkql/parser_compatibility.rb +97 -76
- data/lib/sparkql/parser_tools.rb +159 -139
- data/lib/sparkql/token.rb +25 -25
- data/lib/sparkql/version.rb +1 -1
- data/sparkql.gemspec +1 -1
- data/test/unit/errors_test.rb +4 -5
- data/test/unit/evaluator_test.rb +15 -16
- data/test/unit/expression_state_test.rb +14 -15
- data/test/unit/function_resolver_test.rb +125 -161
- data/test/unit/geo/record_circle_test.rb +2 -2
- data/test/unit/lexer_test.rb +15 -16
- data/test/unit/parser_compatability_test.rb +177 -151
- data/test/unit/parser_test.rb +90 -90
- metadata +8 -6
data/lib/sparkql/parser_tools.rb
CHANGED
@@ -2,11 +2,10 @@
|
|
2
2
|
require 'bigdecimal'
|
3
3
|
|
4
4
|
module Sparkql::ParserTools
|
5
|
-
|
6
5
|
# Coercible types from highest precision to lowest
|
7
|
-
DATE_TYPES = [
|
8
|
-
NUMBER_TYPES = [
|
9
|
-
ARITHMETIC_TYPES = [
|
6
|
+
DATE_TYPES = %i[datetime date].freeze
|
7
|
+
NUMBER_TYPES = %i[decimal integer].freeze
|
8
|
+
ARITHMETIC_TYPES = %i[decimal integer field arithmetic].freeze
|
10
9
|
GROUP = 'Group'.freeze
|
11
10
|
NEGATION = 'Negation'.freeze
|
12
11
|
|
@@ -15,15 +14,14 @@ module Sparkql::ParserTools
|
|
15
14
|
@expression_count = 0
|
16
15
|
results = do_parse
|
17
16
|
return if results.nil?
|
17
|
+
|
18
18
|
validate_expressions results
|
19
19
|
results
|
20
20
|
end
|
21
21
|
|
22
22
|
def next_token
|
23
23
|
t = @lexer.shift
|
24
|
-
while t[0] == :SPACE
|
25
|
-
t = @lexer.shift
|
26
|
-
end
|
24
|
+
t = @lexer.shift while (t[0] == :SPACE) || (t[0] == :NEWLINE)
|
27
25
|
t
|
28
26
|
end
|
29
27
|
|
@@ -41,20 +39,20 @@ module Sparkql::ParserTools
|
|
41
39
|
end
|
42
40
|
|
43
41
|
def no_field_error(field, operator)
|
44
|
-
tokenizer_error(:
|
45
|
-
:
|
46
|
-
:
|
42
|
+
tokenizer_error(token: field,
|
43
|
+
expression: { operator: operator, conjuction: 'And', conjunction_level: 0, level: @lexer.level },
|
44
|
+
message: "Each expression must evaluate a field", status: :fatal)
|
47
45
|
end
|
48
46
|
|
49
|
-
def tokenize_expression(field,
|
50
|
-
operator = get_operator(val,
|
47
|
+
def tokenize_expression(field, op_token, val)
|
48
|
+
operator = get_operator(val, op_token) unless val.nil?
|
51
49
|
|
52
50
|
field_manipulations = nil
|
53
51
|
if field.is_a?(Hash) && field[:type] == :function
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
52
|
+
unless supported_function?(field[:function_name])
|
53
|
+
tokenizer_error(token: field[:function_name],
|
54
|
+
message: 'Unsupported function type',
|
55
|
+
status: :fatal)
|
58
56
|
end
|
59
57
|
field_manipulations = field
|
60
58
|
field = field[:field]
|
@@ -68,10 +66,10 @@ module Sparkql::ParserTools
|
|
68
66
|
|
69
67
|
custom_field = !field.nil? && field.is_a?(String) && field.start_with?('"')
|
70
68
|
|
71
|
-
block_group =
|
72
|
-
expression = {:
|
73
|
-
|
74
|
-
|
69
|
+
block_group = @lexer.level.zero? ? 0 : @lexer.block_group_identifier
|
70
|
+
expression = { field: field, operator: operator, conjunction: 'And',
|
71
|
+
conjunction_level: 0, level: @lexer.level,
|
72
|
+
block_group: block_group, custom_field: custom_field }
|
75
73
|
|
76
74
|
if !field_manipulations.nil?
|
77
75
|
# Keeping field_function and field_function_type for backward compatibility with datacon
|
@@ -89,13 +87,13 @@ module Sparkql::ParserTools
|
|
89
87
|
validate_level_depth expression
|
90
88
|
validate_field_function_depth(expression[:field_manipulations])
|
91
89
|
if operator.nil?
|
92
|
-
tokenizer_error(:
|
93
|
-
|
90
|
+
tokenizer_error(token: op_token, expression: expression,
|
91
|
+
message: "Operator not supported for this type and value string", status: :fatal)
|
94
92
|
end
|
95
93
|
@expression_count += 1
|
96
94
|
[expression]
|
97
95
|
end
|
98
|
-
|
96
|
+
|
99
97
|
def tokenize_conjunction(exp1, conj, exp2)
|
100
98
|
exp2.first[:conjunction] = conj
|
101
99
|
exp2.first[:conjunction_level] = @lexer.level
|
@@ -107,10 +105,10 @@ module Sparkql::ParserTools
|
|
107
105
|
# begins with a unary operator, and is nested, such as:
|
108
106
|
# Not (Not Field Eq 1)
|
109
107
|
# In this instance we treat the outer unary as a conjunction. With any other
|
110
|
-
# expression this would be the case, so that should make processing
|
108
|
+
# expression this would be the case, so that should make processing
|
111
109
|
# consistent.
|
112
|
-
if exp.first[:unary] && @lexer.level
|
113
|
-
exp.first[:conjunction] =
|
110
|
+
if exp.first[:unary] && @lexer.level.zero?
|
111
|
+
exp.first[:conjunction] = conj
|
114
112
|
exp.first[:conjunction_level] = @lexer.level
|
115
113
|
else
|
116
114
|
exp.first[:unary] = conj
|
@@ -119,7 +117,7 @@ module Sparkql::ParserTools
|
|
119
117
|
|
120
118
|
exp
|
121
119
|
end
|
122
|
-
|
120
|
+
|
123
121
|
def tokenize_group(expressions)
|
124
122
|
@lexer.leveldown
|
125
123
|
expressions
|
@@ -128,7 +126,7 @@ module Sparkql::ParserTools
|
|
128
126
|
def tokenize_arithmetic_group(lhs)
|
129
127
|
@lexer.leveldown
|
130
128
|
@lexer.block_group_identifier -= 1
|
131
|
-
lhs = {type: :field, value: lhs} if lhs.is_a?(String)
|
129
|
+
lhs = { type: :field, value: lhs } if lhs.is_a?(String)
|
132
130
|
{
|
133
131
|
type: :arithmetic,
|
134
132
|
op: GROUP,
|
@@ -137,7 +135,7 @@ module Sparkql::ParserTools
|
|
137
135
|
end
|
138
136
|
|
139
137
|
def tokenize_arithmetic_negation(lhs)
|
140
|
-
lhs = {type: :field, value: lhs} if lhs.is_a?(String)
|
138
|
+
lhs = { type: :field, value: lhs } if lhs.is_a?(String)
|
141
139
|
{
|
142
140
|
type: :arithmetic,
|
143
141
|
op: NEGATION,
|
@@ -147,6 +145,7 @@ module Sparkql::ParserTools
|
|
147
145
|
|
148
146
|
def tokenize_list(list)
|
149
147
|
return if list.nil?
|
148
|
+
|
150
149
|
validate_multiple_values list[:value]
|
151
150
|
list[:condition] ||= list[:value]
|
152
151
|
list
|
@@ -154,18 +153,18 @@ module Sparkql::ParserTools
|
|
154
153
|
|
155
154
|
def tokenize_literal_negation(number_token)
|
156
155
|
old_val = case number_token[:type]
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
156
|
+
when :integer
|
157
|
+
number_token[:value].to_i
|
158
|
+
when :decimal
|
159
|
+
number_token[:value].to_f
|
160
|
+
else
|
161
|
+
tokenizer_error(token: @lexer.current_token_value,
|
162
|
+
expression: number_token,
|
163
|
+
message: "Negation is only allowed for integer and floats",
|
164
|
+
status: :fatal,
|
165
|
+
syntax: true)
|
166
|
+
return number_token
|
167
|
+
end
|
169
168
|
number_token[:value] = (-1 * old_val).to_s
|
170
169
|
|
171
170
|
number_token
|
@@ -174,43 +173,43 @@ module Sparkql::ParserTools
|
|
174
173
|
def tokenize_multiple(lit1, lit2)
|
175
174
|
final_type = lit1[:type]
|
176
175
|
if lit1[:type] != lit2[:type]
|
177
|
-
final_type = coercible_types(lit1[:type],lit2[:type])
|
176
|
+
final_type = coercible_types(lit1[:type], lit2[:type])
|
178
177
|
if final_type.nil?
|
179
178
|
final_type = lit1[:type]
|
180
|
-
tokenizer_error(:
|
181
|
-
:
|
182
|
-
:
|
183
|
-
:
|
179
|
+
tokenizer_error(token: @lexer.last_field,
|
180
|
+
message: "Type mismatch in field list.",
|
181
|
+
status: :fatal,
|
182
|
+
syntax: true)
|
184
183
|
end
|
185
184
|
end
|
186
185
|
array = Array(lit1[:value])
|
187
|
-
condition = lit1[:condition] || lit1[:value]
|
186
|
+
condition = lit1[:condition] || lit1[:value]
|
188
187
|
array << lit2[:value]
|
189
188
|
{
|
190
|
-
:
|
191
|
-
:
|
192
|
-
:
|
193
|
-
:
|
189
|
+
type: final_type,
|
190
|
+
value: array,
|
191
|
+
multiple: "true",
|
192
|
+
condition: "#{condition},#{lit2[:condition] || lit2[:value]}"
|
194
193
|
}
|
195
194
|
end
|
196
|
-
|
195
|
+
|
197
196
|
def tokenize_function_args(lit1, lit2)
|
198
|
-
array = lit1.
|
197
|
+
array = lit1.is_a?(Array) ? lit1 : [lit1]
|
199
198
|
array << lit2
|
200
199
|
array
|
201
200
|
end
|
202
|
-
|
201
|
+
|
203
202
|
def tokenize_field_arg(field)
|
204
203
|
if field.is_a?(String)
|
205
204
|
{
|
206
|
-
:
|
207
|
-
:
|
205
|
+
type: :field,
|
206
|
+
value: field
|
208
207
|
}
|
209
208
|
else
|
210
209
|
field
|
211
210
|
end
|
212
211
|
end
|
213
|
-
|
212
|
+
|
214
213
|
def tokenize_function(name, f_args)
|
215
214
|
@lexer.leveldown
|
216
215
|
@lexer.block_group_identifier -= 1
|
@@ -222,24 +221,23 @@ module Sparkql::ParserTools
|
|
222
221
|
condition_list << arg[:value] # Needs to be pure string value
|
223
222
|
arg[:value] = escape_value(arg)
|
224
223
|
end
|
225
|
-
resolver =
|
226
|
-
|
224
|
+
resolver = function_resolver(name, args)
|
227
225
|
resolver.validate
|
228
|
-
if
|
229
|
-
tokenizer_error(:
|
230
|
-
:
|
231
|
-
:
|
232
|
-
:
|
233
|
-
|
226
|
+
if resolver.errors?
|
227
|
+
tokenizer_error(token: @lexer.last_field,
|
228
|
+
message: "Error parsing function #{resolver.errors.join(',')}",
|
229
|
+
status: :fatal,
|
230
|
+
syntax: true)
|
231
|
+
nil
|
234
232
|
else
|
235
|
-
result = resolver.call
|
236
|
-
result.nil? ? result : result.merge(:
|
233
|
+
result = resolver.call
|
234
|
+
result.nil? ? result : result.merge(condition: "#{name}(#{condition_list.join(',')})")
|
237
235
|
end
|
238
236
|
end
|
239
237
|
|
240
238
|
def tokenize_arithmetic(lhs, operator, rhs)
|
241
|
-
lhs = {type: :field, value: lhs} if lhs.is_a?(String)
|
242
|
-
rhs = {type: :field, value: rhs} if rhs.is_a?(String)
|
239
|
+
lhs = { type: :field, value: lhs } if lhs.is_a?(String)
|
240
|
+
rhs = { type: :field, value: rhs } if rhs.is_a?(String)
|
243
241
|
|
244
242
|
arithmetic_error?(lhs)
|
245
243
|
arithmetic_error?(rhs)
|
@@ -253,11 +251,11 @@ module Sparkql::ParserTools
|
|
253
251
|
|
254
252
|
def arithmetic_error?(side)
|
255
253
|
side_type = side[:type] == :function ? side[:return_type] : side[:type]
|
256
|
-
return false
|
254
|
+
return false if ARITHMETIC_TYPES.include?(side_type)
|
257
255
|
|
258
|
-
compile_error(:
|
259
|
-
|
260
|
-
|
256
|
+
compile_error(token: side[:value], expression: side,
|
257
|
+
message: "Error attempting arithmetic with type: #{side_type}",
|
258
|
+
status: :fatal, syntax: false, constraint: true)
|
261
259
|
true
|
262
260
|
end
|
263
261
|
|
@@ -267,47 +265,47 @@ module Sparkql::ParserTools
|
|
267
265
|
exp
|
268
266
|
end
|
269
267
|
|
270
|
-
def add_fold(
|
271
|
-
return if arithmetic_error?(
|
268
|
+
def add_fold(node1, node2)
|
269
|
+
return if arithmetic_error?(node1) || arithmetic_error?(node2)
|
272
270
|
|
273
|
-
value = escape_arithmetic_value(
|
274
|
-
{ type: arithmetic_type(
|
271
|
+
value = escape_arithmetic_value(node1) + escape_arithmetic_value(node2)
|
272
|
+
{ type: arithmetic_type(node1, node2), value: unescape_arithmetic(value) }
|
275
273
|
end
|
276
274
|
|
277
|
-
def sub_fold(
|
278
|
-
return if arithmetic_error?(
|
275
|
+
def sub_fold(node1, node2)
|
276
|
+
return if arithmetic_error?(node1) || arithmetic_error?(node2)
|
279
277
|
|
280
|
-
value = escape_arithmetic_value(
|
281
|
-
{ type: arithmetic_type(
|
278
|
+
value = escape_arithmetic_value(node1) - escape_arithmetic_value(node2)
|
279
|
+
{ type: arithmetic_type(node1, node2), value: unescape_arithmetic(value) }
|
282
280
|
end
|
283
281
|
|
284
|
-
def mul_fold(
|
285
|
-
return if arithmetic_error?(
|
282
|
+
def mul_fold(node1, node2)
|
283
|
+
return if arithmetic_error?(node1) || arithmetic_error?(node2)
|
286
284
|
|
287
|
-
value = escape_arithmetic_value(
|
288
|
-
{ type: arithmetic_type(
|
285
|
+
value = escape_arithmetic_value(node1) * escape_arithmetic_value(node2)
|
286
|
+
{ type: arithmetic_type(node1, node2), value: unescape_arithmetic(value) }
|
289
287
|
end
|
290
288
|
|
291
|
-
def div_fold(
|
292
|
-
return if arithmetic_error?(
|
293
|
-
|
294
|
-
|
289
|
+
def div_fold(node1, node2)
|
290
|
+
return if arithmetic_error?(node1) ||
|
291
|
+
arithmetic_error?(node2) ||
|
292
|
+
zero_error?(node2)
|
295
293
|
|
296
|
-
value = escape_arithmetic_value(
|
297
|
-
{ type: arithmetic_type(
|
294
|
+
value = escape_arithmetic_value(node1) / escape_arithmetic_value(node2)
|
295
|
+
{ type: arithmetic_type(node1, node2), value: unescape_arithmetic(value) }
|
298
296
|
end
|
299
297
|
|
300
|
-
def mod_fold(
|
301
|
-
return if arithmetic_error?(
|
302
|
-
|
303
|
-
|
298
|
+
def mod_fold(node1, node2)
|
299
|
+
return if arithmetic_error?(node1) ||
|
300
|
+
arithmetic_error?(node2) ||
|
301
|
+
zero_error?(node2)
|
304
302
|
|
305
|
-
value = escape_arithmetic_value(
|
306
|
-
{ type: arithmetic_type(
|
303
|
+
value = escape_arithmetic_value(node1) % escape_arithmetic_value(node2)
|
304
|
+
{ type: arithmetic_type(node1, node2), value: unescape_arithmetic(value) }
|
307
305
|
end
|
308
306
|
|
309
307
|
def arithmetic_type(num1, num2)
|
310
|
-
if
|
308
|
+
if num1[:type] == :decimal || num2[:type] == :decimal
|
311
309
|
:decimal
|
312
310
|
else
|
313
311
|
:integer
|
@@ -317,7 +315,7 @@ module Sparkql::ParserTools
|
|
317
315
|
def escape_arithmetic_value(expression)
|
318
316
|
case expression[:type]
|
319
317
|
when :decimal
|
320
|
-
BigDecimal
|
318
|
+
BigDecimal(expression[:value])
|
321
319
|
else
|
322
320
|
escape_value(expression)
|
323
321
|
end
|
@@ -332,71 +330,71 @@ module Sparkql::ParserTools
|
|
332
330
|
end
|
333
331
|
|
334
332
|
def zero_error?(number)
|
335
|
-
return unless escape_value(number)
|
333
|
+
return unless escape_value(number).zero?
|
336
334
|
|
337
|
-
compile_error(:
|
338
|
-
|
339
|
-
|
335
|
+
compile_error(token: (number[:value]).to_s, expression: number,
|
336
|
+
message: "Error attempting to divide by zero",
|
337
|
+
status: :fatal, syntax: false, constraint: true)
|
340
338
|
end
|
341
339
|
|
342
|
-
def on_error(error_token_id,
|
340
|
+
def on_error(error_token_id, _error_value, _value_stack)
|
343
341
|
token_name = token_to_str(error_token_id)
|
344
342
|
token_name.downcase!
|
345
|
-
tokenizer_error(:
|
346
|
-
:
|
347
|
-
:
|
348
|
-
:
|
349
|
-
end
|
343
|
+
tokenizer_error(token: @lexer.current_token_value,
|
344
|
+
message: "Error parsing token #{token_name}",
|
345
|
+
status: :fatal,
|
346
|
+
syntax: true)
|
347
|
+
end
|
350
348
|
|
351
|
-
def validate_level_depth
|
349
|
+
def validate_level_depth(expression)
|
352
350
|
if @lexer.level > max_level_depth
|
353
|
-
compile_error(:
|
354
|
-
|
355
|
-
|
351
|
+
compile_error(token: "(", expression: expression,
|
352
|
+
message: "You have exceeded the maximum nesting level. Please nest no more than #{max_level_depth} levels deep.",
|
353
|
+
status: :fatal, syntax: false, constraint: true)
|
356
354
|
end
|
357
355
|
end
|
358
356
|
|
359
357
|
def validate_field_function_depth(expression)
|
360
358
|
if nested_function_depth(expression) > max_function_depth
|
361
|
-
compile_error(:
|
362
|
-
|
363
|
-
|
359
|
+
compile_error(token: "(", expression: expression,
|
360
|
+
message: "You have exceeded the maximum function nesting level. Please nest no more than #{max_function_depth} levels deep.",
|
361
|
+
status: :fatal, syntax: false, constraint: true)
|
364
362
|
end
|
365
363
|
end
|
366
364
|
|
367
|
-
def validate_expressions
|
368
|
-
if results.size > max_expressions
|
369
|
-
compile_error(:
|
370
|
-
|
371
|
-
|
365
|
+
def validate_expressions(results)
|
366
|
+
if results.size > max_expressions
|
367
|
+
compile_error(token: results[max_expressions][:field], expression: results[max_expressions],
|
368
|
+
message: "You have exceeded the maximum expression count. Please limit to no more than #{max_expressions} expressions in a filter.",
|
369
|
+
status: :fatal, syntax: false, constraint: true)
|
372
370
|
results.slice!(max_expressions..-1)
|
373
371
|
end
|
374
372
|
end
|
375
|
-
|
376
|
-
def validate_multiple_values
|
373
|
+
|
374
|
+
def validate_multiple_values(values)
|
377
375
|
values = Array(values)
|
378
|
-
if values.size > max_values
|
379
|
-
compile_error(:
|
380
|
-
|
381
|
-
|
376
|
+
if values.size > max_values
|
377
|
+
compile_error(token: values[max_values],
|
378
|
+
message: "You have exceeded the maximum value count. Please limit to #{max_values} values in a single expression.",
|
379
|
+
status: :fatal, syntax: false, constraint: true)
|
382
380
|
values.slice!(max_values..-1)
|
383
381
|
end
|
384
382
|
end
|
385
|
-
|
386
|
-
def validate_multiple_arguments
|
383
|
+
|
384
|
+
def validate_multiple_arguments(args)
|
387
385
|
args = Array(args)
|
388
|
-
if args.size > max_values
|
389
|
-
compile_error(:
|
390
|
-
|
391
|
-
|
386
|
+
if args.size > max_values
|
387
|
+
compile_error(token: args[max_values],
|
388
|
+
message: "You have exceeded the maximum parameter count. Please limit to #{max_values} parameters to a single function.",
|
389
|
+
status: :fatal, syntax: false, constraint: true)
|
392
390
|
args.slice!(max_values..-1)
|
393
391
|
end
|
394
392
|
end
|
395
|
-
|
396
|
-
# If both types support coercion with eachother, always selects the highest
|
393
|
+
|
394
|
+
# If both types support coercion with eachother, always selects the highest
|
397
395
|
# precision type to return as a reflection of the two. Any type that doesn't
|
398
396
|
# support coercion with the other type returns nil
|
399
|
-
def coercible_types
|
397
|
+
def coercible_types(type1, type2)
|
400
398
|
if DATE_TYPES.include?(type1) && DATE_TYPES.include?(type2)
|
401
399
|
DATE_TYPES.first
|
402
400
|
elsif NUMBER_TYPES.include?(type1) && NUMBER_TYPES.include?(type2)
|
@@ -413,13 +411,13 @@ module Sparkql::ParserTools
|
|
413
411
|
queue = []
|
414
412
|
queue.push(expression)
|
415
413
|
|
416
|
-
|
414
|
+
loop do
|
417
415
|
count = queue.size
|
418
|
-
return height if count
|
416
|
+
return height if count.zero?
|
419
417
|
|
420
418
|
height += 1
|
421
419
|
|
422
|
-
while count
|
420
|
+
while count.positive?
|
423
421
|
node = queue.shift
|
424
422
|
node[:args].each do |child|
|
425
423
|
queue.push(child) if child[:type] == :function
|
@@ -428,4 +426,26 @@ module Sparkql::ParserTools
|
|
428
426
|
end
|
429
427
|
end
|
430
428
|
end
|
429
|
+
|
430
|
+
def function_resolver(function_name, function_args = [])
|
431
|
+
Sparkql::FunctionResolver.new(function_name,
|
432
|
+
function_args,
|
433
|
+
current_timestamp: current_timestamp)
|
434
|
+
end
|
435
|
+
|
436
|
+
def supported_function?(function_name)
|
437
|
+
!lookup_function(function_name).nil?
|
438
|
+
end
|
439
|
+
|
440
|
+
def lookup_function(function_name)
|
441
|
+
Sparkql::FunctionResolver.lookup(function_name)
|
442
|
+
end
|
443
|
+
|
444
|
+
def current_timestamp
|
445
|
+
@current_timestamp ||= Time.now
|
446
|
+
end
|
447
|
+
|
448
|
+
def offset
|
449
|
+
@offset ||= current_timestamp.strftime('%:z')
|
450
|
+
end
|
431
451
|
end
|
data/lib/sparkql/token.rb
CHANGED
@@ -1,31 +1,31 @@
|
|
1
1
|
module Sparkql::Token
|
2
|
-
SPACE = /[\t ]
|
3
|
-
NEWLINE = /\r\n|\n\r|\r|\n
|
4
|
-
LPAREN = /\(
|
5
|
-
RPAREN = /\)
|
6
|
-
KEYWORD = /[A-Za-z]
|
2
|
+
SPACE = /[\t ]+/.freeze
|
3
|
+
NEWLINE = /\r\n|\n\r|\r|\n/.freeze
|
4
|
+
LPAREN = /\(/.freeze
|
5
|
+
RPAREN = /\)/.freeze
|
6
|
+
KEYWORD = /[A-Za-z]+/.freeze
|
7
7
|
|
8
|
-
ADD = 'Add'
|
9
|
-
SUB = 'Sub'
|
8
|
+
ADD = 'Add'.freeze
|
9
|
+
SUB = 'Sub'.freeze
|
10
10
|
|
11
|
-
MUL = 'Mul'
|
12
|
-
DIV = 'Div'
|
13
|
-
MOD = 'Mod'
|
11
|
+
MUL = 'Mul'.freeze
|
12
|
+
DIV = 'Div'.freeze
|
13
|
+
MOD = 'Mod'.freeze
|
14
14
|
|
15
|
-
STANDARD_FIELD = /[A-Z]+[A-Za-z0-9]
|
16
|
-
CUSTOM_FIELD = /^(
|
17
|
-
INTEGER =
|
18
|
-
DECIMAL =
|
19
|
-
CHARACTER = /^'([^'\\]*(\\.[^'\\]*)*)'
|
20
|
-
DATE = /^[0-9]{4}
|
21
|
-
TIME = /^[0-9]{2}
|
22
|
-
DATETIME = /^[0-9]{4}
|
23
|
-
BOOLEAN = /^true|false
|
24
|
-
NULL = /NULL|null|Null
|
15
|
+
STANDARD_FIELD = /[A-Z]+[A-Za-z0-9]*/.freeze
|
16
|
+
CUSTOM_FIELD = /^("([^$."][^."]+)"."([^$."][^."]*)")/.freeze
|
17
|
+
INTEGER = /^-?[0-9]+/.freeze
|
18
|
+
DECIMAL = /^-?[0-9]+\.[0-9]+([Ee]-?[0-9]{1,2})?/.freeze
|
19
|
+
CHARACTER = /^'([^'\\]*(\\.[^'\\]*)*)'/.freeze
|
20
|
+
DATE = /^[0-9]{4}-[0-9]{2}-[0-9]{2}/.freeze
|
21
|
+
TIME = /^[0-9]{2}:[0-9]{2}((:[0-9]{2})(\.[0-9]{1,50})?)?/.freeze
|
22
|
+
DATETIME = /^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}((:[0-9]{2})(\.[0-9]{1,50})?)?(((\+|-)[0-9]{2}:?[0-9]{2})|Z)?/.freeze
|
23
|
+
BOOLEAN = /^true|false/.freeze
|
24
|
+
NULL = /NULL|null|Null/.freeze
|
25
25
|
# Reserved words
|
26
|
-
RANGE_OPERATOR = 'Bt'
|
27
|
-
EQUALITY_OPERATORS = [
|
28
|
-
OPERATORS = [
|
29
|
-
UNARY_CONJUNCTIONS = ['Not']
|
30
|
-
CONJUNCTIONS = [
|
26
|
+
RANGE_OPERATOR = 'Bt'.freeze
|
27
|
+
EQUALITY_OPERATORS = %w[Eq Ne].freeze
|
28
|
+
OPERATORS = %w[Gt Ge Lt Le] + EQUALITY_OPERATORS
|
29
|
+
UNARY_CONJUNCTIONS = ['Not'].freeze
|
30
|
+
CONJUNCTIONS = %w[And Or].freeze
|
31
31
|
end
|
data/lib/sparkql/version.rb
CHANGED
data/sparkql.gemspec
CHANGED
@@ -27,6 +27,6 @@ Gem::Specification.new do |s|
|
|
27
27
|
s.add_development_dependency 'ci_reporter', '~> 1.6'
|
28
28
|
s.add_development_dependency 'mocha', '~> 0.12.0'
|
29
29
|
s.add_development_dependency 'racc', '~> 1.4.8'
|
30
|
-
s.add_development_dependency 'rake',
|
30
|
+
s.add_development_dependency 'rake', ">=12.3.3"
|
31
31
|
s.add_development_dependency 'test-unit', '~> 2.1.0'
|
32
32
|
end
|
data/test/unit/errors_test.rb
CHANGED
@@ -10,21 +10,20 @@ class ParserTest < Test::Unit::TestCase
|
|
10
10
|
end
|
11
11
|
|
12
12
|
def test_error_constraint
|
13
|
-
errors = ParserError.new(:
|
13
|
+
errors = ParserError.new(constraint: true, syntax: false)
|
14
14
|
assert !errors.syntax?
|
15
15
|
assert errors.constraint?
|
16
16
|
end
|
17
|
-
|
17
|
+
|
18
18
|
def test_process_fatal_errors
|
19
|
-
p = ErrorsProcessor.new(ParserError.new(:
|
19
|
+
p = ErrorsProcessor.new(ParserError.new(status: :fatal))
|
20
20
|
assert p.fatal_errors?
|
21
21
|
assert !p.dropped_errors?
|
22
22
|
end
|
23
23
|
|
24
24
|
def test_process_dropped_errors
|
25
|
-
p = ErrorsProcessor.new(ParserError.new(:
|
25
|
+
p = ErrorsProcessor.new(ParserError.new(status: :dropped))
|
26
26
|
assert p.dropped_errors?
|
27
27
|
assert !p.fatal_errors?
|
28
28
|
end
|
29
|
-
|
30
29
|
end
|