sparkql 1.1.17 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,15 +1,15 @@
1
1
  ---
2
2
  !binary "U0hBMQ==":
3
3
  metadata.gz: !binary |-
4
- MzI5MGFiMTAwMTgyYjkyZDQyODY4MGE3ZDFkYTQ1NjFiODYyODAxMQ==
4
+ YmEwMzUwNDNlYTljMmQ4YTcwMWI5Mjc5YmZkYjc5NzBlY2QyMGM3Ng==
5
5
  data.tar.gz: !binary |-
6
- MGI1OTQwNWQ5OTg1MzNkYTc2MzQzM2Y1ZjRmMGViNWFlODQ5ZTE2MQ==
6
+ OTk3N2I0ZGQwYjc0ZmIxMzcwZTFkZjcwOGMxNzVlY2VkODMxN2QxMg==
7
7
  SHA512:
8
8
  metadata.gz: !binary |-
9
- NmVhZmM2ZWQzZmI0M2EyZmNlNmE2MTE0ZTE3MGZmZDU0MWIxZDZlZmMxOTA5
10
- NGViMTdjNjI0YzkwY2FkODUxY2I5M2RlMTY4OTRmNTQ2NjY5NjQzZTIyZjgx
11
- ZTNmMTUzNDFiMDI0ZWNmMzk5MzcyMjQ0ZWViMDQ4ZDE3NmRlOWY=
9
+ ZDVkMzg2MWNhNDUxMWU0NzVjZjhmNmE2ZmQ3YTVjMWVmY2U0MmRhNGFlZWIw
10
+ YTdlOTZkNTliY2E5ZWMzYTViMTNkMGVmYWYwNzI5ZGQ1YzJjN2IxZDc2Njk5
11
+ NGQ0OGQ2ODk5ZDRlYWZmMGJmYjEzYzBhODAxOTgwYjRjZGViMTA=
12
12
  data.tar.gz: !binary |-
13
- MGNiYmRhOTQ2ODlhMzM4ODRlNzdiYTg5YmNkNDZkZmIxOTc5ZTdkNTIxMjhh
14
- MjgzMGY4ZmE3MDgzNTk5NGJmMTUzYjExYTNkYzhjMGNkY2IzOWNkMDU5OGEy
15
- YTJkYTIxZDM5ODJkMDI5YWQxMjBmZjRlYjdhYzNlMjhiYTZkMzQ=
13
+ M2NjMjYxNmMyNzEyNjZjMTc5Yjc0ZjFkZjlhMjk1ZGNjYzdhMzA5YzZjOWJj
14
+ YTU2NGQ0N2FkNzIzM2E5ZTI1ZmY4ZDYxYTA0YWIxM2I5Mjg5NWY1ZGY1OWVk
15
+ YzFmY2VlNDE0ZjNmNTBkOTY5MjIwOWZjY2YzZWFlNmU0MDllZTI=
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ v1.2.0, 2018-09-24
2
+ -------------------
3
+ * [IMPROVEMENT] Support Nested field functions via `field_manipulations` attribute.
4
+
1
5
  v1.1.17, 2018-08-01
2
6
  -------------------
3
7
  * [IMPROVEMENT] New Function: concat()
data/GRAMMAR.md CHANGED
@@ -108,7 +108,7 @@ on filtering values
108
108
  ```
109
109
  condition
110
110
  : literal
111
- | function
111
+ | literal_function
112
112
  | literal_list
113
113
  ;
114
114
  ```
@@ -124,6 +124,10 @@ fields.
124
124
  : function_name LPAREN RPAREN
125
125
  | function_name LPAREN function_args RPAREN
126
126
  ;
127
+ literal_function
128
+ : function_name LPAREN RPAREN
129
+ | function_name LPAREN literal_function_args RPAREN
130
+ ;
127
131
  function_name
128
132
  : KEYWORD
129
133
  ;
@@ -137,12 +141,21 @@ Functions may optionally have a comma delimited list of parameters.
137
141
  function_args
138
142
  : function_arg
139
143
  | function_args COMMA function_arg
140
- ;
144
+ ;
141
145
  function_arg
142
146
  : literal
143
147
  | literals
144
148
  | field
145
149
  ;
150
+ literal_function_args
151
+ : literal_function_arg
152
+ | literal_function_args COMMA literal_function_arg
153
+ ;
154
+ literal_function_arg
155
+ : literal
156
+ | literals
157
+ | literal_function
158
+ ;
146
159
  ```
147
160
 
148
161
  #### Literal List
@@ -152,7 +165,7 @@ A comma delimited list of functions and values.
152
165
  ```
153
166
  literal_list
154
167
  : literals
155
- | function
168
+ | literal_function
156
169
  | literal_list COMMA literals
157
170
  | literal_list COMMA function
158
171
  ;
data/VERSION CHANGED
@@ -1 +1 @@
1
- 1.1.17
1
+ 1.2.0
@@ -223,7 +223,8 @@ class Sparkql::FunctionResolver
223
223
 
224
224
  count = 0
225
225
  @args.each do |arg|
226
- unless Array(total_args[count]).include?(arg[:type])
226
+ type = arg[:type] == :function ? arg[:return_type] : arg[:type]
227
+ unless Array(total_args[count]).include?(type)
227
228
  @errors << Sparkql::ParserError.new(:token => @name,
228
229
  :message => "Function call '#{@name}' has an invalid argument at #{arg[:value]}",
229
230
  :status => :fatal )
@@ -240,6 +241,10 @@ class Sparkql::FunctionResolver
240
241
  return
241
242
  end
242
243
  end
244
+
245
+ if name == :substring && !@args[2].nil?
246
+ substring_index_error?(@args[2][:value])
247
+ end
243
248
  end
244
249
 
245
250
  def return_type
@@ -269,25 +274,47 @@ class Sparkql::FunctionResolver
269
274
  real_vals = @args.map { |i| i[:value]}
270
275
  name = @name.to_sym
271
276
 
277
+ field = @args.find do |i|
278
+ i[:type] == :field || i.key?(:field)
279
+ end
280
+
281
+ field = field[:type] == :function ? field[:field] : field[:value] unless field.nil?
282
+
272
283
  required_args = support[name][:args]
273
284
  total_args = required_args + Array(support[name][:opt_args]).collect {|args| args[:default]}
285
+
274
286
  fill_in_optional_args = total_args.drop(real_vals.length)
275
287
 
276
288
  fill_in_optional_args.each do |default|
277
289
  real_vals << default
278
290
  end
279
- method = name
280
- if support[name][:resolve_for_type]
281
- method_type = @args.first[:type]
282
- method = "#{method}_#{method_type}"
291
+
292
+
293
+ v = if field.nil?
294
+ method = name
295
+ if support[name][:resolve_for_type]
296
+ method_type = @args.first[:type]
297
+ method = "#{method}_#{method_type}"
298
+ end
299
+ self.send(method, *real_vals)
300
+ else
301
+ {
302
+ :type => :function,
303
+ :return_type => return_type,
304
+ :value => "#{name}",
305
+ }
283
306
  end
284
- v = self.send(method, *real_vals)
285
307
 
286
- unless v.nil? || v.key?(:function_name)
287
- v[:function_name] = @name
288
- v[:function_parameters] = real_vals
308
+ return if v.nil?
309
+
310
+ if !v.key?(:function_name)
311
+ v.merge!( function_parameters: real_vals,
312
+ function_name: @name)
289
313
  end
290
314
 
315
+ v.merge!(args: @args,
316
+ field: field)
317
+
291
318
  v
292
319
  end
293
320
 
@@ -319,14 +346,6 @@ class Sparkql::FunctionResolver
319
346
  }
320
347
  end
321
348
 
322
- def trim_field(arg)
323
- {
324
- :type => :function,
325
- :value => "trim",
326
- :args => [arg]
327
- }
328
- end
329
-
330
349
  def trim_character(arg)
331
350
  {
332
351
  :type => :character,
@@ -334,18 +353,7 @@ class Sparkql::FunctionResolver
334
353
  }
335
354
  end
336
355
 
337
- def substring_field(field, first_index, number_chars)
338
- return if substring_index_error?(number_chars)
339
- {
340
- :type => :function,
341
- :value => "substring",
342
- :args => [field, first_index, number_chars]
343
- }
344
- end
345
-
346
356
  def substring_character(character, first_index, number_chars)
347
- return if substring_index_error?(number_chars)
348
-
349
357
  second_index = if number_chars.nil?
350
358
  -1
351
359
  else
@@ -370,21 +378,21 @@ class Sparkql::FunctionResolver
370
378
  false
371
379
  end
372
380
 
373
- def tolower_character(string)
381
+ def tolower(args)
374
382
  {
375
383
  :type => :character,
376
- :value => "'#{string.to_s.downcase}'"
384
+ :value => "tolower"
377
385
  }
378
386
  end
379
387
 
380
- def tolower_field(arg)
388
+ def tolower_character(string)
381
389
  {
382
- :type => :function,
383
- :value => "tolower",
384
- :args => [arg]
390
+ :type => :character,
391
+ :value => "'#{string.to_s.downcase}'"
385
392
  }
386
393
  end
387
394
 
395
+
388
396
  def toupper_character(string)
389
397
  {
390
398
  :type => :character,
@@ -392,14 +400,6 @@ class Sparkql::FunctionResolver
392
400
  }
393
401
  end
394
402
 
395
- def toupper_field(arg)
396
- {
397
- :type => :function,
398
- :value => "toupper",
399
- :args => [arg]
400
- }
401
- end
402
-
403
403
  def length_character(string)
404
404
  {
405
405
  :type => :integer,
@@ -407,14 +407,6 @@ class Sparkql::FunctionResolver
407
407
  }
408
408
  end
409
409
 
410
- def length_field(arg)
411
- {
412
- :type => :function,
413
- :value => "length",
414
- :args => [arg]
415
- }
416
- end
417
-
418
410
  def startswith(string)
419
411
  # Wrap this string in quotes, as we effectively translate
420
412
  # City Eq startswith('far')
@@ -512,14 +504,6 @@ class Sparkql::FunctionResolver
512
504
  }
513
505
  end
514
506
 
515
- def floor_field(arg)
516
- {
517
- :type => :function,
518
- :value => "floor",
519
- :args => [arg]
520
- }
521
- end
522
-
523
507
  def ceiling_decimal(arg)
524
508
  {
525
509
  :type => :integer,
@@ -527,14 +511,6 @@ class Sparkql::FunctionResolver
527
511
  }
528
512
  end
529
513
 
530
- def ceiling_field(arg)
531
- {
532
- :type => :function,
533
- :value => "ceiling",
534
- :args => [arg]
535
- }
536
- end
537
-
538
514
  def round_decimal(arg)
539
515
  {
540
516
  :type => :integer,
@@ -542,17 +518,8 @@ class Sparkql::FunctionResolver
542
518
  }
543
519
  end
544
520
 
545
- def round_field(arg)
546
- {
547
- :type => :function,
548
- :value => "round",
549
- :args => [arg]
550
- }
551
- end
552
-
553
521
  def indexof(arg1, arg2)
554
522
  {
555
- :type => :function,
556
523
  :value => "indexof",
557
524
  :args => [arg1, arg2]
558
525
  }
@@ -565,86 +532,6 @@ class Sparkql::FunctionResolver
565
532
  }
566
533
  end
567
534
 
568
- def concat_field(arg1, arg2)
569
- {
570
- :type => :function,
571
- :value => 'concat',
572
- :args => [arg1, arg2]
573
- }
574
- end
575
-
576
- def date_field(arg)
577
- {
578
- :type => :function,
579
- :value => "date",
580
- :args => [arg]
581
- }
582
- end
583
-
584
- def time_field(arg)
585
- {
586
- :type => :function,
587
- :value => "time",
588
- :args => [arg]
589
- }
590
- end
591
-
592
- def year_field(arg)
593
- {
594
- :type => :function,
595
- :value => "year",
596
- :args => [arg]
597
- }
598
- end
599
-
600
- def month_field(arg)
601
- {
602
- :type => :function,
603
- :value => "month",
604
- :args => [arg]
605
- }
606
- end
607
-
608
- def day_field(arg)
609
- {
610
- :type => :function,
611
- :value => "day",
612
- :args => [arg]
613
- }
614
- end
615
-
616
- def hour_field(arg)
617
- {
618
- :type => :function,
619
- :value => "hour",
620
- :args => [arg]
621
- }
622
- end
623
-
624
- def minute_field(arg)
625
- {
626
- :type => :function,
627
- :value => "minute",
628
- :args => [arg]
629
- }
630
- end
631
-
632
- def second_field(arg)
633
- {
634
- :type => :function,
635
- :value => "second",
636
- :args => [arg]
637
- }
638
- end
639
-
640
- def fractionalseconds_field(arg)
641
- {
642
- :type => :function,
643
- :value => "fractionalseconds",
644
- :args => [arg]
645
- }
646
- end
647
-
648
535
  def date_datetime(dt)
649
536
  {
650
537
  :type => :date,
@@ -675,7 +562,6 @@ class Sparkql::FunctionResolver
675
562
  }
676
563
  end
677
564
 
678
- # TODO Donuts: to extend, we'd just replace (coords) param with (linear_ring1,linear_ring2, ...)
679
565
  def polygon(coords)
680
566
  new_coords = parse_coordinates(coords)
681
567
  unless new_coords.size > 2
@@ -795,14 +681,6 @@ class Sparkql::FunctionResolver
795
681
  }
796
682
  end
797
683
 
798
- def cast_field(value, type)
799
- {
800
- :type => :function,
801
- :value => "cast",
802
- :args => [value, type]
803
- }
804
- end
805
-
806
684
  def cast(value, type)
807
685
  if value == 'NULL'
808
686
  value = nil
data/lib/sparkql/lexer.rb CHANGED
@@ -16,9 +16,6 @@ class Sparkql::Lexer < StringScanner
16
16
  end
17
17
 
18
18
  # Lookup the next matching token
19
- #
20
- # TODO the old implementation did value type detection conversion at a later date, we can perform
21
- # this at parse time if we want!!!!
22
19
  def shift
23
20
  @token_index = self.pos
24
21