sparkql 1.2.5 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,139 +2,140 @@ require 'test_helper'
2
2
 
3
3
  class ParserCompatabilityTest < Test::Unit::TestCase
4
4
  include Sparkql
5
-
5
+
6
6
  def setup
7
- @expression_keys = [:field, :operator, :value]
8
- @multiple_types = [:character,:integer]
7
+ @expression_keys = %i[field operator value]
8
+ @multiple_types = %i[character integer]
9
9
  @bad_character_strings = ["'Fargo's Boat'", "Fargo", "''Fargo''", "'Fargo''s'",
10
- "'Fargo", "Fargo'", "\\'Fargo\\'"]
10
+ "'Fargo", "Fargo'", "\\'Fargo\\'"]
11
11
  @bad_multiple_character_strings = ["'Fargo's Boat'", "Fargo", "''Fargo''", "'Fargo''s'",
12
- "'Fargo", "Fargo'", "\\'Fargo\\'"]
12
+ "'Fargo", "Fargo'", "\\'Fargo\\'"]
13
13
  @all_bad_strings = @bad_character_strings + @bad_multiple_character_strings
14
14
  @test_filters = [
15
15
  {
16
- :string => "City Eq 'Fargo'",
17
- :type => :character,
18
- :operator => "Eq"
16
+ string: "City Eq 'Fargo'",
17
+ type: :character,
18
+ operator: "Eq"
19
19
  },
20
20
  {
21
- :string => "City Ne 'Fargo'",
22
- :type => :character,
23
- :operator => "Not Eq"
21
+ string: "City Ne 'Fargo'",
22
+ type: :character,
23
+ operator: "Not Eq"
24
24
  },
25
25
  {
26
- :string => "City Eq 'Fargo','Moorhead'",
27
- :type => :character,
28
- :operator => "In"
26
+ string: "City Eq 'Fargo','Moorhead'",
27
+ type: :character,
28
+ operator: "In"
29
29
  },
30
30
  {
31
- :string => "City Eq 'Fargo','Moorhead','Bemidji','Duluth'",
32
- :type => :character,
33
- :operator => "In"
31
+ string: "City Eq 'Fargo','Moorhead','Bemidji','Duluth'",
32
+ type: :character,
33
+ operator: "In"
34
34
  },
35
35
  {
36
- :string => "City Ne 'Fargo','Moorhead','Bemidji','Duluth'",
37
- :type => :character,
38
- :operator => "Not In"
36
+ string: "City Ne 'Fargo','Moorhead','Bemidji','Duluth'",
37
+ type: :character,
38
+ operator: "Not In"
39
39
  },
40
40
  {
41
- :string => "IntegerField Eq 2001",
42
- :type => :integer,
43
- :operator => "Eq"
41
+ string: "IntegerField Eq 2001",
42
+ type: :integer,
43
+ operator: "Eq"
44
44
  },
45
45
  {
46
- :string => "IntegerField Eq -2001",
47
- :type => :integer,
48
- :operator => "Eq"
46
+ string: "IntegerField Eq -2001",
47
+ type: :integer,
48
+ operator: "Eq"
49
49
  },
50
50
  {
51
- :string => "IntegerField Eq 2001,2002",
52
- :type => :integer,
53
- :operator => "In"
51
+ string: "IntegerField Eq 2001,2002",
52
+ type: :integer,
53
+ operator: "In"
54
54
  },
55
55
  {
56
- :string => "IntegerField Eq -2001,-2002",
57
- :type => :integer,
58
- :operator => "In"
56
+ string: "IntegerField Eq -2001,-2002",
57
+ type: :integer,
58
+ operator: "In"
59
59
  },
60
60
  {
61
- :string => "FloatField Eq 2001.120",
62
- :type => :decimal,
63
- :operator => "Eq"
61
+ string: "FloatField Eq 2001.120",
62
+ type: :decimal,
63
+ operator: "Eq"
64
64
  },
65
65
  {
66
- :string => "FloatField Eq -2001.120",
67
- :type => :decimal,
68
- :operator => "Eq"
66
+ string: "FloatField Eq -2001.120",
67
+ type: :decimal,
68
+ operator: "Eq"
69
69
  },
70
70
  {
71
- :string => "FloatField Eq 9.1E-6",
72
- :type => :decimal,
73
- :operator => "Eq"
71
+ string: "FloatField Eq 9.1E-6",
72
+ type: :decimal,
73
+ operator: "Eq"
74
74
  },
75
75
  {
76
- :string => "FloatField Eq -9.1E-6",
77
- :type => :decimal,
78
- :operator => "Eq"
76
+ string: "FloatField Eq -9.1E-6",
77
+ type: :decimal,
78
+ operator: "Eq"
79
79
  },
80
80
  {
81
- :string => "FloatField Eq 1.0E8",
82
- :type => :decimal,
83
- :operator => "Eq"
81
+ string: "FloatField Eq 1.0E8",
82
+ type: :decimal,
83
+ operator: "Eq"
84
84
  },
85
85
  {
86
- :string => "FloatField Eq -2001.120,-2002.0",
87
- :type => :decimal,
88
- :operator => "In"
86
+ string: "FloatField Eq -2001.120,-2002.0",
87
+ type: :decimal,
88
+ operator: "In"
89
89
  },
90
90
  {
91
- :string => "FloatField Eq 100.1,2,3.4",
92
- :type => :decimal,
93
- :operator => "In"
91
+ string: "FloatField Eq 100.1,2,3.4",
92
+ type: :decimal,
93
+ operator: "In"
94
94
  },
95
95
  {
96
- :string => "DateField Eq 2010-10-10",
97
- :type => :date,
98
- :operator => "Eq"
96
+ string: "DateField Eq 2010-10-10",
97
+ type: :date,
98
+ operator: "Eq"
99
99
  },
100
100
  {
101
- :string => "TimestampField Eq 2010-10-10T10:10:30.000000",
102
- :type => :datetime,
103
- :operator => "Eq"
101
+ string: "TimestampField Eq 2010-10-10T10:10:30.000000",
102
+ type: :datetime,
103
+ operator: "Eq"
104
104
  },
105
105
  {
106
- :string => "TimestampField Lt 2010-10-10T10:10:30.000000",
107
- :type => :datetime,
108
- :operator => "Lt"
106
+ string: "TimestampField Lt 2010-10-10T10:10:30.000000",
107
+ type: :datetime,
108
+ operator: "Lt"
109
109
  },
110
110
  {
111
- :string => "TimestampField Gt 2010-10-10T10:10:30.000000",
112
- :type => :datetime,
113
- :operator => "Gt"
111
+ string: "TimestampField Gt 2010-10-10T10:10:30.000000",
112
+ type: :datetime,
113
+ operator: "Gt"
114
114
  },
115
115
  {
116
- :string => "TimestampField Ge 2010-10-10T10:10:30.000000",
117
- :type => :datetime,
118
- :operator => "Ge"
116
+ string: "TimestampField Ge 2010-10-10T10:10:30.000000",
117
+ type: :datetime,
118
+ operator: "Ge"
119
119
  },
120
120
  {
121
- :string => "TimestampField Le 2010-10-10T10:10:30.000000",
122
- :type => :datetime,
123
- :operator => "Le"
121
+ string: "TimestampField Le 2010-10-10T10:10:30.000000",
122
+ type: :datetime,
123
+ operator: "Le"
124
124
  },
125
125
  {
126
- :string => "BooleanField Eq true",
127
- :type => :boolean,
128
- :operator => "Eq"
126
+ string: "BooleanField Eq true",
127
+ type: :boolean,
128
+ operator: "Eq"
129
129
  },
130
130
  {
131
- :string => "BooleanField Eq false",
132
- :type => :boolean,
133
- :operator => "Eq"
134
- }]
131
+ string: "BooleanField Eq false",
132
+ type: :boolean,
133
+ operator: "Eq"
134
+ }
135
+ ]
135
136
  end
136
137
 
137
- def compare_expression_to_tokens( expression, tokens )
138
+ def compare_expression_to_tokens(expression, tokens)
138
139
  counter = 0
139
140
  @expression_keys.each do |key|
140
141
  assert_equal tokens[counter], expression[key]
@@ -143,7 +144,7 @@ class ParserCompatabilityTest < Test::Unit::TestCase
143
144
  end
144
145
 
145
146
  def find_operator(string)
146
- ["Eq","Ne","Gt","Ge","Lt","Le"].each do |op|
147
+ %w[Eq Ne Gt Ge Lt Le].each do |op|
147
148
  return op if string.include? " #{op} "
148
149
  end
149
150
  nil
@@ -153,17 +154,17 @@ class ParserCompatabilityTest < Test::Unit::TestCase
153
154
  filter = "City Eq 'Fargo'"
154
155
  filter_tokens = filter.split(" ")
155
156
  parser = Parser.new
156
- expressions = parser.tokenize( filter )
157
+ expressions = parser.tokenize(filter)
157
158
 
158
159
  assert !parser.errors?
159
160
  assert_equal 1, expressions.size, "#Expressions {expressions.inspect}"
160
161
  compare_expression_to_tokens(expressions.first, filter_tokens)
161
162
  end
162
-
163
+
163
164
  test "types" do
164
165
  @test_filters.each do |elem|
165
166
  parser = Parser.new
166
- expressions = parser.tokenize( elem[:string] )
167
+ expressions = parser.tokenize(elem[:string])
167
168
 
168
169
  assert !parser.errors?, "Query: #{elem.inspect}"
169
170
  assert_equal elem[:type], expressions.first[:type]
@@ -173,7 +174,7 @@ class ParserCompatabilityTest < Test::Unit::TestCase
173
174
  test "operators" do
174
175
  @test_filters.each do |elem|
175
176
  parser = Parser.new
176
- expressions = parser.tokenize( elem[:string] )
177
+ expressions = parser.tokenize(elem[:string])
177
178
  assert !parser.errors?, "Query: #{elem.inspect} #{parser.errors.inspect}"
178
179
  assert_equal elem[:operator], expressions.first[:operator]
179
180
  end
@@ -187,7 +188,7 @@ class ParserCompatabilityTest < Test::Unit::TestCase
187
188
  filter_tokens << f.split(" ")
188
189
  end
189
190
  parser = Parser.new
190
- expressions = parser.tokenize( filter )
191
+ expressions = parser.tokenize(filter)
191
192
 
192
193
  assert !parser.errors?
193
194
  assert_equal 2, expressions.size
@@ -207,7 +208,7 @@ class ParserCompatabilityTest < Test::Unit::TestCase
207
208
  filter_tokens << f.split(" ")
208
209
  end
209
210
  parser = Parser.new
210
- expressions = parser.tokenize( filter )
211
+ expressions = parser.tokenize(filter)
211
212
 
212
213
  assert !parser.errors?
213
214
  assert_equal 2, expressions.size
@@ -226,42 +227,40 @@ class ParserCompatabilityTest < Test::Unit::TestCase
226
227
  filter_tokens = filter.split(" ")
227
228
 
228
229
  filter_tokens.each do |token|
229
- f = filter.gsub(token, "").gsub(/\s+/," ")
230
+ f = filter.gsub(token, "").gsub(/\s+/, " ")
230
231
  parser = Parser.new
231
- expressions = parser.tokenize( f )
232
+ expressions = parser.tokenize(f)
232
233
  assert_nil expressions
233
234
  assert parser.errors?
234
235
  end
235
236
  end
236
237
 
237
238
  test "tokenize fail on invalid string operator" do
238
-
239
239
  filter = "City Eq "
240
240
 
241
241
  @bad_character_strings.each do |string|
242
242
  f = filter + string
243
243
  parser = Parser.new
244
- expressions = parser.tokenize( f )
244
+ expressions = parser.tokenize(f)
245
245
  assert_nil expressions
246
246
  assert parser.errors?
247
247
  end
248
248
  end
249
249
 
250
250
  test "tokenize fail on invalid operator or field" do
251
- filters = ["Eq Eq 'Fargo'","City City 'Fargo'", "And Eq 'Fargo'",
252
- "City And 'Fargo'", "city eq 'Fargo'"]
251
+ filters = ["Eq Eq 'Fargo'", "City City 'Fargo'", "And Eq 'Fargo'",
252
+ "City And 'Fargo'", "city eq 'Fargo'"]
253
253
  filters.each do |f|
254
254
  parser = Parser.new
255
- expressions = parser.tokenize( f )
255
+ expressions = parser.tokenize(f)
256
256
  assert_nil expressions, "filter: #{f}"
257
257
  assert parser.errors?
258
- error = parser.errors.first
259
258
  end
260
259
  end
261
260
 
262
261
  test "report token index on error" do
263
262
  parser = Parser.new
264
- expressions = parser.tokenize( "MlsStatus 2eq 'Active'" )
263
+ parser.tokenize("MlsStatus 2eq 'Active'")
265
264
  error = parser.errors.first
266
265
 
267
266
  assert_equal "2", error.token
@@ -270,26 +269,27 @@ class ParserCompatabilityTest < Test::Unit::TestCase
270
269
 
271
270
  test "tokenize edge case string value" do
272
271
  good_strings = ["'Fargo\\'s Boat'", "'Fargo'", "'Fargo\\'\\'s'",
273
- "' Fargo '", " 'Fargo' "]
272
+ "' Fargo '", " 'Fargo' "]
274
273
 
275
- filters = ["City Eq ","City Eq ", "City Eq "]
274
+ filters = ["City Eq ", "City Eq ", "City Eq "]
276
275
 
277
276
  filters.each do |filter|
278
277
  good_strings.each do |string|
279
278
  f = filter + string
280
279
  parser = Parser.new
281
- expressions = parser.tokenize( f )
280
+ expressions = parser.tokenize(f)
282
281
  assert !parser.errors?
283
282
  assert_equal 1, expressions.size
284
283
  assert_equal string.strip, expressions.first[:value]
285
284
  end
286
285
  end
287
286
  end
288
-
287
+
289
288
  test "get multiple values" do
290
289
  @test_filters.each do |f|
291
- op = find_operator f[:string]
292
- next unless @multiple_types.include?(f[:type]) || op.nil?
290
+ op = find_operator f[:string]
291
+ next unless @multiple_types.include?(f[:type]) || op.nil?
292
+
293
293
  parser = Parser.new
294
294
  val = f[:string].split(" #{op} ")[1]
295
295
  vals = parser.tokenize(f[:string]).first[:value]
@@ -298,13 +298,13 @@ class ParserCompatabilityTest < Test::Unit::TestCase
298
298
  end
299
299
 
300
300
  test "multiples fail with unsupported operators" do
301
- ["Gt","Ge","Lt","Le"].each do |op|
302
- f = "IntegerType #{op} 100,200"
301
+ %w[Gt Ge Lt Le].each do |op|
302
+ f = "IntegerType #{op} 100,200"
303
303
  parser = Parser.new
304
- expressions = parser.tokenize( f )
304
+ parser.tokenize(f)
305
305
  assert parser.errors?
306
306
  assert_equal op, parser.errors.first.token
307
- end
307
+ end
308
308
  end
309
309
 
310
310
  test "bad multiples" do
@@ -315,10 +315,10 @@ class ParserCompatabilityTest < Test::Unit::TestCase
315
315
  assert_nil ex
316
316
  end
317
317
  end
318
-
319
- test "mulitples shouldn't restrict based on string size(OMG LOL THAT WAS FUNNYWTF)" do
318
+
319
+ test "mulitples shouldn't restrict based on string size(OMG LOL THAT WAS FUNNYWTF)" do
320
320
  parser = Parser.new
321
- ex = parser.tokenize("ListAgentId Eq '20110000000000000000000000'")
321
+ parser.tokenize("ListAgentId Eq '20110000000000000000000000'")
322
322
  assert !parser.errors?, parser.inspect
323
323
  end
324
324
 
@@ -337,7 +337,7 @@ class ParserCompatabilityTest < Test::Unit::TestCase
337
337
  test "max out expressions" do
338
338
  parser = Parser.new
339
339
  to_the_max = []
340
- 80.times do |x|
340
+ 80.times do |_x|
341
341
  to_the_max << "City Eq 'Fargo'"
342
342
  end
343
343
  vals = parser.tokenize(to_the_max.join(" And "))
@@ -365,22 +365,22 @@ class ParserCompatabilityTest < Test::Unit::TestCase
365
365
  test "max out function args" do
366
366
  parser = Parser.new
367
367
  to_the_max = []
368
- 201.times do |x|
368
+ 201.times do |_x|
369
369
  to_the_max << "1"
370
370
  end
371
- vals = parser.tokenize("Args Eq myfunc(#{to_the_max.join(",")})")
371
+ parser.tokenize("Args Eq myfunc(#{to_the_max.join(',')})")
372
372
  assert parser.errors?
373
373
  assert parser.errors.first.constraint?
374
374
  end
375
375
 
376
376
  test "API-107 And/Or in string spiel" do
377
- search_strings = ['Tom And Jerry', 'Tom Or Jerry', 'And Or Eq', 'City Eq \\\'Fargo\\\'',
378
- ' And Eq Or ', 'Or And Not']
379
- search_strings.each do |s|
380
- parser = Parser.new
381
- parser.tokenize("City Eq '#{s}' And PropertyType Eq 'A'")
382
- assert !parser.errors?
383
- end
377
+ search_strings = ['Tom And Jerry', 'Tom Or Jerry', 'And Or Eq', 'City Eq \\\'Fargo\\\'',
378
+ ' And Eq Or ', 'Or And Not']
379
+ search_strings.each do |s|
380
+ parser = Parser.new
381
+ parser.tokenize("City Eq '#{s}' And PropertyType Eq 'A'")
382
+ assert !parser.errors?
383
+ end
384
384
  end
385
385
 
386
386
  test "general paren test" do
@@ -390,7 +390,7 @@ class ParserCompatabilityTest < Test::Unit::TestCase
390
390
  "(City Eq 'Fargo') And (City Eq 'Moorhead')"
391
391
  ].each do |filter|
392
392
  parser = Parser.new
393
- p = parser.tokenize(filter)
393
+ parser.tokenize(filter)
394
394
  assert !parser.errors?
395
395
  end
396
396
  end
@@ -405,7 +405,7 @@ class ParserCompatabilityTest < Test::Unit::TestCase
405
405
  "City Eq 'Fargo' (And) City Eq 'Moorhead'"
406
406
  ].each do |filter|
407
407
  parser = Parser.new
408
- p = parser.tokenize(filter)
408
+ parser.tokenize(filter)
409
409
  assert parser.errors?
410
410
  end
411
411
  end
@@ -423,7 +423,7 @@ class ParserCompatabilityTest < Test::Unit::TestCase
423
423
  p = parser.tokenize("(City Eq 'Fargo' Or TotalBr Eq 2) And PropertyType Eq 'A'")
424
424
  assert !parser.errors?
425
425
  p.each do |token|
426
- if ["City","TotalBr"].include?(token[:field])
426
+ if %w[City TotalBr].include?(token[:field])
427
427
  assert_equal 1, token[:level], "Token: #{token.inspect}"
428
428
  else
429
429
  assert_equal 0, token[:level]
@@ -431,11 +431,11 @@ class ParserCompatabilityTest < Test::Unit::TestCase
431
431
  end
432
432
 
433
433
  parser = Parser.new
434
- p = parser.tokenize("(City Eq 'Fargo' Or TotalBr Eq 2 Or City Eq 'Moorhead') " +
434
+ p = parser.tokenize("(City Eq 'Fargo' Or TotalBr Eq 2 Or City Eq 'Moorhead') " \
435
435
  "And PropertyType Eq 'A' And (TotalBr Eq 1 And TotalBr Eq 2)")
436
436
  assert !parser.errors?
437
437
  p.each do |token|
438
- if ["City","TotalBr"].include?(token[:field])
438
+ if %w[City TotalBr].include?(token[:field])
439
439
  assert_equal 1, token[:level]
440
440
  else
441
441
  assert_equal 0, token[:level]
@@ -445,7 +445,7 @@ class ParserCompatabilityTest < Test::Unit::TestCase
445
445
 
446
446
  test "maximum nesting of 2" do
447
447
  parser = Parser.new
448
- p = parser.tokenize("(City Eq 'Fargo' Or (TotalBr Eq 2 And (City Eq 'Moorhead'))) And PropertyType Eq 'A'")
448
+ parser.tokenize("(City Eq 'Fargo' Or (TotalBr Eq 2 And (City Eq 'Moorhead'))) And PropertyType Eq 'A'")
449
449
  assert parser.errors?
450
450
  assert_equal "You have exceeded the maximum nesting level. Please nest no more than 2 levels deep.", parser.errors.first.message
451
451
  end
@@ -454,38 +454,38 @@ class ParserCompatabilityTest < Test::Unit::TestCase
454
454
  filter = '"General Property Description"."Zoning" Eq \'Commercial\''
455
455
  filter_tokens = ['"General Property Description"."Zoning"', 'Eq', "'Commercial'"]
456
456
  parser = Parser.new
457
- expressions = parser.tokenize( filter )
458
-
457
+ expressions = parser.tokenize(filter)
458
+
459
459
  assert !parser.errors?, "Parser errrors [#{filter}]: #{parser.errors.inspect}"
460
460
  assert_equal 1, expressions.size, "Expression #{expressions.inspect}"
461
461
  compare_expression_to_tokens(expressions.first, filter_tokens)
462
462
  assert expressions.first[:custom_field], "Expression #{expressions.first.inspect}"
463
463
  end
464
-
464
+
465
465
  test "tokenize custom field with special characters" do
466
466
  filter = '"Security"."@R080T$\' ` ` `#" Eq \'R2D2\''
467
467
  filter_tokens = ['"Security"."@R080T$\' ` ` `#"', 'Eq', "'R2D2'"]
468
468
  parser = Parser.new
469
- expressions = parser.tokenize( filter )
469
+ expressions = parser.tokenize(filter)
470
470
  assert !parser.errors?, "Parser errrors [#{filter}]: #{parser.errors.inspect}"
471
471
  assert_equal 1, expressions.size, "Expression #{expressions.inspect}"
472
472
  compare_expression_to_tokens(expressions.first, filter_tokens)
473
473
  assert expressions.first[:custom_field], "Expression #{expressions.first.inspect}"
474
474
  end
475
-
475
+
476
476
  test "custom field supports all types" do
477
477
  types = {
478
- :character => "'character'",
479
- :integer => 1234,
480
- :decimal => 12.34,
481
- :boolean => true
478
+ character: "'character'",
479
+ integer: 1234,
480
+ decimal: 12.34,
481
+ boolean: true
482
482
  }
483
- types.each_pair do |key, value|
484
- filter = '"Details"."Random" Eq ' + "#{value}"
485
- filter_tokens = ['"Details"."Random"', 'Eq', "#{value}"]
483
+ types.each_pair do |_key, value|
484
+ filter = "\"Details\".\"Random\" Eq #{value}"
485
+ filter_tokens = ['"Details"."Random"', 'Eq', value.to_s]
486
486
  parser = Parser.new
487
- expressions = parser.tokenize( filter )
488
-
487
+ expressions = parser.tokenize(filter)
488
+
489
489
  assert !parser.errors?, "Parser errrors [#{filter}]: #{parser.errors.inspect}"
490
490
  assert_equal 1, expressions.size, "Expression #{expressions.inspect}"
491
491
  compare_expression_to_tokens(expressions.first, filter_tokens)
@@ -495,13 +495,13 @@ class ParserCompatabilityTest < Test::Unit::TestCase
495
495
 
496
496
  test "escape boolean value" do
497
497
  parser = Parser.new
498
- expressions = parser.tokenize( "BooleanField Eq true" )
498
+ expressions = parser.tokenize("BooleanField Eq true")
499
499
  assert_equal true, parser.escape_value(expressions.first)
500
500
  end
501
501
 
502
502
  test "escape decimal values" do
503
503
  parser = Parser.new
504
- expressions = parser.tokenize( "DecimalField Eq 0.00005 And DecimalField Eq 5.0E-5" )
504
+ expressions = parser.tokenize("DecimalField Eq 0.00005 And DecimalField Eq 5.0E-5")
505
505
  assert_equal 5.0E-5, parser.escape_value(expressions.first)
506
506
  assert_equal parser.escape_value(expressions.first), parser.escape_value(expressions.last)
507
507
  end
@@ -509,23 +509,22 @@ class ParserCompatabilityTest < Test::Unit::TestCase
509
509
  test "Between" do
510
510
  ["BathsFull Bt 10,20", "DateField Bt 2012-12-31,2013-01-31"].each do |f|
511
511
  parser = Parser.new
512
- expressions = parser.tokenize f
512
+ parser.tokenize f
513
513
  assert !parser.errors?, "should successfully parse proper between values, but #{parser.errors.first}"
514
514
  end
515
515
 
516
516
  # truckload of fail
517
517
  ["BathsFull Bt 2012-12-31,1", "DateField Bt 10,2012-12-31"].each do |f|
518
518
  parser = Parser.new
519
- expressions = parser.tokenize f
519
+ parser.tokenize f
520
520
  assert parser.errors?, "should have a type mismatch: #{parser.errors.first}"
521
521
  assert_match /Type mismatch/, parser.errors.first.message
522
522
  end
523
-
524
523
  end
525
-
524
+
526
525
  test "integer type coercion" do
527
526
  parser = Parser.new
528
- expression = parser.tokenize( "DecimalField Eq 100").first
527
+ expression = parser.tokenize("DecimalField Eq 100").first
529
528
  assert parser.send(:check_type!, expression, :decimal)
530
529
  assert_equal 100.0, parser.escape_value(expression)
531
530
  end
@@ -540,13 +539,13 @@ class ParserCompatabilityTest < Test::Unit::TestCase
540
539
  test "datetime->date type coercion" do
541
540
  t = Time.now
542
541
  parser = Parser.new
543
- expression = parser.tokenize( "DateField Eq now()").first
542
+ expression = parser.tokenize("DateField Eq now()").first
544
543
  assert !parser.errors?
545
544
  assert parser.send(:check_type!, expression, :date)
546
- assert_equal t.strftime(Sparkql::FunctionResolver::STRFTIME_DATE_FORMAT),
545
+ assert_equal t.strftime(Sparkql::FunctionResolver::STRFTIME_DATE_FORMAT),
547
546
  parser.escape_value(expression).strftime(Sparkql::FunctionResolver::STRFTIME_DATE_FORMAT)
548
547
  end
549
-
548
+
550
549
  test "datetime->date type coercion array" do
551
550
  today = Time.now
552
551
  parser = Parser.new
@@ -554,9 +553,9 @@ class ParserCompatabilityTest < Test::Unit::TestCase
554
553
  assert !parser.errors?
555
554
  assert parser.send(:check_type!, expression, :date)
556
555
  yesterday = today - 3600 * 24
557
- assert_equal [ yesterday.strftime(Sparkql::FunctionResolver::STRFTIME_DATE_FORMAT),
558
- today.strftime(Sparkql::FunctionResolver::STRFTIME_DATE_FORMAT)],
559
- parser.escape_value(expression).map { |i| i.strftime(Sparkql::FunctionResolver::STRFTIME_DATE_FORMAT)}
556
+ assert_equal([yesterday.strftime(Sparkql::FunctionResolver::STRFTIME_DATE_FORMAT),
557
+ today.strftime(Sparkql::FunctionResolver::STRFTIME_DATE_FORMAT)],
558
+ parser.escape_value(expression).map { |i| i.strftime(Sparkql::FunctionResolver::STRFTIME_DATE_FORMAT) })
560
559
  end
561
560
 
562
561
  test "nested function with altering types" do
@@ -580,4 +579,31 @@ class ParserCompatabilityTest < Test::Unit::TestCase
580
579
  # Type mismatch
581
580
  assert !parser.send(:check_type!, expression, :datetime)
582
581
  end
582
+
583
+ test "#current_timestamp" do
584
+ before_time = Time.now
585
+ parser = Parser.new
586
+ parser_time = parser.current_timestamp
587
+ after_time = Time.now
588
+
589
+ assert before_time < parser_time
590
+ assert after_time > parser_time
591
+
592
+ # Current time is locked to first call
593
+ assert_equal parser_time, parser.current_timestamp
594
+ end
595
+
596
+ test "datetime->date conversions" do
597
+ conversions = {
598
+ '2022-01-18T23:00:00.000000-0600' => Date.new(2022, 1, 18),
599
+ '2022-01-18T00:00:00.000000-0500' => Date.new(2022, 1, 18)
600
+ }
601
+ parser = Parser.new
602
+ conversions.each do |timestamp, date|
603
+ expression = parser.tokenize("DateField Eq #{timestamp}").first
604
+ assert !parser.errors?
605
+ assert parser.send(:check_type!, expression, :date)
606
+ assert_equal date, parser.escape_value(expression), "#{timestamp}: #{date}"
607
+ end
608
+ end
583
609
  end