sparkql 0.3.20 → 0.3.21

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,15 +1,15 @@
1
1
  ---
2
2
  !binary "U0hBMQ==":
3
3
  metadata.gz: !binary |-
4
- ZjQ1YjRjYWNmMmE1ODNiMDY3NmZmODQ2ZjYwYWMyOTZlNDc1ODFmMg==
4
+ NWY3NjdhZWI4ZDViNDE4YTJhYmI1YTI1YzE0YzExYzRhMTVlNzFiMQ==
5
5
  data.tar.gz: !binary |-
6
- ZWZkMWE2NmIzYzM0YjAwYzUyYjM0NDBiYWVlZTc0ZmQ1MGQ0MDlhZQ==
6
+ Y2Y1OTQwNjQ2NWQzYWMwMjI2YmEwZDU4NzgyMjNjN2FhMDllODI4NQ==
7
7
  SHA512:
8
8
  metadata.gz: !binary |-
9
- MWFhZDQ5MTdiOWNmMzVlMWM2MDBkYzdmMjU0OGYwOGUzZTQ0ZmExYjUxY2Ex
10
- ZWFlOTQwM2FiMGMxMGYwNzJjYjU4NDdkNmNlMDFiMjg0M2VmMTRhZTM4OWU0
11
- ZjE4Yjc0YjA4ZjczMjlhYzA5MDEyZGMxNzliMThlM2U4YTFlMGI=
9
+ N2IwY2IyMzQwNmViODZiMzQxOTM3MGZkNDMwNTI2YzNjYjQ4N2E3MWZkZTBl
10
+ MmMwNjQ5MGI5MTdmYTQ3Y2Y3YTEwNTIyYmFlOTAxNWIwNzI4NDZlYjk2NDU3
11
+ YTZhYjFiMTk0ZDE0Y2MxNGNiN2ExZTQxY2Y0NzYzZDMxYTJkNTA=
12
12
  data.tar.gz: !binary |-
13
- OWI1MWY5NDkxYWQ1NDg3NTcyZGUxODU0ZTVhOWE3YjM2NzQyMjFiOTUxMGRh
14
- MTQwN2IzMjIxOTU1MGY0ZmMzY2IxYjc3ZTkwMzlhYzFlZjBmZjNiNWNjOTUx
15
- ZjY4NDQyOWViOGM3Y2RmYmY5N2MzOTY0YTRlNTA3MDkwNWEyODg=
13
+ MWVkMDNhYzNmOTk2ZGNmYzg2ODcwOWRmMWVkNGQzYTM4ZDQ2NjY5NmQ5YzQ2
14
+ NjZhZTU1ZWFhMjlhZTEzNWYzNGQ5Y2MzY2RiNjY4Yzc2Nzk0NmYyNGQ0ZWNk
15
+ MGM2NjYyN2U2ZGZkMmExMzI0MTY4YTU4MWQ4NzE0ZWFiN2Y4ZTE=
data/.gitignore CHANGED
@@ -5,3 +5,4 @@ lib/sparkql/*.output
5
5
  pkg/*
6
6
  *.swp
7
7
  test/reports
8
+ vendor/bundle
data/CHANGELOG.md CHANGED
@@ -1,5 +1,9 @@
1
+ v0.3.21, 2015-09-24 ([changes](https://github.com/sparkapi/sparkql/compare/v0.3.20...v0.3.21))
2
+ -------------------
3
+
4
+ * [IMPROVEMENT] Record token index and current token in lexer, for error reporting
1
5
 
2
- v0.3.2, 2015-04-14 ([changes](https://github.com/sparkapi/sparkql/compare/v0.3.18...v0.3.20))
6
+ v0.3.20, 2015-04-14 ([changes](https://github.com/sparkapi/sparkql/compare/v0.3.18...v0.3.20))
3
7
  -------------------
4
8
 
5
9
  * [BUGFIX] Allow seconds for ISO-8601
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.3.20
1
+ 0.3.21
@@ -38,11 +38,12 @@ class ErrorsProcessor
38
38
  end
39
39
 
40
40
  class ParserError
41
- attr_accessor :token, :expression, :message, :status, :recovered_as
41
+ attr_accessor :token, :token_index, :expression, :message, :status, :recovered_as
42
42
  attr_writer :syntax, :constraint
43
43
 
44
44
  def initialize(error_hash={})
45
45
  @token = error_hash[:token]
46
+ @token_index = error_hash[:token_index]
46
47
  @expression = error_hash[:expression]
47
48
  @message = error_hash[:message]
48
49
  @status = error_hash[:status]
@@ -80,4 +81,4 @@ class ParserError
80
81
  end
81
82
  end
82
83
 
83
- end
84
+ end
data/lib/sparkql/lexer.rb CHANGED
@@ -3,7 +3,7 @@ class Sparkql::Lexer < StringScanner
3
3
 
4
4
  attr_accessor :level, :block_group_identifier
5
5
 
6
- attr_reader :last_field
6
+ attr_reader :last_field, :current_token_value, :token_index
7
7
 
8
8
  def initialize(str)
9
9
  str.freeze
@@ -18,43 +18,45 @@ class Sparkql::Lexer < StringScanner
18
18
  # TODO the old implementation did value type detection conversion at a later date, we can perform
19
19
  # this at parse time if we want!!!!
20
20
  def shift
21
+ @token_index = self.pos
22
+
21
23
  token = case
22
- when value = scan(SPACE)
23
- [:SPACE, value]
24
- when value = scan(LPAREN)
24
+ when @current_token_value = scan(SPACE)
25
+ [:SPACE, @current_token_value]
26
+ when @current_token_value = scan(LPAREN)
25
27
  levelup
26
- [:LPAREN, value]
27
- when value = scan(RPAREN)
28
+ [:LPAREN, @current_token_value]
29
+ when @current_token_value = scan(RPAREN)
28
30
  # leveldown: do this after parsing group
29
- [:RPAREN, value]
30
- when value = scan(/\,/)
31
- [:COMMA,value]
32
- when value = scan(NULL)
31
+ [:RPAREN, @current_token_value]
32
+ when @current_token_value = scan(/\,/)
33
+ [:COMMA,@current_token_value]
34
+ when @current_token_value = scan(NULL)
33
35
  literal :NULL, "NULL"
34
- when value = scan(STANDARD_FIELD)
35
- check_standard_fields(value)
36
- when value = scan(DATETIME)
37
- literal :DATETIME, value
38
- when value = scan(DATE)
39
- literal :DATE, value
40
- when value = scan(DECIMAL)
41
- literal :DECIMAL, value
42
- when value = scan(INTEGER)
43
- literal :INTEGER, value
44
- when value = scan(CHARACTER)
45
- literal :CHARACTER, value
46
- when value = scan(BOOLEAN)
47
- literal :BOOLEAN, value
48
- when value = scan(KEYWORD)
49
- check_keywords(value)
50
- when value = scan(CUSTOM_FIELD)
51
- [:CUSTOM_FIELD,value]
36
+ when @current_token_value = scan(STANDARD_FIELD)
37
+ check_standard_fields(@current_token_value)
38
+ when @current_token_value = scan(DATETIME)
39
+ literal :DATETIME, @current_token_value
40
+ when @current_token_value = scan(DATE)
41
+ literal :DATE, @current_token_value
42
+ when @current_token_value = scan(DECIMAL)
43
+ literal :DECIMAL, @current_token_value
44
+ when @current_token_value = scan(INTEGER)
45
+ literal :INTEGER, @current_token_value
46
+ when @current_token_value = scan(CHARACTER)
47
+ literal :CHARACTER, @current_token_value
48
+ when @current_token_value = scan(BOOLEAN)
49
+ literal :BOOLEAN, @current_token_value
50
+ when @current_token_value = scan(KEYWORD)
51
+ check_keywords(@current_token_value)
52
+ when @current_token_value = scan(CUSTOM_FIELD)
53
+ [:CUSTOM_FIELD,@current_token_value]
52
54
  when empty?
53
55
  [false, false] # end of file, \Z don't work with StringScanner
54
56
  else
55
57
  [:UNKNOWN, "ERROR: '#{self.string}'"]
56
58
  end
57
- #value.freeze
59
+
58
60
  token.freeze
59
61
  end
60
62
 
@@ -193,6 +193,11 @@ module Sparkql::ParserCompatibility
193
193
  private
194
194
 
195
195
  def tokenizer_error( error_hash )
196
+
197
+ if @lexer
198
+ error_hash[:token_index] = @lexer.token_index
199
+ end
200
+
196
201
  self.errors << Sparkql::ParserError.new( error_hash )
197
202
  end
198
203
  alias :compile_error :tokenizer_error
@@ -118,7 +118,7 @@ module Sparkql::ParserTools
118
118
  token_name = token_to_str(error_token_id)
119
119
  token_name.downcase!
120
120
  token = error_value.to_s.inspect
121
- tokenizer_error(:token => @lexer.last_field,
121
+ tokenizer_error(:token => @lexer.current_token_value,
122
122
  :message => "Error parsing token #{token_name}",
123
123
  :status => :fatal,
124
124
  :syntax => true)
@@ -2,6 +2,21 @@ require 'test_helper'
2
2
 
3
3
  class LexerTest < Test::Unit::TestCase
4
4
  include Sparkql
5
+
6
+ test "record the current token and current oken position" do
7
+ @lexer = Lexer.new "City Eq 'Fargo'"
8
+ token = @lexer.shift
9
+ assert_equal "City", @lexer.current_token_value
10
+ assert_equal 0, @lexer.token_index
11
+
12
+ token = @lexer.shift
13
+ assert_equal " ", @lexer.current_token_value
14
+ assert_equal 4, @lexer.token_index
15
+
16
+ token = @lexer.shift
17
+ assert_equal "Eq", @lexer.current_token_value
18
+ assert_equal 5, @lexer.token_index
19
+ end
5
20
 
6
21
  def test_check_reserved_words_standard_fields
7
22
  ["OrOrOr Eq true", "Equador Eq true", "Oregon Ge 10"].each do |standard_field|
@@ -241,9 +241,19 @@ class ParserCompatabilityTest < Test::Unit::TestCase
241
241
  expressions = parser.tokenize( f )
242
242
  assert_nil expressions, "filter: #{f}"
243
243
  assert parser.errors?
244
+ error = parser.errors.first
244
245
  end
245
246
  end
246
247
 
248
+ test "report token index on error" do
249
+ parser = Parser.new
250
+ expressions = parser.tokenize( "MlsStatus 2eq 'Active'" )
251
+ error = parser.errors.first
252
+
253
+ assert_equal "2", error.token
254
+ assert_equal 10, error.token_index
255
+ end
256
+
247
257
  test "tokenize edge case string value" do
248
258
  good_strings = ["'Fargo\\'s Boat'", "'Fargo'", "'Fargo\\'\\'s'",
249
259
  "' Fargo '", " 'Fargo' "]
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: sparkql
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.20
4
+ version: 0.3.21
5
5
  platform: ruby
6
6
  authors:
7
7
  - Wade McEwen
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-05-14 00:00:00.000000000 Z
11
+ date: 2015-09-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: georuby