sparkql 1.2.8 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1d6df2039c1c9c690fe60002b8374ec300e1ffab8496b02e668da3f77999d2e8
4
- data.tar.gz: a529df747695e8c43a45bec3f166cc8e1b2cfa34a671173a998b4620fe522a67
3
+ metadata.gz: 5988efc1b20cbe3cd2baec8537c3d9331564962a1d502d2ceaf63d78f520d218
4
+ data.tar.gz: e1b20183b39463bf221eeddbcbdbb32a3d06457f2108c5c2ae6da471fdfdebf3
5
5
  SHA512:
6
- metadata.gz: e4a0fa65b7cb2b41198f239a6c68613939e3f6f0ce4ce20a9ae00b1ea4f7ae377cdd41b3b67f4a2da158344e33fa2d12d0a9cec7cc7c64a85a14e9f35bdffbdc
7
- data.tar.gz: 746cfed6761f6bdc8ec7dfe91ad45b20d44fd6a67a7fe13e184406a8fab533c1a4122435242447da803160c4c4e3f05a8a8e61ba3a83ca568f2dd4d2b305ed1a
6
+ metadata.gz: 707c7f59f776ff70d9c4d164c4177a4800a81da80b2a0b4c7a99fdf7f1c7e632a67e155f11ead5c836fb48cc2ceec364adb8d359ecea6d714cc86f2bea2e0c58
7
+ data.tar.gz: e74cf6657fd5e9cd81dbd23cbdb07c0506ac84280d7651d1b07e11dc8656f56105bde0b6273e7d4fdfd83a91edaa1742e77a4ce6717ca94daf60e5124c34f313
data/.rubocop.yml ADDED
@@ -0,0 +1,111 @@
1
+ AllCops:
2
+ NewCops: disable
3
+ Exclude:
4
+ - 'bin/*'
5
+ - 'config/**/*'
6
+ - 'Rakefile'
7
+ - 'Capfile'
8
+ - 'Gemfile'
9
+ - 'Guardfile'
10
+ - 'test/factories/*'
11
+ - 'test/support/*'
12
+ - 'config/routes.rb'
13
+ - 'script/*'
14
+ - 'db/**/*'
15
+ - 'vendor/**/*'
16
+
17
+ Style/FrozenStringLiteralComment:
18
+ Enabled: false
19
+
20
+ Style/Documentation:
21
+ Enabled: false
22
+
23
+ Metrics/AbcSize:
24
+ Enabled: false
25
+
26
+ Metrics/BlockLength:
27
+ Enabled: false
28
+
29
+ Metrics/ClassLength:
30
+ Enabled: false
31
+
32
+ Metrics/CyclomaticComplexity:
33
+ Enabled: false
34
+
35
+ Layout/LineLength:
36
+ Enabled: false
37
+
38
+ Metrics/MethodLength:
39
+ Enabled: false
40
+
41
+ Metrics/ModuleLength:
42
+ Enabled: false
43
+
44
+ Metrics/PerceivedComplexity:
45
+ Enabled: false
46
+
47
+ # "Favor `unless` over `if` for negative conditions."
48
+ Style/NegatedIf:
49
+ Enabled: false
50
+ # safe_yaml seems to break all the things.
51
+ Security/YAMLLoad:
52
+ Enabled: false
53
+
54
+ # "Use a guard clause (`return unless extra_types.any?`) instead
55
+ # of wrapping the code inside a conditional expression."
56
+ #
57
+ # Justification: guard clauses don't work very well with long lines.
58
+ # Also, when there's an if check that (say) adds an error to a model
59
+ # validation, it makes more sense to wrap the operation in an if block
60
+ # than to guard the error entry with a double negative.
61
+ Style/GuardClause:
62
+ Enabled: false
63
+
64
+ # Justification:
65
+ #
66
+ # `class MyModule::ClassName` is a lot more concise, especially for tests
67
+ # covering a class that is within a module, than having to wrap the whole
68
+ # class in a module, and indent.
69
+ #
70
+ # "Use nested module/class definitions instead of compact style."
71
+ Style/ClassAndModuleChildren:
72
+ Enabled: false
73
+
74
+ # Justification:
75
+ #
76
+ # A single-line guard clause isn't always a good thing.
77
+ Style/IfUnlessModifier:
78
+ Enabled: false
79
+
80
+ # Justification:
81
+ #
82
+ # Hundreds of existing infractions, and it's not really that confusion to
83
+ # see regex without parens around it.
84
+ Lint/AmbiguousRegexpLiteral:
85
+ Enabled: false
86
+
87
+ # Justification:
88
+ #
89
+ # Is it so wrong to have a variable named fgo_listing_1, instead
90
+ # of fgo_listing1?
91
+ Naming/VariableNumber:
92
+ Enabled: false
93
+
94
+ # Justification:
95
+ #
96
+ # Explicit else's are much clearer than
97
+ # a branch that ends with an `elsif`, and presumes
98
+ # a nil else.
99
+ Style/EmptyElse:
100
+ Enabled: false
101
+
102
+ # Justification:
103
+ #
104
+ # We've generally prefered this, and honestly, I find
105
+ # this often makes readability much clearer to include
106
+ # it.
107
+ Style/RedundantSelf:
108
+ Enabled: false
109
+
110
+ Style/StringLiterals:
111
+ Enabled: false
data/.ruby-version ADDED
@@ -0,0 +1 @@
1
+ 2.5.8
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ v1.3.0, 2022-02-01
2
+ -------------------
3
+ * [BUGFIX] Redesign FunctionResolver to better support other timezones
4
+
1
5
  v1.2.8, 2021-08-11
2
6
  -------------------
3
7
  * [IMPROVEMENT] all() function
data/Rakefile CHANGED
@@ -15,7 +15,7 @@ rule '.rb' => '.y' do |t|
15
15
  end
16
16
 
17
17
  desc "Compile the racc parser from the grammar"
18
- task :compile => ["lib/sparkql/parser.rb", "grammar"]
18
+ task compile: ["lib/sparkql/parser.rb", "grammar"]
19
19
 
20
20
  desc "Generate grammar Documenation"
21
21
  task :grammar do
@@ -27,5 +27,4 @@ Rake::Task[:test].prerequisites.unshift "lib/sparkql/parser.rb"
27
27
  Rake::Task[:test].prerequisites.unshift "grammar"
28
28
 
29
29
  desc 'Default: run unit tests.'
30
- task :default => :test
31
-
30
+ task default: :test
data/VERSION CHANGED
@@ -1 +1 @@
1
- 1.2.8
1
+ 1.3.0
@@ -1,87 +1,84 @@
1
1
  module Sparkql
2
+ class ErrorsProcessor
3
+ attr_accessor :errors
2
4
 
3
- class ErrorsProcessor
4
- attr_accessor :errors
5
+ def initialize(errors = [])
6
+ @errors = Array(errors)
7
+ end
5
8
 
6
- def initialize( errors = [] )
7
- @errors = Array(errors)
8
- end
9
+ # true if the error stack contains at least one error
10
+ def errors?
11
+ @errors.size.positive?
12
+ end
9
13
 
10
- # true if the error stack contains at least one error
11
- def errors?
12
- @errors.size > 0
13
- end
14
+ # true if there is at least one error of status :status in the error stack
15
+ def errors_by_status?(status)
16
+ @errors.each do |error|
17
+ return true if status == error.status
18
+ end
19
+ false
20
+ end
14
21
 
15
- # true if there is at least one error of status :status in the error stack
16
- def errors_by_status?( status )
17
- @errors.each do | error |
18
- return true if status == error.status
22
+ # true if there is at least one :fatal error in the error stack
23
+ def fatal_errors?
24
+ errors_by_status? :fatal
19
25
  end
20
- false
21
- end
22
26
 
23
- # true if there is at least one :fatal error in the error stack
24
- def fatal_errors?
25
- errors_by_status? :fatal
26
- end
27
+ # true if there is at least one :dropped error in the error stack
28
+ def dropped_errors?
29
+ errors_by_status? :dropped
30
+ end
27
31
 
28
- # true if there is at least one :dropped error in the error stack
29
- def dropped_errors?
30
- errors_by_status? :dropped
32
+ # true if there is at least one :recovered error in the error stack
33
+ def recovered_errors?
34
+ errors_by_status? :recovered
35
+ end
31
36
  end
32
37
 
33
- # true if there is at least one :recovered error in the error stack
34
- def recovered_errors?
35
- errors_by_status? :recovered
36
- end
38
+ class ParserError
39
+ attr_accessor :token, :token_index, :expression, :message, :status, :recovered_as,
40
+ :sparkql, :nested_errors
41
+ attr_writer :syntax, :constraint
37
42
 
38
- end
43
+ def initialize(error_hash = {})
44
+ @token = error_hash[:token]
45
+ @token_index = error_hash[:token_index]
46
+ @expression = error_hash[:expression]
47
+ @message = error_hash[:message]
48
+ @status = error_hash[:status]
49
+ @recovered_as = error_hash[:recovered_as]
50
+ @recovered_as = error_hash[:recovered_as]
51
+ @sparkql = error_hash[:sparkql]
52
+ @nested_errors = error_hash[:nested_errors]
53
+ self.syntax = error_hash[:syntax] != false
54
+ self.constraint = error_hash[:constraint] == true
55
+ end
39
56
 
40
- class ParserError
41
- attr_accessor :token, :token_index, :expression, :message, :status, :recovered_as,
42
- :sparkql, :nested_errors
43
- attr_writer :syntax, :constraint
57
+ def syntax?
58
+ @syntax
59
+ end
44
60
 
45
- def initialize(error_hash={})
46
- @token = error_hash[:token]
47
- @token_index = error_hash[:token_index]
48
- @expression = error_hash[:expression]
49
- @message = error_hash[:message]
50
- @status = error_hash[:status]
51
- @recovered_as = error_hash[:recovered_as]
52
- @recovered_as = error_hash[:recovered_as]
53
- @sparkql = error_hash[:sparkql]
54
- @nested_errors = error_hash[:nested_errors]
55
- self.syntax= error_hash[:syntax] == false ? false : true
56
- self.constraint= error_hash[:constraint] == true
57
- end
58
-
59
- def syntax?
60
- @syntax
61
- end
62
-
63
- def constraint?
64
- @constraint
65
- end
61
+ def constraint?
62
+ @constraint
63
+ end
66
64
 
67
- def to_s
68
- str = case @status
69
- # Do nothing. Dropping the expressions isn't special
70
- when :dropped then "Dropped: "
71
- # Fatal errors cannot be recovered from, and should cause anaylisis or
72
- # compilation to stop.
73
- when :fatal then "Fatal: "
74
- # Recovered errors are those that are syntatically
75
- # or symantically incorrect, but are ones that we could "guess" at the
76
- # intention
77
- when :recovered then
78
- "Recovered as #{@recovered_as}: "
79
- else ""
80
- end
81
- str += "<#{@token}> in " unless @token.nil?
82
- str += "<#{@expression}>: #{@message}."
83
- str
65
+ def to_s
66
+ str = case @status
67
+ # Do nothing. Dropping the expressions isn't special
68
+ when :dropped then "Dropped: "
69
+ # Fatal errors cannot be recovered from, and should cause anaylisis or
70
+ # compilation to stop.
71
+ when :fatal then "Fatal: "
72
+ # Recovered errors are those that are syntatically
73
+ # or symantically incorrect, but are ones that we could "guess" at the
74
+ # intention
75
+ when :recovered
76
+ "Recovered as #{@recovered_as}: "
77
+ else ""
78
+ end
79
+ str += "<#{@token}> in " unless @token.nil?
80
+ str += "<#{@expression}>: #{@message}."
81
+ str
82
+ end
84
83
  end
85
84
  end
86
-
87
- end
@@ -4,7 +4,6 @@
4
4
  # fields. Plus, it has some optimizations built in to skip the processing for
5
5
  # any expressions that don't contribute to the net result of the filter.
6
6
  class Sparkql::Evaluator
7
-
8
7
  # The struct here mimics some of the parser information about an expression,
9
8
  # but should not be confused for an expression. Nodes reduce the expressions
10
9
  # to a result based on conjunction logic, and only one exists per block group.
@@ -16,15 +15,17 @@ class Sparkql::Evaluator
16
15
  :match,
17
16
  :good_ors,
18
17
  :expressions,
19
- :unary)
18
+ :unary
19
+ )
20
20
 
21
21
  attr_reader :processed_count
22
22
 
23
- def initialize expression_resolver
23
+ def initialize(expression_resolver)
24
24
  @resolver = expression_resolver
25
25
  end
26
26
 
27
27
  def evaluate(expressions)
28
+ @dropped_expression = nil
28
29
  @processed_count = 0
29
30
  @index = Node.new(0, 0, "And", 0, true, false, 0, nil)
30
31
  @groups = [@index]
@@ -33,10 +34,11 @@ class Sparkql::Evaluator
33
34
  adjust_expression_for_dropped_field(expression)
34
35
  check_for_good_ors(expression)
35
36
  next if skip?(expression)
37
+
36
38
  evaluate_expression(expression)
37
39
  end
38
40
  cleanup
39
- return @index[:match]
41
+ @index[:match]
40
42
  end
41
43
 
42
44
  private
@@ -58,9 +60,10 @@ class Sparkql::Evaluator
58
60
  # each block_group. This logic is re-used when merging the final result of one
59
61
  # block group with the previous.
60
62
  def evaluate_expression(expression)
61
- @processed_count += 1
63
+ @processed_count += 1
62
64
  evaluate_node(expression, @resolver.resolve(expression))
63
65
  end
66
+
64
67
  def evaluate_node(node, result)
65
68
  if result == :drop
66
69
  @dropped_expression = node
@@ -73,7 +76,7 @@ class Sparkql::Evaluator
73
76
  (node[:conjunction_level] == node[:level] ||
74
77
  node[:conjunction_level] == @index[:level])
75
78
  @index[:match] = !result if @index[:match]
76
- elsif node[:conjunction] == 'And' || @index[:expressions] == 0
79
+ elsif node[:conjunction] == 'And' || (@index[:expressions]).zero?
77
80
  @index[:match] = result if @index[:match]
78
81
  elsif node[:conjunction] == 'Or' && result
79
82
  @index[:match] = result
@@ -97,7 +100,7 @@ class Sparkql::Evaluator
97
100
  end
98
101
  end
99
102
  end
100
- if !good_index.nil? && good_index[:expressions] > 0 && good_index[:match]
103
+ if !good_index.nil? && (good_index[:expressions]).positive? && good_index[:match]
101
104
  good_index[:good_ors] = true
102
105
  end
103
106
  end
@@ -112,8 +115,8 @@ class Sparkql::Evaluator
112
115
 
113
116
  def new_group(expression)
114
117
  Node.new(expression[:level], expression[:block_group],
115
- expression[:conjunction], expression[:conjunction_level],
116
- true, false, 0, nil)
118
+ expression[:conjunction], expression[:conjunction_level],
119
+ true, false, 0, nil)
117
120
  end
118
121
 
119
122
  # When the last expression was dropped, we need to repair the filter by
@@ -125,6 +128,7 @@ class Sparkql::Evaluator
125
128
  expression[:conjunction] = @dropped_expression[:conjunction]
126
129
  expression[:conjunction_level] = @dropped_expression[:conjunction_level]
127
130
  end
131
+
128
132
  @dropped_expression = nil
129
133
  end
130
134
 
@@ -1,17 +1,16 @@
1
1
  # Base class for handling expression resolution
2
2
  class Sparkql::ExpressionResolver
3
-
4
3
  # Accepted results from the resolve method:
5
4
  # * true and false reflect the expression's boolean result (as all expressions
6
5
  # should).
7
6
  # * :drop is a special symbol indicating that the expression should be omitted
8
7
  # from the filter. Special rules apply for a dropped expression, such as
9
8
  # keeping the conjunction of the dropped expression.
10
- VALID_RESULTS = [true, false, :drop]
9
+ VALID_RESULTS = [true, false, :drop].freeze
11
10
 
12
11
  # Evaluate the result of this expression. Allows for any of the values in
13
12
  # VALID_RESULTS
14
- def resolve(expression)
13
+ def resolve(_expression)
15
14
  true
16
15
  end
17
16
  end
@@ -1,23 +1,21 @@
1
- # Custom fields need to add a table join to the customfieldsearch table when AND'd together,
1
+ # Custom fields need to add a table join to the customfieldsearch table when AND'd together,
2
2
  # but not when they are OR'd or nested. This class maintains the state for all custom field expressions
3
3
  # lets the parser know when to do either.
4
4
  class Sparkql::ExpressionState
5
-
6
5
  def initialize
7
- @expressions = {0=>[]}
6
+ @expressions = { 0 => [] }
8
7
  @last_conjunction = "And" # always start with a join
9
8
  @block_group = 0
10
9
  end
11
-
10
+
12
11
  def push(expression)
13
12
  @block_group = expression[:block_group]
14
- @expressions[@block_group] ||= []
13
+ @expressions[@block_group] ||= []
15
14
  @expressions[@block_group] << expression
16
15
  @last_conjunction = expression[:conjunction]
17
16
  end
18
-
17
+
19
18
  def needs_join?
20
- return @expressions[@block_group].size == 1 || ["Not", "And"].include?(@last_conjunction)
19
+ @expressions[@block_group].size == 1 || %w[Not And].include?(@last_conjunction)
21
20
  end
22
-
23
- end
21
+ end
@@ -225,14 +225,19 @@ module Sparkql
225
225
  }
226
226
  }.freeze
227
227
 
228
+ def self.lookup(function_name)
229
+ SUPPORTED_FUNCTIONS[function_name.to_sym]
230
+ end
231
+
228
232
  # Construct a resolver instance for a function
229
233
  # name: function name (String)
230
234
  # args: array of literal hashes of the format {:type=><literal_type>, :value=><escaped_literal_value>}.
231
235
  # Empty arry for functions that have no arguments.
232
- def initialize(name, args)
236
+ def initialize(name, args, options = {})
233
237
  @name = name
234
238
  @args = args
235
239
  @errors = []
240
+ @current_timestamp = options[:current_timestamp]
236
241
  end
237
242
 
238
243
  # Validate the function instance prior to calling it. All validation failures will show up in the
@@ -541,18 +546,16 @@ module Sparkql
541
546
  today += weeks * 7
542
547
 
543
548
  # Now iterate on the remaining weekdays
544
- remaining.times do |i|
549
+ remaining.times do |_i|
545
550
  today += direction
546
- while today.saturday? || today.sunday?
547
- today += direction
548
- end
551
+ today += direction while today.saturday? || today.sunday?
549
552
  end
550
553
 
551
554
  # If we end on the weekend, bump accordingly
552
555
  while today.saturday? || today.sunday?
553
556
  # If we start and end on the weekend, wind things back to the next
554
557
  # appropriate weekday.
555
- if weekend_start && remaining == 0
558
+ if weekend_start && remaining.zero?
556
559
  today -= direction
557
560
  else
558
561
  today += direction
@@ -637,7 +640,8 @@ module Sparkql
637
640
  end
638
641
 
639
642
  def months(num_months)
640
- d = current_timestamp >> num_months
643
+ # DateTime usage. There's a better means to do this with Time via rails
644
+ d = (current_timestamp.to_datetime >> num_months).to_time
641
645
  {
642
646
  type: :date,
643
647
  value: d.strftime(STRFTIME_DATE_FORMAT)
@@ -645,7 +649,8 @@ module Sparkql
645
649
  end
646
650
 
647
651
  def years(num_years)
648
- d = current_timestamp >> (num_years * 12)
652
+ # DateTime usage. There's a better means to do this with Time via rails
653
+ d = (current_timestamp.to_datetime >> (num_years * 12)).to_time
649
654
  {
650
655
  type: :date,
651
656
  value: d.strftime(STRFTIME_DATE_FORMAT)
@@ -832,11 +837,11 @@ module Sparkql
832
837
  end
833
838
 
834
839
  def current_time
835
- current_timestamp.to_time
840
+ current_timestamp
836
841
  end
837
842
 
838
843
  def current_timestamp
839
- @current_timestamp ||= DateTime.now
844
+ @current_timestamp ||= Time.now
840
845
  end
841
846
 
842
847
  private
@@ -1,7 +1,7 @@
1
1
  module Sparkql
2
2
  module Geo
3
3
  class RecordRadius
4
- RECORD_ID_REGEX = /\A[0-9]{26}\z/
4
+ RECORD_ID_REGEX = /\A[0-9]{26}\z/.freeze
5
5
 
6
6
  attr_accessor :record_id, :radius
7
7