graphql 2.2.5 → 2.2.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphql might be problematic. Click here for more details.

Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/lib/graphql/analysis/ast/field_usage.rb +32 -7
  3. data/lib/graphql/analysis/ast.rb +7 -1
  4. data/lib/graphql/coercion_error.rb +1 -9
  5. data/lib/graphql/dataloader/request.rb +5 -0
  6. data/lib/graphql/execution/interpreter/runtime.rb +9 -0
  7. data/lib/graphql/execution/interpreter.rb +90 -150
  8. data/lib/graphql/introspection/entry_points.rb +9 -3
  9. data/lib/graphql/introspection/schema_type.rb +3 -1
  10. data/lib/graphql/language/document_from_schema_definition.rb +1 -2
  11. data/lib/graphql/language/nodes.rb +1 -1
  12. data/lib/graphql/language/parser.rb +11 -1
  13. data/lib/graphql/language/printer.rb +4 -0
  14. data/lib/graphql/pagination/array_connection.rb +3 -3
  15. data/lib/graphql/pagination/relation_connection.rb +3 -3
  16. data/lib/graphql/query/validation_pipeline.rb +2 -2
  17. data/lib/graphql/query/variables.rb +3 -3
  18. data/lib/graphql/query.rb +1 -1
  19. data/lib/graphql/schema/base_64_encoder.rb +3 -5
  20. data/lib/graphql/schema/field.rb +31 -30
  21. data/lib/graphql/schema/interface.rb +5 -1
  22. data/lib/graphql/schema/resolver.rb +9 -5
  23. data/lib/graphql/schema/unique_within_type.rb +1 -1
  24. data/lib/graphql/schema.rb +70 -17
  25. data/lib/graphql/static_validation/literal_validator.rb +1 -2
  26. data/lib/graphql/static_validation/rules/required_input_object_attributes_are_present.rb +1 -1
  27. data/lib/graphql/static_validation/validator.rb +3 -0
  28. data/lib/graphql/subscriptions/serialize.rb +2 -0
  29. data/lib/graphql/subscriptions.rb +0 -3
  30. data/lib/graphql/testing/helpers.rb +8 -4
  31. data/lib/graphql/tracing/data_dog_trace.rb +21 -34
  32. data/lib/graphql/tracing/data_dog_tracing.rb +7 -21
  33. data/lib/graphql/tracing/legacy_hooks_trace.rb +74 -0
  34. data/lib/graphql/tracing/platform_tracing.rb +2 -0
  35. data/lib/graphql/tracing/{prometheus_tracing → prometheus_trace}/graphql_collector.rb +3 -1
  36. data/lib/graphql/tracing/sentry_trace.rb +112 -0
  37. data/lib/graphql/tracing.rb +3 -1
  38. data/lib/graphql/version.rb +1 -1
  39. data/lib/graphql.rb +3 -2
  40. metadata +24 -23
  41. data/lib/graphql/schema/base_64_bp.rb +0 -26
  42. data/lib/graphql/subscriptions/instrumentation.rb +0 -28
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 49aca16aba9ee96f9aa4229e07e5493b1e8c676a892a1fa1e9828323d44dc073
4
- data.tar.gz: 95eec1c3808a12b28bcc2732bb13baf9d52ddf3f34c4971abf92f6cff3333e93
3
+ metadata.gz: 1bae060f1dcf449082e9e53be024476505d848448a13d592f31e67c11024caa4
4
+ data.tar.gz: ebc753b89b657c18771641869a4dffea9e79fe324a3f7e46c1a84304a6cc3375
5
5
  SHA512:
6
- metadata.gz: 287acc2969a3b181d2d53dc94562335fac43de4cb8873844f063b63b604948e3edb7df318e53ec42906f678f7329efc5aa8d28bdbbfd4bc7f9d504a96f745df9
7
- data.tar.gz: 3ef6c000a5eeaddd295786e9baf147663556cc19c1bc46facf57b67233bf40780e42b4781f39ebd3966ca29bc6bab6b682e9bd475b4c2474f471841454cb3d3c
6
+ metadata.gz: 0bd27d12cd1efd1c4166c20062e401fa524bcda469de51cbfe89fd21542fd3433a2f23628c21330c9e3d700a9627c683a6a1691887a06ef06105ede4c2241b26
7
+ data.tar.gz: abc65a9217ae76239458695a3ee47d3e2bf3d072e16d62b9df16a65a380110e4a9a207af17473ca0f14fcce5d907a2552866597dd53785c890979a37aa2c500b
@@ -8,6 +8,7 @@ module GraphQL
8
8
  @used_fields = Set.new
9
9
  @used_deprecated_fields = Set.new
10
10
  @used_deprecated_arguments = Set.new
11
+ @used_deprecated_enum_values = Set.new
11
12
  end
12
13
 
13
14
  def on_leave_field(node, parent, visitor)
@@ -15,7 +16,7 @@ module GraphQL
15
16
  field = "#{visitor.parent_type_definition.graphql_name}.#{field_defn.graphql_name}"
16
17
  @used_fields << field
17
18
  @used_deprecated_fields << field if field_defn.deprecation_reason
18
- arguments = visitor.query.arguments_for(node, visitor.field_definition)
19
+ arguments = visitor.query.arguments_for(node, field_defn)
19
20
  # If there was an error when preparing this argument object,
20
21
  # then this might be an error or something:
21
22
  if arguments.respond_to?(:argument_values)
@@ -28,6 +29,7 @@ module GraphQL
28
29
  used_fields: @used_fields.to_a,
29
30
  used_deprecated_fields: @used_deprecated_fields.to_a,
30
31
  used_deprecated_arguments: @used_deprecated_arguments.to_a,
32
+ used_deprecated_enum_values: @used_deprecated_enum_values.to_a,
31
33
  }
32
34
  end
33
35
 
@@ -41,16 +43,39 @@ module GraphQL
41
43
 
42
44
  next if argument.value.nil?
43
45
 
44
- if argument.definition.type.kind.input_object?
46
+ argument_type = argument.definition.type
47
+ if argument_type.non_null?
48
+ argument_type = argument_type.of_type
49
+ end
50
+
51
+ if argument_type.kind.input_object?
45
52
  extract_deprecated_arguments(argument.value.arguments.argument_values) # rubocop:disable Development/ContextIsPassedCop -- runtime args instance
46
- elsif argument.definition.type.list?
47
- argument
48
- .value
49
- .select { |value| value.respond_to?(:arguments) }
50
- .each { |value| extract_deprecated_arguments(value.arguments.argument_values) } # rubocop:disable Development/ContextIsPassedCop -- runtime args instance
53
+ elsif argument_type.kind.enum?
54
+ extract_deprecated_enum_value(argument_type, argument.value)
55
+ elsif argument_type.list?
56
+ inner_type = argument_type.unwrap
57
+ case inner_type.kind
58
+ when TypeKinds::INPUT_OBJECT
59
+ argument.value.each do |value|
60
+ extract_deprecated_arguments(value.arguments.argument_values) # rubocop:disable Development/ContextIsPassedCop -- runtime args instance
61
+ end
62
+ when TypeKinds::ENUM
63
+ argument.value.each do |value|
64
+ extract_deprecated_enum_value(inner_type, value)
65
+ end
66
+ else
67
+ # Not a kind of input that we track
68
+ end
51
69
  end
52
70
  end
53
71
  end
72
+
73
+ def extract_deprecated_enum_value(enum_type, value)
74
+ enum_value = @query.warden.enum_values(enum_type).find { |ev| ev.value == value }
75
+ if enum_value&.deprecation_reason
76
+ @used_deprecated_enum_values << enum_value.path
77
+ end
78
+ end
54
79
  end
55
80
  end
56
81
  end
@@ -6,6 +6,7 @@ require "graphql/analysis/ast/query_complexity"
6
6
  require "graphql/analysis/ast/max_query_complexity"
7
7
  require "graphql/analysis/ast/query_depth"
8
8
  require "graphql/analysis/ast/max_query_depth"
9
+ require "timeout"
9
10
 
10
11
  module GraphQL
11
12
  module Analysis
@@ -63,7 +64,10 @@ module GraphQL
63
64
  analyzers: analyzers_to_run
64
65
  )
65
66
 
66
- visitor.visit
67
+ # `nil` or `0` causes no timeout
68
+ Timeout::timeout(query.validate_timeout_remaining) do
69
+ visitor.visit
70
+ end
67
71
 
68
72
  if visitor.rescued_errors.any?
69
73
  return visitor.rescued_errors
@@ -75,6 +79,8 @@ module GraphQL
75
79
  []
76
80
  end
77
81
  end
82
+ rescue Timeout::Error
83
+ [GraphQL::AnalysisError.new("Timeout on validation of query")]
78
84
  end
79
85
 
80
86
  def analysis_errors(results)
@@ -1,13 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
  module GraphQL
3
- class CoercionError < GraphQL::Error
4
- # @return [Hash] Optional custom data for error objects which will be added
5
- # under the `extensions` key.
6
- attr_accessor :extensions
7
-
8
- def initialize(message, extensions: nil)
9
- @extensions = extensions
10
- super(message)
11
- end
3
+ class CoercionError < GraphQL::ExecutionError
12
4
  end
13
5
  end
@@ -14,6 +14,11 @@ module GraphQL
14
14
  def load
15
15
  @source.load(@key)
16
16
  end
17
+
18
+ def load_with_deprecation_warning
19
+ warn("Returning `.request(...)` from GraphQL::Dataloader is deprecated, use `.load(...)` instead. (See usage of #{@source} with #{@key.inspect}).")
20
+ load
21
+ end
17
22
  end
18
23
  end
19
24
  end
@@ -352,6 +352,15 @@ module GraphQL
352
352
  end
353
353
 
354
354
  field_result = call_method_on_directives(:resolve, object, directives) do
355
+ if directives.any?
356
+ # This might be executed in a different context; reset this info
357
+ runtime_state = get_current_runtime_state
358
+ runtime_state.current_field = field_defn
359
+ runtime_state.current_object = object
360
+ runtime_state.current_arguments = resolved_arguments
361
+ runtime_state.current_result_name = result_name
362
+ runtime_state.current_result = selection_result
363
+ end
355
364
  # Actually call the field resolver and capture the result
356
365
  app_result = begin
357
366
  @current_trace.execute_field(field: field_defn, ast_node: ast_node, query: query, object: object, arguments: kwarg_arguments) do
@@ -37,173 +37,113 @@ module GraphQL
37
37
  multiplex.current_trace.execute_multiplex(multiplex: multiplex) do
38
38
  schema = multiplex.schema
39
39
  queries = multiplex.queries
40
- query_instrumenters = schema.instrumenters[:query]
41
- multiplex_instrumenters = schema.instrumenters[:multiplex]
42
40
  lazies_at_depth = Hash.new { |h, k| h[k] = [] }
41
+ multiplex_analyzers = schema.multiplex_analyzers
42
+ if multiplex.max_complexity
43
+ multiplex_analyzers += [GraphQL::Analysis::AST::MaxQueryComplexity]
44
+ end
43
45
 
44
- # First, run multiplex instrumentation, then query instrumentation for each query
45
- call_hooks(multiplex_instrumenters, multiplex, :before_multiplex, :after_multiplex) do
46
- each_query_call_hooks(query_instrumenters, queries) do
47
- schema = multiplex.schema
48
- multiplex_analyzers = schema.multiplex_analyzers
49
- queries = multiplex.queries
50
- if multiplex.max_complexity
51
- multiplex_analyzers += [GraphQL::Analysis::AST::MaxQueryComplexity]
46
+ schema.analysis_engine.analyze_multiplex(multiplex, multiplex_analyzers)
47
+ begin
48
+ # Since this is basically the batching context,
49
+ # share it for a whole multiplex
50
+ multiplex.context[:interpreter_instance] ||= multiplex.schema.query_execution_strategy.new
51
+ # Do as much eager evaluation of the query as possible
52
+ results = []
53
+ queries.each_with_index do |query, idx|
54
+ if query.subscription? && !query.subscription_update?
55
+ query.context.namespace(:subscriptions)[:events] = []
52
56
  end
53
-
54
- schema.analysis_engine.analyze_multiplex(multiplex, multiplex_analyzers)
55
- begin
56
- # Since this is basically the batching context,
57
- # share it for a whole multiplex
58
- multiplex.context[:interpreter_instance] ||= multiplex.schema.query_execution_strategy.new
59
- # Do as much eager evaluation of the query as possible
60
- results = []
61
- queries.each_with_index do |query, idx|
62
- multiplex.dataloader.append_job {
63
- operation = query.selected_operation
64
- result = if operation.nil? || !query.valid? || query.context.errors.any?
65
- NO_OPERATION
66
- else
67
- begin
68
- # Although queries in a multiplex _share_ an Interpreter instance,
69
- # they also have another item of state, which is private to that query
70
- # in particular, assign it here:
71
- runtime = Runtime.new(query: query, lazies_at_depth: lazies_at_depth)
72
- query.context.namespace(:interpreter_runtime)[:runtime] = runtime
73
-
74
- query.current_trace.execute_query(query: query) do
75
- runtime.run_eager
76
- end
77
- rescue GraphQL::ExecutionError => err
78
- query.context.errors << err
79
- NO_OPERATION
80
- end
57
+ multiplex.dataloader.append_job {
58
+ operation = query.selected_operation
59
+ result = if operation.nil? || !query.valid? || query.context.errors.any?
60
+ NO_OPERATION
61
+ else
62
+ begin
63
+ # Although queries in a multiplex _share_ an Interpreter instance,
64
+ # they also have another item of state, which is private to that query
65
+ # in particular, assign it here:
66
+ runtime = Runtime.new(query: query, lazies_at_depth: lazies_at_depth)
67
+ query.context.namespace(:interpreter_runtime)[:runtime] = runtime
68
+
69
+ query.current_trace.execute_query(query: query) do
70
+ runtime.run_eager
81
71
  end
82
- results[idx] = result
83
- }
72
+ rescue GraphQL::ExecutionError => err
73
+ query.context.errors << err
74
+ NO_OPERATION
75
+ end
84
76
  end
77
+ results[idx] = result
78
+ }
79
+ end
85
80
 
86
- multiplex.dataloader.run
81
+ multiplex.dataloader.run
87
82
 
88
- # Then, work through lazy results in a breadth-first way
89
- multiplex.dataloader.append_job {
90
- query = multiplex.queries.length == 1 ? multiplex.queries[0] : nil
91
- queries = multiplex ? multiplex.queries : [query]
92
- final_values = queries.map do |query|
93
- runtime = query.context.namespace(:interpreter_runtime)[:runtime]
94
- # it might not be present if the query has an error
95
- runtime ? runtime.final_result : nil
96
- end
97
- final_values.compact!
98
- multiplex.current_trace.execute_query_lazy(multiplex: multiplex, query: query) do
99
- Interpreter::Resolve.resolve_each_depth(lazies_at_depth, multiplex.dataloader)
100
- end
101
- }
102
- multiplex.dataloader.run
103
-
104
- # Then, find all errors and assign the result to the query object
105
- results.each_with_index do |data_result, idx|
106
- query = queries[idx]
107
- # Assign the result so that it can be accessed in instrumentation
108
- query.result_values = if data_result.equal?(NO_OPERATION)
109
- if !query.valid? || query.context.errors.any?
110
- # A bit weird, but `Query#static_errors` _includes_ `query.context.errors`
111
- { "errors" => query.static_errors.map(&:to_h) }
112
- else
113
- data_result
114
- end
115
- else
116
- result = {
117
- "data" => query.context.namespace(:interpreter_runtime)[:runtime].final_result
118
- }
83
+ # Then, work through lazy results in a breadth-first way
84
+ multiplex.dataloader.append_job {
85
+ query = multiplex.queries.length == 1 ? multiplex.queries[0] : nil
86
+ queries = multiplex ? multiplex.queries : [query]
87
+ final_values = queries.map do |query|
88
+ runtime = query.context.namespace(:interpreter_runtime)[:runtime]
89
+ # it might not be present if the query has an error
90
+ runtime ? runtime.final_result : nil
91
+ end
92
+ final_values.compact!
93
+ multiplex.current_trace.execute_query_lazy(multiplex: multiplex, query: query) do
94
+ Interpreter::Resolve.resolve_each_depth(lazies_at_depth, multiplex.dataloader)
95
+ end
96
+ }
97
+ multiplex.dataloader.run
119
98
 
120
- if query.context.errors.any?
121
- error_result = query.context.errors.map(&:to_h)
122
- result["errors"] = error_result
123
- end
99
+ # Then, find all errors and assign the result to the query object
100
+ results.each_with_index do |data_result, idx|
101
+ query = queries[idx]
102
+ if (events = query.context.namespace(:subscriptions)[:events]) && events.any?
103
+ schema.subscriptions.write_subscription(query, events)
104
+ end
105
+ # Assign the result so that it can be accessed in instrumentation
106
+ query.result_values = if data_result.equal?(NO_OPERATION)
107
+ if !query.valid? || query.context.errors.any?
108
+ # A bit weird, but `Query#static_errors` _includes_ `query.context.errors`
109
+ { "errors" => query.static_errors.map(&:to_h) }
110
+ else
111
+ data_result
112
+ end
113
+ else
114
+ result = {}
124
115
 
125
- result
126
- end
127
- if query.context.namespace?(:__query_result_extensions__)
128
- query.result_values["extensions"] = query.context.namespace(:__query_result_extensions__)
129
- end
130
- # Get the Query::Result, not the Hash
131
- results[idx] = query.result
116
+ if query.context.errors.any?
117
+ error_result = query.context.errors.map(&:to_h)
118
+ result["errors"] = error_result
132
119
  end
133
120
 
134
- results
135
- rescue Exception
136
- # TODO rescue at a higher level so it will catch errors in analysis, too
137
- # Assign values here so that the query's `@executed` becomes true
138
- queries.map { |q| q.result_values ||= {} }
139
- raise
140
- ensure
141
- queries.map { |query|
142
- runtime = query.context.namespace(:interpreter_runtime)[:runtime]
143
- if runtime
144
- runtime.delete_all_interpreter_context
145
- end
146
- }
121
+ result["data"] = query.context.namespace(:interpreter_runtime)[:runtime].final_result
122
+
123
+ result
124
+ end
125
+ if query.context.namespace?(:__query_result_extensions__)
126
+ query.result_values["extensions"] = query.context.namespace(:__query_result_extensions__)
147
127
  end
128
+ # Get the Query::Result, not the Hash
129
+ results[idx] = query.result
148
130
  end
149
- end
150
- end
151
- end
152
131
 
153
- private
154
-
155
- # Call the before_ hooks of each query,
156
- # Then yield if no errors.
157
- # `call_hooks` takes care of appropriate cleanup.
158
- def each_query_call_hooks(instrumenters, queries, i = 0)
159
- if i >= queries.length
160
- yield
161
- else
162
- query = queries[i]
163
- call_hooks(instrumenters, query, :before_query, :after_query) {
164
- each_query_call_hooks(instrumenters, queries, i + 1) {
165
- yield
132
+ results
133
+ rescue Exception
134
+ # TODO rescue at a higher level so it will catch errors in analysis, too
135
+ # Assign values here so that the query's `@executed` becomes true
136
+ queries.map { |q| q.result_values ||= {} }
137
+ raise
138
+ ensure
139
+ queries.map { |query|
140
+ runtime = query.context.namespace(:interpreter_runtime)[:runtime]
141
+ if runtime
142
+ runtime.delete_all_interpreter_context
143
+ end
166
144
  }
167
- }
168
- end
169
- end
170
-
171
- # Call each before hook, and if they all succeed, yield.
172
- # If they don't all succeed, call after_ for each one that succeeded.
173
- def call_hooks(instrumenters, object, before_hook_name, after_hook_name)
174
- begin
175
- successful = []
176
- instrumenters.each do |instrumenter|
177
- instrumenter.public_send(before_hook_name, object)
178
- successful << instrumenter
179
- end
180
-
181
- # if any before hooks raise an exception, quit calling before hooks,
182
- # but call the after hooks on anything that succeeded but also
183
- # raise the exception that came from the before hook.
184
- rescue GraphQL::ExecutionError => err
185
- object.context.errors << err
186
- rescue => e
187
- raise call_after_hooks(successful, object, after_hook_name, e)
188
- end
189
-
190
- begin
191
- yield # Call the user code
192
- ensure
193
- ex = call_after_hooks(successful, object, after_hook_name, nil)
194
- raise ex if ex
195
- end
196
- end
197
-
198
- def call_after_hooks(instrumenters, object, after_hook_name, ex)
199
- instrumenters.reverse_each do |instrumenter|
200
- begin
201
- instrumenter.public_send(after_hook_name, object)
202
- rescue => e
203
- ex = e
204
145
  end
205
146
  end
206
- ex
207
147
  end
208
148
  end
209
149
 
@@ -9,13 +9,19 @@ module GraphQL
9
9
 
10
10
  def __schema
11
11
  # Apply wrapping manually since this field isn't wrapped by instrumentation
12
- schema = @context.query.schema
12
+ schema = context.schema
13
13
  schema_type = schema.introspection_system.types["__Schema"]
14
- schema_type.wrap(schema, @context)
14
+ schema_type.wrap(schema, context)
15
15
  end
16
16
 
17
17
  def __type(name:)
18
- context.warden.reachable_type?(name) ? context.warden.get_type(name) : nil
18
+ if context.warden.reachable_type?(name)
19
+ context.warden.get_type(name)
20
+ elsif (type = context.schema.extra_types.find { |t| t.graphql_name == name })
21
+ type
22
+ else
23
+ nil
24
+ end
19
25
  end
20
26
  end
21
27
  end
@@ -20,7 +20,9 @@ module GraphQL
20
20
  end
21
21
 
22
22
  def types
23
- @context.warden.reachable_types.sort_by(&:graphql_name)
23
+ types = context.warden.reachable_types + context.schema.extra_types
24
+ types.sort_by!(&:graphql_name)
25
+ types
24
26
  end
25
27
 
26
28
  def query_type
@@ -266,8 +266,7 @@ module GraphQL
266
266
  end
267
267
  definitions = build_directive_nodes(dirs_to_build)
268
268
 
269
- type_nodes = build_type_definition_nodes(warden.reachable_types)
270
-
269
+ type_nodes = build_type_definition_nodes(warden.reachable_types + schema.extra_types)
271
270
  if @include_one_of
272
271
  # This may have been set to true when iterating over all types
273
272
  definitions.concat(build_directive_nodes([GraphQL::Schema::Directive::OneOf]))
@@ -512,7 +512,7 @@ module GraphQL
512
512
  # An operation-level query variable
513
513
  class VariableDefinition < AbstractNode
514
514
  scalar_methods :name, :type, :default_value
515
- children_methods false
515
+ children_methods(directives: Directive)
516
516
  # @!attribute default_value
517
517
  # @return [String, Integer, Float, Boolean, Array, NullValue] A Ruby value to use if no other value is provided
518
518
 
@@ -121,7 +121,17 @@ module GraphQL
121
121
  value
122
122
  end
123
123
 
124
- defs << Nodes::VariableDefinition.new(pos: loc, name: var_name, type: var_type, default_value: default_value, filename: @filename, source_string: @graphql_str)
124
+ directives = parse_directives
125
+
126
+ defs << Nodes::VariableDefinition.new(
127
+ pos: loc,
128
+ name: var_name,
129
+ type: var_type,
130
+ default_value: default_value,
131
+ directives: directives,
132
+ filename: @filename,
133
+ source_string: @graphql_str
134
+ )
125
135
  end
126
136
  expect_token(:RPAREN)
127
137
  defs
@@ -208,6 +208,10 @@ module GraphQL
208
208
  print_string(" = ")
209
209
  print_node(variable_definition.default_value)
210
210
  end
211
+ variable_definition.directives.each do |dir|
212
+ print_string(" ")
213
+ print_directive(dir)
214
+ end
211
215
  end
212
216
 
213
217
  def print_variable_identifier(variable_identifier)
@@ -35,10 +35,10 @@ module GraphQL
35
35
  def load_nodes
36
36
  @nodes ||= begin
37
37
  sliced_nodes = if before && after
38
- end_idx = index_from_cursor(before)-1
38
+ end_idx = index_from_cursor(before) - 2
39
39
  end_idx < 0 ? [] : items[index_from_cursor(after)..end_idx] || []
40
40
  elsif before
41
- end_idx = index_from_cursor(before)-2
41
+ end_idx = index_from_cursor(before) - 2
42
42
  end_idx < 0 ? [] : items[0..end_idx] || []
43
43
  elsif after
44
44
  items[index_from_cursor(after)..-1] || []
@@ -56,7 +56,7 @@ module GraphQL
56
56
  false
57
57
  end
58
58
 
59
- @has_next_page = if first
59
+ @has_next_page = if first_value && first
60
60
  # There are more items after these items
61
61
  sliced_nodes.count > first
62
62
  elsif before
@@ -29,14 +29,14 @@ module GraphQL
29
29
 
30
30
  def has_next_page
31
31
  if @has_next_page.nil?
32
- @has_next_page = if before_offset && before_offset > 0
33
- true
34
- elsif first
32
+ @has_next_page = if first && first_value
35
33
  if @nodes && @nodes.count < first
36
34
  false
37
35
  else
38
36
  relation_larger_than(sliced_nodes, @sliced_nodes_offset, first)
39
37
  end
38
+ elsif before_offset && before_offset > 0
39
+ true
40
40
  else
41
41
  false
42
42
  end
@@ -14,7 +14,7 @@ module GraphQL
14
14
  #
15
15
  # @api private
16
16
  class ValidationPipeline
17
- attr_reader :max_depth, :max_complexity
17
+ attr_reader :max_depth, :max_complexity, :validate_timeout_remaining
18
18
 
19
19
  def initialize(query:, parse_error:, operation_name_error:, max_depth:, max_complexity:)
20
20
  @validation_errors = []
@@ -71,7 +71,7 @@ module GraphQL
71
71
  validator = @query.static_validator || @schema.static_validator
72
72
  validation_result = validator.validate(@query, validate: @query.validate, timeout: @schema.validate_timeout, max_errors: @schema.validate_max_errors)
73
73
  @validation_errors.concat(validation_result[:errors])
74
-
74
+ @validate_timeout_remaining = validation_result[:remaining_timeout]
75
75
  if @validation_errors.empty?
76
76
  @validation_errors.concat(@query.variables.errors)
77
77
  end
@@ -26,7 +26,7 @@ module GraphQL
26
26
  # - Then, fall back to the default value from the query string
27
27
  # If it's still nil, raise an error if it's required.
28
28
  variable_type = schema.type_from_ast(ast_variable.type, context: ctx)
29
- if variable_type.nil?
29
+ if variable_type.nil? || !variable_type.unwrap.kind.input?
30
30
  # Pass -- it will get handled by a validator
31
31
  else
32
32
  variable_name = ast_variable.name
@@ -80,12 +80,12 @@ module GraphQL
80
80
  else
81
81
  val
82
82
  end
83
- end
83
+ end
84
84
 
85
85
  def add_max_errors_reached_message
86
86
  message = "Too many errors processing variables, max validation error limit reached. Execution aborted"
87
87
  validation_result = GraphQL::Query::InputValidationResult.from_problem(message)
88
- errors << GraphQL::Query::VariableValidationError.new(nil, nil, nil, validation_result, msg: message)
88
+ errors << GraphQL::Query::VariableValidationError.new(nil, nil, nil, validation_result, msg: message)
89
89
  end
90
90
  end
91
91
  end
data/lib/graphql/query.rb CHANGED
@@ -317,7 +317,7 @@ module GraphQL
317
317
  end
318
318
 
319
319
  def_delegators :validation_pipeline, :validation_errors,
320
- :analyzers, :ast_analyzers, :max_depth, :max_complexity
320
+ :analyzers, :ast_analyzers, :max_depth, :max_complexity, :validate_timeout_remaining
321
321
 
322
322
  attr_accessor :analysis_errors
323
323
  def valid?
@@ -1,18 +1,16 @@
1
1
  # frozen_string_literal: true
2
-
3
- require 'graphql/schema/base_64_bp'
4
-
2
+ require "base64"
5
3
  module GraphQL
6
4
  class Schema
7
5
  # @api private
8
6
  module Base64Encoder
9
7
  def self.encode(unencoded_text, nonce: false)
10
- Base64Bp.urlsafe_encode64(unencoded_text, padding: false)
8
+ Base64.urlsafe_encode64(unencoded_text, padding: false)
11
9
  end
12
10
 
13
11
  def self.decode(encoded_text, nonce: false)
14
12
  # urlsafe_decode64 is for forward compatibility
15
- Base64Bp.urlsafe_decode64(encoded_text)
13
+ Base64.urlsafe_decode64(encoded_text)
16
14
  rescue ArgumentError
17
15
  raise GraphQL::ExecutionError, "Invalid input: #{encoded_text.inspect}"
18
16
  end