graphql 2.0.13 → 2.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphql might be problematic. Click here for more details.

Files changed (38) hide show
  1. checksums.yaml +4 -4
  2. data/lib/generators/graphql/templates/schema.erb +3 -0
  3. data/lib/graphql/backtrace/table.rb +2 -2
  4. data/lib/graphql/dataloader/source.rb +9 -0
  5. data/lib/graphql/dataloader.rb +4 -1
  6. data/lib/graphql/execution/interpreter/runtime.rb +23 -10
  7. data/lib/graphql/execution/interpreter.rb +185 -59
  8. data/lib/graphql/execution/lookahead.rb +39 -28
  9. data/lib/graphql/execution/multiplex.rb +1 -116
  10. data/lib/graphql/execution.rb +0 -1
  11. data/lib/graphql/introspection/type_type.rb +7 -0
  12. data/lib/graphql/introspection.rb +3 -2
  13. data/lib/graphql/language/document_from_schema_definition.rb +18 -18
  14. data/lib/graphql/language/printer.rb +20 -11
  15. data/lib/graphql/query/context.rb +19 -5
  16. data/lib/graphql/query.rb +1 -1
  17. data/lib/graphql/schema/build_from_definition.rb +32 -17
  18. data/lib/graphql/schema/directive/one_of.rb +12 -0
  19. data/lib/graphql/schema/directive/transform.rb +1 -1
  20. data/lib/graphql/schema/field.rb +3 -3
  21. data/lib/graphql/schema/input_object.rb +35 -0
  22. data/lib/graphql/schema/late_bound_type.rb +4 -0
  23. data/lib/graphql/schema/member/build_type.rb +1 -1
  24. data/lib/graphql/schema/member/has_directives.rb +71 -56
  25. data/lib/graphql/schema/resolver/has_payload_type.rb +1 -1
  26. data/lib/graphql/schema/type_membership.rb +3 -0
  27. data/lib/graphql/schema.rb +19 -7
  28. data/lib/graphql/static_validation/all_rules.rb +1 -0
  29. data/lib/graphql/static_validation/literal_validator.rb +4 -0
  30. data/lib/graphql/static_validation/rules/one_of_input_objects_are_valid.rb +66 -0
  31. data/lib/graphql/static_validation/rules/one_of_input_objects_are_valid_error.rb +29 -0
  32. data/lib/graphql/static_validation/rules/variable_default_values_are_correctly_typed.rb +1 -1
  33. data/lib/graphql/tracing/data_dog_tracing.rb +2 -0
  34. data/lib/graphql/tracing/instrumentation_tracing.rb +83 -0
  35. data/lib/graphql/types/relay/node_behaviors.rb +1 -1
  36. data/lib/graphql/version.rb +1 -1
  37. metadata +7 -4
  38. data/lib/graphql/execution/instrumentation.rb +0 -92
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b8f389d3b7c8052a74ccd4bd9e8412a1fa9d93415e6caeec883b8f179a167395
4
- data.tar.gz: f2e6ab7ba5c1e76b0c44557855ab3af55fb2b718b8de1ff6483917de603734b3
3
+ metadata.gz: 46b43157a331eae41dd54897d24dbc3adac190910237c7c1c158fc2bc32b6616
4
+ data.tar.gz: 118ac97b740ba1340d3e6b9aee5fa2f81ca25dc8769a98ec0d8e25d63e839711
5
5
  SHA512:
6
- metadata.gz: 10a24271a65c65a402d3243d2e43b3b257f74eb7bbc0ec13c254304bdce3122c444a41fbccf943bd3d626d301aec4b973485f742b831e8658ea252a0c95cfcfa
7
- data.tar.gz: 9762008c699f2a53b14913e057dc31ec6c97b4203b1dd28a7a2a4fc18153d5bc200d0a95e3d2a04da80d0486131f5b1ce8d597e1ad6a7085b0766e35e0982959
6
+ metadata.gz: be38356ecdff7f8a9627bb021e83d13ea92e951b287386e76fd6e42ff05ef0a5a37ce9c98e6e4bf1834459989558f6c8b512c98e7642bef2c624578c1db24932
7
+ data.tar.gz: b0b75f342dca7a731b1b59e6399d364184152ceb0c5deb9fb4338a3582b3b6e38bf789a53839f6def9eb4b9c079c45e74675fb608a184dff4e6aeaf0ce052dfd
@@ -23,5 +23,8 @@ class <%= schema_name %> < GraphQL::Schema
23
23
  # to return the correct GraphQL object type for `obj`
24
24
  raise(GraphQL::RequiredImplementationMissingError)
25
25
  end
26
+
27
+ # Stop validating when it encounters this many errors:
28
+ validate_max_errors(100)
26
29
  end
27
30
  <% end -%>
@@ -83,7 +83,7 @@ module GraphQL
83
83
  value = if top && @override_value
84
84
  @override_value
85
85
  else
86
- value_at(@context.query.context.namespace(:interpreter)[:runtime], context_entry.path)
86
+ value_at(@context.query.context.namespace(:interpreter_runtime)[:runtime], context_entry.path)
87
87
  end
88
88
  rows << [
89
89
  "#{context_entry.ast_node ? context_entry.ast_node.position.join(":") : ""}",
@@ -112,7 +112,7 @@ module GraphQL
112
112
  if object.is_a?(GraphQL::Schema::Object)
113
113
  object = object.object
114
114
  end
115
- value = value_at(context_entry.namespace(:interpreter)[:runtime], [])
115
+ value = value_at(context_entry.namespace(:interpreter_runtime)[:runtime], [])
116
116
  rows << [
117
117
  "#{position}",
118
118
  "#{op_type}#{op_name ? " #{op_name}" : ""}",
@@ -86,6 +86,15 @@ module GraphQL
86
86
  !@pending_keys.empty?
87
87
  end
88
88
 
89
+ # Add these key-value pairs to this source's cache
90
+ # (future loads will use these merged values).
91
+ # @param results [Hash<Object => Object>] key-value pairs to cache in this source
92
+ # @return [void]
93
+ def merge(results)
94
+ @results.merge!(results)
95
+ nil
96
+ end
97
+
89
98
  # Called by {GraphQL::Dataloader} to resolve and pending requests to this source.
90
99
  # @api private
91
100
  # @return [void]
@@ -289,7 +289,10 @@ module GraphQL
289
289
  fiber_locals = {}
290
290
 
291
291
  Thread.current.keys.each do |fiber_var_key|
292
- fiber_locals[fiber_var_key] = Thread.current[fiber_var_key]
292
+ # This variable should be fresh in each new fiber
293
+ if fiber_var_key != :__graphql_runtime_info
294
+ fiber_locals[fiber_var_key] = Thread.current[fiber_var_key]
295
+ end
293
296
  end
294
297
 
295
298
  if @nonblocking
@@ -148,13 +148,23 @@ module GraphQL
148
148
  # @return [GraphQL::Query::Context]
149
149
  attr_reader :context
150
150
 
151
+ def thread_info
152
+ info = Thread.current[:__graphql_runtime_info]
153
+ if !info
154
+ new_ti = {}
155
+ info = Thread.current[:__graphql_runtime_info] = new_ti
156
+ end
157
+ info
158
+ end
159
+
151
160
  def initialize(query:)
152
161
  @query = query
153
162
  @dataloader = query.multiplex.dataloader
154
163
  @schema = query.schema
155
164
  @context = query.context
156
165
  @multiplex_context = query.multiplex.context
157
- @interpreter_context = @context.namespace(:interpreter)
166
+ # Start this off empty:
167
+ Thread.current[:__graphql_runtime_info] = nil
158
168
  @response = GraphQLResultHash.new(nil, nil)
159
169
  # Identify runtime directives by checking which of this schema's directives have overridden `def self.resolve`
160
170
  @runtime_directive_names = []
@@ -680,7 +690,11 @@ module GraphQL
680
690
 
681
691
  case current_type.kind.name
682
692
  when "SCALAR", "ENUM"
683
- r = current_type.coerce_result(value, context)
693
+ r = begin
694
+ current_type.coerce_result(value, context)
695
+ rescue StandardError => err
696
+ schema.handle_or_reraise(context, err)
697
+ end
684
698
  set_result(selection_result, result_name, r)
685
699
  r
686
700
  when "UNION", "INTERFACE"
@@ -870,17 +884,18 @@ module GraphQL
870
884
  end
871
885
 
872
886
  def set_all_interpreter_context(object, field, arguments, path)
887
+ ti = thread_info
873
888
  if object
874
- @context[:current_object] = @interpreter_context[:current_object] = object
889
+ ti[:current_object] = object
875
890
  end
876
891
  if field
877
- @context[:current_field] = @interpreter_context[:current_field] = field
892
+ ti[:current_field] = field
878
893
  end
879
894
  if arguments
880
- @context[:current_arguments] = @interpreter_context[:current_arguments] = arguments
895
+ ti[:current_arguments] = arguments
881
896
  end
882
897
  if path
883
- @context[:current_path] = @interpreter_context[:current_path] = path
898
+ ti[:current_path] = path
884
899
  end
885
900
  end
886
901
 
@@ -940,13 +955,11 @@ module GraphQL
940
955
  # Set this pair in the Query context, but also in the interpeter namespace,
941
956
  # for compatibility.
942
957
  def set_interpreter_context(key, value)
943
- @interpreter_context[key] = value
944
- @context[key] = value
958
+ thread_info[key] = value
945
959
  end
946
960
 
947
961
  def delete_interpreter_context(key)
948
- @interpreter_context.delete(key)
949
- @context.delete(key)
962
+ (ti = thread_info) && ti.delete(key)
950
963
  end
951
964
 
952
965
  def resolve_type(type, value, path)
@@ -11,76 +11,202 @@ require "graphql/execution/interpreter/handles_raw_value"
11
11
  module GraphQL
12
12
  module Execution
13
13
  class Interpreter
14
- def self.begin_multiplex(multiplex)
15
- # Since this is basically the batching context,
16
- # share it for a whole multiplex
17
- multiplex.context[:interpreter_instance] ||= self.new
18
- end
14
+ class << self
15
+ # Used internally to signal that the query shouldn't be executed
16
+ # @api private
17
+ NO_OPERATION = {}.freeze
19
18
 
20
- def self.begin_query(query, multiplex)
21
- # The batching context is shared by the multiplex,
22
- # so fetch it out and use that instance.
23
- interpreter =
24
- query.context.namespace(:interpreter)[:interpreter_instance] =
25
- multiplex.context[:interpreter_instance]
26
- interpreter.evaluate(query)
27
- query
28
- end
19
+ # @param schema [GraphQL::Schema]
20
+ # @param queries [Array<GraphQL::Query, Hash>]
21
+ # @param context [Hash]
22
+ # @param max_complexity [Integer, nil]
23
+ # @return [Array<Hash>] One result per query
24
+ def run_all(schema, query_options, context: {}, max_complexity: schema.max_complexity)
25
+ queries = query_options.map do |opts|
26
+ case opts
27
+ when Hash
28
+ GraphQL::Query.new(schema, nil, **opts)
29
+ when GraphQL::Query
30
+ opts
31
+ else
32
+ raise "Expected Hash or GraphQL::Query, not #{opts.class} (#{opts.inspect})"
33
+ end
34
+ end
29
35
 
30
- def self.finish_multiplex(_results, multiplex)
31
- interpreter = multiplex.context[:interpreter_instance]
32
- interpreter.sync_lazies(multiplex: multiplex)
33
- end
36
+ multiplex = Execution::Multiplex.new(schema: schema, queries: queries, context: context, max_complexity: max_complexity)
37
+ multiplex.trace("execute_multiplex", { multiplex: multiplex }) do
38
+ schema = multiplex.schema
39
+ queries = multiplex.queries
40
+ query_instrumenters = schema.instrumenters[:query]
41
+ multiplex_instrumenters = schema.instrumenters[:multiplex]
34
42
 
35
- def self.finish_query(query, _multiplex)
36
- {
37
- "data" => query.context.namespace(:interpreter)[:runtime].final_result
38
- }
39
- end
43
+ # First, run multiplex instrumentation, then query instrumentation for each query
44
+ call_hooks(multiplex_instrumenters, multiplex, :before_multiplex, :after_multiplex) do
45
+ each_query_call_hooks(query_instrumenters, queries) do
46
+ schema = multiplex.schema
47
+ multiplex_analyzers = schema.multiplex_analyzers
48
+ queries = multiplex.queries
49
+ if multiplex.max_complexity
50
+ multiplex_analyzers += [GraphQL::Analysis::AST::MaxQueryComplexity]
51
+ end
40
52
 
41
- # Run the eager part of `query`
42
- # @return {Interpreter::Runtime}
43
- def evaluate(query)
44
- # Although queries in a multiplex _share_ an Interpreter instance,
45
- # they also have another item of state, which is private to that query
46
- # in particular, assign it here:
47
- runtime = Runtime.new(query: query)
48
- query.context.namespace(:interpreter)[:runtime] = runtime
49
-
50
- query.trace("execute_query", {query: query}) do
51
- runtime.run_eager
52
- end
53
+ schema.analysis_engine.analyze_multiplex(multiplex, multiplex_analyzers)
54
+ begin
55
+ # Since this is basically the batching context,
56
+ # share it for a whole multiplex
57
+ multiplex.context[:interpreter_instance] ||= multiplex.schema.query_execution_strategy.new
58
+ # Do as much eager evaluation of the query as possible
59
+ results = []
60
+ queries.each_with_index do |query, idx|
61
+ multiplex.dataloader.append_job {
62
+ operation = query.selected_operation
63
+ result = if operation.nil? || !query.valid? || query.context.errors.any?
64
+ NO_OPERATION
65
+ else
66
+ begin
67
+ # Although queries in a multiplex _share_ an Interpreter instance,
68
+ # they also have another item of state, which is private to that query
69
+ # in particular, assign it here:
70
+ runtime = Runtime.new(query: query)
71
+ query.context.namespace(:interpreter_runtime)[:runtime] = runtime
53
72
 
54
- runtime
55
- end
73
+ query.trace("execute_query", {query: query}) do
74
+ runtime.run_eager
75
+ end
76
+ rescue GraphQL::ExecutionError => err
77
+ query.context.errors << err
78
+ NO_OPERATION
79
+ end
80
+ end
81
+ results[idx] = result
82
+ }
83
+ end
84
+
85
+ multiplex.dataloader.run
86
+
87
+ # Then, work through lazy results in a breadth-first way
88
+ multiplex.dataloader.append_job {
89
+ tracer = multiplex
90
+ query = multiplex.queries.length == 1 ? multiplex.queries[0] : nil
91
+ queries = multiplex ? multiplex.queries : [query]
92
+ final_values = queries.map do |query|
93
+ runtime = query.context.namespace(:interpreter_runtime)[:runtime]
94
+ # it might not be present if the query has an error
95
+ runtime ? runtime.final_result : nil
96
+ end
97
+ final_values.compact!
98
+ tracer.trace("execute_query_lazy", {multiplex: multiplex, query: query}) do
99
+ Interpreter::Resolve.resolve_all(final_values, multiplex.dataloader)
100
+ end
101
+ queries.each do |query|
102
+ runtime = query.context.namespace(:interpreter_runtime)[:runtime]
103
+ if runtime
104
+ runtime.delete_interpreter_context(:current_path)
105
+ runtime.delete_interpreter_context(:current_field)
106
+ runtime.delete_interpreter_context(:current_object)
107
+ runtime.delete_interpreter_context(:current_arguments)
108
+ end
109
+ end
110
+ }
111
+ multiplex.dataloader.run
112
+
113
+ # Then, find all errors and assign the result to the query object
114
+ results.each_with_index do |data_result, idx|
115
+ query = queries[idx]
116
+ # Assign the result so that it can be accessed in instrumentation
117
+ query.result_values = if data_result.equal?(NO_OPERATION)
118
+ if !query.valid? || query.context.errors.any?
119
+ # A bit weird, but `Query#static_errors` _includes_ `query.context.errors`
120
+ { "errors" => query.static_errors.map(&:to_h) }
121
+ else
122
+ data_result
123
+ end
124
+ else
125
+ result = {
126
+ "data" => query.context.namespace(:interpreter_runtime)[:runtime].final_result
127
+ }
128
+
129
+ if query.context.errors.any?
130
+ error_result = query.context.errors.map(&:to_h)
131
+ result["errors"] = error_result
132
+ end
56
133
 
57
- # Run the lazy part of `query` or `multiplex`.
58
- # @return [void]
59
- def sync_lazies(query: nil, multiplex: nil)
60
- tracer = query || multiplex
61
- if query.nil? && multiplex.queries.length == 1
62
- query = multiplex.queries[0]
134
+ result
135
+ end
136
+ if query.context.namespace?(:__query_result_extensions__)
137
+ query.result_values["extensions"] = query.context.namespace(:__query_result_extensions__)
138
+ end
139
+ # Get the Query::Result, not the Hash
140
+ results[idx] = query.result
141
+ end
142
+
143
+ results
144
+ rescue Exception
145
+ # TODO rescue at a higher level so it will catch errors in analysis, too
146
+ # Assign values here so that the query's `@executed` becomes true
147
+ queries.map { |q| q.result_values ||= {} }
148
+ raise
149
+ end
150
+ end
151
+ end
152
+ end
63
153
  end
64
- queries = multiplex ? multiplex.queries : [query]
65
- final_values = queries.map do |query|
66
- runtime = query.context.namespace(:interpreter)[:runtime]
67
- # it might not be present if the query has an error
68
- runtime ? runtime.final_result : nil
154
+
155
+ private
156
+
157
+ # Call the before_ hooks of each query,
158
+ # Then yield if no errors.
159
+ # `call_hooks` takes care of appropriate cleanup.
160
+ def each_query_call_hooks(instrumenters, queries, i = 0)
161
+ if i >= queries.length
162
+ yield
163
+ else
164
+ query = queries[i]
165
+ call_hooks(instrumenters, query, :before_query, :after_query) {
166
+ each_query_call_hooks(instrumenters, queries, i + 1) {
167
+ yield
168
+ }
169
+ }
170
+ end
69
171
  end
70
- final_values.compact!
71
- tracer.trace("execute_query_lazy", {multiplex: multiplex, query: query}) do
72
- Interpreter::Resolve.resolve_all(final_values, multiplex.dataloader)
172
+
173
+ # Call each before hook, and if they all succeed, yield.
174
+ # If they don't all succeed, call after_ for each one that succeeded.
175
+ def call_hooks(instrumenters, object, before_hook_name, after_hook_name)
176
+ begin
177
+ successful = []
178
+ instrumenters.each do |instrumenter|
179
+ instrumenter.public_send(before_hook_name, object)
180
+ successful << instrumenter
181
+ end
182
+
183
+ # if any before hooks raise an exception, quit calling before hooks,
184
+ # but call the after hooks on anything that succeeded but also
185
+ # raise the exception that came from the before hook.
186
+ rescue GraphQL::ExecutionError => err
187
+ object.context.errors << err
188
+ rescue => e
189
+ raise call_after_hooks(successful, object, after_hook_name, e)
190
+ end
191
+
192
+ begin
193
+ yield # Call the user code
194
+ ensure
195
+ ex = call_after_hooks(successful, object, after_hook_name, nil)
196
+ raise ex if ex
197
+ end
73
198
  end
74
- queries.each do |query|
75
- runtime = query.context.namespace(:interpreter)[:runtime]
76
- if runtime
77
- runtime.delete_interpreter_context(:current_path)
78
- runtime.delete_interpreter_context(:current_field)
79
- runtime.delete_interpreter_context(:current_object)
80
- runtime.delete_interpreter_context(:current_arguments)
199
+
200
+ def call_after_hooks(instrumenters, object, after_hook_name, ex)
201
+ instrumenters.reverse_each do |instrumenter|
202
+ begin
203
+ instrumenter.public_send(after_hook_name, object)
204
+ rescue => e
205
+ ex = e
206
+ end
81
207
  end
208
+ ex
82
209
  end
83
- nil
84
210
  end
85
211
 
86
212
  class ListResultFailedError < GraphQL::Error
@@ -76,8 +76,8 @@ module GraphQL
76
76
  # @param field_name [String, Symbol]
77
77
  # @param arguments [Hash] Arguments which must match in the selection
78
78
  # @return [Boolean]
79
- def selects?(field_name, arguments: nil)
80
- selection(field_name, arguments: arguments).selected?
79
+ def selects?(field_name, selected_type: @selected_type, arguments: nil)
80
+ selection(field_name, selected_type: selected_type, arguments: arguments).selected?
81
81
  end
82
82
 
83
83
  # @return [Boolean] True if this lookahead represents a field that was requested
@@ -87,16 +87,39 @@ module GraphQL
87
87
 
88
88
  # Like {#selects?}, but can be used for chaining.
89
89
  # It returns a null object (check with {#selected?})
90
+ # @param field_name [String, Symbol]
90
91
  # @return [GraphQL::Execution::Lookahead]
91
92
  def selection(field_name, selected_type: @selected_type, arguments: nil)
92
- next_field_name = normalize_name(field_name)
93
+ next_field_defn = case field_name
94
+ when String
95
+ @query.get_field(selected_type, field_name)
96
+ when Symbol
97
+ # Try to avoid the `.to_s` below, if possible
98
+ all_fields = if selected_type.kind.fields?
99
+ @query.warden.fields(selected_type)
100
+ else
101
+ # Handle unions by checking possible
102
+ @query.warden
103
+ .possible_types(selected_type)
104
+ .map { |t| @query.warden.fields(t) }
105
+ .flatten
106
+ end
107
+
108
+ if (match_by_orig_name = all_fields.find { |f| f.original_name == field_name })
109
+ match_by_orig_name
110
+ else
111
+ # Symbol#name is only present on 3.0+
112
+ sym_s = field_name.respond_to?(:name) ? field_name.name : field_name.to_s
113
+ guessed_name = Schema::Member::BuildType.camelize(sym_s)
114
+ @query.get_field(selected_type, guessed_name)
115
+ end
116
+ end
93
117
 
94
- next_field_defn = @query.get_field(selected_type, next_field_name)
95
118
  if next_field_defn
96
119
  next_nodes = []
97
120
  @ast_nodes.each do |ast_node|
98
121
  ast_node.selections.each do |selection|
99
- find_selected_nodes(selection, next_field_name, next_field_defn, arguments: arguments, matches: next_nodes)
122
+ find_selected_nodes(selection, next_field_defn, arguments: arguments, matches: next_nodes)
100
123
  end
101
124
  end
102
125
 
@@ -196,23 +219,6 @@ module GraphQL
196
219
 
197
220
  private
198
221
 
199
- # If it's a symbol, stringify and camelize it
200
- def normalize_name(name)
201
- if name.is_a?(Symbol)
202
- Schema::Member::BuildType.camelize(name.to_s)
203
- else
204
- name
205
- end
206
- end
207
-
208
- def normalize_keyword(keyword)
209
- if keyword.is_a?(String)
210
- Schema::Member::BuildType.underscore(keyword).to_sym
211
- else
212
- keyword
213
- end
214
- end
215
-
216
222
  def skipped_by_directive?(ast_selection)
217
223
  ast_selection.directives.each do |directive|
218
224
  dir_defn = @query.schema.directives.fetch(directive.name)
@@ -265,11 +271,11 @@ module GraphQL
265
271
 
266
272
  # If a selection on `node` matches `field_name` (which is backed by `field_defn`)
267
273
  # and matches the `arguments:` constraints, then add that node to `matches`
268
- def find_selected_nodes(node, field_name, field_defn, arguments:, matches:)
274
+ def find_selected_nodes(node, field_defn, arguments:, matches:)
269
275
  return if skipped_by_directive?(node)
270
276
  case node
271
277
  when GraphQL::Language::Nodes::Field
272
- if node.name == field_name
278
+ if node.name == field_defn.graphql_name
273
279
  if arguments.nil? || arguments.empty?
274
280
  # No constraint applied
275
281
  matches << node
@@ -278,10 +284,10 @@ module GraphQL
278
284
  end
279
285
  end
280
286
  when GraphQL::Language::Nodes::InlineFragment
281
- node.selections.each { |s| find_selected_nodes(s, field_name, field_defn, arguments: arguments, matches: matches) }
287
+ node.selections.each { |s| find_selected_nodes(s, field_defn, arguments: arguments, matches: matches) }
282
288
  when GraphQL::Language::Nodes::FragmentSpread
283
289
  frag_defn = @query.fragments[node.name] || raise("Invariant: Can't look ahead to nonexistent fragment #{node.name} (found: #{@query.fragments.keys})")
284
- frag_defn.selections.each { |s| find_selected_nodes(s, field_name, field_defn, arguments: arguments, matches: matches) }
290
+ frag_defn.selections.each { |s| find_selected_nodes(s, field_defn, arguments: arguments, matches: matches) }
285
291
  else
286
292
  raise "Unexpected selection comparison on #{node.class.name} (#{node})"
287
293
  end
@@ -290,9 +296,14 @@ module GraphQL
290
296
  def arguments_match?(arguments, field_defn, field_node)
291
297
  query_kwargs = @query.arguments_for(field_node, field_defn)
292
298
  arguments.all? do |arg_name, arg_value|
293
- arg_name = normalize_keyword(arg_name)
299
+ arg_name_sym = if arg_name.is_a?(String)
300
+ Schema::Member::BuildType.underscore(arg_name).to_sym
301
+ else
302
+ arg_name
303
+ end
304
+
294
305
  # Make sure the constraint is present with a matching value
295
- query_kwargs.key?(arg_name) && query_kwargs[arg_name] == arg_value
306
+ query_kwargs.key?(arg_name_sym) && query_kwargs[arg_name_sym] == arg_value
296
307
  end
297
308
  end
298
309
  end
@@ -23,13 +23,10 @@ module GraphQL
23
23
  # @see {Schema#multiplex} for public API
24
24
  # @api private
25
25
  class Multiplex
26
- # Used internally to signal that the query shouldn't be executed
27
- # @api private
28
- NO_OPERATION = {}.freeze
29
-
30
26
  include Tracing::Traceable
31
27
 
32
28
  attr_reader :context, :queries, :schema, :max_complexity, :dataloader
29
+
33
30
  def initialize(schema:, queries:, context:, max_complexity:)
34
31
  @schema = schema
35
32
  @queries = queries
@@ -43,118 +40,6 @@ module GraphQL
43
40
  end
44
41
  @max_complexity = max_complexity
45
42
  end
46
-
47
- class << self
48
- # @param schema [GraphQL::Schema]
49
- # @param queries [Array<GraphQL::Query, Hash>]
50
- # @param context [Hash]
51
- # @param max_complexity [Integer, nil]
52
- # @return [Array<Hash>] One result per query
53
- def run_all(schema, query_options, context: {}, max_complexity: schema.max_complexity)
54
- queries = query_options.map do |opts|
55
- case opts
56
- when Hash
57
- GraphQL::Query.new(schema, nil, **opts)
58
- when GraphQL::Query
59
- opts
60
- else
61
- raise "Expected Hash or GraphQL::Query, not #{opts.class} (#{opts.inspect})"
62
- end
63
- end
64
-
65
- multiplex = self.new(schema: schema, queries: queries, context: context, max_complexity: max_complexity)
66
- multiplex.trace("execute_multiplex", { multiplex: multiplex }) do
67
- GraphQL::Execution::Instrumentation.apply_instrumenters(multiplex) do
68
- schema = multiplex.schema
69
- multiplex_analyzers = schema.multiplex_analyzers
70
- if multiplex.max_complexity
71
- multiplex_analyzers += [GraphQL::Analysis::AST::MaxQueryComplexity]
72
- end
73
-
74
- schema.analysis_engine.analyze_multiplex(multiplex, multiplex_analyzers)
75
-
76
- begin
77
- multiplex.schema.query_execution_strategy.begin_multiplex(multiplex)
78
- # Do as much eager evaluation of the query as possible
79
- results = []
80
- queries.each_with_index do |query, idx|
81
- multiplex.dataloader.append_job { begin_query(results, idx, query, multiplex) }
82
- end
83
-
84
- multiplex.dataloader.run
85
-
86
- # Then, work through lazy results in a breadth-first way
87
- multiplex.dataloader.append_job {
88
- multiplex.schema.query_execution_strategy.finish_multiplex(results, multiplex)
89
- }
90
- multiplex.dataloader.run
91
-
92
- # Then, find all errors and assign the result to the query object
93
- results.each_with_index do |data_result, idx|
94
- query = queries[idx]
95
- finish_query(data_result, query, multiplex)
96
- # Get the Query::Result, not the Hash
97
- results[idx] = query.result
98
- end
99
-
100
- results
101
- rescue Exception
102
- # TODO rescue at a higher level so it will catch errors in analysis, too
103
- # Assign values here so that the query's `@executed` becomes true
104
- queries.map { |q| q.result_values ||= {} }
105
- raise
106
- end
107
- end
108
- end
109
- end
110
-
111
- # @param query [GraphQL::Query]
112
- def begin_query(results, idx, query, multiplex)
113
- operation = query.selected_operation
114
- result = if operation.nil? || !query.valid? || query.context.errors.any?
115
- NO_OPERATION
116
- else
117
- begin
118
- query.schema.query_execution_strategy.begin_query(query, multiplex)
119
- rescue GraphQL::ExecutionError => err
120
- query.context.errors << err
121
- NO_OPERATION
122
- end
123
- end
124
- results[idx] = result
125
- nil
126
- end
127
-
128
- private
129
-
130
- # @param data_result [Hash] The result for the "data" key, if any
131
- # @param query [GraphQL::Query] The query which was run
132
- # @return [Hash] final result of this query, including all values and errors
133
- def finish_query(data_result, query, multiplex)
134
- # Assign the result so that it can be accessed in instrumentation
135
- query.result_values = if data_result.equal?(NO_OPERATION)
136
- if !query.valid? || query.context.errors.any?
137
- # A bit weird, but `Query#static_errors` _includes_ `query.context.errors`
138
- { "errors" => query.static_errors.map(&:to_h) }
139
- else
140
- data_result
141
- end
142
- else
143
- # Use `context.value` which was assigned during execution
144
- result = query.schema.query_execution_strategy.finish_query(query, multiplex)
145
-
146
- if query.context.errors.any?
147
- error_result = query.context.errors.map(&:to_h)
148
- result["errors"] = error_result
149
- end
150
-
151
- result
152
- end
153
- if query.context.namespace?(:__query_result_extensions__)
154
- query.result_values["extensions"] = query.context.namespace(:__query_result_extensions__)
155
- end
156
- end
157
- end
158
43
  end
159
44
  end
160
45
  end