graphql 2.2.5 → 2.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphql might be problematic. Click here for more details.

Files changed (55) hide show
  1. checksums.yaml +4 -4
  2. data/lib/generators/graphql/templates/schema.erb +3 -0
  3. data/lib/graphql/analysis/ast/field_usage.rb +36 -9
  4. data/lib/graphql/analysis/ast/visitor.rb +8 -0
  5. data/lib/graphql/analysis/ast.rb +10 -1
  6. data/lib/graphql/backtrace/inspect_result.rb +0 -12
  7. data/lib/graphql/coercion_error.rb +1 -9
  8. data/lib/graphql/dataloader/request.rb +5 -0
  9. data/lib/graphql/execution/interpreter/argument_value.rb +5 -1
  10. data/lib/graphql/execution/interpreter/runtime.rb +9 -0
  11. data/lib/graphql/execution/interpreter.rb +90 -150
  12. data/lib/graphql/introspection/entry_points.rb +9 -3
  13. data/lib/graphql/introspection/schema_type.rb +3 -1
  14. data/lib/graphql/language/document_from_schema_definition.rb +2 -3
  15. data/lib/graphql/language/lexer.rb +48 -30
  16. data/lib/graphql/language/nodes.rb +2 -1
  17. data/lib/graphql/language/parser.rb +25 -11
  18. data/lib/graphql/language/printer.rb +4 -0
  19. data/lib/graphql/language.rb +60 -0
  20. data/lib/graphql/pagination/array_connection.rb +6 -6
  21. data/lib/graphql/query/context.rb +30 -33
  22. data/lib/graphql/query/validation_pipeline.rb +2 -2
  23. data/lib/graphql/query/variables.rb +3 -3
  24. data/lib/graphql/query.rb +3 -3
  25. data/lib/graphql/schema/argument.rb +18 -2
  26. data/lib/graphql/schema/base_64_encoder.rb +3 -5
  27. data/lib/graphql/schema/build_from_definition.rb +9 -1
  28. data/lib/graphql/schema/field.rb +33 -30
  29. data/lib/graphql/schema/input_object.rb +1 -2
  30. data/lib/graphql/schema/interface.rb +5 -1
  31. data/lib/graphql/schema/loader.rb +2 -1
  32. data/lib/graphql/schema/member/has_arguments.rb +2 -2
  33. data/lib/graphql/schema/mutation.rb +7 -0
  34. data/lib/graphql/schema/resolver.rb +19 -10
  35. data/lib/graphql/schema/unique_within_type.rb +1 -1
  36. data/lib/graphql/schema.rb +119 -28
  37. data/lib/graphql/static_validation/literal_validator.rb +1 -2
  38. data/lib/graphql/static_validation/rules/required_input_object_attributes_are_present.rb +1 -1
  39. data/lib/graphql/static_validation/validator.rb +3 -0
  40. data/lib/graphql/subscriptions/serialize.rb +2 -0
  41. data/lib/graphql/subscriptions.rb +0 -3
  42. data/lib/graphql/testing/helpers.rb +32 -6
  43. data/lib/graphql/tracing/data_dog_trace.rb +21 -34
  44. data/lib/graphql/tracing/data_dog_tracing.rb +7 -21
  45. data/lib/graphql/tracing/legacy_hooks_trace.rb +74 -0
  46. data/lib/graphql/tracing/platform_tracing.rb +3 -1
  47. data/lib/graphql/tracing/{prometheus_tracing → prometheus_trace}/graphql_collector.rb +3 -1
  48. data/lib/graphql/tracing/prometheus_trace.rb +2 -2
  49. data/lib/graphql/tracing/sentry_trace.rb +112 -0
  50. data/lib/graphql/tracing.rb +3 -1
  51. data/lib/graphql/version.rb +1 -1
  52. data/lib/graphql.rb +10 -2
  53. metadata +38 -23
  54. data/lib/graphql/schema/base_64_bp.rb +0 -26
  55. data/lib/graphql/subscriptions/instrumentation.rb +0 -28
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 49aca16aba9ee96f9aa4229e07e5493b1e8c676a892a1fa1e9828323d44dc073
4
- data.tar.gz: 95eec1c3808a12b28bcc2732bb13baf9d52ddf3f34c4971abf92f6cff3333e93
3
+ metadata.gz: 573ce6f5423ca2d9eb0ca3dab40f01869501f834c3b48d5cac7f7c06fbf36da9
4
+ data.tar.gz: c3b1f0393d2838a2478a36a43cec88560832c220a3896f2ee4c715d013d1758c
5
5
  SHA512:
6
- metadata.gz: 287acc2969a3b181d2d53dc94562335fac43de4cb8873844f063b63b604948e3edb7df318e53ec42906f678f7329efc5aa8d28bdbbfd4bc7f9d504a96f745df9
7
- data.tar.gz: 3ef6c000a5eeaddd295786e9baf147663556cc19c1bc46facf57b67233bf40780e42b4781f39ebd3966ca29bc6bab6b682e9bd475b4c2474f471841454cb3d3c
6
+ metadata.gz: 14d6c96c20c8fdb56ecddf0279e24e83c862d4133c72b3cebc44dca9e40e1c684b4f4a4c45fca81451d48d1bb97a4e6c3ae13499a40e12a76926e679a12a13f2
7
+ data.tar.gz: 44157aaa761da8c9f82afa9e31533ee3c87cdf4257256ec84f70f0e30dff8dc1830486c2ff02b4464a67e8815518466451ae598eabc12249cf2edaad6a9371f1
@@ -26,6 +26,9 @@ class <%= schema_name %> < GraphQL::Schema
26
26
  raise(GraphQL::RequiredImplementationMissingError)
27
27
  end
28
28
 
29
+ # Limit the size of incoming queries:
30
+ max_query_string_tokens(5000)
31
+
29
32
  # Stop validating when it encounters this many errors:
30
33
  validate_max_errors(100)
31
34
  end
@@ -8,6 +8,7 @@ module GraphQL
8
8
  @used_fields = Set.new
9
9
  @used_deprecated_fields = Set.new
10
10
  @used_deprecated_arguments = Set.new
11
+ @used_deprecated_enum_values = Set.new
11
12
  end
12
13
 
13
14
  def on_leave_field(node, parent, visitor)
@@ -15,7 +16,7 @@ module GraphQL
15
16
  field = "#{visitor.parent_type_definition.graphql_name}.#{field_defn.graphql_name}"
16
17
  @used_fields << field
17
18
  @used_deprecated_fields << field if field_defn.deprecation_reason
18
- arguments = visitor.query.arguments_for(node, visitor.field_definition)
19
+ arguments = visitor.query.arguments_for(node, field_defn)
19
20
  # If there was an error when preparing this argument object,
20
21
  # then this might be an error or something:
21
22
  if arguments.respond_to?(:argument_values)
@@ -28,6 +29,7 @@ module GraphQL
28
29
  used_fields: @used_fields.to_a,
29
30
  used_deprecated_fields: @used_deprecated_fields.to_a,
30
31
  used_deprecated_arguments: @used_deprecated_arguments.to_a,
32
+ used_deprecated_enum_values: @used_deprecated_enum_values.to_a,
31
33
  }
32
34
  end
33
35
 
@@ -39,18 +41,43 @@ module GraphQL
39
41
  @used_deprecated_arguments << argument.definition.path
40
42
  end
41
43
 
42
- next if argument.value.nil?
44
+ arg_val = argument.value
43
45
 
44
- if argument.definition.type.kind.input_object?
45
- extract_deprecated_arguments(argument.value.arguments.argument_values) # rubocop:disable Development/ContextIsPassedCop -- runtime args instance
46
- elsif argument.definition.type.list?
47
- argument
48
- .value
49
- .select { |value| value.respond_to?(:arguments) }
50
- .each { |value| extract_deprecated_arguments(value.arguments.argument_values) } # rubocop:disable Development/ContextIsPassedCop -- runtime args instance
46
+ next if arg_val.nil?
47
+
48
+ argument_type = argument.definition.type
49
+ if argument_type.non_null?
50
+ argument_type = argument_type.of_type
51
+ end
52
+
53
+ if argument_type.kind.input_object?
54
+ extract_deprecated_arguments(argument.original_value.arguments.argument_values) # rubocop:disable Development/ContextIsPassedCop -- runtime args instance
55
+ elsif argument_type.kind.enum?
56
+ extract_deprecated_enum_value(argument_type, arg_val)
57
+ elsif argument_type.list?
58
+ inner_type = argument_type.unwrap
59
+ case inner_type.kind
60
+ when TypeKinds::INPUT_OBJECT
61
+ argument.original_value.each do |value|
62
+ extract_deprecated_arguments(value.arguments.argument_values) # rubocop:disable Development/ContextIsPassedCop -- runtime args instance
63
+ end
64
+ when TypeKinds::ENUM
65
+ arg_val.each do |value|
66
+ extract_deprecated_enum_value(inner_type, value)
67
+ end
68
+ else
69
+ # Not a kind of input that we track
70
+ end
51
71
  end
52
72
  end
53
73
  end
74
+
75
+ def extract_deprecated_enum_value(enum_type, value)
76
+ enum_value = @query.warden.enum_values(enum_type).find { |ev| ev.value == value }
77
+ if enum_value&.deprecation_reason
78
+ @used_deprecated_enum_values << enum_value.path
79
+ end
80
+ end
54
81
  end
55
82
  end
56
83
  end
@@ -118,8 +118,12 @@ module GraphQL
118
118
  def on_inline_fragment(node, parent)
119
119
  on_fragment_with_type(node) do
120
120
  @path.push("...#{node.type ? " on #{node.type.name}" : ""}")
121
+ @skipping = @skip_stack.last || skip?(node)
122
+ @skip_stack << @skipping
123
+
121
124
  call_on_enter_inline_fragment(node, parent)
122
125
  super
126
+ @skipping = @skip_stack.pop
123
127
  call_on_leave_inline_fragment(node, parent)
124
128
  end
125
129
  end
@@ -187,9 +191,13 @@ module GraphQL
187
191
 
188
192
  def on_fragment_spread(node, parent)
189
193
  @path.push("... #{node.name}")
194
+ @skipping = @skip_stack.last || skip?(node)
195
+ @skip_stack << @skipping
196
+
190
197
  call_on_enter_fragment_spread(node, parent)
191
198
  enter_fragment_spread_inline(node)
192
199
  super
200
+ @skipping = @skip_stack.pop
193
201
  leave_fragment_spread_inline(node)
194
202
  call_on_leave_fragment_spread(node, parent)
195
203
  @path.pop
@@ -6,6 +6,7 @@ require "graphql/analysis/ast/query_complexity"
6
6
  require "graphql/analysis/ast/max_query_complexity"
7
7
  require "graphql/analysis/ast/query_depth"
8
8
  require "graphql/analysis/ast/max_query_depth"
9
+ require "timeout"
9
10
 
10
11
  module GraphQL
11
12
  module Analysis
@@ -63,7 +64,10 @@ module GraphQL
63
64
  analyzers: analyzers_to_run
64
65
  )
65
66
 
66
- visitor.visit
67
+ # `nil` or `0` causes no timeout
68
+ Timeout::timeout(query.validate_timeout_remaining) do
69
+ visitor.visit
70
+ end
67
71
 
68
72
  if visitor.rescued_errors.any?
69
73
  return visitor.rescued_errors
@@ -75,6 +79,11 @@ module GraphQL
75
79
  []
76
80
  end
77
81
  end
82
+ rescue Timeout::Error
83
+ [GraphQL::AnalysisError.new("Timeout on validation of query")]
84
+ rescue GraphQL::UnauthorizedError
85
+ # This error was raised during analysis and will be returned the client before execution
86
+ []
78
87
  end
79
88
 
80
89
  def analysis_errors(results)
@@ -16,12 +16,6 @@ module GraphQL
16
16
  "[" +
17
17
  obj.map { |v| inspect_truncated(v) }.join(", ") +
18
18
  "]"
19
- when Query::Context::SharedMethods
20
- if obj.invalid_null?
21
- "nil"
22
- else
23
- inspect_truncated(obj.value)
24
- end
25
19
  else
26
20
  inspect_truncated(obj)
27
21
  end
@@ -33,12 +27,6 @@ module GraphQL
33
27
  "{...}"
34
28
  when Array
35
29
  "[...]"
36
- when Query::Context::SharedMethods
37
- if obj.invalid_null?
38
- "nil"
39
- else
40
- inspect_truncated(obj.value)
41
- end
42
30
  when GraphQL::Execution::Lazy
43
31
  "(unresolved)"
44
32
  else
@@ -1,13 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
  module GraphQL
3
- class CoercionError < GraphQL::Error
4
- # @return [Hash] Optional custom data for error objects which will be added
5
- # under the `extensions` key.
6
- attr_accessor :extensions
7
-
8
- def initialize(message, extensions: nil)
9
- @extensions = extensions
10
- super(message)
11
- end
3
+ class CoercionError < GraphQL::ExecutionError
12
4
  end
13
5
  end
@@ -14,6 +14,11 @@ module GraphQL
14
14
  def load
15
15
  @source.load(@key)
16
16
  end
17
+
18
+ def load_with_deprecation_warning
19
+ warn("Returning `.request(...)` from GraphQL::Dataloader is deprecated, use `.load(...)` instead. (See usage of #{@source} with #{@key.inspect}).")
20
+ load
21
+ end
17
22
  end
18
23
  end
19
24
  end
@@ -6,15 +6,19 @@ module GraphQL
6
6
  # A container for metadata regarding arguments present in a GraphQL query.
7
7
  # @see Interpreter::Arguments#argument_values for a hash of these objects.
8
8
  class ArgumentValue
9
- def initialize(definition:, value:, default_used:)
9
+ def initialize(definition:, value:, original_value:, default_used:)
10
10
  @definition = definition
11
11
  @value = value
12
+ @original_value = original_value
12
13
  @default_used = default_used
13
14
  end
14
15
 
15
16
  # @return [Object] The Ruby-ready value for this Argument
16
17
  attr_reader :value
17
18
 
19
+ # @return [Object] The value of this argument _before_ `prepare` is applied.
20
+ attr_reader :original_value
21
+
18
22
  # @return [GraphQL::Schema::Argument] The definition instance for this argument
19
23
  attr_reader :definition
20
24
 
@@ -352,6 +352,15 @@ module GraphQL
352
352
  end
353
353
 
354
354
  field_result = call_method_on_directives(:resolve, object, directives) do
355
+ if directives.any?
356
+ # This might be executed in a different context; reset this info
357
+ runtime_state = get_current_runtime_state
358
+ runtime_state.current_field = field_defn
359
+ runtime_state.current_object = object
360
+ runtime_state.current_arguments = resolved_arguments
361
+ runtime_state.current_result_name = result_name
362
+ runtime_state.current_result = selection_result
363
+ end
355
364
  # Actually call the field resolver and capture the result
356
365
  app_result = begin
357
366
  @current_trace.execute_field(field: field_defn, ast_node: ast_node, query: query, object: object, arguments: kwarg_arguments) do
@@ -37,173 +37,113 @@ module GraphQL
37
37
  multiplex.current_trace.execute_multiplex(multiplex: multiplex) do
38
38
  schema = multiplex.schema
39
39
  queries = multiplex.queries
40
- query_instrumenters = schema.instrumenters[:query]
41
- multiplex_instrumenters = schema.instrumenters[:multiplex]
42
40
  lazies_at_depth = Hash.new { |h, k| h[k] = [] }
41
+ multiplex_analyzers = schema.multiplex_analyzers
42
+ if multiplex.max_complexity
43
+ multiplex_analyzers += [GraphQL::Analysis::AST::MaxQueryComplexity]
44
+ end
43
45
 
44
- # First, run multiplex instrumentation, then query instrumentation for each query
45
- call_hooks(multiplex_instrumenters, multiplex, :before_multiplex, :after_multiplex) do
46
- each_query_call_hooks(query_instrumenters, queries) do
47
- schema = multiplex.schema
48
- multiplex_analyzers = schema.multiplex_analyzers
49
- queries = multiplex.queries
50
- if multiplex.max_complexity
51
- multiplex_analyzers += [GraphQL::Analysis::AST::MaxQueryComplexity]
46
+ schema.analysis_engine.analyze_multiplex(multiplex, multiplex_analyzers)
47
+ begin
48
+ # Since this is basically the batching context,
49
+ # share it for a whole multiplex
50
+ multiplex.context[:interpreter_instance] ||= multiplex.schema.query_execution_strategy(deprecation_warning: false).new
51
+ # Do as much eager evaluation of the query as possible
52
+ results = []
53
+ queries.each_with_index do |query, idx|
54
+ if query.subscription? && !query.subscription_update?
55
+ query.context.namespace(:subscriptions)[:events] = []
52
56
  end
53
-
54
- schema.analysis_engine.analyze_multiplex(multiplex, multiplex_analyzers)
55
- begin
56
- # Since this is basically the batching context,
57
- # share it for a whole multiplex
58
- multiplex.context[:interpreter_instance] ||= multiplex.schema.query_execution_strategy.new
59
- # Do as much eager evaluation of the query as possible
60
- results = []
61
- queries.each_with_index do |query, idx|
62
- multiplex.dataloader.append_job {
63
- operation = query.selected_operation
64
- result = if operation.nil? || !query.valid? || query.context.errors.any?
65
- NO_OPERATION
66
- else
67
- begin
68
- # Although queries in a multiplex _share_ an Interpreter instance,
69
- # they also have another item of state, which is private to that query
70
- # in particular, assign it here:
71
- runtime = Runtime.new(query: query, lazies_at_depth: lazies_at_depth)
72
- query.context.namespace(:interpreter_runtime)[:runtime] = runtime
73
-
74
- query.current_trace.execute_query(query: query) do
75
- runtime.run_eager
76
- end
77
- rescue GraphQL::ExecutionError => err
78
- query.context.errors << err
79
- NO_OPERATION
80
- end
57
+ multiplex.dataloader.append_job {
58
+ operation = query.selected_operation
59
+ result = if operation.nil? || !query.valid? || query.context.errors.any?
60
+ NO_OPERATION
61
+ else
62
+ begin
63
+ # Although queries in a multiplex _share_ an Interpreter instance,
64
+ # they also have another item of state, which is private to that query
65
+ # in particular, assign it here:
66
+ runtime = Runtime.new(query: query, lazies_at_depth: lazies_at_depth)
67
+ query.context.namespace(:interpreter_runtime)[:runtime] = runtime
68
+
69
+ query.current_trace.execute_query(query: query) do
70
+ runtime.run_eager
81
71
  end
82
- results[idx] = result
83
- }
72
+ rescue GraphQL::ExecutionError => err
73
+ query.context.errors << err
74
+ NO_OPERATION
75
+ end
84
76
  end
77
+ results[idx] = result
78
+ }
79
+ end
85
80
 
86
- multiplex.dataloader.run
81
+ multiplex.dataloader.run
87
82
 
88
- # Then, work through lazy results in a breadth-first way
89
- multiplex.dataloader.append_job {
90
- query = multiplex.queries.length == 1 ? multiplex.queries[0] : nil
91
- queries = multiplex ? multiplex.queries : [query]
92
- final_values = queries.map do |query|
93
- runtime = query.context.namespace(:interpreter_runtime)[:runtime]
94
- # it might not be present if the query has an error
95
- runtime ? runtime.final_result : nil
96
- end
97
- final_values.compact!
98
- multiplex.current_trace.execute_query_lazy(multiplex: multiplex, query: query) do
99
- Interpreter::Resolve.resolve_each_depth(lazies_at_depth, multiplex.dataloader)
100
- end
101
- }
102
- multiplex.dataloader.run
103
-
104
- # Then, find all errors and assign the result to the query object
105
- results.each_with_index do |data_result, idx|
106
- query = queries[idx]
107
- # Assign the result so that it can be accessed in instrumentation
108
- query.result_values = if data_result.equal?(NO_OPERATION)
109
- if !query.valid? || query.context.errors.any?
110
- # A bit weird, but `Query#static_errors` _includes_ `query.context.errors`
111
- { "errors" => query.static_errors.map(&:to_h) }
112
- else
113
- data_result
114
- end
115
- else
116
- result = {
117
- "data" => query.context.namespace(:interpreter_runtime)[:runtime].final_result
118
- }
83
+ # Then, work through lazy results in a breadth-first way
84
+ multiplex.dataloader.append_job {
85
+ query = multiplex.queries.length == 1 ? multiplex.queries[0] : nil
86
+ queries = multiplex ? multiplex.queries : [query]
87
+ final_values = queries.map do |query|
88
+ runtime = query.context.namespace(:interpreter_runtime)[:runtime]
89
+ # it might not be present if the query has an error
90
+ runtime ? runtime.final_result : nil
91
+ end
92
+ final_values.compact!
93
+ multiplex.current_trace.execute_query_lazy(multiplex: multiplex, query: query) do
94
+ Interpreter::Resolve.resolve_each_depth(lazies_at_depth, multiplex.dataloader)
95
+ end
96
+ }
97
+ multiplex.dataloader.run
119
98
 
120
- if query.context.errors.any?
121
- error_result = query.context.errors.map(&:to_h)
122
- result["errors"] = error_result
123
- end
99
+ # Then, find all errors and assign the result to the query object
100
+ results.each_with_index do |data_result, idx|
101
+ query = queries[idx]
102
+ if (events = query.context.namespace(:subscriptions)[:events]) && events.any?
103
+ schema.subscriptions.write_subscription(query, events)
104
+ end
105
+ # Assign the result so that it can be accessed in instrumentation
106
+ query.result_values = if data_result.equal?(NO_OPERATION)
107
+ if !query.valid? || query.context.errors.any?
108
+ # A bit weird, but `Query#static_errors` _includes_ `query.context.errors`
109
+ { "errors" => query.static_errors.map(&:to_h) }
110
+ else
111
+ data_result
112
+ end
113
+ else
114
+ result = {}
124
115
 
125
- result
126
- end
127
- if query.context.namespace?(:__query_result_extensions__)
128
- query.result_values["extensions"] = query.context.namespace(:__query_result_extensions__)
129
- end
130
- # Get the Query::Result, not the Hash
131
- results[idx] = query.result
116
+ if query.context.errors.any?
117
+ error_result = query.context.errors.map(&:to_h)
118
+ result["errors"] = error_result
132
119
  end
133
120
 
134
- results
135
- rescue Exception
136
- # TODO rescue at a higher level so it will catch errors in analysis, too
137
- # Assign values here so that the query's `@executed` becomes true
138
- queries.map { |q| q.result_values ||= {} }
139
- raise
140
- ensure
141
- queries.map { |query|
142
- runtime = query.context.namespace(:interpreter_runtime)[:runtime]
143
- if runtime
144
- runtime.delete_all_interpreter_context
145
- end
146
- }
121
+ result["data"] = query.context.namespace(:interpreter_runtime)[:runtime].final_result
122
+
123
+ result
124
+ end
125
+ if query.context.namespace?(:__query_result_extensions__)
126
+ query.result_values["extensions"] = query.context.namespace(:__query_result_extensions__)
147
127
  end
128
+ # Get the Query::Result, not the Hash
129
+ results[idx] = query.result
148
130
  end
149
- end
150
- end
151
- end
152
131
 
153
- private
154
-
155
- # Call the before_ hooks of each query,
156
- # Then yield if no errors.
157
- # `call_hooks` takes care of appropriate cleanup.
158
- def each_query_call_hooks(instrumenters, queries, i = 0)
159
- if i >= queries.length
160
- yield
161
- else
162
- query = queries[i]
163
- call_hooks(instrumenters, query, :before_query, :after_query) {
164
- each_query_call_hooks(instrumenters, queries, i + 1) {
165
- yield
132
+ results
133
+ rescue Exception
134
+ # TODO rescue at a higher level so it will catch errors in analysis, too
135
+ # Assign values here so that the query's `@executed` becomes true
136
+ queries.map { |q| q.result_values ||= {} }
137
+ raise
138
+ ensure
139
+ queries.map { |query|
140
+ runtime = query.context.namespace(:interpreter_runtime)[:runtime]
141
+ if runtime
142
+ runtime.delete_all_interpreter_context
143
+ end
166
144
  }
167
- }
168
- end
169
- end
170
-
171
- # Call each before hook, and if they all succeed, yield.
172
- # If they don't all succeed, call after_ for each one that succeeded.
173
- def call_hooks(instrumenters, object, before_hook_name, after_hook_name)
174
- begin
175
- successful = []
176
- instrumenters.each do |instrumenter|
177
- instrumenter.public_send(before_hook_name, object)
178
- successful << instrumenter
179
- end
180
-
181
- # if any before hooks raise an exception, quit calling before hooks,
182
- # but call the after hooks on anything that succeeded but also
183
- # raise the exception that came from the before hook.
184
- rescue GraphQL::ExecutionError => err
185
- object.context.errors << err
186
- rescue => e
187
- raise call_after_hooks(successful, object, after_hook_name, e)
188
- end
189
-
190
- begin
191
- yield # Call the user code
192
- ensure
193
- ex = call_after_hooks(successful, object, after_hook_name, nil)
194
- raise ex if ex
195
- end
196
- end
197
-
198
- def call_after_hooks(instrumenters, object, after_hook_name, ex)
199
- instrumenters.reverse_each do |instrumenter|
200
- begin
201
- instrumenter.public_send(after_hook_name, object)
202
- rescue => e
203
- ex = e
204
145
  end
205
146
  end
206
- ex
207
147
  end
208
148
  end
209
149
 
@@ -9,13 +9,19 @@ module GraphQL
9
9
 
10
10
  def __schema
11
11
  # Apply wrapping manually since this field isn't wrapped by instrumentation
12
- schema = @context.query.schema
12
+ schema = context.schema
13
13
  schema_type = schema.introspection_system.types["__Schema"]
14
- schema_type.wrap(schema, @context)
14
+ schema_type.wrap(schema, context)
15
15
  end
16
16
 
17
17
  def __type(name:)
18
- context.warden.reachable_type?(name) ? context.warden.get_type(name) : nil
18
+ if context.warden.reachable_type?(name)
19
+ context.warden.get_type(name)
20
+ elsif (type = context.schema.extra_types.find { |t| t.graphql_name == name })
21
+ type
22
+ else
23
+ nil
24
+ end
19
25
  end
20
26
  end
21
27
  end
@@ -20,7 +20,9 @@ module GraphQL
20
20
  end
21
21
 
22
22
  def types
23
- @context.warden.reachable_types.sort_by(&:graphql_name)
23
+ types = context.warden.reachable_types + context.schema.extra_types
24
+ types.sort_by!(&:graphql_name)
25
+ types
24
26
  end
25
27
 
26
28
  def query_type
@@ -24,7 +24,7 @@ module GraphQL
24
24
  @include_built_in_directives = include_built_in_directives
25
25
  @include_one_of = false
26
26
 
27
- schema_context = schema.context_class.new(query: nil, object: nil, schema: schema, values: context)
27
+ schema_context = schema.context_class.new(query: nil, schema: schema, values: context)
28
28
 
29
29
 
30
30
  @warden = @schema.warden_class.new(
@@ -266,8 +266,7 @@ module GraphQL
266
266
  end
267
267
  definitions = build_directive_nodes(dirs_to_build)
268
268
 
269
- type_nodes = build_type_definition_nodes(warden.reachable_types)
270
-
269
+ type_nodes = build_type_definition_nodes(warden.reachable_types + schema.extra_types)
271
270
  if @include_one_of
272
271
  # This may have been set to true when iterating over all types
273
272
  definitions.concat(build_directive_nodes([GraphQL::Schema::Directive::OneOf]))