graphql 2.4.9 → 2.4.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. data/lib/graphql/current.rb +5 -0
  3. data/lib/graphql/dashboard/statics/bootstrap-5.3.3.min.css +6 -0
  4. data/lib/graphql/dashboard/statics/bootstrap-5.3.3.min.js +7 -0
  5. data/lib/graphql/dashboard/statics/dashboard.css +3 -0
  6. data/lib/graphql/dashboard/statics/dashboard.js +78 -0
  7. data/lib/graphql/dashboard/statics/header-icon.png +0 -0
  8. data/lib/graphql/dashboard/statics/icon.png +0 -0
  9. data/lib/graphql/dashboard/views/graphql/dashboard/landings/show.html.erb +18 -0
  10. data/lib/graphql/dashboard/views/graphql/dashboard/traces/index.html.erb +63 -0
  11. data/lib/graphql/dashboard/views/layouts/graphql/dashboard/application.html.erb +60 -0
  12. data/lib/graphql/dashboard.rb +142 -0
  13. data/lib/graphql/dataloader/active_record_association_source.rb +64 -0
  14. data/lib/graphql/dataloader/active_record_source.rb +26 -0
  15. data/lib/graphql/dataloader/async_dataloader.rb +17 -5
  16. data/lib/graphql/dataloader/null_dataloader.rb +1 -1
  17. data/lib/graphql/dataloader/source.rb +2 -2
  18. data/lib/graphql/dataloader.rb +37 -5
  19. data/lib/graphql/execution/interpreter/runtime.rb +26 -7
  20. data/lib/graphql/execution/interpreter.rb +9 -1
  21. data/lib/graphql/invalid_name_error.rb +1 -1
  22. data/lib/graphql/invalid_null_error.rb +6 -12
  23. data/lib/graphql/language/parser.rb +1 -1
  24. data/lib/graphql/query.rb +8 -4
  25. data/lib/graphql/schema/build_from_definition.rb +0 -1
  26. data/lib/graphql/schema/enum.rb +17 -2
  27. data/lib/graphql/schema/input_object.rb +1 -1
  28. data/lib/graphql/schema/interface.rb +1 -0
  29. data/lib/graphql/schema/member/has_dataloader.rb +60 -0
  30. data/lib/graphql/schema/member.rb +1 -0
  31. data/lib/graphql/schema/object.rb +17 -8
  32. data/lib/graphql/schema/resolver.rb +1 -5
  33. data/lib/graphql/schema/visibility/profile.rb +4 -4
  34. data/lib/graphql/schema/visibility.rb +14 -9
  35. data/lib/graphql/schema.rb +52 -10
  36. data/lib/graphql/static_validation/validator.rb +6 -1
  37. data/lib/graphql/tracing/active_support_notifications_trace.rb +6 -2
  38. data/lib/graphql/tracing/appoptics_trace.rb +3 -1
  39. data/lib/graphql/tracing/appsignal_trace.rb +6 -0
  40. data/lib/graphql/tracing/data_dog_trace.rb +5 -0
  41. data/lib/graphql/tracing/detailed_trace/memory_backend.rb +60 -0
  42. data/lib/graphql/tracing/detailed_trace/redis_backend.rb +72 -0
  43. data/lib/graphql/tracing/detailed_trace.rb +93 -0
  44. data/lib/graphql/tracing/new_relic_trace.rb +147 -41
  45. data/lib/graphql/tracing/perfetto_trace/trace.proto +141 -0
  46. data/lib/graphql/tracing/perfetto_trace/trace_pb.rb +33 -0
  47. data/lib/graphql/tracing/perfetto_trace.rb +737 -0
  48. data/lib/graphql/tracing/prometheus_trace.rb +22 -0
  49. data/lib/graphql/tracing/scout_trace.rb +6 -0
  50. data/lib/graphql/tracing/sentry_trace.rb +5 -0
  51. data/lib/graphql/tracing/statsd_trace.rb +9 -0
  52. data/lib/graphql/tracing/trace.rb +124 -0
  53. data/lib/graphql/tracing.rb +2 -0
  54. data/lib/graphql/version.rb +1 -1
  55. data/lib/graphql.rb +3 -0
  56. metadata +49 -3
  57. data/lib/graphql/schema/null_mask.rb +0 -11
@@ -0,0 +1,64 @@
1
+ # frozen_string_literal: true
2
+ require "graphql/dataloader/source"
3
+ require "graphql/dataloader/active_record_source"
4
+
5
+ module GraphQL
6
+ class Dataloader
7
+ class ActiveRecordAssociationSource < GraphQL::Dataloader::Source
8
+ RECORD_SOURCE_CLASS = ActiveRecordSource
9
+
10
+ def initialize(association, scope = nil)
11
+ @association = association
12
+ @scope = scope
13
+ end
14
+
15
+ def load(record)
16
+ if (assoc = record.association(@association)).loaded?
17
+ assoc.target
18
+ else
19
+ super
20
+ end
21
+ end
22
+
23
+ def fetch(records)
24
+ record_classes = Set.new.compare_by_identity
25
+ associated_classes = Set.new.compare_by_identity
26
+ records.each do |record|
27
+ if record_classes.add?(record.class)
28
+ reflection = record.class.reflect_on_association(@association)
29
+ if !reflection.polymorphic? && reflection.klass
30
+ associated_classes.add(reflection.klass)
31
+ end
32
+ end
33
+ end
34
+
35
+ available_records = []
36
+ associated_classes.each do |assoc_class|
37
+ already_loaded_records = dataloader.with(RECORD_SOURCE_CLASS, assoc_class).results.values
38
+ available_records.concat(already_loaded_records)
39
+ end
40
+
41
+ ::ActiveRecord::Associations::Preloader.new(records: records, associations: @association, available_records: available_records, scope: @scope).call
42
+
43
+ loaded_associated_records = records.map { |r| r.public_send(@association) }
44
+ records_by_model = {}
45
+ loaded_associated_records.each do |record|
46
+ if record
47
+ updates = records_by_model[record.class] ||= {}
48
+ updates[record.id] = record
49
+ end
50
+ end
51
+
52
+ if @scope.nil?
53
+ # Don't cache records loaded via scope because they might have reduced `SELECT`s
54
+ # Could check .select_values here?
55
+ records_by_model.each do |model_class, updates|
56
+ dataloader.with(RECORD_SOURCE_CLASS, model_class).merge(updates)
57
+ end
58
+ end
59
+
60
+ loaded_associated_records
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+ require "graphql/dataloader/source"
3
+
4
+ module GraphQL
5
+ class Dataloader
6
+ class ActiveRecordSource < GraphQL::Dataloader::Source
7
+ def initialize(model_class, find_by: model_class.primary_key)
8
+ @model_class = model_class
9
+ @find_by = find_by
10
+ @type_for_column = @model_class.type_for_attribute(@find_by)
11
+ end
12
+
13
+ def load(requested_key)
14
+ casted_key = @type_for_column.cast(requested_key)
15
+ super(casted_key)
16
+ end
17
+
18
+ def fetch(record_ids)
19
+ records = @model_class.where(@find_by => record_ids)
20
+ record_lookup = {}
21
+ records.each { |r| record_lookup[r.public_send(@find_by)] = r }
22
+ record_ids.map { |id| record_lookup[id] }
23
+ end
24
+ end
25
+ end
26
+ end
@@ -2,16 +2,20 @@
2
2
  module GraphQL
3
3
  class Dataloader
4
4
  class AsyncDataloader < Dataloader
5
- def yield
5
+ def yield(source = Fiber[:__graphql_current_dataloader_source])
6
+ trace = Fiber[:__graphql_current_multiplex]&.current_trace
7
+ trace&.dataloader_fiber_yield(source)
6
8
  if (condition = Fiber[:graphql_dataloader_next_tick])
7
9
  condition.wait
8
10
  else
9
11
  Fiber.yield
10
12
  end
13
+ trace&.dataloader_fiber_resume(source)
11
14
  nil
12
15
  end
13
16
 
14
17
  def run
18
+ trace = Fiber[:__graphql_current_multiplex]&.current_trace
15
19
  jobs_fiber_limit, total_fiber_limit = calculate_fiber_limit
16
20
  job_fibers = []
17
21
  next_job_fibers = []
@@ -20,11 +24,12 @@ module GraphQL
20
24
  first_pass = true
21
25
  sources_condition = Async::Condition.new
22
26
  manager = spawn_fiber do
27
+ trace&.begin_dataloader(self)
23
28
  while first_pass || !job_fibers.empty?
24
29
  first_pass = false
25
30
  fiber_vars = get_fiber_variables
26
31
 
27
- while (f = (job_fibers.shift || (((job_fibers.size + next_job_fibers.size + source_tasks.size) < jobs_fiber_limit) && spawn_job_fiber)))
32
+ while (f = (job_fibers.shift || (((job_fibers.size + next_job_fibers.size + source_tasks.size) < jobs_fiber_limit) && spawn_job_fiber(trace))))
28
33
  if f.alive?
29
34
  finished = run_fiber(f)
30
35
  if !finished
@@ -38,7 +43,7 @@ module GraphQL
38
43
  Sync do |root_task|
39
44
  set_fiber_variables(fiber_vars)
40
45
  while !source_tasks.empty? || @source_cache.each_value.any? { |group_sources| group_sources.each_value.any?(&:pending?) }
41
- while (task = (source_tasks.shift || (((job_fibers.size + next_job_fibers.size + source_tasks.size + next_source_tasks.size) < total_fiber_limit) && spawn_source_task(root_task, sources_condition))))
46
+ while (task = (source_tasks.shift || (((job_fibers.size + next_job_fibers.size + source_tasks.size + next_source_tasks.size) < total_fiber_limit) && spawn_source_task(root_task, sources_condition, trace))))
42
47
  if task.alive?
43
48
  root_task.yield # give the source task a chance to run
44
49
  next_source_tasks << task
@@ -50,6 +55,7 @@ module GraphQL
50
55
  end
51
56
  end
52
57
  end
58
+ trace&.end_dataloader(self)
53
59
  end
54
60
 
55
61
  manager.resume
@@ -63,7 +69,7 @@ module GraphQL
63
69
 
64
70
  private
65
71
 
66
- def spawn_source_task(parent_task, condition)
72
+ def spawn_source_task(parent_task, condition, trace)
67
73
  pending_sources = nil
68
74
  @source_cache.each_value do |source_by_batch_params|
69
75
  source_by_batch_params.each_value do |source|
@@ -77,10 +83,16 @@ module GraphQL
77
83
  if pending_sources
78
84
  fiber_vars = get_fiber_variables
79
85
  parent_task.async do
86
+ trace&.dataloader_spawn_source_fiber(pending_sources)
80
87
  set_fiber_variables(fiber_vars)
81
88
  Fiber[:graphql_dataloader_next_tick] = condition
82
- pending_sources.each(&:run_pending_keys)
89
+ pending_sources.each do |s|
90
+ trace&.begin_dataloader_source(s)
91
+ s.run_pending_keys
92
+ trace&.end_dataloader_source(s)
93
+ end
83
94
  cleanup_fiber
95
+ trace&.dataloader_fiber_exit
84
96
  end
85
97
  end
86
98
  end
@@ -11,7 +11,7 @@ module GraphQL
11
11
  # executed synchronously.
12
12
  def run; end
13
13
  def run_isolated; yield; end
14
- def yield
14
+ def yield(_source)
15
15
  raise GraphQL::Error, "GraphQL::Dataloader is not running -- add `use GraphQL::Dataloader` to your schema to use Dataloader sources."
16
16
  end
17
17
 
@@ -93,14 +93,14 @@ module GraphQL
93
93
  # Then run the batch and update the cache.
94
94
  # @return [void]
95
95
  def sync(pending_result_keys)
96
- @dataloader.yield
96
+ @dataloader.yield(self)
97
97
  iterations = 0
98
98
  while pending_result_keys.any? { |key| !@results.key?(key) }
99
99
  iterations += 1
100
100
  if iterations > MAX_ITERATIONS
101
101
  raise "#{self.class}#sync tried #{MAX_ITERATIONS} times to load pending keys (#{pending_result_keys}), but they still weren't loaded. There is likely a circular dependency#{@dataloader.fiber_limit ? " or `fiber_limit: #{@dataloader.fiber_limit}` is set too low" : ""}."
102
102
  end
103
- @dataloader.yield
103
+ @dataloader.yield(self)
104
104
  end
105
105
  nil
106
106
  end
@@ -4,6 +4,8 @@ require "graphql/dataloader/null_dataloader"
4
4
  require "graphql/dataloader/request"
5
5
  require "graphql/dataloader/request_all"
6
6
  require "graphql/dataloader/source"
7
+ require "graphql/dataloader/active_record_association_source"
8
+ require "graphql/dataloader/active_record_source"
7
9
 
8
10
  module GraphQL
9
11
  # This plugin supports Fiber-based concurrency, along with {GraphQL::Dataloader::Source}.
@@ -129,8 +131,11 @@ module GraphQL
129
131
  # Dataloader will resume the fiber after the requested data has been loaded (by another Fiber).
130
132
  #
131
133
  # @return [void]
132
- def yield
134
+ def yield(source = Fiber[:__graphql_current_dataloader_source])
135
+ trace = Fiber[:__graphql_current_multiplex]&.current_trace
136
+ trace&.dataloader_fiber_yield(source)
133
137
  Fiber.yield
138
+ trace&.dataloader_fiber_resume(source)
134
139
  nil
135
140
  end
136
141
 
@@ -184,6 +189,7 @@ module GraphQL
184
189
  end
185
190
 
186
191
  def run
192
+ trace = Fiber[:__graphql_current_multiplex]&.current_trace
187
193
  jobs_fiber_limit, total_fiber_limit = calculate_fiber_limit
188
194
  job_fibers = []
189
195
  next_job_fibers = []
@@ -191,10 +197,11 @@ module GraphQL
191
197
  next_source_fibers = []
192
198
  first_pass = true
193
199
  manager = spawn_fiber do
200
+ trace&.begin_dataloader(self)
194
201
  while first_pass || !job_fibers.empty?
195
202
  first_pass = false
196
203
 
197
- while (f = (job_fibers.shift || (((next_job_fibers.size + job_fibers.size) < jobs_fiber_limit) && spawn_job_fiber)))
204
+ while (f = (job_fibers.shift || (((next_job_fibers.size + job_fibers.size) < jobs_fiber_limit) && spawn_job_fiber(trace))))
198
205
  if f.alive?
199
206
  finished = run_fiber(f)
200
207
  if !finished
@@ -205,7 +212,7 @@ module GraphQL
205
212
  join_queues(job_fibers, next_job_fibers)
206
213
 
207
214
  while (!source_fibers.empty? || @source_cache.each_value.any? { |group_sources| group_sources.each_value.any?(&:pending?) })
208
- while (f = source_fibers.shift || (((job_fibers.size + source_fibers.size + next_source_fibers.size + next_job_fibers.size) < total_fiber_limit) && spawn_source_fiber))
215
+ while (f = source_fibers.shift || (((job_fibers.size + source_fibers.size + next_source_fibers.size + next_job_fibers.size) < total_fiber_limit) && spawn_source_fiber(trace)))
209
216
  if f.alive?
210
217
  finished = run_fiber(f)
211
218
  if !finished
@@ -216,6 +223,8 @@ module GraphQL
216
223
  join_queues(source_fibers, next_source_fibers)
217
224
  end
218
225
  end
226
+
227
+ trace&.end_dataloader(self)
219
228
  end
220
229
 
221
230
  run_fiber(manager)
@@ -230,6 +239,7 @@ module GraphQL
230
239
  if !source_fibers.empty?
231
240
  raise "Invariant: source fibers should have exited but #{source_fibers.size} remained"
232
241
  end
242
+
233
243
  rescue UncaughtThrowError => e
234
244
  throw e.tag, e.value
235
245
  end
@@ -247,6 +257,22 @@ module GraphQL
247
257
  }
248
258
  end
249
259
 
260
+ # Pre-warm the Dataloader cache with ActiveRecord objects which were loaded elsewhere.
261
+ # These will be used by {Dataloader::ActiveRecordSource}, {Dataloader::ActiveRecordAssociationSource} and their helper
262
+ # methods, `dataload_record` and `dataload_association`.
263
+ # @param records [Array<ActiveRecord::Base>] Already-loaded records to warm the cache with
264
+ # @param index_by [Symbol] The attribute to use as the cache key. (Should match `find_by:` when using {ActiveRecordSource})
265
+ # @return [void]
266
+ def merge_records(records, index_by: :id)
267
+ records_by_class = Hash.new { |h, k| h[k] = {} }
268
+ records.each do |r|
269
+ records_by_class[r.class][r.public_send(index_by)] = r
270
+ end
271
+ records_by_class.each do |r_class, records|
272
+ with(ActiveRecordSource, r_class).merge(records)
273
+ end
274
+ end
275
+
250
276
  private
251
277
 
252
278
  def calculate_fiber_limit
@@ -266,17 +292,19 @@ module GraphQL
266
292
  new_queue.clear
267
293
  end
268
294
 
269
- def spawn_job_fiber
295
+ def spawn_job_fiber(trace)
270
296
  if !@pending_jobs.empty?
271
297
  spawn_fiber do
298
+ trace&.dataloader_spawn_execution_fiber(@pending_jobs)
272
299
  while job = @pending_jobs.shift
273
300
  job.call
274
301
  end
302
+ trace&.dataloader_fiber_exit
275
303
  end
276
304
  end
277
305
  end
278
306
 
279
- def spawn_source_fiber
307
+ def spawn_source_fiber(trace)
280
308
  pending_sources = nil
281
309
  @source_cache.each_value do |source_by_batch_params|
282
310
  source_by_batch_params.each_value do |source|
@@ -289,10 +317,14 @@ module GraphQL
289
317
 
290
318
  if pending_sources
291
319
  spawn_fiber do
320
+ trace&.dataloader_spawn_source_fiber(pending_sources)
292
321
  pending_sources.each do |source|
293
322
  Fiber[:__graphql_current_dataloader_source] = source
323
+ trace&.begin_dataloader_source(source)
294
324
  source.run_pending_keys
325
+ trace&.end_dataloader_source(source)
295
326
  end
327
+ trace&.dataloader_fiber_exit
296
328
  end
297
329
  end
298
330
  end
@@ -218,8 +218,10 @@ module GraphQL
218
218
  result_name, field_ast_nodes_or_ast_node, selections_result
219
219
  )
220
220
  finished_jobs += 1
221
- if target_result && finished_jobs == enqueued_jobs
222
- selections_result.merge_into(target_result)
221
+ if finished_jobs == enqueued_jobs
222
+ if target_result
223
+ selections_result.merge_into(target_result)
224
+ end
223
225
  end
224
226
  @dataloader.clear_cache
225
227
  }
@@ -229,8 +231,10 @@ module GraphQL
229
231
  result_name, field_ast_nodes_or_ast_node, selections_result
230
232
  )
231
233
  finished_jobs += 1
232
- if target_result && finished_jobs == enqueued_jobs
233
- selections_result.merge_into(target_result)
234
+ if finished_jobs == enqueued_jobs
235
+ if target_result
236
+ selections_result.merge_into(target_result)
237
+ end
234
238
  end
235
239
  }
236
240
  end
@@ -371,6 +375,7 @@ module GraphQL
371
375
  end
372
376
  # Actually call the field resolver and capture the result
373
377
  app_result = begin
378
+ @current_trace.begin_execute_field(field_defn, object, kwarg_arguments, query)
374
379
  @current_trace.execute_field(field: field_defn, ast_node: ast_node, query: query, object: object, arguments: kwarg_arguments) do
375
380
  field_defn.resolve(object, kwarg_arguments, context)
376
381
  end
@@ -383,6 +388,7 @@ module GraphQL
383
388
  ex_err
384
389
  end
385
390
  end
391
+ @current_trace.end_execute_field(field_defn, object, kwarg_arguments, query, app_result)
386
392
  after_lazy(app_result, field: field_defn, ast_node: ast_node, owner_object: object, arguments: resolved_arguments, result_name: result_name, result: selection_result, runtime_state: runtime_state) do |inner_result, runtime_state|
387
393
  owner_type = selection_result.graphql_result_type
388
394
  return_type = field_defn.type
@@ -391,6 +397,8 @@ module GraphQL
391
397
  was_scoped = runtime_state.was_authorized_by_scope_items
392
398
  runtime_state.was_authorized_by_scope_items = nil
393
399
  continue_field(continue_value, owner_type, field_defn, return_type, ast_node, next_selections, false, object, resolved_arguments, result_name, selection_result, was_scoped, runtime_state)
400
+ else
401
+ nil
394
402
  end
395
403
  end
396
404
  end
@@ -465,7 +473,7 @@ module GraphQL
465
473
  # When this comes from a list item, use the parent object:
466
474
  parent_type = selection_result.is_a?(GraphQLResultArray) ? selection_result.graphql_parent.graphql_result_type : selection_result.graphql_result_type
467
475
  # This block is called if `result_name` is not dead. (Maybe a previous invalid nil caused it be marked dead.)
468
- err = parent_type::InvalidNullError.new(parent_type, field, value)
476
+ err = parent_type::InvalidNullError.new(parent_type, field, value, ast_node)
469
477
  schema.type_error(err, context)
470
478
  end
471
479
  else
@@ -781,8 +789,10 @@ module GraphQL
781
789
  runtime_state.was_authorized_by_scope_items = was_authorized_by_scope_items
782
790
  # Wrap the execution of _this_ method with tracing,
783
791
  # but don't wrap the continuation below
792
+ result = nil
784
793
  inner_obj = begin
785
- if trace
794
+ result = if trace
795
+ @current_trace.begin_execute_field(field, owner_object, arguments, query)
786
796
  @current_trace.execute_field_lazy(field: field, query: query, object: owner_object, arguments: arguments, ast_node: ast_node) do
787
797
  schema.sync_lazy(lazy_obj)
788
798
  end
@@ -797,6 +807,10 @@ module GraphQL
797
807
  rescue GraphQL::ExecutionError => ex_err
798
808
  ex_err
799
809
  end
810
+ ensure
811
+ if trace
812
+ @current_trace.end_execute_field(field, owner_object, arguments, query, result)
813
+ end
800
814
  end
801
815
  yield(inner_obj, runtime_state)
802
816
  end
@@ -840,14 +854,19 @@ module GraphQL
840
854
  end
841
855
 
842
856
  def resolve_type(type, value)
857
+ @current_trace.begin_resolve_type(type, value, context)
843
858
  resolved_type, resolved_value = @current_trace.resolve_type(query: query, type: type, object: value) do
844
859
  query.resolve_type(type, value)
845
860
  end
861
+ @current_trace.end_resolve_type(type, value, context, resolved_type)
846
862
 
847
863
  if lazy?(resolved_type)
848
864
  GraphQL::Execution::Lazy.new do
865
+ @current_trace.begin_resolve_type(type, value, context)
849
866
  @current_trace.resolve_type_lazy(query: query, type: type, object: value) do
850
- schema.sync_lazy(resolved_type)
867
+ rt = schema.sync_lazy(resolved_type)
868
+ @current_trace.end_resolve_type(type, value, context, rt)
869
+ rt
851
870
  end
852
871
  end
853
872
  else
@@ -33,9 +33,12 @@ module GraphQL
33
33
  end
34
34
  end
35
35
 
36
+
36
37
  multiplex = Execution::Multiplex.new(schema: schema, queries: queries, context: context, max_complexity: max_complexity)
37
38
  Fiber[:__graphql_current_multiplex] = multiplex
38
- multiplex.current_trace.execute_multiplex(multiplex: multiplex) do
39
+ trace = multiplex.current_trace
40
+ trace.begin_execute_multiplex(multiplex)
41
+ trace.execute_multiplex(multiplex: multiplex) do
39
42
  schema = multiplex.schema
40
43
  queries = multiplex.queries
41
44
  lazies_at_depth = Hash.new { |h, k| h[k] = [] }
@@ -44,7 +47,10 @@ module GraphQL
44
47
  multiplex_analyzers += [GraphQL::Analysis::MaxQueryComplexity]
45
48
  end
46
49
 
50
+ trace.begin_analyze_multiplex(multiplex, multiplex_analyzers)
47
51
  schema.analysis_engine.analyze_multiplex(multiplex, multiplex_analyzers)
52
+ trace.end_analyze_multiplex(multiplex, multiplex_analyzers)
53
+
48
54
  begin
49
55
  # Since this is basically the batching context,
50
56
  # share it for a whole multiplex
@@ -148,6 +154,8 @@ module GraphQL
148
154
  }
149
155
  end
150
156
  end
157
+ ensure
158
+ trace&.end_execute_multiplex(multiplex)
151
159
  end
152
160
  end
153
161
 
@@ -1,6 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
  module GraphQL
3
- class InvalidNameError < GraphQL::ExecutionError
3
+ class InvalidNameError < GraphQL::Error
4
4
  attr_reader :name, :valid_regex
5
5
  def initialize(name, valid_regex)
6
6
  @name = name
@@ -2,7 +2,7 @@
2
2
  module GraphQL
3
3
  # Raised automatically when a field's resolve function returns `nil`
4
4
  # for a non-null field.
5
- class InvalidNullError < GraphQL::RuntimeTypeError
5
+ class InvalidNullError < GraphQL::Error
6
6
  # @return [GraphQL::BaseType] The owner of {#field}
7
7
  attr_reader :parent_type
8
8
 
@@ -12,23 +12,17 @@ module GraphQL
12
12
  # @return [nil, GraphQL::ExecutionError] The invalid value for this field
13
13
  attr_reader :value
14
14
 
15
- def initialize(parent_type, field, value)
15
+ # @return [GraphQL::Language::Nodes::Field] the field where the error occurred
16
+ attr_reader :ast_node
17
+
18
+ def initialize(parent_type, field, value, ast_node)
16
19
  @parent_type = parent_type
17
20
  @field = field
18
21
  @value = value
22
+ @ast_node = ast_node
19
23
  super("Cannot return null for non-nullable field #{@parent_type.graphql_name}.#{@field.graphql_name}")
20
24
  end
21
25
 
22
- # @return [Hash] An entry for the response's "errors" key
23
- def to_h
24
- { "message" => message }
25
- end
26
-
27
- # @deprecated always false
28
- def parent_error?
29
- false
30
- end
31
-
32
26
  class << self
33
27
  attr_accessor :parent_class
34
28
 
@@ -161,7 +161,7 @@ module GraphQL
161
161
  expect_token(:VAR_SIGN)
162
162
  var_name = parse_name
163
163
  expect_token(:COLON)
164
- var_type = self.type
164
+ var_type = self.type || raise_parse_error("Missing type definition for variable: $#{var_name}")
165
165
  default_value = if at?(:EQUALS)
166
166
  advance_token
167
167
  value
data/lib/graphql/query.rb CHANGED
@@ -97,21 +97,22 @@ module GraphQL
97
97
  # @param root_value [Object] the object used to resolve fields on the root type
98
98
  # @param max_depth [Numeric] the maximum number of nested selections allowed for this query (falls back to schema-level value)
99
99
  # @param max_complexity [Numeric] the maximum field complexity for this query (falls back to schema-level value)
100
- # @param visibility_profile [Symbol]
100
+ # @param visibility_profile [Symbol] Another way to assign `context[:visibility_profile]`
101
101
  def initialize(schema, query_string = nil, query: nil, document: nil, context: nil, variables: nil, validate: true, static_validator: nil, visibility_profile: nil, subscription_topic: nil, operation_name: nil, root_value: nil, max_depth: schema.max_depth, max_complexity: schema.max_complexity, warden: nil, use_visibility_profile: nil)
102
102
  # Even if `variables: nil` is passed, use an empty hash for simpler logic
103
103
  variables ||= {}
104
104
  @schema = schema
105
105
  @context = schema.context_class.new(query: self, values: context)
106
+ if visibility_profile
107
+ @context[:visibility_profile] ||= visibility_profile
108
+ end
106
109
 
107
110
  if use_visibility_profile.nil?
108
111
  use_visibility_profile = warden ? false : schema.use_visibility_profile?
109
112
  end
110
113
 
111
- @visibility_profile = visibility_profile
112
-
113
114
  if use_visibility_profile
114
- @visibility_profile = @schema.visibility.profile_for(@context, visibility_profile)
115
+ @visibility_profile = @schema.visibility.profile_for(@context)
115
116
  @warden = Schema::Warden::NullWarden.new(context: @context, schema: @schema)
116
117
  else
117
118
  @visibility_profile = nil
@@ -440,6 +441,7 @@ module GraphQL
440
441
  @warden ||= @schema.warden_class.new(schema: @schema, context: @context)
441
442
  parse_error = nil
442
443
  @document ||= begin
444
+ current_trace.begin_parse(query_string)
443
445
  if query_string
444
446
  GraphQL.parse(query_string, trace: self.current_trace, max_tokens: @schema.max_query_string_tokens)
445
447
  end
@@ -447,6 +449,8 @@ module GraphQL
447
449
  parse_error = err
448
450
  @schema.parse_error(err, @context)
449
451
  nil
452
+ ensure
453
+ current_trace.end_parse(query_string)
450
454
  end
451
455
 
452
456
  @fragments = {}
@@ -298,7 +298,6 @@ module GraphQL
298
298
  description: enum_value_definition.description,
299
299
  directives: builder.prepare_directives(enum_value_definition, type_resolver),
300
300
  ast_node: enum_value_definition,
301
- value_method: GraphQL::Schema::Enum.respond_to?(enum_value_definition.name.downcase) ? false : nil,
302
301
  )
303
302
  end
304
303
  end
@@ -70,7 +70,9 @@ module GraphQL
70
70
  kwargs[:owner] = self
71
71
  value = enum_value_class.new(*args, **kwargs, &block)
72
72
 
73
- generate_value_method(value, value_method)
73
+ if value_method || (value_methods && value_method != false)
74
+ generate_value_method(value, value_method)
75
+ end
74
76
 
75
77
  key = value.graphql_name
76
78
  prev_value = own_values[key]
@@ -159,6 +161,18 @@ module GraphQL
159
161
  end
160
162
  end
161
163
 
164
+ def value_methods(new_value = NOT_CONFIGURED)
165
+ if NOT_CONFIGURED.equal?(new_value)
166
+ if @value_methods != nil
167
+ @value_methods
168
+ else
169
+ find_inherited_value(:value_methods, false)
170
+ end
171
+ else
172
+ @value_methods = new_value
173
+ end
174
+ end
175
+
162
176
  def kind
163
177
  GraphQL::TypeKinds::ENUM
164
178
  end
@@ -220,6 +234,7 @@ module GraphQL
220
234
  # because they would end up with names like `#<Class0x1234>::UnresolvedValueError` which messes up bug trackers
221
235
  child_class.const_set(:UnresolvedValueError, Class.new(Schema::Enum::UnresolvedValueError))
222
236
  end
237
+ child_class.class_eval { @value_methods = nil }
223
238
  super
224
239
  end
225
240
 
@@ -241,7 +256,7 @@ module GraphQL
241
256
  return
242
257
  end
243
258
 
244
- instance_eval("def #{value_method_name}; #{value.graphql_name.inspect}; end;")
259
+ instance_eval("def #{value_method_name}; #{value.graphql_name.inspect}; end;", __FILE__, __LINE__)
245
260
  end
246
261
  end
247
262
 
@@ -156,7 +156,7 @@ module GraphQL
156
156
  def #{method_name}
157
157
  self[#{method_name.inspect}]
158
158
  end
159
- alias_method :#{method_name}, :#{method_name}
159
+ alias_method #{method_name.inspect}, #{method_name.inspect}
160
160
  RUBY
161
161
  end
162
162
  argument_defn
@@ -13,6 +13,7 @@ module GraphQL
13
13
  include GraphQL::Schema::Member::Scoped
14
14
  include GraphQL::Schema::Member::HasAstNode
15
15
  include GraphQL::Schema::Member::HasUnresolvedTypeError
16
+ include GraphQL::Schema::Member::HasDataloader
16
17
  include GraphQL::Schema::Member::HasDirectives
17
18
  include GraphQL::Schema::Member::HasInterfaces
18
19