graphql 2.4.8 → 2.4.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/lib/graphql/backtrace/table.rb +95 -55
  3. data/lib/graphql/backtrace.rb +1 -19
  4. data/lib/graphql/current.rb +5 -0
  5. data/lib/graphql/dataloader/active_record_association_source.rb +64 -0
  6. data/lib/graphql/dataloader/active_record_source.rb +26 -0
  7. data/lib/graphql/dataloader/async_dataloader.rb +17 -5
  8. data/lib/graphql/dataloader/null_dataloader.rb +1 -1
  9. data/lib/graphql/dataloader/source.rb +2 -2
  10. data/lib/graphql/dataloader.rb +37 -5
  11. data/lib/graphql/execution/interpreter/runtime/graphql_result.rb +11 -4
  12. data/lib/graphql/execution/interpreter/runtime.rb +59 -32
  13. data/lib/graphql/execution/interpreter.rb +9 -1
  14. data/lib/graphql/execution/multiplex.rb +0 -4
  15. data/lib/graphql/introspection/directive_location_enum.rb +1 -1
  16. data/lib/graphql/language/parser.rb +1 -1
  17. data/lib/graphql/query.rb +8 -12
  18. data/lib/graphql/schema/build_from_definition.rb +1 -0
  19. data/lib/graphql/schema/enum.rb +21 -1
  20. data/lib/graphql/schema/interface.rb +1 -0
  21. data/lib/graphql/schema/loader.rb +1 -0
  22. data/lib/graphql/schema/member/has_dataloader.rb +56 -0
  23. data/lib/graphql/schema/member.rb +1 -0
  24. data/lib/graphql/schema/object.rb +17 -8
  25. data/lib/graphql/schema/resolver.rb +2 -5
  26. data/lib/graphql/schema/validator/required_validator.rb +23 -6
  27. data/lib/graphql/schema/visibility/profile.rb +5 -5
  28. data/lib/graphql/schema/visibility.rb +14 -9
  29. data/lib/graphql/schema.rb +9 -25
  30. data/lib/graphql/static_validation/validator.rb +6 -1
  31. data/lib/graphql/subscriptions/serialize.rb +1 -3
  32. data/lib/graphql/tracing/appoptics_trace.rb +1 -1
  33. data/lib/graphql/tracing/new_relic_trace.rb +138 -41
  34. data/lib/graphql/tracing/perfetto_trace/trace.proto +141 -0
  35. data/lib/graphql/tracing/perfetto_trace/trace_pb.rb +33 -0
  36. data/lib/graphql/tracing/perfetto_trace.rb +726 -0
  37. data/lib/graphql/tracing/trace.rb +125 -1
  38. data/lib/graphql/tracing.rb +1 -0
  39. data/lib/graphql/version.rb +1 -1
  40. metadata +135 -10
  41. data/lib/graphql/backtrace/inspect_result.rb +0 -38
  42. data/lib/graphql/backtrace/trace.rb +0 -93
  43. data/lib/graphql/backtrace/tracer.rb +0 -80
  44. data/lib/graphql/schema/null_mask.rb +0 -11
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 386222d0dc13d35729460ee8f3cd9fa30f90db9cb7dc2b2392489c9e0bc8939b
4
- data.tar.gz: 5229e7ee84d4c4e0a0c8afd10ef2b0185b0eedfc611a8db38047171d9c04a6e2
3
+ metadata.gz: e8174a7314a4dc73aa720915044a1b068a39eccb981ca7553b7f894b60e94bdb
4
+ data.tar.gz: 2bfb0e581036b944d2bd0f42b9ae580d990a848506b6ea20c9192c71c46e009d
5
5
  SHA512:
6
- metadata.gz: 4cb91383b995e8714915d2c99d8c2d4fa15eaad91f0069766d7e8d379cae15c2aae6eb7a3961e440d62a961837a044aed604802960f199e8670ecf973c991fb2
7
- data.tar.gz: 8cd21fd942892100a0647bf9897e6e7d8651d184aee76a2988633fa3e9dc505163f330bb1f0ca3f4819cbb3248cd88e9ac252f516449ae169c1a3f47a982e11c
6
+ metadata.gz: 5277ca8f41431b3854bbf85e64aa6c045701c803c18c927d21e2a89cdc2130a1c6bcb7c0e166fb3c7fcbdaac553f1bbf83bc00043ce8efd27b93dd8810b206a3
7
+ data.tar.gz: 0564e407251fad40e9a3cec275a172980407b97659a8db85cb177ab2b03c5244b320661096eb6e49b3df87897b39c66f727312bb310516341d962013d5899d02
@@ -36,7 +36,70 @@ module GraphQL
36
36
  private
37
37
 
38
38
  def rows
39
- @rows ||= build_rows(@context, rows: [HEADERS], top: true)
39
+ @rows ||= begin
40
+ query = @context.query
41
+ query_ctx = @context
42
+ runtime_inst = query_ctx.namespace(:interpreter_runtime)[:runtime]
43
+ result = runtime_inst.instance_variable_get(:@response)
44
+ rows = []
45
+ result_path = []
46
+ last_part = nil
47
+ path = @context.current_path
48
+ path.each do |path_part|
49
+ value = value_at(runtime_inst, result_path)
50
+
51
+ if result_path.empty?
52
+ name = query.selected_operation.operation_type || "query"
53
+ if (n = query.selected_operation_name)
54
+ name += " #{n}"
55
+ end
56
+ args = query.variables
57
+ else
58
+ name = result.graphql_field.path
59
+ args = result.graphql_arguments
60
+ end
61
+
62
+ object = result.graphql_parent ? result.graphql_parent.graphql_application_value : result.graphql_application_value
63
+ object = object.object.inspect
64
+
65
+ rows << [
66
+ result.ast_node.position.join(":"),
67
+ name,
68
+ "#{object}",
69
+ args.to_h.inspect,
70
+ inspect_result(value),
71
+ ]
72
+
73
+ result_path << path_part
74
+ if path_part == path.last
75
+ last_part = path_part
76
+ else
77
+ result = result[path_part]
78
+ end
79
+ end
80
+
81
+
82
+ object = result.graphql_application_value.object.inspect
83
+ ast_node = result.graphql_selections.find { |s| s.alias == last_part || s.name == last_part }
84
+ field_defn = query.get_field(result.graphql_result_type, ast_node.name)
85
+ args = query.arguments_for(ast_node, field_defn).to_h
86
+ field_path = field_defn.path
87
+ if ast_node.alias
88
+ field_path += " as #{ast_node.alias}"
89
+ end
90
+
91
+ rows << [
92
+ ast_node.position.join(":"),
93
+ field_path,
94
+ "#{object}",
95
+ args.inspect,
96
+ inspect_result(@override_value)
97
+ ]
98
+
99
+ rows << HEADERS
100
+ rows.reverse!
101
+ rows
102
+ end
40
103
  end
41
104
 
42
105
  # @return [String]
@@ -75,67 +138,44 @@ module GraphQL
75
138
  table
76
139
  end
77
140
 
78
- # @return [Array] 5 items for a backtrace table (not `key`)
79
- def build_rows(context_entry, rows:, top: false)
80
- case context_entry
81
- when Backtrace::Frame
82
- field_alias = context_entry.ast_node.respond_to?(:alias) && context_entry.ast_node.alias
83
- value = if top && @override_value
84
- @override_value
85
- else
86
- value_at(@context.query.context.namespace(:interpreter_runtime)[:runtime], context_entry.path)
87
- end
88
- rows << [
89
- "#{context_entry.ast_node ? context_entry.ast_node.position.join(":") : ""}",
90
- "#{context_entry.field.path}#{field_alias ? " as #{field_alias}" : ""}",
91
- "#{context_entry.object.object.inspect}",
92
- context_entry.arguments.to_h.inspect, # rubocop:disable Development/ContextIsPassedCop -- unrelated method
93
- Backtrace::InspectResult.inspect_result(value),
94
- ]
95
- if (parent = context_entry.parent_frame)
96
- build_rows(parent, rows: rows)
97
- else
98
- rows
99
- end
100
- when GraphQL::Query::Context
101
- query = context_entry.query
102
- op = query.selected_operation
103
- if op
104
- op_type = op.operation_type
105
- position = "#{op.line}:#{op.col}"
106
- else
107
- op_type = "query"
108
- position = "?:?"
109
- end
110
- op_name = query.selected_operation_name
111
- object = query.root_value
112
- if object.is_a?(GraphQL::Schema::Object)
113
- object = object.object
114
- end
115
- value = value_at(context_entry.namespace(:interpreter_runtime)[:runtime], [])
116
- rows << [
117
- "#{position}",
118
- "#{op_type}#{op_name ? " #{op_name}" : ""}",
119
- "#{object.inspect}",
120
- query.variables.to_h.inspect,
121
- Backtrace::InspectResult.inspect_result(value),
122
- ]
123
- else
124
- raise "Unexpected get_rows subject #{context_entry.class} (#{context_entry.inspect})"
125
- end
126
- end
127
141
 
128
142
  def value_at(runtime, path)
129
143
  response = runtime.final_result
130
144
  path.each do |key|
131
- if response && (response = response[key])
132
- next
133
- else
134
- break
135
- end
145
+ response && (response = response[key])
136
146
  end
137
147
  response
138
148
  end
149
+
150
+ def inspect_result(obj)
151
+ case obj
152
+ when Hash
153
+ "{" +
154
+ obj.map do |key, val|
155
+ "#{key}: #{inspect_truncated(val)}"
156
+ end.join(", ") +
157
+ "}"
158
+ when Array
159
+ "[" +
160
+ obj.map { |v| inspect_truncated(v) }.join(", ") +
161
+ "]"
162
+ else
163
+ inspect_truncated(obj)
164
+ end
165
+ end
166
+
167
+ def inspect_truncated(obj)
168
+ case obj
169
+ when Hash
170
+ "{...}"
171
+ when Array
172
+ "[...]"
173
+ when GraphQL::Execution::Lazy
174
+ "(unresolved)"
175
+ else
176
+ "#{obj.inspect}"
177
+ end
178
+ end
139
179
  end
140
180
  end
141
181
  end
@@ -1,9 +1,6 @@
1
1
  # frozen_string_literal: true
2
- require "graphql/backtrace/inspect_result"
3
2
  require "graphql/backtrace/table"
4
3
  require "graphql/backtrace/traced_error"
5
- require "graphql/backtrace/tracer"
6
- require "graphql/backtrace/trace"
7
4
  module GraphQL
8
5
  # Wrap unhandled errors with {TracedError}.
9
6
  #
@@ -24,7 +21,7 @@ module GraphQL
24
21
  def_delegators :to_a, :each, :[]
25
22
 
26
23
  def self.use(schema_defn)
27
- schema_defn.trace_with(self::Trace)
24
+ schema_defn.using_backtrace = true
28
25
  end
29
26
 
30
27
  def initialize(context, value: nil)
@@ -40,20 +37,5 @@ module GraphQL
40
37
  def to_a
41
38
  @table.to_backtrace
42
39
  end
43
-
44
- # Used for internal bookkeeping
45
- # @api private
46
- class Frame
47
- attr_reader :path, :query, :ast_node, :object, :field, :arguments, :parent_frame
48
- def initialize(path:, query:, ast_node:, object:, field:, arguments:, parent_frame:)
49
- @path = path
50
- @query = query
51
- @ast_node = ast_node
52
- @field = field
53
- @object = object
54
- @arguments = arguments
55
- @parent_frame = parent_frame
56
- end
57
- end
58
40
  end
59
41
  end
@@ -48,5 +48,10 @@ module GraphQL
48
48
  def self.dataloader_source_class
49
49
  Fiber[:__graphql_current_dataloader_source]&.class
50
50
  end
51
+
52
+ # @return [GraphQL::Dataloader::Source, nil] The currently-running source, if there is one
53
+ def self.dataloader_source
54
+ Fiber[:__graphql_current_dataloader_source]
55
+ end
51
56
  end
52
57
  end
@@ -0,0 +1,64 @@
1
+ # frozen_string_literal: true
2
+ require "graphql/dataloader/source"
3
+ require "graphql/dataloader/active_record_source"
4
+
5
+ module GraphQL
6
+ class Dataloader
7
+ class ActiveRecordAssociationSource < GraphQL::Dataloader::Source
8
+ RECORD_SOURCE_CLASS = ActiveRecordSource
9
+
10
+ def initialize(association, scope = nil)
11
+ @association = association
12
+ @scope = scope
13
+ end
14
+
15
+ def load(record)
16
+ if (assoc = record.association(@association)).loaded?
17
+ assoc.target
18
+ else
19
+ super
20
+ end
21
+ end
22
+
23
+ def fetch(records)
24
+ record_classes = Set.new.compare_by_identity
25
+ associated_classes = Set.new.compare_by_identity
26
+ records.each do |record|
27
+ if record_classes.add?(record.class)
28
+ reflection = record.class.reflect_on_association(@association)
29
+ if !reflection.polymorphic? && reflection.klass
30
+ associated_classes.add(reflection.klass)
31
+ end
32
+ end
33
+ end
34
+
35
+ available_records = []
36
+ associated_classes.each do |assoc_class|
37
+ already_loaded_records = dataloader.with(RECORD_SOURCE_CLASS, assoc_class).results.values
38
+ available_records.concat(already_loaded_records)
39
+ end
40
+
41
+ ::ActiveRecord::Associations::Preloader.new(records: records, associations: @association, available_records: available_records, scope: @scope).call
42
+
43
+ loaded_associated_records = records.map { |r| r.public_send(@association) }
44
+ records_by_model = {}
45
+ loaded_associated_records.each do |record|
46
+ if record
47
+ updates = records_by_model[record.class] ||= {}
48
+ updates[record.id] = record
49
+ end
50
+ end
51
+
52
+ if @scope.nil?
53
+ # Don't cache records loaded via scope because they might have reduced `SELECT`s
54
+ # Could check .select_values here?
55
+ records_by_model.each do |model_class, updates|
56
+ dataloader.with(RECORD_SOURCE_CLASS, model_class).merge(updates)
57
+ end
58
+ end
59
+
60
+ loaded_associated_records
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+ require "graphql/dataloader/source"
3
+
4
+ module GraphQL
5
+ class Dataloader
6
+ class ActiveRecordSource < GraphQL::Dataloader::Source
7
+ def initialize(model_class, find_by: model_class.primary_key)
8
+ @model_class = model_class
9
+ @find_by = find_by
10
+ @type_for_column = @model_class.type_for_attribute(@find_by)
11
+ end
12
+
13
+ def load(requested_key)
14
+ casted_key = @type_for_column.cast(requested_key)
15
+ super(casted_key)
16
+ end
17
+
18
+ def fetch(record_ids)
19
+ records = @model_class.where(@find_by => record_ids)
20
+ record_lookup = {}
21
+ records.each { |r| record_lookup[r.public_send(@find_by)] = r }
22
+ record_ids.map { |id| record_lookup[id] }
23
+ end
24
+ end
25
+ end
26
+ end
@@ -2,16 +2,20 @@
2
2
  module GraphQL
3
3
  class Dataloader
4
4
  class AsyncDataloader < Dataloader
5
- def yield
5
+ def yield(source = Fiber[:__graphql_current_dataloader_source])
6
+ trace = Fiber[:__graphql_current_multiplex]&.current_trace
7
+ trace&.dataloader_fiber_yield(source)
6
8
  if (condition = Fiber[:graphql_dataloader_next_tick])
7
9
  condition.wait
8
10
  else
9
11
  Fiber.yield
10
12
  end
13
+ trace&.dataloader_fiber_resume(source)
11
14
  nil
12
15
  end
13
16
 
14
17
  def run
18
+ trace = Fiber[:__graphql_current_multiplex]&.current_trace
15
19
  jobs_fiber_limit, total_fiber_limit = calculate_fiber_limit
16
20
  job_fibers = []
17
21
  next_job_fibers = []
@@ -20,11 +24,12 @@ module GraphQL
20
24
  first_pass = true
21
25
  sources_condition = Async::Condition.new
22
26
  manager = spawn_fiber do
27
+ trace&.begin_dataloader(self)
23
28
  while first_pass || !job_fibers.empty?
24
29
  first_pass = false
25
30
  fiber_vars = get_fiber_variables
26
31
 
27
- while (f = (job_fibers.shift || (((job_fibers.size + next_job_fibers.size + source_tasks.size) < jobs_fiber_limit) && spawn_job_fiber)))
32
+ while (f = (job_fibers.shift || (((job_fibers.size + next_job_fibers.size + source_tasks.size) < jobs_fiber_limit) && spawn_job_fiber(trace))))
28
33
  if f.alive?
29
34
  finished = run_fiber(f)
30
35
  if !finished
@@ -38,7 +43,7 @@ module GraphQL
38
43
  Sync do |root_task|
39
44
  set_fiber_variables(fiber_vars)
40
45
  while !source_tasks.empty? || @source_cache.each_value.any? { |group_sources| group_sources.each_value.any?(&:pending?) }
41
- while (task = (source_tasks.shift || (((job_fibers.size + next_job_fibers.size + source_tasks.size + next_source_tasks.size) < total_fiber_limit) && spawn_source_task(root_task, sources_condition))))
46
+ while (task = (source_tasks.shift || (((job_fibers.size + next_job_fibers.size + source_tasks.size + next_source_tasks.size) < total_fiber_limit) && spawn_source_task(root_task, sources_condition, trace))))
42
47
  if task.alive?
43
48
  root_task.yield # give the source task a chance to run
44
49
  next_source_tasks << task
@@ -50,6 +55,7 @@ module GraphQL
50
55
  end
51
56
  end
52
57
  end
58
+ trace&.end_dataloader(self)
53
59
  end
54
60
 
55
61
  manager.resume
@@ -63,7 +69,7 @@ module GraphQL
63
69
 
64
70
  private
65
71
 
66
- def spawn_source_task(parent_task, condition)
72
+ def spawn_source_task(parent_task, condition, trace)
67
73
  pending_sources = nil
68
74
  @source_cache.each_value do |source_by_batch_params|
69
75
  source_by_batch_params.each_value do |source|
@@ -77,10 +83,16 @@ module GraphQL
77
83
  if pending_sources
78
84
  fiber_vars = get_fiber_variables
79
85
  parent_task.async do
86
+ trace&.dataloader_spawn_source_fiber(pending_sources)
80
87
  set_fiber_variables(fiber_vars)
81
88
  Fiber[:graphql_dataloader_next_tick] = condition
82
- pending_sources.each(&:run_pending_keys)
89
+ pending_sources.each do |s|
90
+ trace&.begin_dataloader_source(s)
91
+ s.run_pending_keys
92
+ trace&.end_dataloader_source(s)
93
+ end
83
94
  cleanup_fiber
95
+ trace&.dataloader_fiber_exit
84
96
  end
85
97
  end
86
98
  end
@@ -11,7 +11,7 @@ module GraphQL
11
11
  # executed synchronously.
12
12
  def run; end
13
13
  def run_isolated; yield; end
14
- def yield
14
+ def yield(_source)
15
15
  raise GraphQL::Error, "GraphQL::Dataloader is not running -- add `use GraphQL::Dataloader` to your schema to use Dataloader sources."
16
16
  end
17
17
 
@@ -93,14 +93,14 @@ module GraphQL
93
93
  # Then run the batch and update the cache.
94
94
  # @return [void]
95
95
  def sync(pending_result_keys)
96
- @dataloader.yield
96
+ @dataloader.yield(self)
97
97
  iterations = 0
98
98
  while pending_result_keys.any? { |key| !@results.key?(key) }
99
99
  iterations += 1
100
100
  if iterations > MAX_ITERATIONS
101
101
  raise "#{self.class}#sync tried #{MAX_ITERATIONS} times to load pending keys (#{pending_result_keys}), but they still weren't loaded. There is likely a circular dependency#{@dataloader.fiber_limit ? " or `fiber_limit: #{@dataloader.fiber_limit}` is set too low" : ""}."
102
102
  end
103
- @dataloader.yield
103
+ @dataloader.yield(self)
104
104
  end
105
105
  nil
106
106
  end
@@ -4,6 +4,8 @@ require "graphql/dataloader/null_dataloader"
4
4
  require "graphql/dataloader/request"
5
5
  require "graphql/dataloader/request_all"
6
6
  require "graphql/dataloader/source"
7
+ require "graphql/dataloader/active_record_association_source"
8
+ require "graphql/dataloader/active_record_source"
7
9
 
8
10
  module GraphQL
9
11
  # This plugin supports Fiber-based concurrency, along with {GraphQL::Dataloader::Source}.
@@ -129,8 +131,11 @@ module GraphQL
129
131
  # Dataloader will resume the fiber after the requested data has been loaded (by another Fiber).
130
132
  #
131
133
  # @return [void]
132
- def yield
134
+ def yield(source = Fiber[:__graphql_current_dataloader_source])
135
+ trace = Fiber[:__graphql_current_multiplex]&.current_trace
136
+ trace&.dataloader_fiber_yield(source)
133
137
  Fiber.yield
138
+ trace&.dataloader_fiber_resume(source)
134
139
  nil
135
140
  end
136
141
 
@@ -184,6 +189,7 @@ module GraphQL
184
189
  end
185
190
 
186
191
  def run
192
+ trace = Fiber[:__graphql_current_multiplex]&.current_trace
187
193
  jobs_fiber_limit, total_fiber_limit = calculate_fiber_limit
188
194
  job_fibers = []
189
195
  next_job_fibers = []
@@ -191,10 +197,11 @@ module GraphQL
191
197
  next_source_fibers = []
192
198
  first_pass = true
193
199
  manager = spawn_fiber do
200
+ trace&.begin_dataloader(self)
194
201
  while first_pass || !job_fibers.empty?
195
202
  first_pass = false
196
203
 
197
- while (f = (job_fibers.shift || (((next_job_fibers.size + job_fibers.size) < jobs_fiber_limit) && spawn_job_fiber)))
204
+ while (f = (job_fibers.shift || (((next_job_fibers.size + job_fibers.size) < jobs_fiber_limit) && spawn_job_fiber(trace))))
198
205
  if f.alive?
199
206
  finished = run_fiber(f)
200
207
  if !finished
@@ -205,7 +212,7 @@ module GraphQL
205
212
  join_queues(job_fibers, next_job_fibers)
206
213
 
207
214
  while (!source_fibers.empty? || @source_cache.each_value.any? { |group_sources| group_sources.each_value.any?(&:pending?) })
208
- while (f = source_fibers.shift || (((job_fibers.size + source_fibers.size + next_source_fibers.size + next_job_fibers.size) < total_fiber_limit) && spawn_source_fiber))
215
+ while (f = source_fibers.shift || (((job_fibers.size + source_fibers.size + next_source_fibers.size + next_job_fibers.size) < total_fiber_limit) && spawn_source_fiber(trace)))
209
216
  if f.alive?
210
217
  finished = run_fiber(f)
211
218
  if !finished
@@ -216,6 +223,8 @@ module GraphQL
216
223
  join_queues(source_fibers, next_source_fibers)
217
224
  end
218
225
  end
226
+
227
+ trace&.end_dataloader(self)
219
228
  end
220
229
 
221
230
  run_fiber(manager)
@@ -230,6 +239,7 @@ module GraphQL
230
239
  if !source_fibers.empty?
231
240
  raise "Invariant: source fibers should have exited but #{source_fibers.size} remained"
232
241
  end
242
+
233
243
  rescue UncaughtThrowError => e
234
244
  throw e.tag, e.value
235
245
  end
@@ -247,6 +257,22 @@ module GraphQL
247
257
  }
248
258
  end
249
259
 
260
+ # Pre-warm the Dataloader cache with ActiveRecord objects which were loaded elsewhere.
261
+ # These will be used by {Dataloader::ActiveRecordSource}, {Dataloader::ActiveRecordAssociationSource} and their helper
262
+ # methods, `dataload_record` and `dataload_association`.
263
+ # @param records [Array<ActiveRecord::Base>] Already-loaded records to warm the cache with
264
+ # @param index_by [Symbol] The attribute to use as the cache key. (Should match `find_by:` when using {ActiveRecordSource})
265
+ # @return [void]
266
+ def merge_records(records, index_by: :id)
267
+ records_by_class = Hash.new { |h, k| h[k] = {} }
268
+ records.each do |r|
269
+ records_by_class[r.class][r.public_send(index_by)] = r
270
+ end
271
+ records_by_class.each do |r_class, records|
272
+ with(ActiveRecordSource, r_class).merge(records)
273
+ end
274
+ end
275
+
250
276
  private
251
277
 
252
278
  def calculate_fiber_limit
@@ -266,17 +292,19 @@ module GraphQL
266
292
  new_queue.clear
267
293
  end
268
294
 
269
- def spawn_job_fiber
295
+ def spawn_job_fiber(trace)
270
296
  if !@pending_jobs.empty?
271
297
  spawn_fiber do
298
+ trace&.dataloader_spawn_execution_fiber(@pending_jobs)
272
299
  while job = @pending_jobs.shift
273
300
  job.call
274
301
  end
302
+ trace&.dataloader_fiber_exit
275
303
  end
276
304
  end
277
305
  end
278
306
 
279
- def spawn_source_fiber
307
+ def spawn_source_fiber(trace)
280
308
  pending_sources = nil
281
309
  @source_cache.each_value do |source_by_batch_params|
282
310
  source_by_batch_params.each_value do |source|
@@ -289,10 +317,14 @@ module GraphQL
289
317
 
290
318
  if pending_sources
291
319
  spawn_fiber do
320
+ trace&.dataloader_spawn_source_fiber(pending_sources)
292
321
  pending_sources.each do |source|
293
322
  Fiber[:__graphql_current_dataloader_source] = source
323
+ trace&.begin_dataloader_source(source)
294
324
  source.run_pending_keys
325
+ trace&.end_dataloader_source(source)
295
326
  end
327
+ trace&.dataloader_fiber_exit
296
328
  end
297
329
  end
298
330
  end
@@ -5,7 +5,10 @@ module GraphQL
5
5
  class Interpreter
6
6
  class Runtime
7
7
  module GraphQLResult
8
- def initialize(result_name, result_type, application_value, parent_result, is_non_null_in_parent, selections, is_eager)
8
+ def initialize(result_name, result_type, application_value, parent_result, is_non_null_in_parent, selections, is_eager, ast_node, graphql_arguments, graphql_field) # rubocop:disable Metrics/ParameterLists
9
+ @ast_node = ast_node
10
+ @graphql_arguments = graphql_arguments
11
+ @graphql_field = graphql_field
9
12
  @graphql_parent = parent_result
10
13
  @graphql_application_value = application_value
11
14
  @graphql_result_type = result_type
@@ -31,14 +34,14 @@ module GraphQL
31
34
 
32
35
  attr_accessor :graphql_dead
33
36
  attr_reader :graphql_parent, :graphql_result_name, :graphql_is_non_null_in_parent,
34
- :graphql_application_value, :graphql_result_type, :graphql_selections, :graphql_is_eager
37
+ :graphql_application_value, :graphql_result_type, :graphql_selections, :graphql_is_eager, :ast_node, :graphql_arguments, :graphql_field
35
38
 
36
39
  # @return [Hash] Plain-Ruby result data (`@graphql_metadata` contains Result wrapper objects)
37
40
  attr_accessor :graphql_result_data
38
41
  end
39
42
 
40
43
  class GraphQLResultHash
41
- def initialize(_result_name, _result_type, _application_value, _parent_result, _is_non_null_in_parent, _selections, _is_eager)
44
+ def initialize(_result_name, _result_type, _application_value, _parent_result, _is_non_null_in_parent, _selections, _is_eager, _ast_node, _graphql_arguments, graphql_field) # rubocop:disable Metrics/ParameterLists
42
45
  super
43
46
  @graphql_result_data = {}
44
47
  end
@@ -126,7 +129,7 @@ module GraphQL
126
129
  class GraphQLResultArray
127
130
  include GraphQLResult
128
131
 
129
- def initialize(_result_name, _result_type, _application_value, _parent_result, _is_non_null_in_parent, _selections, _is_eager)
132
+ def initialize(_result_name, _result_type, _application_value, _parent_result, _is_non_null_in_parent, _selections, _is_eager, _ast_node, _graphql_arguments, graphql_field) # rubocop:disable Metrics/ParameterLists
130
133
  super
131
134
  @graphql_result_data = []
132
135
  end
@@ -168,6 +171,10 @@ module GraphQL
168
171
  def values
169
172
  (@graphql_metadata || @graphql_result_data)
170
173
  end
174
+
175
+ def [](idx)
176
+ (@graphql_metadata || @graphql_result_data)[idx]
177
+ end
171
178
  end
172
179
  end
173
180
  end