graphql 1.11.8 → 1.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphql might be problematic. Click here for more details.

Files changed (117) hide show
  1. checksums.yaml +4 -4
  2. data/lib/generators/graphql/install_generator.rb +5 -5
  3. data/lib/generators/graphql/relay_generator.rb +63 -0
  4. data/lib/generators/graphql/templates/base_connection.erb +8 -0
  5. data/lib/generators/graphql/templates/base_edge.erb +8 -0
  6. data/lib/generators/graphql/templates/node_type.erb +9 -0
  7. data/lib/generators/graphql/templates/object.erb +1 -1
  8. data/lib/generators/graphql/templates/query_type.erb +1 -3
  9. data/lib/generators/graphql/templates/schema.erb +8 -35
  10. data/lib/graphql.rb +38 -4
  11. data/lib/graphql/analysis/analyze_query.rb +7 -0
  12. data/lib/graphql/analysis/ast.rb +11 -2
  13. data/lib/graphql/analysis/ast/visitor.rb +9 -1
  14. data/lib/graphql/backtrace.rb +28 -19
  15. data/lib/graphql/backtrace/legacy_tracer.rb +56 -0
  16. data/lib/graphql/backtrace/table.rb +22 -2
  17. data/lib/graphql/backtrace/tracer.rb +40 -9
  18. data/lib/graphql/backwards_compatibility.rb +1 -0
  19. data/lib/graphql/compatibility/execution_specification.rb +1 -0
  20. data/lib/graphql/compatibility/lazy_execution_specification.rb +2 -0
  21. data/lib/graphql/compatibility/query_parser_specification.rb +2 -0
  22. data/lib/graphql/compatibility/schema_parser_specification.rb +2 -0
  23. data/lib/graphql/dataloader.rb +197 -0
  24. data/lib/graphql/dataloader/null_dataloader.rb +21 -0
  25. data/lib/graphql/dataloader/request.rb +24 -0
  26. data/lib/graphql/dataloader/request_all.rb +22 -0
  27. data/lib/graphql/dataloader/source.rb +93 -0
  28. data/lib/graphql/define/assign_global_id_field.rb +1 -1
  29. data/lib/graphql/define/instance_definable.rb +32 -2
  30. data/lib/graphql/define/type_definer.rb +5 -5
  31. data/lib/graphql/deprecated_dsl.rb +5 -0
  32. data/lib/graphql/enum_type.rb +2 -0
  33. data/lib/graphql/execution/errors.rb +4 -0
  34. data/lib/graphql/execution/execute.rb +7 -0
  35. data/lib/graphql/execution/interpreter.rb +10 -6
  36. data/lib/graphql/execution/interpreter/arguments.rb +51 -14
  37. data/lib/graphql/execution/interpreter/handles_raw_value.rb +0 -7
  38. data/lib/graphql/execution/interpreter/runtime.rb +210 -124
  39. data/lib/graphql/execution/multiplex.rb +20 -6
  40. data/lib/graphql/function.rb +4 -0
  41. data/lib/graphql/input_object_type.rb +2 -0
  42. data/lib/graphql/interface_type.rb +3 -1
  43. data/lib/graphql/language/document_from_schema_definition.rb +50 -23
  44. data/lib/graphql/object_type.rb +2 -0
  45. data/lib/graphql/pagination/connection.rb +5 -1
  46. data/lib/graphql/pagination/connections.rb +6 -16
  47. data/lib/graphql/query.rb +2 -0
  48. data/lib/graphql/query/context.rb +4 -0
  49. data/lib/graphql/query/serial_execution.rb +1 -0
  50. data/lib/graphql/relay/base_connection.rb +7 -0
  51. data/lib/graphql/relay/connection_instrumentation.rb +4 -4
  52. data/lib/graphql/relay/connection_type.rb +1 -1
  53. data/lib/graphql/relay/mutation.rb +1 -0
  54. data/lib/graphql/relay/node.rb +3 -0
  55. data/lib/graphql/relay/type_extensions.rb +2 -0
  56. data/lib/graphql/scalar_type.rb +2 -0
  57. data/lib/graphql/schema.rb +69 -32
  58. data/lib/graphql/schema/argument.rb +25 -7
  59. data/lib/graphql/schema/build_from_definition.rb +139 -51
  60. data/lib/graphql/schema/directive.rb +76 -0
  61. data/lib/graphql/schema/directive/flagged.rb +57 -0
  62. data/lib/graphql/schema/enum.rb +3 -0
  63. data/lib/graphql/schema/enum_value.rb +12 -6
  64. data/lib/graphql/schema/field.rb +28 -9
  65. data/lib/graphql/schema/field/connection_extension.rb +3 -2
  66. data/lib/graphql/schema/input_object.rb +33 -22
  67. data/lib/graphql/schema/interface.rb +1 -0
  68. data/lib/graphql/schema/member.rb +4 -0
  69. data/lib/graphql/schema/member/base_dsl_methods.rb +1 -0
  70. data/lib/graphql/schema/member/build_type.rb +3 -3
  71. data/lib/graphql/schema/member/has_arguments.rb +24 -6
  72. data/lib/graphql/schema/member/has_deprecation_reason.rb +25 -0
  73. data/lib/graphql/schema/member/has_directives.rb +98 -0
  74. data/lib/graphql/schema/member/has_validators.rb +31 -0
  75. data/lib/graphql/schema/member/type_system_helpers.rb +1 -1
  76. data/lib/graphql/schema/object.rb +11 -0
  77. data/lib/graphql/schema/printer.rb +5 -4
  78. data/lib/graphql/schema/resolver.rb +7 -0
  79. data/lib/graphql/schema/resolver/has_payload_type.rb +2 -0
  80. data/lib/graphql/schema/subscription.rb +19 -1
  81. data/lib/graphql/schema/timeout_middleware.rb +2 -0
  82. data/lib/graphql/schema/validation.rb +2 -0
  83. data/lib/graphql/schema/validator.rb +163 -0
  84. data/lib/graphql/schema/validator/exclusion_validator.rb +31 -0
  85. data/lib/graphql/schema/validator/format_validator.rb +49 -0
  86. data/lib/graphql/schema/validator/inclusion_validator.rb +33 -0
  87. data/lib/graphql/schema/validator/length_validator.rb +57 -0
  88. data/lib/graphql/schema/validator/numericality_validator.rb +71 -0
  89. data/lib/graphql/schema/validator/required_validator.rb +68 -0
  90. data/lib/graphql/static_validation/validator.rb +2 -0
  91. data/lib/graphql/subscriptions.rb +17 -20
  92. data/lib/graphql/tracing.rb +2 -2
  93. data/lib/graphql/tracing/appoptics_tracing.rb +3 -1
  94. data/lib/graphql/tracing/platform_tracing.rb +3 -1
  95. data/lib/graphql/tracing/skylight_tracing.rb +1 -1
  96. data/lib/graphql/types/relay.rb +11 -3
  97. data/lib/graphql/types/relay/base_connection.rb +2 -92
  98. data/lib/graphql/types/relay/base_edge.rb +2 -35
  99. data/lib/graphql/types/relay/connection_behaviors.rb +123 -0
  100. data/lib/graphql/types/relay/default_relay.rb +27 -0
  101. data/lib/graphql/types/relay/edge_behaviors.rb +42 -0
  102. data/lib/graphql/types/relay/has_node_field.rb +41 -0
  103. data/lib/graphql/types/relay/has_nodes_field.rb +41 -0
  104. data/lib/graphql/types/relay/node.rb +2 -4
  105. data/lib/graphql/types/relay/node_behaviors.rb +15 -0
  106. data/lib/graphql/types/relay/node_field.rb +1 -19
  107. data/lib/graphql/types/relay/nodes_field.rb +1 -19
  108. data/lib/graphql/types/relay/page_info.rb +2 -14
  109. data/lib/graphql/types/relay/page_info_behaviors.rb +25 -0
  110. data/lib/graphql/union_type.rb +2 -0
  111. data/lib/graphql/upgrader/member.rb +1 -0
  112. data/lib/graphql/upgrader/schema.rb +1 -0
  113. data/lib/graphql/version.rb +1 -1
  114. metadata +34 -9
  115. data/lib/graphql/types/relay/base_field.rb +0 -22
  116. data/lib/graphql/types/relay/base_interface.rb +0 -29
  117. data/lib/graphql/types/relay/base_object.rb +0 -26
@@ -79,6 +79,25 @@ module GraphQL
79
79
  # @return [Array] 5 items for a backtrace table (not `key`)
80
80
  def build_rows(context_entry, rows:, top: false)
81
81
  case context_entry
82
+ when Backtrace::Frame
83
+ field_alias = context_entry.ast_node.respond_to?(:alias) && context_entry.ast_node.alias
84
+ value = if top && @override_value
85
+ @override_value
86
+ else
87
+ @context.query.context.namespace(:interpreter)[:runtime].value_at(context_entry.path)
88
+ end
89
+ rows << [
90
+ "#{context_entry.ast_node ? context_entry.ast_node.position.join(":") : ""}",
91
+ "#{context_entry.field.path}#{field_alias ? " as #{field_alias}" : ""}",
92
+ "#{context_entry.object.object.inspect}",
93
+ context_entry.arguments.to_h.inspect,
94
+ Backtrace::InspectResult.inspect_result(value),
95
+ ]
96
+ if (parent = context_entry.parent_frame)
97
+ build_rows(parent, rows: rows)
98
+ else
99
+ rows
100
+ end
82
101
  when GraphQL::Query::Context::FieldResolutionContext
83
102
  ctx = context_entry
84
103
  field_name = "#{ctx.irep_node.owner_type.name}.#{ctx.field.name}"
@@ -112,15 +131,16 @@ module GraphQL
112
131
  if object.is_a?(GraphQL::Schema::Object)
113
132
  object = object.object
114
133
  end
134
+ value = context_entry.namespace(:interpreter)[:runtime].value_at([])
115
135
  rows << [
116
136
  "#{position}",
117
137
  "#{op_type}#{op_name ? " #{op_name}" : ""}",
118
138
  "#{object.inspect}",
119
139
  query.variables.to_h.inspect,
120
- Backtrace::InspectResult.inspect_result(query.context.value),
140
+ Backtrace::InspectResult.inspect_result(value),
121
141
  ]
122
142
  else
123
- raise "Unexpected get_rows subject #{context_entry.inspect}"
143
+ raise "Unexpected get_rows subject #{context_entry.class} (#{context_entry.inspect})"
124
144
  end
125
145
  end
126
146
  end
@@ -1,46 +1,77 @@
1
1
  # frozen_string_literal: true
2
2
  module GraphQL
3
3
  class Backtrace
4
+ # TODO this is not fiber-friendly
4
5
  module Tracer
5
6
  module_function
6
7
 
7
8
  # Implement the {GraphQL::Tracing} API.
8
9
  def trace(key, metadata)
9
- push_data = case key
10
+ case key
10
11
  when "lex", "parse"
11
12
  # No context here, don't have a query yet
12
13
  nil
13
14
  when "execute_multiplex", "analyze_multiplex"
14
- metadata[:multiplex].queries
15
+ # No query context yet
16
+ nil
15
17
  when "validate", "analyze_query", "execute_query", "execute_query_lazy"
16
- metadata[:query] || metadata[:queries]
18
+ query = metadata[:query] || metadata[:queries].first
19
+ push_key = []
20
+ push_data = query
21
+ multiplex = query.multiplex
17
22
  when "execute_field", "execute_field_lazy"
18
- # The interpreter passes `query:`, legacy passes `context:`
19
- metadata[:context] || ((q = metadata[:query]) && q.context)
23
+ query = metadata[:query] || raise(ArgumentError, "Add `legacy: true` to use GraphQL::Backtrace without the interpreter runtime.")
24
+ context = query.context
25
+ multiplex = query.multiplex
26
+ push_key = metadata[:path].reject { |i| i.is_a?(Integer) }
27
+ parent_frame = multiplex.context[:graphql_backtrace_contexts][push_key[0..-2]]
28
+ if parent_frame.nil?
29
+ p push_key
30
+ binding.pry
31
+ end
32
+ if parent_frame.is_a?(GraphQL::Query)
33
+ parent_frame = parent_frame.context
34
+ end
35
+
36
+ push_data = Frame.new(
37
+ query: query,
38
+ path: push_key,
39
+ ast_node: metadata[:ast_node],
40
+ field: metadata[:field],
41
+ object: metadata[:object],
42
+ arguments: metadata[:arguments],
43
+ parent_frame: parent_frame,
44
+ )
20
45
  else
21
46
  # Custom key, no backtrace data for this
22
47
  nil
23
48
  end
24
49
 
25
50
  if push_data
26
- Thread.current[:last_graphql_backtrace_context] = push_data
51
+ multiplex.context[:graphql_backtrace_contexts][push_key] = push_data
52
+ multiplex.context[:last_graphql_backtrace_context] = push_data
27
53
  end
28
54
 
29
55
  if key == "execute_multiplex"
56
+ multiplex_context = metadata[:multiplex].context
57
+ multiplex_context[:graphql_backtrace_contexts] = {}
30
58
  begin
31
59
  yield
32
60
  rescue StandardError => err
33
61
  # This is an unhandled error from execution,
34
62
  # Re-raise it with a GraphQL trace.
35
- potential_context = Thread.current[:last_graphql_backtrace_context]
63
+ potential_context = multiplex_context[:last_graphql_backtrace_context]
36
64
 
37
- if potential_context.is_a?(GraphQL::Query::Context) || potential_context.is_a?(GraphQL::Query::Context::FieldResolutionContext)
65
+ if potential_context.is_a?(GraphQL::Query::Context) ||
66
+ potential_context.is_a?(GraphQL::Query::Context::FieldResolutionContext) ||
67
+ potential_context.is_a?(Backtrace::Frame)
38
68
  raise TracedError.new(err, potential_context)
39
69
  else
40
70
  raise
41
71
  end
42
72
  ensure
43
- Thread.current[:last_graphql_backtrace_context] = nil
73
+ multiplex_context.delete(:graphql_backtrace_contexts)
74
+ multiplex_context.delete(:last_graphql_backtrace_context)
44
75
  end
45
76
  else
46
77
  yield
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
  module GraphQL
3
3
  # Helpers for migrating in a backwards-compatible way
4
+ # Remove this in GraphQL-Ruby 2.0, when all users of it will be gone.
4
5
  # @api private
5
6
  module BackwardsCompatibility
6
7
  module_function
@@ -32,6 +32,7 @@ module GraphQL
32
32
  # @param execution_strategy [<#new, #execute>] An execution strategy class
33
33
  # @return [Class<Minitest::Test>] A test suite for this execution strategy
34
34
  def self.build_suite(execution_strategy)
35
+ warn "#{self} will be removed from GraphQL-Ruby 2.0. There is no replacement, please open an issue on GitHub if you need support."
35
36
  Class.new(Minitest::Test) do
36
37
  class << self
37
38
  attr_accessor :counter_schema, :specification_schema
@@ -7,6 +7,8 @@ module GraphQL
7
7
  # @param execution_strategy [<#new, #execute>] An execution strategy class
8
8
  # @return [Class<Minitest::Test>] A test suite for this execution strategy
9
9
  def self.build_suite(execution_strategy)
10
+ warn "#{self} will be removed from GraphQL-Ruby 2.0. There is no replacement, please open an issue on GitHub if you need support."
11
+
10
12
  Class.new(Minitest::Test) do
11
13
  class << self
12
14
  attr_accessor :lazy_schema
@@ -11,6 +11,8 @@ module GraphQL
11
11
  # @yieldreturn [GraphQL::Language::Nodes::Document]
12
12
  # @return [Class<Minitest::Test>] A test suite for this parse function
13
13
  def self.build_suite(&block)
14
+ warn "#{self} will be removed from GraphQL-Ruby 2.0. There is no replacement, please open an issue on GitHub if you need support."
15
+
14
16
  Class.new(Minitest::Test) do
15
17
  include QueryAssertions
16
18
  include ParseErrorSpecification
@@ -8,6 +8,8 @@ module GraphQL
8
8
  # @yieldreturn [GraphQL::Language::Nodes::Document]
9
9
  # @return [Class<Minitest::Test>] A test suite for this parse function
10
10
  def self.build_suite(&block)
11
+ warn "#{self} will be removed from GraphQL-Ruby 2.0. There is no replacement, please open an issue on GitHub if you need support."
12
+
11
13
  Class.new(Minitest::Test) do
12
14
  @@parse_fn = block
13
15
 
@@ -0,0 +1,197 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "graphql/dataloader/null_dataloader"
4
+ require "graphql/dataloader/request"
5
+ require "graphql/dataloader/request_all"
6
+ require "graphql/dataloader/source"
7
+
8
+ module GraphQL
9
+ # This plugin supports Fiber-based concurrency, along with {GraphQL::Dataloader::Source}.
10
+ #
11
+ # @example Installing Dataloader
12
+ #
13
+ # class MySchema < GraphQL::Schema
14
+ # use GraphQL::Dataloader
15
+ # end
16
+ #
17
+ # @example Waiting for batch-loaded data in a GraphQL field
18
+ #
19
+ # field :team, Types::Team, null: true
20
+ #
21
+ # def team
22
+ # dataloader.with(Sources::Record, Team).load(object.team_id)
23
+ # end
24
+ #
25
+ class Dataloader
26
+ def self.use(schema)
27
+ schema.dataloader_class = self
28
+ end
29
+
30
+ def initialize(multiplex_context)
31
+ @context = multiplex_context
32
+ @source_cache = Hash.new { |h, source_class| h[source_class] = Hash.new { |h2, batch_parameters|
33
+ source = source_class.new(*batch_parameters)
34
+ source.setup(self)
35
+ h2[batch_parameters] = source
36
+ }
37
+ }
38
+ @waiting_fibers = []
39
+ @yielded_fibers = Set.new
40
+ end
41
+
42
+ # @return [Hash] the {Multiplex} context
43
+ attr_reader :context
44
+
45
+ # @api private
46
+ attr_reader :yielded_fibers
47
+
48
+ # Add some work to this dataloader to be scheduled later.
49
+ # @param block Some work to enqueue
50
+ # @return [void]
51
+ def enqueue(&block)
52
+ @waiting_fibers << Fiber.new {
53
+ begin
54
+ yield
55
+ rescue StandardError => exception
56
+ exception
57
+ end
58
+ }
59
+ nil
60
+ end
61
+
62
+ # Tell the dataloader that this fiber is waiting for data.
63
+ #
64
+ # Dataloader will resume the fiber after the requested data has been loaded (by another Fiber).
65
+ #
66
+ # @return [void]
67
+ def yield
68
+ Fiber.yield
69
+ nil
70
+ end
71
+
72
+ # @return [Boolean] Returns true if the current Fiber has yielded once via Dataloader
73
+ def yielded?
74
+ @yielded_fibers.include?(Fiber.current)
75
+ end
76
+
77
+ # Run all Fibers until they're all done
78
+ #
79
+ # Each cycle works like this:
80
+ #
81
+ # - Run each pending execution fiber (`@waiting_fibers`),
82
+ # - Then run each pending Source, preparing more data for those fibers.
83
+ # - Run each pending Source _again_ (if one Source requested more data from another Source)
84
+ # - Continue until there are no pending sources
85
+ # - Repeat: run execution fibers again ...
86
+ #
87
+ # @return [void]
88
+ def run
89
+ # Start executing Fibers. This will run until all the Fibers are done.
90
+ already_run_fibers = []
91
+ while (current_fiber = @waiting_fibers.pop)
92
+ # Run each execution fiber, enqueuing it in `already_run_fibers`
93
+ # if it's still `.alive?`.
94
+ # Any spin-off continuations will be enqueued in `@waiting_fibers` (via {#enqueue})
95
+ resume_fiber_and_enqueue_continuation(current_fiber, already_run_fibers)
96
+
97
+ if @waiting_fibers.empty?
98
+ # Now, run all Sources which have become pending _before_ resuming GraphQL execution.
99
+ # Sources might queue up other Sources, which is fine -- those will also run before resuming execution.
100
+ #
101
+ # This is where an evented approach would be even better -- can we tell which
102
+ # fibers are ready to continue, and continue execution there?
103
+ #
104
+ source_fiber_stack = if (first_source_fiber = create_source_fiber)
105
+ [first_source_fiber]
106
+ else
107
+ nil
108
+ end
109
+
110
+ if source_fiber_stack
111
+ while (outer_source_fiber = source_fiber_stack.pop)
112
+ resume_fiber_and_enqueue_continuation(outer_source_fiber, source_fiber_stack)
113
+
114
+ # If this source caused more sources to become pending, run those before running this one again:
115
+ next_source_fiber = create_source_fiber
116
+ if next_source_fiber
117
+ source_fiber_stack << next_source_fiber
118
+ end
119
+ end
120
+ end
121
+
122
+ # We ran all the first round of execution fibers,
123
+ # and we ran all the pending sources.
124
+ # So pick up any paused execution fibers and repeat.
125
+ @waiting_fibers.concat(already_run_fibers)
126
+ already_run_fibers.clear
127
+ end
128
+ end
129
+ nil
130
+ end
131
+
132
+ # Get a Source instance from this dataloader, for calling `.load(...)` or `.request(...)` on.
133
+ #
134
+ # @param source_class [Class<GraphQL::Dataloader::Source]
135
+ # @param batch_parameters [Array<Object>]
136
+ # @return [GraphQL::Dataloader::Source] An instance of {source_class}, initialized with `self, *batch_parameters`,
137
+ # and cached for the lifetime of this {Multiplex}.
138
+ def with(source_class, *batch_parameters)
139
+ @source_cache[source_class][batch_parameters]
140
+ end
141
+
142
+ # @api private
143
+ attr_accessor :current_runtime
144
+
145
+ private
146
+
147
+ # Check if this fiber is still alive.
148
+ # If it is, and it should continue, then enqueue a continuation.
149
+ # If it is, re-enqueue it in `fiber_queue`.
150
+ # Otherwise, clean it up from @yielded_fibers.
151
+ # @return [void]
152
+ def resume_fiber_and_enqueue_continuation(fiber, fiber_stack)
153
+ result = fiber.resume
154
+ if result.is_a?(StandardError)
155
+ raise result
156
+ end
157
+
158
+ # This fiber yielded; there's more to do here.
159
+ # (If `#alive?` is false, then the fiber concluded without yielding.)
160
+ if fiber.alive?
161
+ if !@yielded_fibers.include?(fiber)
162
+ # This fiber hasn't yielded yet, we should enqueue a continuation fiber
163
+ @yielded_fibers.add(fiber)
164
+ current_runtime.enqueue_selections_fiber
165
+ end
166
+ fiber_stack << fiber
167
+ else
168
+ # Keep this set clean so that fibers can be GC'ed during execution
169
+ @yielded_fibers.delete(fiber)
170
+ end
171
+ end
172
+
173
+ # If there are pending sources, return a fiber for running them.
174
+ # Otherwise, return `nil`.
175
+ #
176
+ # @return [Fiber, nil]
177
+ def create_source_fiber
178
+ pending_sources = nil
179
+ @source_cache.each_value do |source_by_batch_params|
180
+ source_by_batch_params.each_value do |source|
181
+ if source.pending?
182
+ pending_sources ||= []
183
+ pending_sources << source
184
+ end
185
+ end
186
+ end
187
+
188
+ if pending_sources
189
+ source_fiber = Fiber.new do
190
+ pending_sources.each(&:run_pending_keys)
191
+ end
192
+ end
193
+
194
+ source_fiber
195
+ end
196
+ end
197
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module GraphQL
4
+ class Dataloader
5
+ # The default implementation of dataloading -- all no-ops.
6
+ #
7
+ # The Dataloader interface isn't public, but it enables
8
+ # simple internal code while adding the option to add Dataloader.
9
+ class NullDataloader < Dataloader
10
+ def enqueue
11
+ yield
12
+ end
13
+
14
+ # These are all no-ops because code was
15
+ # executed sychronously.
16
+ def run; end
17
+ def yield; end
18
+ def yielded?; false; end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+ module GraphQL
3
+ class Dataloader
4
+ # @see Source#request which returns an instance of this
5
+ class Request
6
+ def initialize(source, key)
7
+ @source = source
8
+ @key = key
9
+ end
10
+
11
+ # Call this method to cause the current Fiber to wait for the results of this request.
12
+ #
13
+ # @return [Object] the object loaded for `key`
14
+ def load
15
+ if @source.results.key?(@key)
16
+ @source.results[@key]
17
+ else
18
+ @source.sync
19
+ @source.results[@key]
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+ module GraphQL
3
+ class Dataloader
4
+ # @see Source#request_all which returns an instance of this.
5
+ class RequestAll < Request
6
+ def initialize(source, keys)
7
+ @source = source
8
+ @keys = keys
9
+ end
10
+
11
+ # Call this method to cause the current Fiber to wait for the results of this request.
12
+ #
13
+ # @return [Array<Object>] One object for each of `keys`
14
+ def load
15
+ if @keys.any? { |k| !@source.results.key?(k) }
16
+ @source.sync
17
+ end
18
+ @keys.map { |k| @source.results[k] }
19
+ end
20
+ end
21
+ end
22
+ end