graphql 1.12.3 → 1.12.8

Sign up to get free protection for your applications and to get access to all the features.
Files changed (70) hide show
  1. checksums.yaml +4 -4
  2. data/lib/generators/graphql/install_generator.rb +4 -1
  3. data/lib/generators/graphql/loader_generator.rb +1 -0
  4. data/lib/generators/graphql/mutation_generator.rb +1 -0
  5. data/lib/generators/graphql/relay.rb +55 -0
  6. data/lib/generators/graphql/relay_generator.rb +4 -46
  7. data/lib/generators/graphql/type_generator.rb +1 -0
  8. data/lib/graphql.rb +4 -2
  9. data/lib/graphql/backtrace/inspect_result.rb +0 -1
  10. data/lib/graphql/backtrace/table.rb +0 -1
  11. data/lib/graphql/backtrace/traced_error.rb +0 -1
  12. data/lib/graphql/backtrace/tracer.rb +4 -8
  13. data/lib/graphql/dataloader.rb +102 -92
  14. data/lib/graphql/dataloader/null_dataloader.rb +5 -5
  15. data/lib/graphql/dataloader/request.rb +1 -6
  16. data/lib/graphql/dataloader/request_all.rb +1 -4
  17. data/lib/graphql/dataloader/source.rb +20 -6
  18. data/lib/graphql/execution/errors.rb +109 -11
  19. data/lib/graphql/execution/interpreter.rb +2 -2
  20. data/lib/graphql/execution/interpreter/arguments_cache.rb +37 -14
  21. data/lib/graphql/execution/interpreter/resolve.rb +33 -25
  22. data/lib/graphql/execution/interpreter/runtime.rb +41 -78
  23. data/lib/graphql/execution/multiplex.rb +21 -22
  24. data/lib/graphql/introspection.rb +1 -1
  25. data/lib/graphql/introspection/directive_type.rb +7 -3
  26. data/lib/graphql/language.rb +1 -0
  27. data/lib/graphql/language/cache.rb +37 -0
  28. data/lib/graphql/language/parser.rb +15 -5
  29. data/lib/graphql/language/parser.y +15 -5
  30. data/lib/graphql/object_type.rb +0 -2
  31. data/lib/graphql/pagination/active_record_relation_connection.rb +7 -0
  32. data/lib/graphql/pagination/connection.rb +15 -1
  33. data/lib/graphql/pagination/connections.rb +1 -0
  34. data/lib/graphql/pagination/relation_connection.rb +12 -1
  35. data/lib/graphql/parse_error.rb +0 -1
  36. data/lib/graphql/query.rb +9 -5
  37. data/lib/graphql/query/arguments_cache.rb +0 -1
  38. data/lib/graphql/query/context.rb +1 -3
  39. data/lib/graphql/query/executor.rb +0 -1
  40. data/lib/graphql/query/null_context.rb +3 -2
  41. data/lib/graphql/query/validation_pipeline.rb +1 -1
  42. data/lib/graphql/query/variable_validation_error.rb +1 -1
  43. data/lib/graphql/railtie.rb +9 -1
  44. data/lib/graphql/relay/range_add.rb +10 -5
  45. data/lib/graphql/schema.rb +14 -27
  46. data/lib/graphql/schema/argument.rb +61 -0
  47. data/lib/graphql/schema/field.rb +10 -5
  48. data/lib/graphql/schema/field/connection_extension.rb +1 -0
  49. data/lib/graphql/schema/find_inherited_value.rb +3 -1
  50. data/lib/graphql/schema/input_object.rb +6 -2
  51. data/lib/graphql/schema/member/has_arguments.rb +43 -56
  52. data/lib/graphql/schema/member/has_fields.rb +1 -4
  53. data/lib/graphql/schema/member/instrumentation.rb +0 -1
  54. data/lib/graphql/schema/resolver.rb +28 -1
  55. data/lib/graphql/static_validation/rules/argument_literals_are_compatible.rb +3 -1
  56. data/lib/graphql/static_validation/rules/argument_literals_are_compatible_error.rb +6 -2
  57. data/lib/graphql/static_validation/rules/arguments_are_defined.rb +2 -1
  58. data/lib/graphql/static_validation/rules/arguments_are_defined_error.rb +4 -2
  59. data/lib/graphql/static_validation/rules/variable_usages_are_allowed.rb +2 -2
  60. data/lib/graphql/subscriptions/broadcast_analyzer.rb +0 -3
  61. data/lib/graphql/subscriptions/event.rb +0 -1
  62. data/lib/graphql/subscriptions/instrumentation.rb +0 -1
  63. data/lib/graphql/subscriptions/serialize.rb +3 -1
  64. data/lib/graphql/tracing/active_support_notifications_tracing.rb +2 -1
  65. data/lib/graphql/types/relay/base_connection.rb +4 -0
  66. data/lib/graphql/types/relay/connection_behaviors.rb +38 -5
  67. data/lib/graphql/types/relay/edge_behaviors.rb +12 -1
  68. data/lib/graphql/version.rb +1 -1
  69. data/readme.md +1 -1
  70. metadata +8 -90
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 62556f55d1ce711780044cff00313dedc21f9c07ff69538127f3f96c52c1921c
4
- data.tar.gz: 66f0329f3a61cdddd758159e344d1cf9781b5e21e6adf1c37380188c60c73025
3
+ metadata.gz: 10ea46e2edf136b445de343ee1a58dd98b0a6c5800ef31923d7d184198ed58fd
4
+ data.tar.gz: f2f6dff0bf020bfef04769deb3fd548317121f0e636d7a47a94baea862f341ac
5
5
  SHA512:
6
- metadata.gz: f5365accd8e35b1b016de61d9e5c2cf5605d0cb26c0d4baa4d31a4f479b6e91e7d49875fd9837bd426236a23ac52eb3dbb6a57f392ae0ebf25dc191c0e36786f
7
- data.tar.gz: 228843044ed696075f3daa6e43a51a518607007417c47f1da3690d2d8e5c5dce0233c702c4101f9d7b94159499d0251fc27a164dc0f5105d6e9674dd4c1565f0
6
+ metadata.gz: a79fde55cedcd391b7dfe7c141e22c51563aeefceb0fda494dd50f4281fefb0104f8663e688395268c95361ee10e01c7bfb69250368441ec8b8c9ad0d00ac279
7
+ data.tar.gz: 49a7b0f5cdf9d3a64ff05b0898c7ef93c6cdfd210baf93affa25afe3f3d7b59187fe3e7727f467af9411a085c5f6e8e281fafd27352fe97128f98bd82ffde330
@@ -1,6 +1,8 @@
1
1
  # frozen_string_literal: true
2
+ require 'rails/generators'
2
3
  require 'rails/generators/base'
3
4
  require_relative 'core'
5
+ require_relative 'relay'
4
6
 
5
7
  module Graphql
6
8
  module Generators
@@ -50,6 +52,7 @@ module Graphql
50
52
  # TODO: also add base classes
51
53
  class InstallGenerator < Rails::Generators::Base
52
54
  include Core
55
+ include Relay
53
56
 
54
57
  desc "Install GraphQL folder structure and boilerplate code"
55
58
  source_root File.expand_path('../templates', __FILE__)
@@ -164,7 +167,7 @@ RUBY
164
167
  end
165
168
 
166
169
  if options[:relay]
167
- generate("graphql:relay")
170
+ install_relay
168
171
  end
169
172
 
170
173
  if gemfile_modified?
@@ -1,4 +1,5 @@
1
1
  # frozen_string_literal: true
2
+ require 'rails/generators'
2
3
  require "rails/generators/named_base"
3
4
  require_relative "core"
4
5
 
@@ -1,4 +1,5 @@
1
1
  # frozen_string_literal: true
2
+ require 'rails/generators'
2
3
  require 'rails/generators/named_base'
3
4
  require_relative 'core'
4
5
 
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+ module Graphql
3
+ module Generators
4
+ module Relay
5
+ def install_relay
6
+ # Add Node, `node(id:)`, and `nodes(ids:)`
7
+ template("node_type.erb", "#{options[:directory]}/types/node_type.rb")
8
+ in_root do
9
+ fields = " # Add `node(id: ID!) and `nodes(ids: [ID!]!)`\n include GraphQL::Types::Relay::HasNodeField\n include GraphQL::Types::Relay::HasNodesField\n\n"
10
+ inject_into_file "#{options[:directory]}/types/query_type.rb", fields, after: /class .*QueryType\s*<\s*[^\s]+?\n/m, force: false
11
+ end
12
+
13
+ # Add connections and edges
14
+ template("base_connection.erb", "#{options[:directory]}/types/base_connection.rb")
15
+ template("base_edge.erb", "#{options[:directory]}/types/base_edge.rb")
16
+ connectionable_type_files = {
17
+ "#{options[:directory]}/types/base_object.rb" => /class .*BaseObject\s*<\s*[^\s]+?\n/m,
18
+ "#{options[:directory]}/types/base_union.rb" => /class .*BaseUnion\s*<\s*[^\s]+?\n/m,
19
+ "#{options[:directory]}/types/base_interface.rb" => /include GraphQL::Schema::Interface\n/m,
20
+ }
21
+ in_root do
22
+ connectionable_type_files.each do |type_class_file, sentinel|
23
+ inject_into_file type_class_file, " connection_type_class(Types::BaseConnection)\n", after: sentinel, force: false
24
+ inject_into_file type_class_file, " edge_type_class(Types::BaseEdge)\n", after: sentinel, force: false
25
+ end
26
+ end
27
+
28
+ # Add object ID hooks & connection plugin
29
+ schema_code = <<-RUBY
30
+
31
+ # Relay-style Object Identification:
32
+
33
+ # Return a string UUID for `object`
34
+ def self.id_from_object(object, type_definition, query_ctx)
35
+ # Here's a simple implementation which:
36
+ # - joins the type name & object.id
37
+ # - encodes it with base64:
38
+ # GraphQL::Schema::UniqueWithinType.encode(type_definition.name, object.id)
39
+ end
40
+
41
+ # Given a string UUID, find the object
42
+ def self.object_from_id(id, query_ctx)
43
+ # For example, to decode the UUIDs generated above:
44
+ # type_name, item_id = GraphQL::Schema::UniqueWithinType.decode(id)
45
+ #
46
+ # Then, based on `type_name` and `id`
47
+ # find an object in your application
48
+ # ...
49
+ end
50
+ RUBY
51
+ inject_into_file schema_file_path, schema_code, before: /^end\n/m, force: false
52
+ end
53
+ end
54
+ end
55
+ end
@@ -1,62 +1,20 @@
1
1
  # frozen_string_literal: true
2
+ require 'rails/generators'
2
3
  require 'rails/generators/base'
3
4
  require_relative 'core'
5
+ require_relative 'relay'
4
6
 
5
7
  module Graphql
6
8
  module Generators
7
9
  class RelayGenerator < Rails::Generators::Base
8
10
  include Core
11
+ include Relay
9
12
 
10
13
  desc "Add base types and fields for Relay-style nodes and connections"
11
14
  source_root File.expand_path('../templates', __FILE__)
12
15
 
13
16
  def install_relay
14
- # Add Node, `node(id:)`, and `nodes(ids:)`
15
- template("node_type.erb", "#{options[:directory]}/types/node_type.rb")
16
- in_root do
17
- fields = " # Add `node(id: ID!) and `nodes(ids: [ID!]!)`\n include GraphQL::Types::Relay::HasNodeField\n include GraphQL::Types::Relay::HasNodesField\n\n"
18
- inject_into_file "#{options[:directory]}/types/query_type.rb", fields, after: /class .*QueryType\s*<\s*[^\s]+?\n/m, force: false
19
- end
20
-
21
- # Add connections and edges
22
- template("base_connection.erb", "#{options[:directory]}/types/base_connection.rb")
23
- template("base_edge.erb", "#{options[:directory]}/types/base_edge.rb")
24
- connectionable_type_files = {
25
- "#{options[:directory]}/types/base_object.rb" => /class .*BaseObject\s*<\s*[^\s]+?\n/m,
26
- "#{options[:directory]}/types/base_union.rb" => /class .*BaseUnion\s*<\s*[^\s]+?\n/m,
27
- "#{options[:directory]}/types/base_interface.rb" => /include GraphQL::Schema::Interface\n/m,
28
- }
29
- in_root do
30
- connectionable_type_files.each do |type_class_file, sentinel|
31
- inject_into_file type_class_file, " connection_type_class(Types::BaseConnection)\n", after: sentinel, force: false
32
- inject_into_file type_class_file, " edge_type_class(Types::BaseEdge)\n", after: sentinel, force: false
33
- end
34
- end
35
-
36
- # Add object ID hooks & connection plugin
37
- schema_code = <<-RUBY
38
-
39
- # Relay-style Object Identification:
40
-
41
- # Return a string UUID for `object`
42
- def self.id_from_object(object, type_definition, query_ctx)
43
- # Here's a simple implementation which:
44
- # - joins the type name & object.id
45
- # - encodes it with base64:
46
- # GraphQL::Schema::UniqueWithinType.encode(type_definition.name, object.id)
47
- end
48
-
49
- # Given a string UUID, find the object
50
- def self.object_from_id(id, query_ctx)
51
- # For example, to decode the UUIDs generated above:
52
- # type_name, item_id = GraphQL::Schema::UniqueWithinType.decode(id)
53
- #
54
- # Then, based on `type_name` and `id`
55
- # find an object in your application
56
- # ...
57
- end
58
- RUBY
59
- inject_into_file schema_file_path, schema_code, before: /^end\n/m, force: false
17
+ super
60
18
  end
61
19
  end
62
20
  end
@@ -1,4 +1,5 @@
1
1
  # frozen_string_literal: true
2
+ require 'rails/generators'
2
3
  require 'rails/generators/base'
3
4
  require 'graphql'
4
5
  require 'active_support'
data/lib/graphql.rb CHANGED
@@ -4,7 +4,6 @@ require "json"
4
4
  require "set"
5
5
  require "singleton"
6
6
  require "forwardable"
7
- require_relative "./graphql/railtie" if defined? Rails::Railtie
8
7
 
9
8
  module GraphQL
10
9
  # forwards-compat for argument handling
@@ -103,6 +102,9 @@ require "graphql/scalar_type"
103
102
  require "graphql/name_validator"
104
103
 
105
104
  require "graphql/language"
105
+
106
+ require_relative "./graphql/railtie" if defined? Rails::Railtie
107
+
106
108
  require "graphql/analysis"
107
109
  require "graphql/tracing"
108
110
  require "graphql/dig"
@@ -128,6 +130,7 @@ require "graphql/schema/printer"
128
130
  require "graphql/filter"
129
131
  require "graphql/internal_representation"
130
132
  require "graphql/static_validation"
133
+ require "graphql/dataloader"
131
134
  require "graphql/introspection"
132
135
 
133
136
  require "graphql/analysis_error"
@@ -148,7 +151,6 @@ require "graphql/authorization"
148
151
  require "graphql/unauthorized_error"
149
152
  require "graphql/unauthorized_field_error"
150
153
  require "graphql/load_application_object_failed_error"
151
- require "graphql/dataloader"
152
154
  require "graphql/deprecation"
153
155
 
154
156
  module GraphQL
@@ -1,5 +1,4 @@
1
1
  # frozen_string_literal: true
2
- # test_via: ../backtrace.rb
3
2
  module GraphQL
4
3
  class Backtrace
5
4
  module InspectResult
@@ -1,5 +1,4 @@
1
1
  # frozen_string_literal: true
2
- # test_via: ../backtrace.rb
3
2
  module GraphQL
4
3
  class Backtrace
5
4
  # A class for turning a context into a human-readable table or array
@@ -1,5 +1,4 @@
1
1
  # frozen_string_literal: true
2
- # test_via: ../backtrace.rb
3
2
  module GraphQL
4
3
  class Backtrace
5
4
  # When {Backtrace} is enabled, raised errors are wrapped with {TracedError}.
@@ -21,14 +21,10 @@ module GraphQL
21
21
  multiplex = query.multiplex
22
22
  when "execute_field", "execute_field_lazy"
23
23
  query = metadata[:query] || raise(ArgumentError, "Add `legacy: true` to use GraphQL::Backtrace without the interpreter runtime.")
24
- context = query.context
25
24
  multiplex = query.multiplex
26
25
  push_key = metadata[:path].reject { |i| i.is_a?(Integer) }
27
26
  parent_frame = multiplex.context[:graphql_backtrace_contexts][push_key[0..-2]]
28
- if parent_frame.nil?
29
- p push_key
30
- binding.pry
31
- end
27
+
32
28
  if parent_frame.is_a?(GraphQL::Query)
33
29
  parent_frame = parent_frame.context
34
30
  end
@@ -47,14 +43,14 @@ module GraphQL
47
43
  nil
48
44
  end
49
45
 
50
- if push_data
51
- multiplex.context[:graphql_backtrace_contexts][push_key] = push_data
46
+ if push_data && multiplex
47
+ push_storage = multiplex.context[:graphql_backtrace_contexts] ||= {}
48
+ push_storage[push_key] = push_data
52
49
  multiplex.context[:last_graphql_backtrace_context] = push_data
53
50
  end
54
51
 
55
52
  if key == "execute_multiplex"
56
53
  multiplex_context = metadata[:multiplex].context
57
- multiplex_context[:graphql_backtrace_contexts] = {}
58
54
  begin
59
55
  yield
60
56
  rescue StandardError => err
@@ -27,36 +27,24 @@ module GraphQL
27
27
  schema.dataloader_class = self
28
28
  end
29
29
 
30
- def initialize(multiplex_context)
31
- @context = multiplex_context
30
+ def initialize
32
31
  @source_cache = Hash.new { |h, source_class| h[source_class] = Hash.new { |h2, batch_parameters|
33
32
  source = source_class.new(*batch_parameters)
34
33
  source.setup(self)
35
34
  h2[batch_parameters] = source
36
35
  }
37
36
  }
38
- @waiting_fibers = []
39
- @yielded_fibers = {}
37
+ @pending_jobs = []
40
38
  end
41
39
 
42
- # @return [Hash] the {Multiplex} context
43
- attr_reader :context
44
-
45
- # @api private
46
- attr_reader :yielded_fibers
47
-
48
- # Add some work to this dataloader to be scheduled later.
49
- # @param block Some work to enqueue
50
- # @return [void]
51
- def enqueue(&block)
52
- @waiting_fibers << Fiber.new {
53
- begin
54
- yield
55
- rescue StandardError => exception
56
- exception
57
- end
58
- }
59
- nil
40
+ # Get a Source instance from this dataloader, for calling `.load(...)` or `.request(...)` on.
41
+ #
42
+ # @param source_class [Class<GraphQL::Dataloader::Source]
43
+ # @param batch_parameters [Array<Object>]
44
+ # @return [GraphQL::Dataloader::Source] An instance of {source_class}, initialized with `self, *batch_parameters`,
45
+ # and cached for the lifetime of this {Multiplex}.
46
+ def with(source_class, *batch_parameters)
47
+ @source_cache[source_class][batch_parameters]
60
48
  end
61
49
 
62
50
  # Tell the dataloader that this fiber is waiting for data.
@@ -69,33 +57,67 @@ module GraphQL
69
57
  nil
70
58
  end
71
59
 
72
- # @param path [Array<String, Integer>] A graphql response path
73
- # @return [Boolean] True if the current Fiber has yielded once via Dataloader at {path}
74
- def yielded?(path)
75
- @yielded_fibers[Fiber.current] == path
60
+ # @api private Nothing to see here
61
+ def append_job(&job)
62
+ # Given a block, queue it up to be worked through when `#run` is called.
63
+ # (If the dataloader is already running, than a Fiber will pick this up later.)
64
+ @pending_jobs.push(job)
65
+ nil
76
66
  end
77
67
 
78
- # Run all Fibers until they're all done
79
- #
80
- # Each cycle works like this:
81
- #
82
- # - Run each pending execution fiber (`@waiting_fibers`),
83
- # - Then run each pending Source, preparing more data for those fibers.
84
- # - Run each pending Source _again_ (if one Source requested more data from another Source)
85
- # - Continue until there are no pending sources
86
- # - Repeat: run execution fibers again ...
87
- #
88
- # @return [void]
68
+ # @api private Move along, move along
89
69
  def run
90
- # Start executing Fibers. This will run until all the Fibers are done.
91
- already_run_fibers = []
92
- while (current_fiber = @waiting_fibers.pop)
93
- # Run each execution fiber, enqueuing it in `already_run_fibers`
94
- # if it's still `.alive?`.
95
- # Any spin-off continuations will be enqueued in `@waiting_fibers` (via {#enqueue})
96
- resume_fiber_and_enqueue_continuation(current_fiber, already_run_fibers)
97
-
98
- if @waiting_fibers.empty?
70
+ # At a high level, the algorithm is:
71
+ #
72
+ # A) Inside Fibers, run jobs from the queue one-by-one
73
+ # - When one of the jobs yields to the dataloader (`Fiber.yield`), then that fiber will pause
74
+ # - In that case, if there are still pending jobs, a new Fiber will be created to run jobs
75
+ # - Continue until all jobs have been _started_ by a Fiber. (Any number of those Fibers may be waiting to be resumed, after their data is loaded)
76
+ # B) Once all known jobs have been run until they are complete or paused for data, run all pending data sources.
77
+ # - Similarly, create a Fiber to consume pending sources and tell them to load their data.
78
+ # - If one of those Fibers pauses, then create a new Fiber to continue working through remaining pending sources.
79
+ # - When a source causes another source to become pending, run the newly-pending source _first_, since it's a dependency of the previous one.
80
+ # C) After all pending sources have been completely loaded (there are no more pending sources), resume any Fibers that were waiting for data.
81
+ # - Those Fibers assume that source caches will have been populated with the data they were waiting for.
82
+ # - Those Fibers may request data from a source again, in which case they will yeilded and be added to a new pending fiber list.
83
+ # D) Once all pending fibers have been resumed once, return to `A` above.
84
+ #
85
+ # For whatever reason, the best implementation I could find was to order the steps `[D, A, B, C]`, with a special case for skipping `D`
86
+ # on the first pass. I just couldn't find a better way to write the loops in a way that was DRY and easy to read.
87
+ #
88
+ pending_fibers = []
89
+ next_fibers = []
90
+ first_pass = true
91
+
92
+ while first_pass || (f = pending_fibers.shift)
93
+ if first_pass
94
+ first_pass = false
95
+ else
96
+ # These fibers were previously waiting for sources to load data,
97
+ # resume them. (They might wait again, in which case, re-enqueue them.)
98
+ resume(f)
99
+ if f.alive?
100
+ next_fibers << f
101
+ end
102
+ end
103
+
104
+ while @pending_jobs.any?
105
+ # Create a Fiber to consume jobs until one of the jobs yields
106
+ # or jobs run out
107
+ f = Fiber.new {
108
+ while (job = @pending_jobs.shift)
109
+ job.call
110
+ end
111
+ }
112
+ resume(f)
113
+ # In this case, the job yielded. Queue it up to run again after
114
+ # we load whatever it's waiting for.
115
+ if f.alive?
116
+ next_fibers << f
117
+ end
118
+ end
119
+
120
+ if pending_fibers.empty?
99
121
  # Now, run all Sources which have become pending _before_ resuming GraphQL execution.
100
122
  # Sources might queue up other Sources, which is fine -- those will also run before resuming execution.
101
123
  #
@@ -109,9 +131,15 @@ module GraphQL
109
131
  end
110
132
 
111
133
  if source_fiber_stack
134
+ # Use a stack with `.pop` here so that when a source causes another source to become pending,
135
+ # that newly-pending source will run _before_ the one that depends on it.
136
+ # (See below where the old fiber is pushed to the stack, then the new fiber is pushed on the stack.)
112
137
  while (outer_source_fiber = source_fiber_stack.pop)
113
- resume_fiber_and_enqueue_continuation(outer_source_fiber, source_fiber_stack)
138
+ resume(outer_source_fiber)
114
139
 
140
+ if outer_source_fiber.alive?
141
+ source_fiber_stack << outer_source_fiber
142
+ end
115
143
  # If this source caused more sources to become pending, run those before running this one again:
116
144
  next_source_fiber = create_source_fiber
117
145
  if next_source_fiber
@@ -119,58 +147,26 @@ module GraphQL
119
147
  end
120
148
  end
121
149
  end
122
-
123
- # We ran all the first round of execution fibers,
124
- # and we ran all the pending sources.
125
- # So pick up any paused execution fibers and repeat.
126
- @waiting_fibers.concat(already_run_fibers)
127
- already_run_fibers.clear
150
+ # Move newly-enqueued Fibers on to the list to be resumed.
151
+ # Clear out the list of next-round Fibers, so that
152
+ # any Fibers that pause can be put on it.
153
+ pending_fibers.concat(next_fibers)
154
+ next_fibers.clear
128
155
  end
129
156
  end
130
- nil
131
- end
132
157
 
133
- # Get a Source instance from this dataloader, for calling `.load(...)` or `.request(...)` on.
134
- #
135
- # @param source_class [Class<GraphQL::Dataloader::Source]
136
- # @param batch_parameters [Array<Object>]
137
- # @return [GraphQL::Dataloader::Source] An instance of {source_class}, initialized with `self, *batch_parameters`,
138
- # and cached for the lifetime of this {Multiplex}.
139
- def with(source_class, *batch_parameters)
140
- @source_cache[source_class][batch_parameters]
158
+ if @pending_jobs.any?
159
+ raise "Invariant: #{@pending_jobs.size} pending jobs"
160
+ elsif pending_fibers.any?
161
+ raise "Invariant: #{pending_fibers.size} pending fibers"
162
+ elsif next_fibers.any?
163
+ raise "Invariant: #{next_fibers.size} next fibers"
164
+ end
165
+ nil
141
166
  end
142
167
 
143
- # @api private
144
- attr_accessor :current_runtime
145
-
146
168
  private
147
169
 
148
- # Check if this fiber is still alive.
149
- # If it is, and it should continue, then enqueue a continuation.
150
- # If it is, re-enqueue it in `fiber_queue`.
151
- # Otherwise, clean it up from @yielded_fibers.
152
- # @return [void]
153
- def resume_fiber_and_enqueue_continuation(fiber, fiber_stack)
154
- result = fiber.resume
155
- if result.is_a?(StandardError)
156
- raise result
157
- end
158
-
159
- # This fiber yielded; there's more to do here.
160
- # (If `#alive?` is false, then the fiber concluded without yielding.)
161
- if fiber.alive?
162
- if !@yielded_fibers.include?(fiber)
163
- # This fiber hasn't yielded yet, we should enqueue a continuation fiber
164
- @yielded_fibers[fiber] = current_runtime.progress_path
165
- current_runtime.enqueue_selections_fiber
166
- end
167
- fiber_stack << fiber
168
- else
169
- # Keep this set clean so that fibers can be GC'ed during execution
170
- @yielded_fibers.delete(fiber)
171
- end
172
- end
173
-
174
170
  # If there are pending sources, return a fiber for running them.
175
171
  # Otherwise, return `nil`.
176
172
  #
@@ -187,6 +183,14 @@ module GraphQL
187
183
  end
188
184
 
189
185
  if pending_sources
186
+ # By passing the whole array into this Fiber, it's possible that we set ourselves up for a bunch of no-ops.
187
+ # For example, if you have sources `[a, b, c]`, and `a` is loaded, then `b` yields to wait for `d`, then
188
+ # the next fiber would be dispatched with `[c, d]`. It would fulfill `c`, then `d`, then eventually
189
+ # the previous fiber would start up again. `c` would no longer be pending, but it would still receive `.run_pending_keys`.
190
+ # That method is short-circuited since it isn't pending any more, but it's still a waste.
191
+ #
192
+ # This design could probably be improved by maintaining a `@pending_sources` queue which is shared by the fibers,
193
+ # similar to `@pending_jobs`. That way, when a fiber is resumed, it would never pick up work that was finished by a different fiber.
190
194
  source_fiber = Fiber.new do
191
195
  pending_sources.each(&:run_pending_keys)
192
196
  end
@@ -194,5 +198,11 @@ module GraphQL
194
198
 
195
199
  source_fiber
196
200
  end
201
+
202
+ def resume(fiber)
203
+ fiber.resume
204
+ rescue UncaughtThrowError => e
205
+ throw e.tag, e.value
206
+ end
197
207
  end
198
208
  end