graphql 1.11.7 → 1.12.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphql might be problematic. Click here for more details.

Files changed (140) hide show
  1. checksums.yaml +4 -4
  2. data/lib/generators/graphql/install_generator.rb +7 -5
  3. data/lib/generators/graphql/relay.rb +55 -0
  4. data/lib/generators/graphql/relay_generator.rb +20 -0
  5. data/lib/generators/graphql/templates/base_connection.erb +8 -0
  6. data/lib/generators/graphql/templates/base_edge.erb +8 -0
  7. data/lib/generators/graphql/templates/node_type.erb +9 -0
  8. data/lib/generators/graphql/templates/object.erb +1 -1
  9. data/lib/generators/graphql/templates/query_type.erb +1 -3
  10. data/lib/generators/graphql/templates/schema.erb +8 -35
  11. data/lib/graphql.rb +38 -4
  12. data/lib/graphql/analysis/analyze_query.rb +7 -0
  13. data/lib/graphql/analysis/ast.rb +11 -2
  14. data/lib/graphql/analysis/ast/visitor.rb +9 -1
  15. data/lib/graphql/backtrace.rb +28 -19
  16. data/lib/graphql/backtrace/inspect_result.rb +0 -1
  17. data/lib/graphql/backtrace/legacy_tracer.rb +56 -0
  18. data/lib/graphql/backtrace/table.rb +22 -3
  19. data/lib/graphql/backtrace/traced_error.rb +0 -1
  20. data/lib/graphql/backtrace/tracer.rb +37 -10
  21. data/lib/graphql/backwards_compatibility.rb +2 -1
  22. data/lib/graphql/base_type.rb +1 -1
  23. data/lib/graphql/compatibility/execution_specification.rb +1 -0
  24. data/lib/graphql/compatibility/lazy_execution_specification.rb +2 -0
  25. data/lib/graphql/compatibility/query_parser_specification.rb +2 -0
  26. data/lib/graphql/compatibility/schema_parser_specification.rb +2 -0
  27. data/lib/graphql/dataloader.rb +208 -0
  28. data/lib/graphql/dataloader/null_dataloader.rb +21 -0
  29. data/lib/graphql/dataloader/request.rb +19 -0
  30. data/lib/graphql/dataloader/request_all.rb +19 -0
  31. data/lib/graphql/dataloader/source.rb +107 -0
  32. data/lib/graphql/define/assign_global_id_field.rb +1 -1
  33. data/lib/graphql/define/instance_definable.rb +32 -2
  34. data/lib/graphql/define/type_definer.rb +5 -5
  35. data/lib/graphql/deprecated_dsl.rb +7 -2
  36. data/lib/graphql/deprecation.rb +13 -0
  37. data/lib/graphql/enum_type.rb +2 -0
  38. data/lib/graphql/execution/errors.rb +4 -0
  39. data/lib/graphql/execution/execute.rb +7 -0
  40. data/lib/graphql/execution/interpreter.rb +11 -7
  41. data/lib/graphql/execution/interpreter/arguments.rb +51 -14
  42. data/lib/graphql/execution/interpreter/arguments_cache.rb +37 -14
  43. data/lib/graphql/execution/interpreter/handles_raw_value.rb +0 -7
  44. data/lib/graphql/execution/interpreter/resolve.rb +33 -25
  45. data/lib/graphql/execution/interpreter/runtime.rb +173 -123
  46. data/lib/graphql/execution/multiplex.rb +36 -23
  47. data/lib/graphql/function.rb +4 -0
  48. data/lib/graphql/input_object_type.rb +2 -0
  49. data/lib/graphql/interface_type.rb +3 -1
  50. data/lib/graphql/internal_representation/document.rb +2 -2
  51. data/lib/graphql/internal_representation/rewrite.rb +1 -1
  52. data/lib/graphql/language/document_from_schema_definition.rb +50 -23
  53. data/lib/graphql/object_type.rb +2 -2
  54. data/lib/graphql/pagination/connection.rb +5 -1
  55. data/lib/graphql/pagination/connections.rb +6 -16
  56. data/lib/graphql/parse_error.rb +0 -1
  57. data/lib/graphql/query.rb +10 -2
  58. data/lib/graphql/query/arguments.rb +1 -1
  59. data/lib/graphql/query/arguments_cache.rb +0 -1
  60. data/lib/graphql/query/context.rb +4 -2
  61. data/lib/graphql/query/executor.rb +0 -1
  62. data/lib/graphql/query/null_context.rb +3 -2
  63. data/lib/graphql/query/serial_execution.rb +1 -0
  64. data/lib/graphql/query/variable_validation_error.rb +1 -1
  65. data/lib/graphql/relay/base_connection.rb +7 -0
  66. data/lib/graphql/relay/connection_instrumentation.rb +4 -4
  67. data/lib/graphql/relay/connection_type.rb +1 -1
  68. data/lib/graphql/relay/mutation.rb +1 -0
  69. data/lib/graphql/relay/node.rb +3 -0
  70. data/lib/graphql/relay/type_extensions.rb +2 -0
  71. data/lib/graphql/scalar_type.rb +2 -0
  72. data/lib/graphql/schema.rb +64 -26
  73. data/lib/graphql/schema/argument.rb +86 -7
  74. data/lib/graphql/schema/build_from_definition.rb +139 -51
  75. data/lib/graphql/schema/directive.rb +76 -0
  76. data/lib/graphql/schema/directive/flagged.rb +57 -0
  77. data/lib/graphql/schema/enum.rb +3 -0
  78. data/lib/graphql/schema/enum_value.rb +12 -6
  79. data/lib/graphql/schema/field.rb +40 -16
  80. data/lib/graphql/schema/field/connection_extension.rb +3 -2
  81. data/lib/graphql/schema/find_inherited_value.rb +3 -1
  82. data/lib/graphql/schema/input_object.rb +39 -24
  83. data/lib/graphql/schema/interface.rb +1 -0
  84. data/lib/graphql/schema/member.rb +4 -0
  85. data/lib/graphql/schema/member/base_dsl_methods.rb +1 -0
  86. data/lib/graphql/schema/member/build_type.rb +3 -3
  87. data/lib/graphql/schema/member/has_arguments.rb +54 -49
  88. data/lib/graphql/schema/member/has_deprecation_reason.rb +25 -0
  89. data/lib/graphql/schema/member/has_directives.rb +98 -0
  90. data/lib/graphql/schema/member/has_fields.rb +1 -4
  91. data/lib/graphql/schema/member/has_validators.rb +31 -0
  92. data/lib/graphql/schema/member/instrumentation.rb +0 -1
  93. data/lib/graphql/schema/member/type_system_helpers.rb +1 -1
  94. data/lib/graphql/schema/middleware_chain.rb +1 -1
  95. data/lib/graphql/schema/object.rb +11 -0
  96. data/lib/graphql/schema/printer.rb +5 -4
  97. data/lib/graphql/schema/resolver.rb +7 -0
  98. data/lib/graphql/schema/resolver/has_payload_type.rb +2 -0
  99. data/lib/graphql/schema/subscription.rb +19 -1
  100. data/lib/graphql/schema/timeout_middleware.rb +3 -1
  101. data/lib/graphql/schema/validation.rb +4 -2
  102. data/lib/graphql/schema/validator.rb +163 -0
  103. data/lib/graphql/schema/validator/exclusion_validator.rb +31 -0
  104. data/lib/graphql/schema/validator/format_validator.rb +49 -0
  105. data/lib/graphql/schema/validator/inclusion_validator.rb +33 -0
  106. data/lib/graphql/schema/validator/length_validator.rb +57 -0
  107. data/lib/graphql/schema/validator/numericality_validator.rb +71 -0
  108. data/lib/graphql/schema/validator/required_validator.rb +68 -0
  109. data/lib/graphql/static_validation/validator.rb +4 -0
  110. data/lib/graphql/subscriptions.rb +17 -20
  111. data/lib/graphql/subscriptions/event.rb +0 -1
  112. data/lib/graphql/subscriptions/instrumentation.rb +0 -1
  113. data/lib/graphql/subscriptions/serialize.rb +0 -1
  114. data/lib/graphql/subscriptions/subscription_root.rb +1 -1
  115. data/lib/graphql/tracing.rb +2 -2
  116. data/lib/graphql/tracing/appoptics_tracing.rb +3 -1
  117. data/lib/graphql/tracing/platform_tracing.rb +3 -1
  118. data/lib/graphql/tracing/skylight_tracing.rb +1 -1
  119. data/lib/graphql/types/relay.rb +11 -3
  120. data/lib/graphql/types/relay/base_connection.rb +2 -92
  121. data/lib/graphql/types/relay/base_edge.rb +2 -35
  122. data/lib/graphql/types/relay/connection_behaviors.rb +123 -0
  123. data/lib/graphql/types/relay/default_relay.rb +27 -0
  124. data/lib/graphql/types/relay/edge_behaviors.rb +42 -0
  125. data/lib/graphql/types/relay/has_node_field.rb +41 -0
  126. data/lib/graphql/types/relay/has_nodes_field.rb +41 -0
  127. data/lib/graphql/types/relay/node.rb +2 -4
  128. data/lib/graphql/types/relay/node_behaviors.rb +15 -0
  129. data/lib/graphql/types/relay/node_field.rb +1 -19
  130. data/lib/graphql/types/relay/nodes_field.rb +1 -19
  131. data/lib/graphql/types/relay/page_info.rb +2 -14
  132. data/lib/graphql/types/relay/page_info_behaviors.rb +25 -0
  133. data/lib/graphql/union_type.rb +2 -0
  134. data/lib/graphql/upgrader/member.rb +1 -0
  135. data/lib/graphql/upgrader/schema.rb +1 -0
  136. data/lib/graphql/version.rb +1 -1
  137. metadata +50 -93
  138. data/lib/graphql/types/relay/base_field.rb +0 -22
  139. data/lib/graphql/types/relay/base_interface.rb +0 -29
  140. data/lib/graphql/types/relay/base_object.rb +0 -26
@@ -1,5 +1,4 @@
1
1
  # frozen_string_literal: true
2
- # test_via: ../backtrace.rb
3
2
  module GraphQL
4
3
  class Backtrace
5
4
  module InspectResult
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+ module GraphQL
3
+ class Backtrace
4
+ module LegacyTracer
5
+ module_function
6
+
7
+ # Implement the {GraphQL::Tracing} API.
8
+ def trace(key, metadata)
9
+ case key
10
+ when "lex", "parse"
11
+ # No context here, don't have a query yet
12
+ nil
13
+ when "execute_multiplex", "analyze_multiplex"
14
+ # No query context yet
15
+ nil
16
+ when "validate", "analyze_query", "execute_query", "execute_query_lazy"
17
+ query = metadata[:query] || metadata[:queries].first
18
+ push_data = query
19
+ multiplex = query.multiplex
20
+ when "execute_field", "execute_field_lazy"
21
+ # The interpreter passes `query:`, legacy passes `context:`
22
+ context = metadata[:context] || ((q = metadata[:query]) && q.context)
23
+ push_data = context
24
+ multiplex = context.query.multiplex
25
+ else
26
+ # Custom key, no backtrace data for this
27
+ nil
28
+ end
29
+
30
+ if push_data
31
+ multiplex.context[:last_graphql_backtrace_context] = push_data
32
+ end
33
+
34
+ if key == "execute_multiplex"
35
+ begin
36
+ yield
37
+ rescue StandardError => err
38
+ # This is an unhandled error from execution,
39
+ # Re-raise it with a GraphQL trace.
40
+ potential_context = metadata[:multiplex].context[:last_graphql_backtrace_context]
41
+
42
+ if potential_context.is_a?(GraphQL::Query::Context) || potential_context.is_a?(GraphQL::Query::Context::FieldResolutionContext)
43
+ raise TracedError.new(err, potential_context)
44
+ else
45
+ raise
46
+ end
47
+ ensure
48
+ metadata[:multiplex].context.delete(:last_graphql_backtrace_context)
49
+ end
50
+ else
51
+ yield
52
+ end
53
+ end
54
+ end
55
+ end
56
+ end
@@ -1,5 +1,4 @@
1
1
  # frozen_string_literal: true
2
- # test_via: ../backtrace.rb
3
2
  module GraphQL
4
3
  class Backtrace
5
4
  # A class for turning a context into a human-readable table or array
@@ -79,6 +78,25 @@ module GraphQL
79
78
  # @return [Array] 5 items for a backtrace table (not `key`)
80
79
  def build_rows(context_entry, rows:, top: false)
81
80
  case context_entry
81
+ when Backtrace::Frame
82
+ field_alias = context_entry.ast_node.respond_to?(:alias) && context_entry.ast_node.alias
83
+ value = if top && @override_value
84
+ @override_value
85
+ else
86
+ @context.query.context.namespace(:interpreter)[:runtime].value_at(context_entry.path)
87
+ end
88
+ rows << [
89
+ "#{context_entry.ast_node ? context_entry.ast_node.position.join(":") : ""}",
90
+ "#{context_entry.field.path}#{field_alias ? " as #{field_alias}" : ""}",
91
+ "#{context_entry.object.object.inspect}",
92
+ context_entry.arguments.to_h.inspect,
93
+ Backtrace::InspectResult.inspect_result(value),
94
+ ]
95
+ if (parent = context_entry.parent_frame)
96
+ build_rows(parent, rows: rows)
97
+ else
98
+ rows
99
+ end
82
100
  when GraphQL::Query::Context::FieldResolutionContext
83
101
  ctx = context_entry
84
102
  field_name = "#{ctx.irep_node.owner_type.name}.#{ctx.field.name}"
@@ -112,15 +130,16 @@ module GraphQL
112
130
  if object.is_a?(GraphQL::Schema::Object)
113
131
  object = object.object
114
132
  end
133
+ value = context_entry.namespace(:interpreter)[:runtime].value_at([])
115
134
  rows << [
116
135
  "#{position}",
117
136
  "#{op_type}#{op_name ? " #{op_name}" : ""}",
118
137
  "#{object.inspect}",
119
138
  query.variables.to_h.inspect,
120
- Backtrace::InspectResult.inspect_result(query.context.value),
139
+ Backtrace::InspectResult.inspect_result(value),
121
140
  ]
122
141
  else
123
- raise "Unexpected get_rows subject #{context_entry.inspect}"
142
+ raise "Unexpected get_rows subject #{context_entry.class} (#{context_entry.inspect})"
124
143
  end
125
144
  end
126
145
  end
@@ -1,5 +1,4 @@
1
1
  # frozen_string_literal: true
2
- # test_via: ../backtrace.rb
3
2
  module GraphQL
4
3
  class Backtrace
5
4
  # When {Backtrace} is enabled, raised errors are wrapped with {TracedError}.
@@ -1,46 +1,73 @@
1
1
  # frozen_string_literal: true
2
2
  module GraphQL
3
3
  class Backtrace
4
+ # TODO this is not fiber-friendly
4
5
  module Tracer
5
6
  module_function
6
7
 
7
8
  # Implement the {GraphQL::Tracing} API.
8
9
  def trace(key, metadata)
9
- push_data = case key
10
+ case key
10
11
  when "lex", "parse"
11
12
  # No context here, don't have a query yet
12
13
  nil
13
14
  when "execute_multiplex", "analyze_multiplex"
14
- metadata[:multiplex].queries
15
+ # No query context yet
16
+ nil
15
17
  when "validate", "analyze_query", "execute_query", "execute_query_lazy"
16
- metadata[:query] || metadata[:queries]
18
+ query = metadata[:query] || metadata[:queries].first
19
+ push_key = []
20
+ push_data = query
21
+ multiplex = query.multiplex
17
22
  when "execute_field", "execute_field_lazy"
18
- # The interpreter passes `query:`, legacy passes `context:`
19
- metadata[:context] || ((q = metadata[:query]) && q.context)
23
+ query = metadata[:query] || raise(ArgumentError, "Add `legacy: true` to use GraphQL::Backtrace without the interpreter runtime.")
24
+ multiplex = query.multiplex
25
+ push_key = metadata[:path].reject { |i| i.is_a?(Integer) }
26
+ parent_frame = multiplex.context[:graphql_backtrace_contexts][push_key[0..-2]]
27
+
28
+ if parent_frame.is_a?(GraphQL::Query)
29
+ parent_frame = parent_frame.context
30
+ end
31
+
32
+ push_data = Frame.new(
33
+ query: query,
34
+ path: push_key,
35
+ ast_node: metadata[:ast_node],
36
+ field: metadata[:field],
37
+ object: metadata[:object],
38
+ arguments: metadata[:arguments],
39
+ parent_frame: parent_frame,
40
+ )
20
41
  else
21
42
  # Custom key, no backtrace data for this
22
43
  nil
23
44
  end
24
45
 
25
- if push_data
26
- Thread.current[:last_graphql_backtrace_context] = push_data
46
+ if push_data && multiplex
47
+ multiplex.context[:graphql_backtrace_contexts][push_key] = push_data
48
+ multiplex.context[:last_graphql_backtrace_context] = push_data
27
49
  end
28
50
 
29
51
  if key == "execute_multiplex"
52
+ multiplex_context = metadata[:multiplex].context
53
+ multiplex_context[:graphql_backtrace_contexts] = {}
30
54
  begin
31
55
  yield
32
56
  rescue StandardError => err
33
57
  # This is an unhandled error from execution,
34
58
  # Re-raise it with a GraphQL trace.
35
- potential_context = Thread.current[:last_graphql_backtrace_context]
59
+ potential_context = multiplex_context[:last_graphql_backtrace_context]
36
60
 
37
- if potential_context.is_a?(GraphQL::Query::Context) || potential_context.is_a?(GraphQL::Query::Context::FieldResolutionContext)
61
+ if potential_context.is_a?(GraphQL::Query::Context) ||
62
+ potential_context.is_a?(GraphQL::Query::Context::FieldResolutionContext) ||
63
+ potential_context.is_a?(Backtrace::Frame)
38
64
  raise TracedError.new(err, potential_context)
39
65
  else
40
66
  raise
41
67
  end
42
68
  ensure
43
- Thread.current[:last_graphql_backtrace_context] = nil
69
+ multiplex_context.delete(:graphql_backtrace_contexts)
70
+ multiplex_context.delete(:last_graphql_backtrace_context)
44
71
  end
45
72
  else
46
73
  yield
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
  module GraphQL
3
3
  # Helpers for migrating in a backwards-compatible way
4
+ # Remove this in GraphQL-Ruby 2.0, when all users of it will be gone.
4
5
  # @api private
5
6
  module BackwardsCompatibility
6
7
  module_function
@@ -21,7 +22,7 @@ module GraphQL
21
22
  backtrace = caller(0, 20)
22
23
  # Find the first line in the trace that isn't library internals:
23
24
  user_line = backtrace.find {|l| l !~ /lib\/graphql/ }
24
- warn(message + "\n" + user_line + "\n")
25
+ GraphQL::Deprecation.warn(message + "\n" + user_line + "\n")
25
26
  wrapper = last ? LastArgumentsWrapper : FirstArgumentsWrapper
26
27
  wrapper.new(callable, from)
27
28
  else
@@ -224,7 +224,7 @@ module GraphQL
224
224
  private
225
225
 
226
226
  def warn_deprecated_coerce(alt_method_name)
227
- warn("Coercing without a context is deprecated; use `#{alt_method_name}` if you don't want context-awareness")
227
+ GraphQL::Deprecation.warn("Coercing without a context is deprecated; use `#{alt_method_name}` if you don't want context-awareness")
228
228
  end
229
229
  end
230
230
  end
@@ -32,6 +32,7 @@ module GraphQL
32
32
  # @param execution_strategy [<#new, #execute>] An execution strategy class
33
33
  # @return [Class<Minitest::Test>] A test suite for this execution strategy
34
34
  def self.build_suite(execution_strategy)
35
+ GraphQL::Deprecation.warn "#{self} will be removed from GraphQL-Ruby 2.0. There is no replacement, please open an issue on GitHub if you need support."
35
36
  Class.new(Minitest::Test) do
36
37
  class << self
37
38
  attr_accessor :counter_schema, :specification_schema
@@ -7,6 +7,8 @@ module GraphQL
7
7
  # @param execution_strategy [<#new, #execute>] An execution strategy class
8
8
  # @return [Class<Minitest::Test>] A test suite for this execution strategy
9
9
  def self.build_suite(execution_strategy)
10
+ GraphQL::Deprecation.warn "#{self} will be removed from GraphQL-Ruby 2.0. There is no replacement, please open an issue on GitHub if you need support."
11
+
10
12
  Class.new(Minitest::Test) do
11
13
  class << self
12
14
  attr_accessor :lazy_schema
@@ -11,6 +11,8 @@ module GraphQL
11
11
  # @yieldreturn [GraphQL::Language::Nodes::Document]
12
12
  # @return [Class<Minitest::Test>] A test suite for this parse function
13
13
  def self.build_suite(&block)
14
+ GraphQL::Deprecation.warn "#{self} will be removed from GraphQL-Ruby 2.0. There is no replacement, please open an issue on GitHub if you need support."
15
+
14
16
  Class.new(Minitest::Test) do
15
17
  include QueryAssertions
16
18
  include ParseErrorSpecification
@@ -8,6 +8,8 @@ module GraphQL
8
8
  # @yieldreturn [GraphQL::Language::Nodes::Document]
9
9
  # @return [Class<Minitest::Test>] A test suite for this parse function
10
10
  def self.build_suite(&block)
11
+ GraphQL::Deprecation.warn "#{self} will be removed from GraphQL-Ruby 2.0. There is no replacement, please open an issue on GitHub if you need support."
12
+
11
13
  Class.new(Minitest::Test) do
12
14
  @@parse_fn = block
13
15
 
@@ -0,0 +1,208 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "graphql/dataloader/null_dataloader"
4
+ require "graphql/dataloader/request"
5
+ require "graphql/dataloader/request_all"
6
+ require "graphql/dataloader/source"
7
+
8
+ module GraphQL
9
+ # This plugin supports Fiber-based concurrency, along with {GraphQL::Dataloader::Source}.
10
+ #
11
+ # @example Installing Dataloader
12
+ #
13
+ # class MySchema < GraphQL::Schema
14
+ # use GraphQL::Dataloader
15
+ # end
16
+ #
17
+ # @example Waiting for batch-loaded data in a GraphQL field
18
+ #
19
+ # field :team, Types::Team, null: true
20
+ #
21
+ # def team
22
+ # dataloader.with(Sources::Record, Team).load(object.team_id)
23
+ # end
24
+ #
25
+ class Dataloader
26
+ def self.use(schema)
27
+ schema.dataloader_class = self
28
+ end
29
+
30
+ def initialize
31
+ @source_cache = Hash.new { |h, source_class| h[source_class] = Hash.new { |h2, batch_parameters|
32
+ source = source_class.new(*batch_parameters)
33
+ source.setup(self)
34
+ h2[batch_parameters] = source
35
+ }
36
+ }
37
+ @pending_jobs = []
38
+ end
39
+
40
+ # Get a Source instance from this dataloader, for calling `.load(...)` or `.request(...)` on.
41
+ #
42
+ # @param source_class [Class<GraphQL::Dataloader::Source]
43
+ # @param batch_parameters [Array<Object>]
44
+ # @return [GraphQL::Dataloader::Source] An instance of {source_class}, initialized with `self, *batch_parameters`,
45
+ # and cached for the lifetime of this {Multiplex}.
46
+ def with(source_class, *batch_parameters)
47
+ @source_cache[source_class][batch_parameters]
48
+ end
49
+
50
+ # Tell the dataloader that this fiber is waiting for data.
51
+ #
52
+ # Dataloader will resume the fiber after the requested data has been loaded (by another Fiber).
53
+ #
54
+ # @return [void]
55
+ def yield
56
+ Fiber.yield
57
+ nil
58
+ end
59
+
60
+ # @api private Nothing to see here
61
+ def append_job(&job)
62
+ # Given a block, queue it up to be worked through when `#run` is called.
63
+ # (If the dataloader is already running, than a Fiber will pick this up later.)
64
+ @pending_jobs.push(job)
65
+ nil
66
+ end
67
+
68
+ # @api private Move along, move along
69
+ def run
70
+ # At a high level, the algorithm is:
71
+ #
72
+ # A) Inside Fibers, run jobs from the queue one-by-one
73
+ # - When one of the jobs yields to the dataloader (`Fiber.yield`), then that fiber will pause
74
+ # - In that case, if there are still pending jobs, a new Fiber will be created to run jobs
75
+ # - Continue until all jobs have been _started_ by a Fiber. (Any number of those Fibers may be waiting to be resumed, after their data is loaded)
76
+ # B) Once all known jobs have been run until they are complete or paused for data, run all pending data sources.
77
+ # - Similarly, create a Fiber to consume pending sources and tell them to load their data.
78
+ # - If one of those Fibers pauses, then create a new Fiber to continue working through remaining pending sources.
79
+ # - When a source causes another source to become pending, run the newly-pending source _first_, since it's a dependency of the previous one.
80
+ # C) After all pending sources have been completely loaded (there are no more pending sources), resume any Fibers that were waiting for data.
81
+ # - Those Fibers assume that source caches will have been populated with the data they were waiting for.
82
+ # - Those Fibers may request data from a source again, in which case they will yeilded and be added to a new pending fiber list.
83
+ # D) Once all pending fibers have been resumed once, return to `A` above.
84
+ #
85
+ # For whatever reason, the best implementation I could find was to order the steps `[D, A, B, C]`, with a special case for skipping `D`
86
+ # on the first pass. I just couldn't find a better way to write the loops in a way that was DRY and easy to read.
87
+ #
88
+ pending_fibers = []
89
+ next_fibers = []
90
+ first_pass = true
91
+
92
+ while first_pass || (f = pending_fibers.shift)
93
+ if first_pass
94
+ first_pass = false
95
+ else
96
+ # These fibers were previously waiting for sources to load data,
97
+ # resume them. (They might wait again, in which case, re-enqueue them.)
98
+ f.resume
99
+ if f.alive?
100
+ next_fibers << f
101
+ end
102
+ end
103
+
104
+ while @pending_jobs.any?
105
+ # Create a Fiber to consume jobs until one of the jobs yields
106
+ # or jobs run out
107
+ f = Fiber.new {
108
+ while (job = @pending_jobs.shift)
109
+ job.call
110
+ end
111
+ }
112
+ result = f.resume
113
+ if result.is_a?(StandardError)
114
+ raise result
115
+ end
116
+ # In this case, the job yielded. Queue it up to run again after
117
+ # we load whatever it's waiting for.
118
+ if f.alive?
119
+ next_fibers << f
120
+ end
121
+ end
122
+
123
+ if pending_fibers.empty?
124
+ # Now, run all Sources which have become pending _before_ resuming GraphQL execution.
125
+ # Sources might queue up other Sources, which is fine -- those will also run before resuming execution.
126
+ #
127
+ # This is where an evented approach would be even better -- can we tell which
128
+ # fibers are ready to continue, and continue execution there?
129
+ #
130
+ source_fiber_stack = if (first_source_fiber = create_source_fiber)
131
+ [first_source_fiber]
132
+ else
133
+ nil
134
+ end
135
+
136
+ if source_fiber_stack
137
+ # Use a stack with `.pop` here so that when a source causes another source to become pending,
138
+ # that newly-pending source will run _before_ the one that depends on it.
139
+ # (See below where the old fiber is pushed to the stack, then the new fiber is pushed on the stack.)
140
+ while (outer_source_fiber = source_fiber_stack.pop)
141
+ result = outer_source_fiber.resume
142
+ if result.is_a?(StandardError)
143
+ raise result
144
+ end
145
+
146
+ if outer_source_fiber.alive?
147
+ source_fiber_stack << outer_source_fiber
148
+ end
149
+ # If this source caused more sources to become pending, run those before running this one again:
150
+ next_source_fiber = create_source_fiber
151
+ if next_source_fiber
152
+ source_fiber_stack << next_source_fiber
153
+ end
154
+ end
155
+ end
156
+ # Move newly-enqueued Fibers on to the list to be resumed.
157
+ # Clear out the list of next-round Fibers, so that
158
+ # any Fibers that pause can be put on it.
159
+ pending_fibers.concat(next_fibers)
160
+ next_fibers.clear
161
+ end
162
+ end
163
+
164
+ if @pending_jobs.any?
165
+ raise "Invariant: #{@pending_jobs.size} pending jobs"
166
+ elsif pending_fibers.any?
167
+ raise "Invariant: #{pending_fibers.size} pending fibers"
168
+ elsif next_fibers.any?
169
+ raise "Invariant: #{next_fibers.size} next fibers"
170
+ end
171
+ nil
172
+ end
173
+
174
+ private
175
+
176
+ # If there are pending sources, return a fiber for running them.
177
+ # Otherwise, return `nil`.
178
+ #
179
+ # @return [Fiber, nil]
180
+ def create_source_fiber
181
+ pending_sources = nil
182
+ @source_cache.each_value do |source_by_batch_params|
183
+ source_by_batch_params.each_value do |source|
184
+ if source.pending?
185
+ pending_sources ||= []
186
+ pending_sources << source
187
+ end
188
+ end
189
+ end
190
+
191
+ if pending_sources
192
+ # By passing the whole array into this Fiber, it's possible that we set ourselves up for a bunch of no-ops.
193
+ # For example, if you have sources `[a, b, c]`, and `a` is loaded, then `b` yields to wait for `d`, then
194
+ # the next fiber would be dispatched with `[c, d]`. It would fulfill `c`, then `d`, then eventually
195
+ # the previous fiber would start up again. `c` would no longer be pending, but it would still receive `.run_pending_keys`.
196
+ # That method is short-circuited since it isn't pending any more, but it's still a waste.
197
+ #
198
+ # This design could probably be improved by maintaining a `@pending_sources` queue which is shared by the fibers,
199
+ # similar to `@pending_jobs`. That way, when a fiber is resumed, it would never pick up work that was finished by a different fiber.
200
+ source_fiber = Fiber.new do
201
+ pending_sources.each(&:run_pending_keys)
202
+ end
203
+ end
204
+
205
+ source_fiber
206
+ end
207
+ end
208
+ end