graphql 2.0.13 → 2.3.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphql might be problematic. Click here for more details.

Files changed (228) hide show
  1. checksums.yaml +4 -4
  2. data/lib/generators/graphql/install/mutation_root_generator.rb +2 -2
  3. data/lib/generators/graphql/install/templates/base_mutation.erb +2 -0
  4. data/lib/generators/graphql/install/templates/mutation_type.erb +2 -0
  5. data/lib/generators/graphql/install_generator.rb +3 -0
  6. data/lib/generators/graphql/mutation_delete_generator.rb +1 -1
  7. data/lib/generators/graphql/mutation_update_generator.rb +1 -1
  8. data/lib/generators/graphql/relay.rb +18 -1
  9. data/lib/generators/graphql/templates/base_argument.erb +2 -0
  10. data/lib/generators/graphql/templates/base_connection.erb +2 -0
  11. data/lib/generators/graphql/templates/base_edge.erb +2 -0
  12. data/lib/generators/graphql/templates/base_enum.erb +2 -0
  13. data/lib/generators/graphql/templates/base_field.erb +2 -0
  14. data/lib/generators/graphql/templates/base_input_object.erb +2 -0
  15. data/lib/generators/graphql/templates/base_interface.erb +2 -0
  16. data/lib/generators/graphql/templates/base_object.erb +2 -0
  17. data/lib/generators/graphql/templates/base_resolver.erb +6 -0
  18. data/lib/generators/graphql/templates/base_scalar.erb +2 -0
  19. data/lib/generators/graphql/templates/base_union.erb +2 -0
  20. data/lib/generators/graphql/templates/graphql_controller.erb +2 -0
  21. data/lib/generators/graphql/templates/loader.erb +2 -0
  22. data/lib/generators/graphql/templates/mutation.erb +2 -0
  23. data/lib/generators/graphql/templates/node_type.erb +2 -0
  24. data/lib/generators/graphql/templates/query_type.erb +2 -0
  25. data/lib/generators/graphql/templates/schema.erb +8 -0
  26. data/lib/graphql/analysis/analyzer.rb +89 -0
  27. data/lib/graphql/analysis/field_usage.rb +82 -0
  28. data/lib/graphql/analysis/max_query_complexity.rb +20 -0
  29. data/lib/graphql/analysis/max_query_depth.rb +20 -0
  30. data/lib/graphql/analysis/query_complexity.rb +183 -0
  31. data/lib/graphql/analysis/query_depth.rb +58 -0
  32. data/lib/graphql/analysis/visitor.rb +283 -0
  33. data/lib/graphql/analysis.rb +92 -1
  34. data/lib/graphql/backtrace/inspect_result.rb +0 -12
  35. data/lib/graphql/backtrace/table.rb +2 -2
  36. data/lib/graphql/backtrace/trace.rb +93 -0
  37. data/lib/graphql/backtrace/tracer.rb +1 -1
  38. data/lib/graphql/backtrace.rb +2 -1
  39. data/lib/graphql/coercion_error.rb +1 -9
  40. data/lib/graphql/dataloader/async_dataloader.rb +88 -0
  41. data/lib/graphql/dataloader/null_dataloader.rb +1 -1
  42. data/lib/graphql/dataloader/request.rb +5 -0
  43. data/lib/graphql/dataloader/source.rb +89 -45
  44. data/lib/graphql/dataloader.rb +115 -142
  45. data/lib/graphql/duration_encoding_error.rb +16 -0
  46. data/lib/graphql/execution/interpreter/argument_value.rb +5 -1
  47. data/lib/graphql/execution/interpreter/arguments.rb +1 -1
  48. data/lib/graphql/execution/interpreter/arguments_cache.rb +33 -33
  49. data/lib/graphql/execution/interpreter/resolve.rb +19 -0
  50. data/lib/graphql/execution/interpreter/runtime/graphql_result.rb +175 -0
  51. data/lib/graphql/execution/interpreter/runtime.rb +331 -455
  52. data/lib/graphql/execution/interpreter.rb +125 -61
  53. data/lib/graphql/execution/lazy.rb +6 -12
  54. data/lib/graphql/execution/lookahead.rb +124 -46
  55. data/lib/graphql/execution/multiplex.rb +3 -117
  56. data/lib/graphql/execution.rb +0 -1
  57. data/lib/graphql/introspection/directive_type.rb +3 -3
  58. data/lib/graphql/introspection/dynamic_fields.rb +1 -1
  59. data/lib/graphql/introspection/entry_points.rb +11 -5
  60. data/lib/graphql/introspection/field_type.rb +2 -2
  61. data/lib/graphql/introspection/schema_type.rb +10 -13
  62. data/lib/graphql/introspection/type_type.rb +17 -10
  63. data/lib/graphql/introspection.rb +3 -2
  64. data/lib/graphql/language/block_string.rb +34 -18
  65. data/lib/graphql/language/definition_slice.rb +1 -1
  66. data/lib/graphql/language/document_from_schema_definition.rb +75 -59
  67. data/lib/graphql/language/lexer.rb +358 -1506
  68. data/lib/graphql/language/nodes.rb +166 -93
  69. data/lib/graphql/language/parser.rb +795 -1953
  70. data/lib/graphql/language/printer.rb +340 -160
  71. data/lib/graphql/language/sanitized_printer.rb +21 -23
  72. data/lib/graphql/language/static_visitor.rb +167 -0
  73. data/lib/graphql/language/visitor.rb +188 -141
  74. data/lib/graphql/language.rb +61 -1
  75. data/lib/graphql/load_application_object_failed_error.rb +5 -1
  76. data/lib/graphql/pagination/active_record_relation_connection.rb +0 -8
  77. data/lib/graphql/pagination/array_connection.rb +6 -6
  78. data/lib/graphql/pagination/connection.rb +33 -6
  79. data/lib/graphql/pagination/mongoid_relation_connection.rb +1 -2
  80. data/lib/graphql/query/context/scoped_context.rb +101 -0
  81. data/lib/graphql/query/context.rb +117 -112
  82. data/lib/graphql/query/null_context.rb +12 -25
  83. data/lib/graphql/query/validation_pipeline.rb +6 -5
  84. data/lib/graphql/query/variables.rb +3 -3
  85. data/lib/graphql/query.rb +86 -30
  86. data/lib/graphql/railtie.rb +9 -6
  87. data/lib/graphql/rake_task.rb +29 -11
  88. data/lib/graphql/rubocop/graphql/base_cop.rb +1 -1
  89. data/lib/graphql/schema/addition.rb +59 -23
  90. data/lib/graphql/schema/always_visible.rb +11 -0
  91. data/lib/graphql/schema/argument.rb +55 -26
  92. data/lib/graphql/schema/base_64_encoder.rb +3 -5
  93. data/lib/graphql/schema/build_from_definition.rb +56 -32
  94. data/lib/graphql/schema/directive/one_of.rb +24 -0
  95. data/lib/graphql/schema/directive/specified_by.rb +14 -0
  96. data/lib/graphql/schema/directive/transform.rb +1 -1
  97. data/lib/graphql/schema/directive.rb +15 -3
  98. data/lib/graphql/schema/enum.rb +35 -24
  99. data/lib/graphql/schema/enum_value.rb +2 -3
  100. data/lib/graphql/schema/field/connection_extension.rb +2 -16
  101. data/lib/graphql/schema/field/scope_extension.rb +8 -1
  102. data/lib/graphql/schema/field.rb +147 -107
  103. data/lib/graphql/schema/field_extension.rb +1 -4
  104. data/lib/graphql/schema/find_inherited_value.rb +2 -7
  105. data/lib/graphql/schema/has_single_input_argument.rb +158 -0
  106. data/lib/graphql/schema/input_object.rb +47 -11
  107. data/lib/graphql/schema/interface.rb +15 -21
  108. data/lib/graphql/schema/introspection_system.rb +7 -17
  109. data/lib/graphql/schema/late_bound_type.rb +10 -0
  110. data/lib/graphql/schema/list.rb +2 -2
  111. data/lib/graphql/schema/loader.rb +2 -3
  112. data/lib/graphql/schema/member/base_dsl_methods.rb +18 -14
  113. data/lib/graphql/schema/member/build_type.rb +11 -3
  114. data/lib/graphql/schema/member/has_arguments.rb +170 -130
  115. data/lib/graphql/schema/member/has_ast_node.rb +12 -0
  116. data/lib/graphql/schema/member/has_deprecation_reason.rb +3 -4
  117. data/lib/graphql/schema/member/has_directives.rb +81 -61
  118. data/lib/graphql/schema/member/has_fields.rb +100 -38
  119. data/lib/graphql/schema/member/has_interfaces.rb +65 -10
  120. data/lib/graphql/schema/member/has_unresolved_type_error.rb +5 -1
  121. data/lib/graphql/schema/member/has_validators.rb +32 -6
  122. data/lib/graphql/schema/member/relay_shortcuts.rb +19 -0
  123. data/lib/graphql/schema/member/scoped.rb +19 -0
  124. data/lib/graphql/schema/member/type_system_helpers.rb +16 -0
  125. data/lib/graphql/schema/member/validates_input.rb +3 -3
  126. data/lib/graphql/schema/mutation.rb +7 -0
  127. data/lib/graphql/schema/object.rb +16 -5
  128. data/lib/graphql/schema/printer.rb +11 -8
  129. data/lib/graphql/schema/relay_classic_mutation.rb +7 -129
  130. data/lib/graphql/schema/resolver/has_payload_type.rb +9 -9
  131. data/lib/graphql/schema/resolver.rb +47 -32
  132. data/lib/graphql/schema/scalar.rb +3 -3
  133. data/lib/graphql/schema/subscription.rb +11 -4
  134. data/lib/graphql/schema/subset.rb +397 -0
  135. data/lib/graphql/schema/timeout.rb +25 -29
  136. data/lib/graphql/schema/type_expression.rb +2 -2
  137. data/lib/graphql/schema/type_membership.rb +3 -0
  138. data/lib/graphql/schema/union.rb +11 -2
  139. data/lib/graphql/schema/unique_within_type.rb +1 -1
  140. data/lib/graphql/schema/validator/all_validator.rb +60 -0
  141. data/lib/graphql/schema/validator.rb +4 -2
  142. data/lib/graphql/schema/warden.rb +238 -93
  143. data/lib/graphql/schema.rb +498 -103
  144. data/lib/graphql/static_validation/all_rules.rb +2 -1
  145. data/lib/graphql/static_validation/base_visitor.rb +7 -6
  146. data/lib/graphql/static_validation/definition_dependencies.rb +7 -1
  147. data/lib/graphql/static_validation/literal_validator.rb +24 -7
  148. data/lib/graphql/static_validation/rules/argument_literals_are_compatible.rb +1 -1
  149. data/lib/graphql/static_validation/rules/arguments_are_defined.rb +1 -1
  150. data/lib/graphql/static_validation/rules/directives_are_defined.rb +1 -2
  151. data/lib/graphql/static_validation/rules/fields_are_defined_on_type.rb +1 -1
  152. data/lib/graphql/static_validation/rules/fields_have_appropriate_selections.rb +12 -4
  153. data/lib/graphql/static_validation/rules/fields_will_merge.rb +10 -10
  154. data/lib/graphql/static_validation/rules/fragment_spreads_are_possible.rb +3 -3
  155. data/lib/graphql/static_validation/rules/fragment_types_exist.rb +1 -1
  156. data/lib/graphql/static_validation/rules/fragments_are_on_composite_types.rb +1 -1
  157. data/lib/graphql/static_validation/rules/mutation_root_exists.rb +1 -1
  158. data/lib/graphql/static_validation/rules/one_of_input_objects_are_valid.rb +66 -0
  159. data/lib/graphql/static_validation/rules/one_of_input_objects_are_valid_error.rb +29 -0
  160. data/lib/graphql/static_validation/rules/query_root_exists.rb +1 -1
  161. data/lib/graphql/static_validation/rules/required_arguments_are_present.rb +4 -4
  162. data/lib/graphql/static_validation/rules/required_input_object_attributes_are_present.rb +5 -5
  163. data/lib/graphql/static_validation/rules/subscription_root_exists.rb +1 -1
  164. data/lib/graphql/static_validation/rules/variable_default_values_are_correctly_typed.rb +18 -27
  165. data/lib/graphql/static_validation/rules/variable_usages_are_allowed.rb +1 -1
  166. data/lib/graphql/static_validation/rules/variables_are_input_types.rb +1 -1
  167. data/lib/graphql/static_validation/validation_context.rb +5 -5
  168. data/lib/graphql/static_validation/validator.rb +4 -1
  169. data/lib/graphql/static_validation.rb +0 -1
  170. data/lib/graphql/subscriptions/action_cable_subscriptions.rb +11 -4
  171. data/lib/graphql/subscriptions/broadcast_analyzer.rb +11 -5
  172. data/lib/graphql/subscriptions/event.rb +11 -10
  173. data/lib/graphql/subscriptions/serialize.rb +2 -0
  174. data/lib/graphql/subscriptions.rb +20 -13
  175. data/lib/graphql/testing/helpers.rb +151 -0
  176. data/lib/graphql/testing.rb +2 -0
  177. data/lib/graphql/tracing/active_support_notifications_trace.rb +16 -0
  178. data/lib/graphql/tracing/appoptics_trace.rb +251 -0
  179. data/lib/graphql/tracing/appoptics_tracing.rb +2 -2
  180. data/lib/graphql/tracing/appsignal_trace.rb +77 -0
  181. data/lib/graphql/tracing/data_dog_trace.rb +183 -0
  182. data/lib/graphql/tracing/data_dog_tracing.rb +9 -21
  183. data/lib/graphql/{execution/instrumentation.rb → tracing/legacy_hooks_trace.rb} +10 -28
  184. data/lib/graphql/tracing/legacy_trace.rb +69 -0
  185. data/lib/graphql/tracing/new_relic_trace.rb +75 -0
  186. data/lib/graphql/tracing/notifications_trace.rb +45 -0
  187. data/lib/graphql/tracing/platform_trace.rb +118 -0
  188. data/lib/graphql/tracing/platform_tracing.rb +17 -3
  189. data/lib/graphql/tracing/{prometheus_tracing → prometheus_trace}/graphql_collector.rb +4 -2
  190. data/lib/graphql/tracing/prometheus_trace.rb +89 -0
  191. data/lib/graphql/tracing/prometheus_tracing.rb +3 -3
  192. data/lib/graphql/tracing/scout_trace.rb +72 -0
  193. data/lib/graphql/tracing/sentry_trace.rb +112 -0
  194. data/lib/graphql/tracing/statsd_trace.rb +56 -0
  195. data/lib/graphql/tracing/trace.rb +76 -0
  196. data/lib/graphql/tracing.rb +20 -40
  197. data/lib/graphql/type_kinds.rb +7 -4
  198. data/lib/graphql/types/iso_8601_duration.rb +77 -0
  199. data/lib/graphql/types/relay/base_connection.rb +1 -1
  200. data/lib/graphql/types/relay/connection_behaviors.rb +68 -6
  201. data/lib/graphql/types/relay/edge_behaviors.rb +33 -5
  202. data/lib/graphql/types/relay/node_behaviors.rb +8 -2
  203. data/lib/graphql/types/relay/page_info_behaviors.rb +11 -2
  204. data/lib/graphql/types/relay.rb +0 -1
  205. data/lib/graphql/types/string.rb +1 -1
  206. data/lib/graphql/types.rb +1 -0
  207. data/lib/graphql/version.rb +1 -1
  208. data/lib/graphql.rb +27 -20
  209. data/readme.md +13 -3
  210. metadata +96 -47
  211. data/lib/graphql/analysis/ast/analyzer.rb +0 -84
  212. data/lib/graphql/analysis/ast/field_usage.rb +0 -57
  213. data/lib/graphql/analysis/ast/max_query_complexity.rb +0 -22
  214. data/lib/graphql/analysis/ast/max_query_depth.rb +0 -22
  215. data/lib/graphql/analysis/ast/query_complexity.rb +0 -230
  216. data/lib/graphql/analysis/ast/query_depth.rb +0 -55
  217. data/lib/graphql/analysis/ast/visitor.rb +0 -269
  218. data/lib/graphql/analysis/ast.rb +0 -81
  219. data/lib/graphql/deprecation.rb +0 -9
  220. data/lib/graphql/filter.rb +0 -53
  221. data/lib/graphql/language/lexer.rl +0 -280
  222. data/lib/graphql/language/parser.y +0 -554
  223. data/lib/graphql/language/token.rb +0 -34
  224. data/lib/graphql/schema/base_64_bp.rb +0 -26
  225. data/lib/graphql/schema/invalid_type_error.rb +0 -7
  226. data/lib/graphql/static_validation/type_stack.rb +0 -216
  227. data/lib/graphql/subscriptions/instrumentation.rb +0 -28
  228. data/lib/graphql/types/relay/default_relay.rb +0 -21
@@ -8,7 +8,7 @@ module GraphQL
8
8
  # simple internal code while adding the option to add Dataloader.
9
9
  class NullDataloader < Dataloader
10
10
  # These are all no-ops because code was
11
- # executed sychronously.
11
+ # executed synchronously.
12
12
  def run; end
13
13
  def run_isolated; yield; end
14
14
  def yield
@@ -14,6 +14,11 @@ module GraphQL
14
14
  def load
15
15
  @source.load(@key)
16
16
  end
17
+
18
+ def load_with_deprecation_warning
19
+ warn("Returning `.request(...)` from GraphQL::Dataloader is deprecated, use `.load(...)` instead. (See usage of #{@source} with #{@key.inspect}).")
20
+ load
21
+ end
17
22
  end
18
23
  end
19
24
  end
@@ -7,9 +7,9 @@ module GraphQL
7
7
  # @api private
8
8
  def setup(dataloader)
9
9
  # These keys have been requested but haven't been fetched yet
10
- @pending_keys = []
10
+ @pending = {}
11
11
  # These keys have been passed to `fetch` but haven't been finished yet
12
- @fetching_keys = []
12
+ @fetching = {}
13
13
  # { key => result }
14
14
  @results = {}
15
15
  @dataloader = dataloader
@@ -18,42 +18,66 @@ module GraphQL
18
18
  attr_reader :dataloader
19
19
 
20
20
  # @return [Dataloader::Request] a pending request for a value from `key`. Call `.load` on that object to wait for the result.
21
- def request(key)
22
- if !@results.key?(key)
23
- @pending_keys << key
21
+ def request(value)
22
+ res_key = result_key_for(value)
23
+ if !@results.key?(res_key)
24
+ @pending[res_key] ||= value
24
25
  end
25
- Dataloader::Request.new(self, key)
26
+ Dataloader::Request.new(self, value)
27
+ end
28
+
29
+ # Implement this method to return a stable identifier if different
30
+ # key objects should load the same data value.
31
+ #
32
+ # @param value [Object] A value passed to `.request` or `.load`, for which a value will be loaded
33
+ # @return [Object] The key for tracking this pending data
34
+ def result_key_for(value)
35
+ value
26
36
  end
27
37
 
28
38
  # @return [Dataloader::Request] a pending request for a values from `keys`. Call `.load` on that object to wait for the results.
29
- def request_all(keys)
30
- pending_keys = keys.select { |k| !@results.key?(k) }
31
- @pending_keys.concat(pending_keys)
32
- Dataloader::RequestAll.new(self, keys)
39
+ def request_all(values)
40
+ values.each do |v|
41
+ res_key = result_key_for(v)
42
+ if !@results.key?(res_key)
43
+ @pending[res_key] ||= v
44
+ end
45
+ end
46
+ Dataloader::RequestAll.new(self, values)
33
47
  end
34
48
 
35
- # @param key [Object] A loading key which will be passed to {#fetch} if it isn't already in the internal cache.
49
+ # @param value [Object] A loading value which will be passed to {#fetch} if it isn't already in the internal cache.
36
50
  # @return [Object] The result from {#fetch} for `key`. If `key` hasn't been loaded yet, the Fiber will yield until it's loaded.
37
- def load(key)
38
- if @results.key?(key)
39
- result_for(key)
51
+ def load(value)
52
+ result_key = result_key_for(value)
53
+ if @results.key?(result_key)
54
+ result_for(result_key)
40
55
  else
41
- @pending_keys << key
42
- sync
43
- result_for(key)
56
+ @pending[result_key] ||= value
57
+ sync([result_key])
58
+ result_for(result_key)
44
59
  end
45
60
  end
46
61
 
47
- # @param keys [Array<Object>] Loading keys which will be passed to `#fetch` (or read from the internal cache).
62
+ # @param values [Array<Object>] Loading keys which will be passed to `#fetch` (or read from the internal cache).
48
63
  # @return [Object] The result from {#fetch} for `keys`. If `keys` haven't been loaded yet, the Fiber will yield until they're loaded.
49
- def load_all(keys)
50
- if keys.any? { |k| !@results.key?(k) }
51
- pending_keys = keys.select { |k| !@results.key?(k) }
52
- @pending_keys.concat(pending_keys)
53
- sync
64
+ def load_all(values)
65
+ result_keys = []
66
+ pending_keys = []
67
+ values.each { |v|
68
+ k = result_key_for(v)
69
+ result_keys << k
70
+ if !@results.key?(k)
71
+ @pending[k] ||= v
72
+ pending_keys << k
73
+ end
74
+ }
75
+
76
+ if pending_keys.any?
77
+ sync(pending_keys)
54
78
  end
55
79
 
56
- keys.map { |k| result_for(k) }
80
+ result_keys.map { |k| result_for(k) }
57
81
  end
58
82
 
59
83
  # Subclasses must implement this method to return a value for each of `keys`
@@ -64,17 +88,17 @@ module GraphQL
64
88
  raise "Implement `#{self.class}#fetch(#{keys.inspect}) to return a record for each of the keys"
65
89
  end
66
90
 
91
+ MAX_ITERATIONS = 1000
67
92
  # Wait for a batch, if there's anything to batch.
68
93
  # Then run the batch and update the cache.
69
94
  # @return [void]
70
- def sync
71
- pending_keys = @pending_keys.dup
95
+ def sync(pending_result_keys)
72
96
  @dataloader.yield
73
97
  iterations = 0
74
- while pending_keys.any? { |k| !@results.key?(k) }
98
+ while pending_result_keys.any? { |key| !@results.key?(key) }
75
99
  iterations += 1
76
- if iterations > 1000
77
- raise "#{self.class}#sync tried 1000 times to load pending keys (#{pending_keys}), but they still weren't loaded. There is likely a circular dependency."
100
+ if iterations > MAX_ITERATIONS
101
+ raise "#{self.class}#sync tried #{MAX_ITERATIONS} times to load pending keys (#{pending_result_keys}), but they still weren't loaded. There is likely a circular dependency."
78
102
  end
79
103
  @dataloader.yield
80
104
  end
@@ -83,31 +107,41 @@ module GraphQL
83
107
 
84
108
  # @return [Boolean] True if this source has any pending requests for data.
85
109
  def pending?
86
- !@pending_keys.empty?
110
+ !@pending.empty?
111
+ end
112
+
113
+ # Add these key-value pairs to this source's cache
114
+ # (future loads will use these merged values).
115
+ # @param new_results [Hash<Object => Object>] key-value pairs to cache in this source
116
+ # @return [void]
117
+ def merge(new_results)
118
+ new_results.each do |new_k, new_v|
119
+ key = result_key_for(new_k)
120
+ @results[key] = new_v
121
+ end
122
+ nil
87
123
  end
88
124
 
89
125
  # Called by {GraphQL::Dataloader} to resolve and pending requests to this source.
90
126
  # @api private
91
127
  # @return [void]
92
128
  def run_pending_keys
93
- if !@fetching_keys.empty?
94
- @pending_keys -= @fetching_keys
129
+ if !@fetching.empty?
130
+ @fetching.each_key { |k| @pending.delete(k) }
95
131
  end
96
- return if @pending_keys.empty?
97
- fetch_keys = @pending_keys.uniq
98
- @fetching_keys.concat(fetch_keys)
99
- @pending_keys = []
100
- results = fetch(fetch_keys)
101
- fetch_keys.each_with_index do |key, idx|
132
+ return if @pending.empty?
133
+ fetch_h = @pending
134
+ @pending = {}
135
+ @fetching.merge!(fetch_h)
136
+ results = fetch(fetch_h.values)
137
+ fetch_h.each_with_index do |(key, _value), idx|
102
138
  @results[key] = results[idx]
103
139
  end
104
140
  nil
105
141
  rescue StandardError => error
106
- fetch_keys.each { |key| @results[key] = error }
142
+ fetch_h.each_key { |key| @results[key] = error }
107
143
  ensure
108
- if fetch_keys
109
- @fetching_keys -= fetch_keys
110
- end
144
+ fetch_h && fetch_h.each_key { |k| @fetching.delete(k) }
111
145
  end
112
146
 
113
147
  # These arguments are given to `dataloader.with(source_class, ...)`. The object
@@ -128,7 +162,14 @@ module GraphQL
128
162
  [*batch_args, **batch_kwargs]
129
163
  end
130
164
 
131
- attr_reader :pending_keys
165
+ # Clear any already-loaded objects for this source
166
+ # @return [void]
167
+ def clear_cache
168
+ @results.clear
169
+ nil
170
+ end
171
+
172
+ attr_reader :pending, :results
132
173
 
133
174
  private
134
175
 
@@ -145,8 +186,11 @@ This key should have been loaded already. This is a bug in GraphQL::Dataloader,
145
186
  ERR
146
187
  end
147
188
  result = @results[key]
148
-
149
- raise result if result.class <= StandardError
189
+ if result.is_a?(StandardError)
190
+ # Dup it because the rescuer may modify it.
191
+ # (This happens for GraphQL::ExecutionErrors, at least)
192
+ raise result.dup
193
+ end
150
194
 
151
195
  result
152
196
  end
@@ -27,11 +27,12 @@ module GraphQL
27
27
  attr_accessor :default_nonblocking
28
28
  end
29
29
 
30
- AsyncDataloader = Class.new(self) { self.default_nonblocking = true }
30
+ NonblockingDataloader = Class.new(self) { self.default_nonblocking = true }
31
31
 
32
32
  def self.use(schema, nonblocking: nil)
33
33
  schema.dataloader_class = if nonblocking
34
- AsyncDataloader
34
+ warn("`nonblocking: true` is deprecated from `GraphQL::Dataloader`, please use `GraphQL::Dataloader::AsyncDataloader` instead. Docs: https://graphql-ruby.org/dataloader/async_dataloader.")
35
+ NonblockingDataloader
35
36
  else
36
37
  self
37
38
  end
@@ -61,6 +62,37 @@ module GraphQL
61
62
  @nonblocking
62
63
  end
63
64
 
65
+ # This is called before the fiber is spawned, from the parent context (i.e. from
66
+ # the thread or fiber that it is scheduled from).
67
+ #
68
+ # @return [Hash<Symbol, Object>] Current fiber-local variables
69
+ def get_fiber_variables
70
+ fiber_vars = {}
71
+ Thread.current.keys.each do |fiber_var_key|
72
+ # This variable should be fresh in each new fiber
73
+ if fiber_var_key != :__graphql_runtime_info
74
+ fiber_vars[fiber_var_key] = Thread.current[fiber_var_key]
75
+ end
76
+ end
77
+ fiber_vars
78
+ end
79
+
80
+ # Set up the fiber variables in a new fiber.
81
+ #
82
+ # This is called within the fiber, right after it is spawned.
83
+ #
84
+ # @param vars [Hash<Symbol, Object>] Fiber-local variables from {get_fiber_variables}
85
+ # @return [void]
86
+ def set_fiber_variables(vars)
87
+ vars.each { |k, v| Thread.current[k] = v }
88
+ nil
89
+ end
90
+
91
+ # This method is called when Dataloader is finished using a fiber.
92
+ # Use it to perform any cleanup, such as releasing database connections (if required manually)
93
+ def cleanup_fiber
94
+ end
95
+
64
96
  # Get a Source instance from this dataloader, for calling `.load(...)` or `.request(...)` on.
65
97
  #
66
98
  # @param source_class [Class<GraphQL::Dataloader::Source]
@@ -104,6 +136,15 @@ module GraphQL
104
136
  nil
105
137
  end
106
138
 
139
+ # Clear any already-loaded objects from {Source} caches
140
+ # @return [void]
141
+ def clear_cache
142
+ @source_cache.each do |_source_class, batched_sources|
143
+ batched_sources.each_value(&:clear_cache)
144
+ end
145
+ nil
146
+ end
147
+
107
148
  # Use a self-contained queue for the work in the block.
108
149
  def run_isolated
109
150
  prev_queue = @pending_jobs
@@ -111,8 +152,8 @@ module GraphQL
111
152
  @source_cache.each do |source_class, batched_sources|
112
153
  batched_sources.each do |batch_args, batched_source_instance|
113
154
  if batched_source_instance.pending?
114
- prev_pending_keys[batched_source_instance] = batched_source_instance.pending_keys.dup
115
- batched_source_instance.pending_keys.clear
155
+ prev_pending_keys[batched_source_instance] = batched_source_instance.pending.dup
156
+ batched_source_instance.pending.clear
116
157
  end
117
158
  end
118
159
  end
@@ -127,125 +168,97 @@ module GraphQL
127
168
  res
128
169
  ensure
129
170
  @pending_jobs = prev_queue
130
- prev_pending_keys.each do |source_instance, pending_keys|
131
- source_instance.pending_keys.concat(pending_keys)
171
+ prev_pending_keys.each do |source_instance, pending|
172
+ pending.each do |key, value|
173
+ if !source_instance.results.key?(key)
174
+ source_instance.pending[key] = value
175
+ end
176
+ end
132
177
  end
133
178
  end
134
179
 
135
- # @api private Move along, move along
136
180
  def run
137
- if @nonblocking && !Fiber.scheduler
138
- raise "`nonblocking: true` requires `Fiber.scheduler`, assign one with `Fiber.set_scheduler(...)` before executing GraphQL."
139
- end
140
- # At a high level, the algorithm is:
141
- #
142
- # A) Inside Fibers, run jobs from the queue one-by-one
143
- # - When one of the jobs yields to the dataloader (`Fiber.yield`), then that fiber will pause
144
- # - In that case, if there are still pending jobs, a new Fiber will be created to run jobs
145
- # - Continue until all jobs have been _started_ by a Fiber. (Any number of those Fibers may be waiting to be resumed, after their data is loaded)
146
- # B) Once all known jobs have been run until they are complete or paused for data, run all pending data sources.
147
- # - Similarly, create a Fiber to consume pending sources and tell them to load their data.
148
- # - If one of those Fibers pauses, then create a new Fiber to continue working through remaining pending sources.
149
- # - When a source causes another source to become pending, run the newly-pending source _first_, since it's a dependency of the previous one.
150
- # C) After all pending sources have been completely loaded (there are no more pending sources), resume any Fibers that were waiting for data.
151
- # - Those Fibers assume that source caches will have been populated with the data they were waiting for.
152
- # - Those Fibers may request data from a source again, in which case they will yeilded and be added to a new pending fiber list.
153
- # D) Once all pending fibers have been resumed once, return to `A` above.
154
- #
155
- # For whatever reason, the best implementation I could find was to order the steps `[D, A, B, C]`, with a special case for skipping `D`
156
- # on the first pass. I just couldn't find a better way to write the loops in a way that was DRY and easy to read.
157
- #
158
- pending_fibers = []
159
- next_fibers = []
160
- pending_source_fibers = []
181
+ job_fibers = []
182
+ next_job_fibers = []
183
+ source_fibers = []
161
184
  next_source_fibers = []
162
185
  first_pass = true
163
-
164
- while first_pass || (f = pending_fibers.shift)
165
- if first_pass
186
+ manager = spawn_fiber do
187
+ while first_pass || job_fibers.any?
166
188
  first_pass = false
167
- else
168
- # These fibers were previously waiting for sources to load data,
169
- # resume them. (They might wait again, in which case, re-enqueue them.)
170
- resume(f)
171
- if f.alive?
172
- next_fibers << f
173
- end
174
- end
175
189
 
176
- while @pending_jobs.any?
177
- # Create a Fiber to consume jobs until one of the jobs yields
178
- # or jobs run out
179
- f = spawn_fiber {
180
- while (job = @pending_jobs.shift)
181
- job.call
190
+ while (f = (job_fibers.shift || spawn_job_fiber))
191
+ if f.alive?
192
+ finished = run_fiber(f)
193
+ if !finished
194
+ next_job_fibers << f
195
+ end
182
196
  end
183
- }
184
- resume(f)
185
- # In this case, the job yielded. Queue it up to run again after
186
- # we load whatever it's waiting for.
187
- if f.alive?
188
- next_fibers << f
189
197
  end
190
- end
198
+ join_queues(job_fibers, next_job_fibers)
191
199
 
192
- if pending_fibers.empty?
193
- # Now, run all Sources which have become pending _before_ resuming GraphQL execution.
194
- # Sources might queue up other Sources, which is fine -- those will also run before resuming execution.
195
- #
196
- # This is where an evented approach would be even better -- can we tell which
197
- # fibers are ready to continue, and continue execution there?
198
- #
199
- if (first_source_fiber = create_source_fiber)
200
- pending_source_fibers << first_source_fiber
201
- end
202
-
203
- while pending_source_fibers.any?
204
- while (outer_source_fiber = pending_source_fibers.pop)
205
- resume(outer_source_fiber)
206
- if outer_source_fiber.alive?
207
- next_source_fibers << outer_source_fiber
208
- end
209
- if (next_source_fiber = create_source_fiber)
210
- pending_source_fibers << next_source_fiber
200
+ while source_fibers.any? || @source_cache.each_value.any? { |group_sources| group_sources.each_value.any?(&:pending?) }
201
+ while (f = source_fibers.shift || spawn_source_fiber)
202
+ if f.alive?
203
+ finished = run_fiber(f)
204
+ if !finished
205
+ next_source_fibers << f
206
+ end
211
207
  end
212
208
  end
213
- join_queues(pending_source_fibers, next_source_fibers)
214
- next_source_fibers.clear
209
+ join_queues(source_fibers, next_source_fibers)
215
210
  end
216
- # Move newly-enqueued Fibers on to the list to be resumed.
217
- # Clear out the list of next-round Fibers, so that
218
- # any Fibers that pause can be put on it.
219
- join_queues(pending_fibers, next_fibers)
220
- next_fibers.clear
221
211
  end
222
212
  end
223
213
 
224
- if @pending_jobs.any?
225
- raise "Invariant: #{@pending_jobs.size} pending jobs"
226
- elsif pending_fibers.any?
227
- raise "Invariant: #{pending_fibers.size} pending fibers"
228
- elsif next_fibers.any?
229
- raise "Invariant: #{next_fibers.size} next fibers"
214
+ run_fiber(manager)
215
+
216
+ if manager.alive?
217
+ raise "Invariant: Manager fiber didn't terminate properly."
230
218
  end
231
- nil
232
- end
233
219
 
234
- def join_queues(previous_queue, next_queue)
235
- if @nonblocking
236
- Fiber.scheduler.run
237
- next_queue.select!(&:alive?)
220
+ if job_fibers.any?
221
+ raise "Invariant: job fibers should have exited but #{job_fibers.size} remained"
222
+ end
223
+ if source_fibers.any?
224
+ raise "Invariant: source fibers should have exited but #{source_fibers.size} remained"
238
225
  end
239
- previous_queue.concat(next_queue)
226
+ rescue UncaughtThrowError => e
227
+ throw e.tag, e.value
228
+ end
229
+
230
+ def run_fiber(f)
231
+ f.resume
232
+ end
233
+
234
+ def spawn_fiber
235
+ fiber_vars = get_fiber_variables
236
+ Fiber.new(blocking: !@nonblocking) {
237
+ set_fiber_variables(fiber_vars)
238
+ yield
239
+ cleanup_fiber
240
+ }
240
241
  end
241
242
 
242
243
  private
243
244
 
244
- # If there are pending sources, return a fiber for running them.
245
- # Otherwise, return `nil`.
246
- #
247
- # @return [Fiber, nil]
248
- def create_source_fiber
245
+ def join_queues(prev_queue, new_queue)
246
+ @nonblocking && Fiber.scheduler.run
247
+ prev_queue.concat(new_queue)
248
+ new_queue.clear
249
+ end
250
+
251
+ def spawn_job_fiber
252
+ if @pending_jobs.any?
253
+ spawn_fiber do
254
+ while job = @pending_jobs.shift
255
+ job.call
256
+ end
257
+ end
258
+ end
259
+ end
260
+
261
+ def spawn_source_fiber
249
262
  pending_sources = nil
250
263
  @source_cache.each_value do |source_by_batch_params|
251
264
  source_by_batch_params.each_value do |source|
@@ -257,52 +270,12 @@ module GraphQL
257
270
  end
258
271
 
259
272
  if pending_sources
260
- # By passing the whole array into this Fiber, it's possible that we set ourselves up for a bunch of no-ops.
261
- # For example, if you have sources `[a, b, c]`, and `a` is loaded, then `b` yields to wait for `d`, then
262
- # the next fiber would be dispatched with `[c, d]`. It would fulfill `c`, then `d`, then eventually
263
- # the previous fiber would start up again. `c` would no longer be pending, but it would still receive `.run_pending_keys`.
264
- # That method is short-circuited since it isn't pending any more, but it's still a waste.
265
- #
266
- # This design could probably be improved by maintaining a `@pending_sources` queue which is shared by the fibers,
267
- # similar to `@pending_jobs`. That way, when a fiber is resumed, it would never pick up work that was finished by a different fiber.
268
- source_fiber = spawn_fiber do
273
+ spawn_fiber do
269
274
  pending_sources.each(&:run_pending_keys)
270
275
  end
271
276
  end
272
-
273
- source_fiber
274
- end
275
-
276
- def resume(fiber)
277
- fiber.resume
278
- rescue UncaughtThrowError => e
279
- throw e.tag, e.value
280
- end
281
-
282
- # Copies the thread local vars into the fiber thread local vars. Many
283
- # gems (such as RequestStore, MiniRacer, etc.) rely on thread local vars
284
- # to keep track of execution context, and without this they do not
285
- # behave as expected.
286
- #
287
- # @see https://github.com/rmosolgo/graphql-ruby/issues/3449
288
- def spawn_fiber
289
- fiber_locals = {}
290
-
291
- Thread.current.keys.each do |fiber_var_key|
292
- fiber_locals[fiber_var_key] = Thread.current[fiber_var_key]
293
- end
294
-
295
- if @nonblocking
296
- Fiber.new(blocking: false) do
297
- fiber_locals.each { |k, v| Thread.current[k] = v }
298
- yield
299
- end
300
- else
301
- Fiber.new do
302
- fiber_locals.each { |k, v| Thread.current[k] = v }
303
- yield
304
- end
305
- end
306
277
  end
307
278
  end
308
279
  end
280
+
281
+ require "graphql/dataloader/async_dataloader"
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+ module GraphQL
3
+ # This error is raised when `Types::ISO8601Duration` is asked to return a value
4
+ # that cannot be parsed as an ISO8601-formatted duration by ActiveSupport::Duration.
5
+ #
6
+ # @see GraphQL::Types::ISO8601Duration which raises this error
7
+ class DurationEncodingError < GraphQL::RuntimeTypeError
8
+ # The value which couldn't be encoded
9
+ attr_reader :duration_value
10
+
11
+ def initialize(value)
12
+ @duration_value = value
13
+ super("Duration cannot be parsed: #{value}. \nDuration must be an ISO8601-formatted duration.")
14
+ end
15
+ end
16
+ end
@@ -6,15 +6,19 @@ module GraphQL
6
6
  # A container for metadata regarding arguments present in a GraphQL query.
7
7
  # @see Interpreter::Arguments#argument_values for a hash of these objects.
8
8
  class ArgumentValue
9
- def initialize(definition:, value:, default_used:)
9
+ def initialize(definition:, value:, original_value:, default_used:)
10
10
  @definition = definition
11
11
  @value = value
12
+ @original_value = original_value
12
13
  @default_used = default_used
13
14
  end
14
15
 
15
16
  # @return [Object] The Ruby-ready value for this Argument
16
17
  attr_reader :value
17
18
 
19
+ # @return [Object] The value of this argument _before_ `prepare` is applied.
20
+ attr_reader :original_value
21
+
18
22
  # @return [GraphQL::Schema::Argument] The definition instance for this argument
19
23
  attr_reader :definition
20
24
 
@@ -80,7 +80,7 @@ module GraphQL
80
80
  )
81
81
  end
82
82
 
83
- NO_ARGS = {}.freeze
83
+ NO_ARGS = GraphQL::EmptyObjects::EMPTY_HASH
84
84
  EMPTY = self.new(argument_values: nil, keyword_arguments: NO_ARGS).freeze
85
85
  end
86
86
  end