graphql 2.2.0 → 2.2.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1294ac9e09f2c6924a771765e3f79d6c29449235e65f736afd177270b2a90b99
4
- data.tar.gz: eece859aa0e3137e5d7a67c39ec79917d3ee884e3011cb8e9f203801c78c0408
3
+ metadata.gz: a1caad3d02e9d64f06c086fd46c324a0df1fc66c35353aef62dd716fe7ea4b9c
4
+ data.tar.gz: 8c53383e4fe8b0e7d5199d42460e0a5920940ae27965a7223f5ef020b1c594cc
5
5
  SHA512:
6
- metadata.gz: 4a8343dd7ddcdfbaa05cdfd462612810bbbaa600c62c2d392d367926e7c4742a14705cc05a7e14cfae6d0d5aecb0509e96b645edcbfcdb63d250349bdeef725e
7
- data.tar.gz: de145d99232eecc3feb68fa0cb2f3cdfe73fa154924f3ad27ec25ab1e90783927a2b064667233379adc5f7bc1a38563a39466c193a5378b64e13676f802aeeeb
6
+ metadata.gz: ae189d6de14b18dbee6781688a43d1f15cfb1418fa9ceb45ca099f577a5cd9339c6e16d8402e7b15ba62c64afbfbaed64f3d5d64b253a51597fb50d586d5a6dd
7
+ data.tar.gz: ab5860a939248086c5b8df11f0f638edce805110e75f51b068addce3e0956a9ffeda17836bf02962a943b137e4defccd48e3c18f836973f0c780c4ef225f7bd0
@@ -16,10 +16,11 @@ module GraphQL
16
16
  max_possible_complexity
17
17
  end
18
18
 
19
- class ScopedTypeComplexity
20
- # A single proc for {#scoped_children} hashes. Use this to avoid repeated allocations,
21
- # since the lexical binding isn't important.
22
- HASH_CHILDREN = ->(h, k) { h[k] = {} }
19
+ # ScopedTypeComplexity models a tree of GraphQL types mapped to inner selections, ie:
20
+ # Hash<GraphQL::BaseType, Hash<String, ScopedTypeComplexity>>
21
+ class ScopedTypeComplexity < Hash
22
+ # A proc for defaulting empty namespace requests as a new scope hash.
23
+ DEFAULT_PROC = ->(h, k) { h[k] = {} }
23
24
 
24
25
  attr_reader :field_definition, :response_path, :query
25
26
 
@@ -27,32 +28,19 @@ module GraphQL
27
28
  # @param field_definition [GraphQL::Field, GraphQL::Schema::Field] Used for getting the `.complexity` configuration
28
29
  # @param query [GraphQL::Query] Used for `query.possible_types`
29
30
  # @param response_path [Array<String>] The path to the response key for the field
31
+ # @return [Hash<GraphQL::BaseType, Hash<String, ScopedTypeComplexity>>]
30
32
  def initialize(parent_type, field_definition, query, response_path)
33
+ super(&DEFAULT_PROC)
31
34
  @parent_type = parent_type
32
35
  @field_definition = field_definition
33
36
  @query = query
34
37
  @response_path = response_path
35
- @scoped_children = nil
36
38
  @nodes = []
37
39
  end
38
40
 
39
41
  # @return [Array<GraphQL::Language::Nodes::Field>]
40
42
  attr_reader :nodes
41
43
 
42
- # Returns true if this field has no selections, ie, it's a scalar.
43
- # We need a quick way to check whether we should continue traversing.
44
- def terminal?
45
- @scoped_children.nil?
46
- end
47
-
48
- # This value is only calculated when asked for to avoid needless hash allocations.
49
- # Also, if it's never asked for, we determine that this scope complexity
50
- # is a scalar field ({#terminal?}).
51
- # @return [Hash<Hash<Class => ScopedTypeComplexity>]
52
- def scoped_children
53
- @scoped_children ||= Hash.new(&HASH_CHILDREN)
54
- end
55
-
56
44
  def own_complexity(child_complexity)
57
45
  @field_definition.calculate_complexity(query: @query, nodes: @nodes, child_complexity: child_complexity)
58
46
  end
@@ -65,19 +53,14 @@ module GraphQL
65
53
  return if visitor.skipping?
66
54
  parent_type = visitor.parent_type_definition
67
55
  field_key = node.alias || node.name
68
- # Find the complexity calculation for this field --
69
- # if we're re-entering a selection, we'll already have one.
70
- # Otherwise, make a new one and store it.
71
- #
72
- # `node` and `visitor.field_definition` may appear from a cache,
73
- # but I think that's ok. If the arguments _didn't_ match,
74
- # then the query would have been rejected as invalid.
75
- complexities_on_type = @complexities_on_type_by_query[visitor.query] ||= [ScopedTypeComplexity.new(nil, nil, query, visitor.response_path)]
76
-
77
- complexity = complexities_on_type.last.scoped_children[parent_type][field_key] ||= ScopedTypeComplexity.new(parent_type, visitor.field_definition, visitor.query, visitor.response_path)
78
- complexity.nodes.push(node)
79
- # Push it on the stack.
80
- complexities_on_type.push(complexity)
56
+
57
+ # Find or create a complexity scope stack for this query.
58
+ scopes_stack = @complexities_on_type_by_query[visitor.query] ||= [ScopedTypeComplexity.new(nil, nil, query, visitor.response_path)]
59
+
60
+ # Find or create the complexity costing node for this field.
61
+ scope = scopes_stack.last[parent_type][field_key] ||= ScopedTypeComplexity.new(parent_type, visitor.field_definition, visitor.query, visitor.response_path)
62
+ scope.nodes.push(node)
63
+ scopes_stack.push(scope)
81
64
  end
82
65
 
83
66
  def on_leave_field(node, parent, visitor)
@@ -85,89 +68,61 @@ module GraphQL
85
68
  # we'll visit them when we hit the spreads instead
86
69
  return if visitor.visiting_fragment_definition?
87
70
  return if visitor.skipping?
88
- complexities_on_type = @complexities_on_type_by_query[visitor.query]
89
- complexities_on_type.pop
71
+ scopes_stack = @complexities_on_type_by_query[visitor.query]
72
+ scopes_stack.pop
90
73
  end
91
74
 
92
75
  private
93
76
 
94
77
  # @return [Integer]
95
78
  def max_possible_complexity
96
- @complexities_on_type_by_query.reduce(0) do |total, (query, complexities_on_type)|
97
- root_complexity = complexities_on_type.last
98
- # Use this entry point to calculate the total complexity
99
- total_complexity_for_query = merged_max_complexity_for_scopes(query, [root_complexity.scoped_children])
100
- total + total_complexity_for_query
79
+ @complexities_on_type_by_query.reduce(0) do |total, (query, scopes_stack)|
80
+ total + merged_max_complexity_for_scopes(query, [scopes_stack.first])
101
81
  end
102
82
  end
103
83
 
104
84
  # @param query [GraphQL::Query] Used for `query.possible_types`
105
- # @param scoped_children_hashes [Array<Hash>] Array of scoped children hashes
85
+ # @param scopes [Array<ScopedTypeComplexity>] Array of scoped type complexities
106
86
  # @return [Integer]
107
- def merged_max_complexity_for_scopes(query, scoped_children_hashes)
108
- # Figure out what scopes are possible here.
87
+ def merged_max_complexity_for_scopes(query, scopes)
88
+ # Aggregate a set of all possible scope types encountered (scope keys).
109
89
  # Use a hash, but ignore the values; it's just a fast way to work with the keys.
110
- all_scopes = {}
111
- scoped_children_hashes.each do |h|
112
- all_scopes.merge!(h)
90
+ possible_scope_types = scopes.each_with_object({}) do |scope, memo|
91
+ memo.merge!(scope)
113
92
  end
114
93
 
115
- # If an abstract scope is present, but _all_ of its concrete types
116
- # are also in the list, remove it from the list of scopes to check,
117
- # because every possible type is covered by a concrete type.
118
- # (That is, there are no remainder types to check.)
119
- prev_keys = all_scopes.keys
120
- prev_keys.each do |scope|
121
- next unless scope.kind.abstract?
122
-
123
- missing_concrete_types = query.possible_types(scope).select { |t| !all_scopes.key?(t) }
124
- # This concrete type is possible _only_ as a member of the abstract type.
125
- # So, attribute to it the complexity which belongs to the abstract type.
126
- missing_concrete_types.each do |concrete_scope|
127
- all_scopes[concrete_scope] = all_scopes[scope]
94
+ # Expand abstract scope types into their concrete implementations;
95
+ # overlapping abstracts coalesce through their intersecting types.
96
+ possible_scope_types.keys.each do |possible_scope_type|
97
+ next unless possible_scope_type.kind.abstract?
98
+
99
+ query.possible_types(possible_scope_type).each do |impl_type|
100
+ possible_scope_types[impl_type] ||= true
128
101
  end
129
- all_scopes.delete(scope)
102
+ possible_scope_types.delete(possible_scope_type)
130
103
  end
131
104
 
132
- # This will hold `{ type => int }` pairs, one for each possible branch
133
- complexity_by_scope = {}
134
-
135
- # For each scope,
136
- # find the lexical selections that might apply to it,
137
- # and gather them together into an array.
138
- # Then, treat the set of selection hashes
139
- # as a set and calculate the complexity for them as a unit
140
- all_scopes.each do |scope, _|
141
- # These will be the selections on `scope`
142
- children_for_scope = []
143
- scoped_children_hashes.each do |sc_h|
144
- sc_h.each do |inner_scope, children_hash|
145
- if applies_to?(query, scope, inner_scope)
146
- children_for_scope << children_hash
147
- end
105
+ # Aggregate the lexical selections that may apply to each possible type,
106
+ # and then return the maximum cost among possible typed selections.
107
+ possible_scope_types.each_key.reduce(0) do |max, possible_scope_type|
108
+ # Collect inner selections from all scopes that intersect with this possible type.
109
+ all_inner_selections = scopes.each_with_object([]) do |scope, memo|
110
+ scope.each do |scope_type, inner_selections|
111
+ memo << inner_selections if types_intersect?(query, scope_type, possible_scope_type)
148
112
  end
149
113
  end
150
114
 
151
- # Calculate the complexity for `scope`, merging all
152
- # possible lexical branches.
153
- complexity_value = merged_max_complexity(query, children_for_scope)
154
- complexity_by_scope[scope] = complexity_value
115
+ # Find the maximum complexity for the scope type among possible lexical branches.
116
+ complexity = merged_max_complexity(query, all_inner_selections)
117
+ complexity > max ? complexity : max
155
118
  end
156
-
157
- # Return the max complexity among all scopes
158
- complexity_by_scope.each_value.max
159
119
  end
160
120
 
161
- def applies_to?(query, left_scope, right_scope)
162
- if left_scope == right_scope
163
- # This can happen when several branches are being analyzed together
164
- true
165
- else
166
- # Check if these two scopes have _any_ types in common.
167
- possible_right_types = query.possible_types(right_scope)
168
- possible_left_types = query.possible_types(left_scope)
169
- !(possible_right_types & possible_left_types).empty?
170
- end
121
+ def types_intersect?(query, a, b)
122
+ return true if a == b
123
+
124
+ a_types = query.possible_types(a)
125
+ query.possible_types(b).any? { |t| a_types.include?(t) }
171
126
  end
172
127
 
173
128
  # A hook which is called whenever a field's max complexity is calculated.
@@ -179,50 +134,47 @@ module GraphQL
179
134
  def field_complexity(scoped_type_complexity, max_complexity:, child_complexity: nil)
180
135
  end
181
136
 
182
- # @param children_for_scope [Array<Hash>] An array of `scoped_children[scope]` hashes
183
- # (`{field_key => complexity}`)
184
- # @return [Integer] Complexity value for all these selections in the current scope
185
- def merged_max_complexity(query, children_for_scope)
186
- all_keys = []
187
- children_for_scope.each do |c|
188
- all_keys.concat(c.keys)
137
+ # @param inner_selections [Array<Hash<String, ScopedTypeComplexity>>] Field selections for a scope
138
+ # @return [Integer] Total complexity value for all these selections in the parent scope
139
+ def merged_max_complexity(query, inner_selections)
140
+ # Aggregate a set of all unique field selection keys across all scopes.
141
+ # Use a hash, but ignore the values; it's just a fast way to work with the keys.
142
+ unique_field_keys = inner_selections.each_with_object({}) do |inner_selection, memo|
143
+ memo.merge!(inner_selection)
189
144
  end
190
- all_keys.uniq!
191
- complexity_for_keys = {}
192
-
193
- all_keys.each do |child_key|
194
- scoped_children_for_key = nil
195
- complexity_for_key = nil
196
- children_for_scope.each do |children_hash|
197
- next unless children_hash.key?(child_key)
198
-
199
- complexity_for_key = children_hash[child_key]
200
- if complexity_for_key.terminal?
201
- # Assume that all terminals would return the same complexity
202
- # Since it's a terminal, its child complexity is zero.
203
- complexity = complexity_for_key.own_complexity(0)
204
- complexity_for_keys[child_key] = complexity
205
-
206
- field_complexity(complexity_for_key, max_complexity: complexity, child_complexity: nil)
145
+
146
+ # Add up the total cost for each unique field name's coalesced selections
147
+ unique_field_keys.each_key.reduce(0) do |total, field_key|
148
+ composite_scopes = nil
149
+ field_cost = 0
150
+
151
+ # Collect composite selection scopes for further aggregation,
152
+ # leaf selections report their costs directly.
153
+ inner_selections.each do |inner_selection|
154
+ child_scope = inner_selection[field_key]
155
+ next unless child_scope
156
+
157
+ # Empty child scopes are leaf nodes with zero child complexity.
158
+ if child_scope.empty?
159
+ field_cost = child_scope.own_complexity(0)
160
+ field_complexity(child_scope, max_complexity: field_cost, child_complexity: nil)
207
161
  else
208
- scoped_children_for_key ||= []
209
- scoped_children_for_key << complexity_for_key.scoped_children
162
+ composite_scopes ||= []
163
+ composite_scopes << child_scope
210
164
  end
211
165
  end
212
166
 
213
- next unless scoped_children_for_key
167
+ if composite_scopes
168
+ child_complexity = merged_max_complexity_for_scopes(query, composite_scopes)
214
169
 
215
- child_complexity = merged_max_complexity_for_scopes(query, scoped_children_for_key)
216
- # This is the _last_ one we visited; assume it's representative.
217
- max_complexity = complexity_for_key.own_complexity(child_complexity)
218
-
219
- field_complexity(complexity_for_key, max_complexity: max_complexity, child_complexity: child_complexity)
170
+ # This is the last composite scope visited; assume it's representative (for backwards compatibility).
171
+ # Note: it would be more correct to score each composite scope and use the maximum possibility.
172
+ field_cost = composite_scopes.last.own_complexity(child_complexity)
173
+ field_complexity(composite_scopes.last, max_complexity: field_cost, child_complexity: child_complexity)
174
+ end
220
175
 
221
- complexity_for_keys[child_key] = max_complexity
176
+ total + field_cost
222
177
  end
223
-
224
- # Calculate the child complexity by summing the complexity of all selections
225
- complexity_for_keys.each_value.inject(0, &:+)
226
178
  end
227
179
  end
228
180
  end
@@ -23,6 +23,11 @@ module GraphQL
23
23
  while (task = job_tasks.shift || spawn_job_task(jobs_task, jobs_condition))
24
24
  if task.alive?
25
25
  next_job_tasks << task
26
+ elsif task.failed?
27
+ # re-raise a raised error -
28
+ # this also covers errors from sources since
29
+ # these jobs wait for sources as needed.
30
+ task.wait
26
31
  end
27
32
  end
28
33
  end.wait
@@ -169,7 +169,7 @@ module GraphQL
169
169
  nil
170
170
  end
171
171
 
172
- attr_reader :pending
172
+ attr_reader :pending, :results
173
173
 
174
174
  private
175
175
 
@@ -119,12 +119,7 @@ module GraphQL
119
119
  #
120
120
  # @return [void]
121
121
  def yield
122
- if use_fiber_resume?
123
- Fiber.yield
124
- else
125
- parent_fiber = Thread.current[:parent_fiber]
126
- parent_fiber.transfer
127
- end
122
+ Fiber.yield
128
123
  nil
129
124
  end
130
125
 
@@ -169,7 +164,11 @@ module GraphQL
169
164
  ensure
170
165
  @pending_jobs = prev_queue
171
166
  prev_pending_keys.each do |source_instance, pending|
172
- source_instance.pending.merge!(pending)
167
+ pending.each do |key, value|
168
+ if !source_instance.results.key?(key)
169
+ source_instance.pending[key] = value
170
+ end
171
+ end
173
172
  end
174
173
  end
175
174
 
@@ -183,7 +182,7 @@ module GraphQL
183
182
  while first_pass || job_fibers.any?
184
183
  first_pass = false
185
184
 
186
- while (f = job_fibers.shift || spawn_job_fiber)
185
+ while (f = (job_fibers.shift || spawn_job_fiber))
187
186
  if f.alive?
188
187
  finished = run_fiber(f)
189
188
  if !finished
@@ -204,38 +203,37 @@ module GraphQL
204
203
  end
205
204
  join_queues(source_fibers, next_source_fibers)
206
205
  end
207
-
208
206
  end
209
207
  end
210
208
 
211
209
  run_fiber(manager)
212
210
 
211
+ if manager.alive?
212
+ raise "Invariant: Manager fiber didn't terminate properly."
213
+ end
214
+
215
+ if job_fibers.any?
216
+ raise "Invariant: job fibers should have exited but #{job_fibers.size} remained"
217
+ end
218
+ if source_fibers.any?
219
+ raise "Invariant: source fibers should have exited but #{source_fibers.size} remained"
220
+ end
213
221
  rescue UncaughtThrowError => e
214
222
  throw e.tag, e.value
215
223
  end
216
224
 
217
225
  def run_fiber(f)
218
- if use_fiber_resume?
219
- f.resume
220
- else
221
- f.transfer
222
- end
226
+ f.resume
223
227
  end
224
228
 
225
229
  def spawn_fiber
226
230
  fiber_vars = get_fiber_variables
227
- parent_fiber = use_fiber_resume? ? nil : Fiber.current
228
231
  Fiber.new(blocking: !@nonblocking) {
229
232
  set_fiber_variables(fiber_vars)
230
- Thread.current[:parent_fiber] = parent_fiber
231
233
  yield
232
234
  # With `.transfer`, you have to explicitly pass back to the parent --
233
235
  # if the fiber is allowed to terminate normally, control is passed to the main fiber instead.
234
- if parent_fiber
235
- parent_fiber.transfer(true)
236
- else
237
- true
238
- end
236
+ true
239
237
  }
240
238
  end
241
239
 
@@ -247,15 +245,6 @@ module GraphQL
247
245
  new_queue.clear
248
246
  end
249
247
 
250
- def use_fiber_resume?
251
- Fiber.respond_to?(:scheduler) &&
252
- (
253
- (defined?(::DummyScheduler) && Fiber.scheduler.is_a?(::DummyScheduler)) ||
254
- (defined?(::Evt) && ::Evt::Scheduler.singleton_class::BACKENDS.any? { |be| Fiber.scheduler.is_a?(be) }) ||
255
- (defined?(::Libev) && Fiber.scheduler.is_a?(::Libev::Scheduler))
256
- )
257
- end
258
-
259
248
  def spawn_job_fiber
260
249
  if @pending_jobs.any?
261
250
  spawn_fiber do
@@ -62,11 +62,13 @@ module GraphQL
62
62
 
63
63
  def document
64
64
  any_tokens = advance_token
65
- if !any_tokens
65
+ defns = []
66
+ if any_tokens
67
+ defns << definition
68
+ else
66
69
  # Only ignored characters is not a valid document
67
70
  raise GraphQL::ParseError.new("Unexpected end of document", nil, nil, @graphql_str)
68
71
  end
69
- defns = []
70
72
  while !@lexer.eos?
71
73
  defns << definition
72
74
  end
@@ -276,8 +278,8 @@ module GraphQL
276
278
  when :INTERFACE
277
279
  advance_token
278
280
  name = parse_name
279
- directives = parse_directives
280
281
  interfaces = parse_implements
282
+ directives = parse_directives
281
283
  fields_definition = parse_field_definitions
282
284
  InterfaceTypeDefinition.new(pos: loc, definition_pos: defn_loc, description: desc, name: name, directives: directives, fields: fields_definition, interfaces: interfaces, filename: @filename, source_string: @graphql_str)
283
285
  when :UNION
@@ -1,4 +1,4 @@
1
1
  # frozen_string_literal: true
2
2
  module GraphQL
3
- VERSION = "2.2.0"
3
+ VERSION = "2.2.2"
4
4
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: graphql
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.2.0
4
+ version: 2.2.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Robert Mosolgo
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-12-18 00:00:00.000000000 Z
11
+ date: 2023-12-27 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: racc