graphql 1.12.0 → 1.12.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of graphql might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/lib/generators/graphql/install_generator.rb +4 -1
- data/lib/generators/graphql/loader_generator.rb +1 -0
- data/lib/generators/graphql/mutation_generator.rb +1 -0
- data/lib/generators/graphql/relay.rb +55 -0
- data/lib/generators/graphql/relay_generator.rb +4 -46
- data/lib/generators/graphql/type_generator.rb +1 -0
- data/lib/graphql.rb +2 -2
- data/lib/graphql/analysis/analyze_query.rb +1 -1
- data/lib/graphql/analysis/ast.rb +1 -1
- data/lib/graphql/backtrace/inspect_result.rb +0 -1
- data/lib/graphql/backtrace/table.rb +0 -1
- data/lib/graphql/backtrace/traced_error.rb +0 -1
- data/lib/graphql/backtrace/tracer.rb +4 -8
- data/lib/graphql/backwards_compatibility.rb +1 -1
- data/lib/graphql/base_type.rb +1 -1
- data/lib/graphql/compatibility/execution_specification.rb +1 -1
- data/lib/graphql/compatibility/lazy_execution_specification.rb +1 -1
- data/lib/graphql/compatibility/query_parser_specification.rb +1 -1
- data/lib/graphql/compatibility/schema_parser_specification.rb +1 -1
- data/lib/graphql/dataloader.rb +102 -91
- data/lib/graphql/dataloader/null_dataloader.rb +5 -5
- data/lib/graphql/dataloader/request.rb +1 -6
- data/lib/graphql/dataloader/request_all.rb +1 -4
- data/lib/graphql/dataloader/source.rb +20 -6
- data/lib/graphql/define/instance_definable.rb +1 -1
- data/lib/graphql/deprecated_dsl.rb +4 -4
- data/lib/graphql/deprecation.rb +13 -0
- data/lib/graphql/execution/errors.rb +1 -1
- data/lib/graphql/execution/execute.rb +1 -1
- data/lib/graphql/execution/interpreter.rb +3 -3
- data/lib/graphql/execution/interpreter/arguments_cache.rb +37 -14
- data/lib/graphql/execution/interpreter/resolve.rb +33 -25
- data/lib/graphql/execution/interpreter/runtime.rb +38 -74
- data/lib/graphql/execution/multiplex.rb +22 -23
- data/lib/graphql/function.rb +1 -1
- data/lib/graphql/internal_representation/document.rb +2 -2
- data/lib/graphql/internal_representation/rewrite.rb +1 -1
- data/lib/graphql/object_type.rb +0 -2
- data/lib/graphql/pagination/connection.rb +9 -0
- data/lib/graphql/pagination/connections.rb +1 -1
- data/lib/graphql/parse_error.rb +0 -1
- data/lib/graphql/query.rb +8 -2
- data/lib/graphql/query/arguments.rb +1 -1
- data/lib/graphql/query/arguments_cache.rb +0 -1
- data/lib/graphql/query/context.rb +1 -3
- data/lib/graphql/query/executor.rb +0 -1
- data/lib/graphql/query/null_context.rb +3 -2
- data/lib/graphql/query/serial_execution.rb +1 -1
- data/lib/graphql/query/variable_validation_error.rb +1 -1
- data/lib/graphql/relay/base_connection.rb +2 -2
- data/lib/graphql/relay/mutation.rb +1 -1
- data/lib/graphql/relay/node.rb +3 -3
- data/lib/graphql/relay/range_add.rb +10 -5
- data/lib/graphql/relay/type_extensions.rb +2 -2
- data/lib/graphql/schema.rb +14 -13
- data/lib/graphql/schema/argument.rb +61 -0
- data/lib/graphql/schema/field.rb +12 -7
- data/lib/graphql/schema/find_inherited_value.rb +3 -1
- data/lib/graphql/schema/input_object.rb +6 -2
- data/lib/graphql/schema/member/has_arguments.rb +43 -56
- data/lib/graphql/schema/member/has_fields.rb +1 -4
- data/lib/graphql/schema/member/instrumentation.rb +0 -1
- data/lib/graphql/schema/middleware_chain.rb +1 -1
- data/lib/graphql/schema/resolver.rb +28 -1
- data/lib/graphql/schema/timeout_middleware.rb +1 -1
- data/lib/graphql/schema/validation.rb +2 -2
- data/lib/graphql/static_validation/validator.rb +4 -2
- data/lib/graphql/subscriptions/event.rb +0 -1
- data/lib/graphql/subscriptions/instrumentation.rb +0 -1
- data/lib/graphql/subscriptions/serialize.rb +0 -1
- data/lib/graphql/subscriptions/subscription_root.rb +1 -1
- data/lib/graphql/tracing/skylight_tracing.rb +1 -1
- data/lib/graphql/upgrader/member.rb +1 -1
- data/lib/graphql/upgrader/schema.rb +1 -1
- data/lib/graphql/version.rb +1 -1
- data/readme.md +1 -1
- metadata +22 -90
@@ -7,15 +7,15 @@ module GraphQL
|
|
7
7
|
# The Dataloader interface isn't public, but it enables
|
8
8
|
# simple internal code while adding the option to add Dataloader.
|
9
9
|
class NullDataloader < Dataloader
|
10
|
-
def enqueue
|
11
|
-
yield
|
12
|
-
end
|
13
|
-
|
14
10
|
# These are all no-ops because code was
|
15
11
|
# executed sychronously.
|
16
12
|
def run; end
|
17
13
|
def yield; end
|
18
|
-
|
14
|
+
|
15
|
+
def append_job
|
16
|
+
yield
|
17
|
+
nil
|
18
|
+
end
|
19
19
|
end
|
20
20
|
end
|
21
21
|
end
|
@@ -3,9 +3,6 @@
|
|
3
3
|
module GraphQL
|
4
4
|
class Dataloader
|
5
5
|
class Source
|
6
|
-
# @api private
|
7
|
-
attr_reader :results
|
8
|
-
|
9
6
|
# Called by {Dataloader} to prepare the {Source}'s internal state
|
10
7
|
# @api private
|
11
8
|
def setup(dataloader)
|
@@ -35,11 +32,11 @@ module GraphQL
|
|
35
32
|
# @return [Object] The result from {#fetch} for `key`. If `key` hasn't been loaded yet, the Fiber will yield until it's loaded.
|
36
33
|
def load(key)
|
37
34
|
if @results.key?(key)
|
38
|
-
|
35
|
+
result_for(key)
|
39
36
|
else
|
40
37
|
@pending_keys << key
|
41
38
|
sync
|
42
|
-
|
39
|
+
result_for(key)
|
43
40
|
end
|
44
41
|
end
|
45
42
|
|
@@ -52,7 +49,7 @@ module GraphQL
|
|
52
49
|
sync
|
53
50
|
end
|
54
51
|
|
55
|
-
keys.map { |k|
|
52
|
+
keys.map { |k| result_for(k) }
|
56
53
|
end
|
57
54
|
|
58
55
|
# Subclasses must implement this method to return a value for each of `keys`
|
@@ -86,8 +83,25 @@ module GraphQL
|
|
86
83
|
fetch_keys.each_with_index do |key, idx|
|
87
84
|
@results[key] = results[idx]
|
88
85
|
end
|
86
|
+
rescue StandardError => error
|
87
|
+
fetch_keys.each { |key| @results[key] = error }
|
88
|
+
ensure
|
89
89
|
nil
|
90
90
|
end
|
91
|
+
|
92
|
+
private
|
93
|
+
|
94
|
+
# Reads and returns the result for the key from the internal cache, or raises an error if the result was an error
|
95
|
+
# @param key [Object] key passed to {#load} or {#load_all}
|
96
|
+
# @return [Object] The result from {#fetch} for `key`.
|
97
|
+
# @api private
|
98
|
+
def result_for(key)
|
99
|
+
result = @results[key]
|
100
|
+
|
101
|
+
raise result if result.class <= StandardError
|
102
|
+
|
103
|
+
result
|
104
|
+
end
|
91
105
|
end
|
92
106
|
end
|
93
107
|
end
|
@@ -11,7 +11,7 @@ module GraphQL
|
|
11
11
|
end
|
12
12
|
|
13
13
|
if deprecated_caller
|
14
|
-
warn <<-ERR
|
14
|
+
GraphQL::Deprecation.warn <<-ERR
|
15
15
|
#{self}.define will be removed in GraphQL-Ruby 2.0; use a class-based definition instead. See https://graphql-ruby.org/schema/class_based_api.html.
|
16
16
|
-> called from #{deprecated_caller}
|
17
17
|
ERR
|
@@ -4,13 +4,13 @@ module GraphQL
|
|
4
4
|
#
|
5
5
|
# 1. Scoped by file (CRuby only), add to the top of the file:
|
6
6
|
#
|
7
|
-
# using GraphQL::
|
7
|
+
# using GraphQL::DeprecationDSL
|
8
8
|
#
|
9
9
|
# (This is a "refinement", there are also other ways to scope it.)
|
10
10
|
#
|
11
11
|
# 2. Global application, add before schema definition:
|
12
12
|
#
|
13
|
-
# GraphQL::
|
13
|
+
# GraphQL::DeprecationDSL.activate
|
14
14
|
#
|
15
15
|
module DeprecatedDSL
|
16
16
|
TYPE_CLASSES = [
|
@@ -24,7 +24,7 @@ module GraphQL
|
|
24
24
|
|
25
25
|
def self.activate
|
26
26
|
deprecated_caller = caller(1, 1).first
|
27
|
-
warn "DeprecatedDSL will be removed from GraphQL-Ruby 2.0, use `.to_non_null_type` instead of `!` and remove `.activate` from #{deprecated_caller}"
|
27
|
+
GraphQL::Deprecation.warn "DeprecatedDSL will be removed from GraphQL-Ruby 2.0, use `.to_non_null_type` instead of `!` and remove `.activate` from #{deprecated_caller}"
|
28
28
|
TYPE_CLASSES.each { |c| c.extend(Methods) }
|
29
29
|
GraphQL::Schema::List.include(Methods)
|
30
30
|
GraphQL::Schema::NonNull.include(Methods)
|
@@ -33,7 +33,7 @@ module GraphQL
|
|
33
33
|
module Methods
|
34
34
|
def !
|
35
35
|
deprecated_caller = caller(1, 1).first
|
36
|
-
warn "DeprecatedDSL will be removed from GraphQL-Ruby 2.0, use `.to_non_null_type` instead of `!` at #{deprecated_caller}"
|
36
|
+
GraphQL::Deprecation.warn "DeprecatedDSL will be removed from GraphQL-Ruby 2.0, use `.to_non_null_type` instead of `!` at #{deprecated_caller}"
|
37
37
|
to_non_null_type
|
38
38
|
end
|
39
39
|
end
|
@@ -20,7 +20,7 @@ module GraphQL
|
|
20
20
|
def self.use(schema)
|
21
21
|
if schema.plugins.any? { |(plugin, kwargs)| plugin == self }
|
22
22
|
definition_line = caller(2, 1).first
|
23
|
-
warn("GraphQL::Execution::Errors is now installed by default, remove `use GraphQL::Execution::Errors` from #{definition_line}")
|
23
|
+
GraphQL::Deprecation.warn("GraphQL::Execution::Errors is now installed by default, remove `use GraphQL::Execution::Errors` from #{definition_line}")
|
24
24
|
end
|
25
25
|
schema.error_handler = self.new(schema)
|
26
26
|
end
|
@@ -25,7 +25,7 @@ module GraphQL
|
|
25
25
|
end
|
26
26
|
|
27
27
|
def execute(ast_operation, root_type, query)
|
28
|
-
warn "#{self.class} will be removed in GraphQL-Ruby 2.0, please upgrade to the Interpreter: https://graphql-ruby.org/queries/interpreter.html"
|
28
|
+
GraphQL::Deprecation.warn "#{self.class} will be removed in GraphQL-Ruby 2.0, please upgrade to the Interpreter: https://graphql-ruby.org/queries/interpreter.html"
|
29
29
|
result = resolve_root_selection(query)
|
30
30
|
lazy_resolve_root_selection(result, **{query: query})
|
31
31
|
GraphQL::Execution::Flatten.call(query.context)
|
@@ -25,7 +25,7 @@ module GraphQL
|
|
25
25
|
def self.use(schema_class)
|
26
26
|
if schema_class.interpreter?
|
27
27
|
definition_line = caller(2, 1).first
|
28
|
-
warn("GraphQL::Execution::Interpreter is now the default; remove `use GraphQL::Execution::Interpreter` from the schema definition (#{definition_line})")
|
28
|
+
GraphQL::Deprecation.warn("GraphQL::Execution::Interpreter is now the default; remove `use GraphQL::Execution::Interpreter` from the schema definition (#{definition_line})")
|
29
29
|
else
|
30
30
|
schema_class.query_execution_strategy(self)
|
31
31
|
schema_class.mutation_execution_strategy(self)
|
@@ -95,7 +95,7 @@ module GraphQL
|
|
95
95
|
end
|
96
96
|
final_values.compact!
|
97
97
|
tracer.trace("execute_query_lazy", {multiplex: multiplex, query: query}) do
|
98
|
-
Interpreter::Resolve.resolve_all(final_values)
|
98
|
+
Interpreter::Resolve.resolve_all(final_values, multiplex.dataloader)
|
99
99
|
end
|
100
100
|
queries.each do |query|
|
101
101
|
runtime = query.context.namespace(:interpreter)[:runtime]
|
@@ -113,7 +113,7 @@ module GraphQL
|
|
113
113
|
def initialize(value:, path:, field:)
|
114
114
|
message = "Failed to build a GraphQL list result for field `#{field.path}` at path `#{path.join(".")}`.\n".dup
|
115
115
|
|
116
|
-
message << "Expected `#{value.inspect}` to implement `.each` to satisfy the GraphQL return type `#{field.type.to_type_signature}`.\n"
|
116
|
+
message << "Expected `#{value.inspect}` (#{value.class}) to implement `.each` to satisfy the GraphQL return type `#{field.type.to_type_signature}`.\n"
|
117
117
|
|
118
118
|
if field.connection?
|
119
119
|
message << "\nThis field was treated as a Relay-style connection; add `connection: false` to the `field(...)` to disable this behavior."
|
@@ -6,17 +6,21 @@ module GraphQL
|
|
6
6
|
class ArgumentsCache
|
7
7
|
def initialize(query)
|
8
8
|
@query = query
|
9
|
+
@dataloader = query.context.dataloader
|
9
10
|
@storage = Hash.new do |h, ast_node|
|
10
11
|
h[ast_node] = Hash.new do |h2, arg_owner|
|
11
12
|
h2[arg_owner] = Hash.new do |h3, parent_object|
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
13
|
+
dataload_for(ast_node, arg_owner, parent_object) do |kwarg_arguments|
|
14
|
+
h3[parent_object] = @query.schema.after_lazy(kwarg_arguments) do |resolved_args|
|
15
|
+
h3[parent_object] = resolved_args
|
16
|
+
end
|
17
|
+
end
|
16
18
|
|
17
|
-
h3
|
18
|
-
#
|
19
|
-
h3[parent_object] =
|
19
|
+
if !h3.key?(parent_object)
|
20
|
+
# TODO should i bother putting anything here?
|
21
|
+
h3[parent_object] = NO_ARGUMENTS
|
22
|
+
else
|
23
|
+
h3[parent_object]
|
20
24
|
end
|
21
25
|
end
|
22
26
|
end
|
@@ -25,6 +29,25 @@ module GraphQL
|
|
25
29
|
|
26
30
|
def fetch(ast_node, argument_owner, parent_object)
|
27
31
|
@storage[ast_node][argument_owner][parent_object]
|
32
|
+
# If any jobs were enqueued, run them now,
|
33
|
+
# since this might have been called outside of execution.
|
34
|
+
# (The jobs are responsible for updating `result` in-place.)
|
35
|
+
@dataloader.run
|
36
|
+
# Ack, the _hash_ is updated, but the key is eventually
|
37
|
+
# overridden with an immutable arguments instance.
|
38
|
+
# The first call queues up the job,
|
39
|
+
# then this call fetches the result.
|
40
|
+
# TODO this should be better, find a solution
|
41
|
+
# that works with merging the runtime.rb code
|
42
|
+
@storage[ast_node][argument_owner][parent_object]
|
43
|
+
end
|
44
|
+
|
45
|
+
# @yield [Interpreter::Arguments, Lazy<Interpreter::Arguments>] The finally-loaded arguments
|
46
|
+
def dataload_for(ast_node, argument_owner, parent_object, &block)
|
47
|
+
# First, normalize all AST or Ruby values to a plain Ruby hash
|
48
|
+
args_hash = self.class.prepare_args_hash(@query, ast_node)
|
49
|
+
argument_owner.coerce_arguments(parent_object, args_hash, @query.context, &block)
|
50
|
+
nil
|
28
51
|
end
|
29
52
|
|
30
53
|
private
|
@@ -33,7 +56,7 @@ module GraphQL
|
|
33
56
|
|
34
57
|
NO_VALUE_GIVEN = Object.new
|
35
58
|
|
36
|
-
def prepare_args_hash(ast_arg_or_hash_or_value)
|
59
|
+
def self.prepare_args_hash(query, ast_arg_or_hash_or_value)
|
37
60
|
case ast_arg_or_hash_or_value
|
38
61
|
when Hash
|
39
62
|
if ast_arg_or_hash_or_value.empty?
|
@@ -41,27 +64,27 @@ module GraphQL
|
|
41
64
|
end
|
42
65
|
args_hash = {}
|
43
66
|
ast_arg_or_hash_or_value.each do |k, v|
|
44
|
-
args_hash[k] = prepare_args_hash(v)
|
67
|
+
args_hash[k] = prepare_args_hash(query, v)
|
45
68
|
end
|
46
69
|
args_hash
|
47
70
|
when Array
|
48
|
-
ast_arg_or_hash_or_value.map { |v| prepare_args_hash(v) }
|
71
|
+
ast_arg_or_hash_or_value.map { |v| prepare_args_hash(query, v) }
|
49
72
|
when GraphQL::Language::Nodes::Field, GraphQL::Language::Nodes::InputObject, GraphQL::Language::Nodes::Directive
|
50
73
|
if ast_arg_or_hash_or_value.arguments.empty?
|
51
74
|
return NO_ARGUMENTS
|
52
75
|
end
|
53
76
|
args_hash = {}
|
54
77
|
ast_arg_or_hash_or_value.arguments.each do |arg|
|
55
|
-
v = prepare_args_hash(arg.value)
|
78
|
+
v = prepare_args_hash(query, arg.value)
|
56
79
|
if v != NO_VALUE_GIVEN
|
57
80
|
args_hash[arg.name] = v
|
58
81
|
end
|
59
82
|
end
|
60
83
|
args_hash
|
61
84
|
when GraphQL::Language::Nodes::VariableIdentifier
|
62
|
-
if
|
63
|
-
variable_value =
|
64
|
-
prepare_args_hash(variable_value)
|
85
|
+
if query.variables.key?(ast_arg_or_hash_or_value.name)
|
86
|
+
variable_value = query.variables[ast_arg_or_hash_or_value.name]
|
87
|
+
prepare_args_hash(query, variable_value)
|
65
88
|
else
|
66
89
|
NO_VALUE_GIVEN
|
67
90
|
end
|
@@ -6,10 +6,9 @@ module GraphQL
|
|
6
6
|
module Resolve
|
7
7
|
# Continue field results in `results` until there's nothing else to continue.
|
8
8
|
# @return [void]
|
9
|
-
def self.resolve_all(results)
|
10
|
-
|
11
|
-
|
12
|
-
end
|
9
|
+
def self.resolve_all(results, dataloader)
|
10
|
+
dataloader.append_job { resolve(results, dataloader) }
|
11
|
+
nil
|
13
12
|
end
|
14
13
|
|
15
14
|
# After getting `results` back from an interpreter evaluation,
|
@@ -24,33 +23,42 @@ module GraphQL
|
|
24
23
|
# return {Lazy} instances if there's more work to be done,
|
25
24
|
# or return {Hash}/{Array} if the query should be continued.
|
26
25
|
#
|
27
|
-
# @
|
28
|
-
|
29
|
-
|
26
|
+
# @return [void]
|
27
|
+
def self.resolve(results, dataloader)
|
28
|
+
# There might be pending jobs here that _will_ write lazies
|
29
|
+
# into the result hash. We should run them out, so we
|
30
|
+
# can be sure that all lazies will be present in the result hashes.
|
31
|
+
# A better implementation would somehow interleave (or unify)
|
32
|
+
# these approaches.
|
33
|
+
dataloader.run
|
30
34
|
next_results = []
|
31
|
-
|
32
|
-
# Work through the queue until it's empty
|
33
|
-
while results.size > 0
|
35
|
+
while results.any?
|
34
36
|
result_value = results.shift
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
end
|
39
|
-
|
40
|
-
if result_value.is_a?(Lazy)
|
41
|
-
# Since this field returned another lazy,
|
42
|
-
# add it to the same queue
|
43
|
-
results << result_value
|
44
|
-
elsif result_value.is_a?(Hash)
|
45
|
-
# This is part of the next level, add it
|
46
|
-
next_results.concat(result_value.values)
|
37
|
+
if result_value.is_a?(Hash)
|
38
|
+
results.concat(result_value.values)
|
39
|
+
next
|
47
40
|
elsif result_value.is_a?(Array)
|
48
|
-
|
49
|
-
|
41
|
+
results.concat(result_value)
|
42
|
+
next
|
43
|
+
elsif result_value.is_a?(Lazy)
|
44
|
+
loaded_value = result_value.value
|
45
|
+
if loaded_value.is_a?(Lazy)
|
46
|
+
# Since this field returned another lazy,
|
47
|
+
# add it to the same queue
|
48
|
+
results << loaded_value
|
49
|
+
elsif loaded_value.is_a?(Hash) || loaded_value.is_a?(Array)
|
50
|
+
# Add these values in wholesale --
|
51
|
+
# they might be modified by later work in the dataloader.
|
52
|
+
next_results << loaded_value
|
53
|
+
end
|
50
54
|
end
|
51
55
|
end
|
52
56
|
|
53
|
-
next_results
|
57
|
+
if next_results.any?
|
58
|
+
dataloader.append_job { resolve(next_results, dataloader) }
|
59
|
+
end
|
60
|
+
|
61
|
+
nil
|
54
62
|
end
|
55
63
|
end
|
56
64
|
end
|
@@ -56,17 +56,18 @@ module GraphQL
|
|
56
56
|
# Root .authorized? returned false.
|
57
57
|
write_in_response(path, nil)
|
58
58
|
else
|
59
|
-
|
60
|
-
@progress_path = path
|
61
|
-
@progress_scoped_context = context.scoped_context
|
62
|
-
@progress_object = object_proxy
|
63
|
-
@progress_object_type = root_type
|
64
|
-
@progress_index = nil
|
65
|
-
@progress_is_eager_selection = root_op_type == "mutation"
|
66
|
-
@progress_selections = gather_selections(object_proxy, root_type, root_operation.selections)
|
67
|
-
|
59
|
+
gathered_selections = gather_selections(object_proxy, root_type, root_operation.selections)
|
68
60
|
# Make the first fiber which will begin execution
|
69
|
-
|
61
|
+
@dataloader.append_job {
|
62
|
+
evaluate_selections(
|
63
|
+
path,
|
64
|
+
context.scoped_context,
|
65
|
+
object_proxy,
|
66
|
+
root_type,
|
67
|
+
root_op_type == "mutation",
|
68
|
+
gathered_selections,
|
69
|
+
)
|
70
|
+
}
|
70
71
|
end
|
71
72
|
delete_interpreter_context(:current_path)
|
72
73
|
delete_interpreter_context(:current_field)
|
@@ -75,32 +76,6 @@ module GraphQL
|
|
75
76
|
nil
|
76
77
|
end
|
77
78
|
|
78
|
-
# Use `@dataloader` to enqueue a fiber that will pick up from the current point.
|
79
|
-
# @return [void]
|
80
|
-
def enqueue_selections_fiber
|
81
|
-
# Read these into local variables so that later assignments don't affect the block below.
|
82
|
-
path = @progress_path
|
83
|
-
scoped_context = @progress_scoped_context
|
84
|
-
owner_object = @progress_object
|
85
|
-
owner_type = @progress_object_type
|
86
|
-
idx = @progress_index
|
87
|
-
is_eager_selection = @progress_is_eager_selection
|
88
|
-
gathered_selections = @progress_selections
|
89
|
-
|
90
|
-
@dataloader.enqueue {
|
91
|
-
evaluate_selections(
|
92
|
-
path,
|
93
|
-
scoped_context,
|
94
|
-
owner_object,
|
95
|
-
owner_type,
|
96
|
-
is_eager_selection: is_eager_selection,
|
97
|
-
after: idx,
|
98
|
-
gathered_selections: gathered_selections,
|
99
|
-
)
|
100
|
-
}
|
101
|
-
nil
|
102
|
-
end
|
103
|
-
|
104
79
|
def gather_selections(owner_object, owner_type, selections, selections_by_name = {})
|
105
80
|
selections.each do |node|
|
106
81
|
# Skip gathering this if the directive says so
|
@@ -159,42 +134,22 @@ module GraphQL
|
|
159
134
|
NO_ARGS = {}.freeze
|
160
135
|
|
161
136
|
# @return [void]
|
162
|
-
def evaluate_selections(path, scoped_context, owner_object, owner_type, is_eager_selection
|
137
|
+
def evaluate_selections(path, scoped_context, owner_object, owner_type, is_eager_selection, gathered_selections)
|
163
138
|
set_all_interpreter_context(owner_object, nil, nil, path)
|
164
139
|
|
165
|
-
@progress_path = path
|
166
|
-
@progress_scoped_context = scoped_context
|
167
|
-
@progress_object = owner_object
|
168
|
-
@progress_object_type = owner_type
|
169
|
-
@progress_index = nil
|
170
|
-
@progress_is_eager_selection = is_eager_selection
|
171
|
-
@progress_selections = gathered_selections
|
172
|
-
|
173
|
-
# Track `idx` manually to avoid an allocation on this hot path
|
174
|
-
idx = 0
|
175
140
|
gathered_selections.each do |result_name, field_ast_nodes_or_ast_node|
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
if after && prev_idx <= after
|
182
|
-
next
|
183
|
-
end
|
184
|
-
@progress_index = prev_idx
|
185
|
-
# This is how the current runtime gives itself to `dataloader`
|
186
|
-
# so that the dataloader can enqueue another fiber to resume if needed.
|
187
|
-
@dataloader.current_runtime = self
|
188
|
-
evaluate_selection(path, result_name, field_ast_nodes_or_ast_node, scoped_context, owner_object, owner_type, is_eager_selection)
|
189
|
-
# The dataloader knows if ^^ that selection halted and later selections were executed in another fiber.
|
190
|
-
# If that's the case, then don't continue execution here.
|
191
|
-
if @dataloader.yielded?
|
192
|
-
break
|
193
|
-
end
|
141
|
+
@dataloader.append_job {
|
142
|
+
evaluate_selection(
|
143
|
+
path, result_name, field_ast_nodes_or_ast_node, scoped_context, owner_object, owner_type, is_eager_selection
|
144
|
+
)
|
145
|
+
}
|
194
146
|
end
|
147
|
+
|
195
148
|
nil
|
196
149
|
end
|
197
150
|
|
151
|
+
attr_reader :progress_path
|
152
|
+
|
198
153
|
# @return [void]
|
199
154
|
def evaluate_selection(path, result_name, field_ast_nodes_or_ast_node, scoped_context, owner_object, owner_type, is_eager_field)
|
200
155
|
# As a performance optimization, the hash key will be a `Node` if
|
@@ -241,13 +196,21 @@ module GraphQL
|
|
241
196
|
object = authorized_new(field_defn.owner, object, context, next_path)
|
242
197
|
end
|
243
198
|
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
199
|
+
total_args_count = field_defn.arguments.size
|
200
|
+
if total_args_count == 0
|
201
|
+
kwarg_arguments = GraphQL::Execution::Interpreter::Arguments::EMPTY
|
202
|
+
evaluate_selection_with_args(kwarg_arguments, field_defn, next_path, ast_node, field_ast_nodes, scoped_context, owner_type, object, is_eager_field)
|
203
|
+
else
|
204
|
+
# TODO remove all arguments(...) usages?
|
205
|
+
@query.arguments_cache.dataload_for(ast_node, field_defn, object) do |resolved_arguments|
|
206
|
+
evaluate_selection_with_args(resolved_arguments, field_defn, next_path, ast_node, field_ast_nodes, scoped_context, owner_type, object, is_eager_field)
|
207
|
+
end
|
249
208
|
end
|
209
|
+
end
|
250
210
|
|
211
|
+
def evaluate_selection_with_args(kwarg_arguments, field_defn, next_path, ast_node, field_ast_nodes, scoped_context, owner_type, object, is_eager_field) # rubocop:disable Metrics/ParameterLists
|
212
|
+
context.scoped_context = scoped_context
|
213
|
+
return_type = field_defn.type
|
251
214
|
after_lazy(kwarg_arguments, owner: owner_type, field: field_defn, path: next_path, ast_node: ast_node, scoped_context: context.scoped_context, owner_object: object, arguments: kwarg_arguments) do |resolved_arguments|
|
252
215
|
if resolved_arguments.is_a?(GraphQL::ExecutionError) || resolved_arguments.is_a?(GraphQL::UnauthorizedError)
|
253
216
|
continue_value(next_path, resolved_arguments, owner_type, field_defn, return_type.non_null?, ast_node)
|
@@ -327,10 +290,11 @@ module GraphQL
|
|
327
290
|
# all of its child fields before moving on to the next root mutation field.
|
328
291
|
# (Subselections of this mutation will still be resolved level-by-level.)
|
329
292
|
if is_eager_field
|
330
|
-
Interpreter::Resolve.resolve_all([field_result])
|
293
|
+
Interpreter::Resolve.resolve_all([field_result], @dataloader)
|
294
|
+
else
|
295
|
+
# Return this from `after_lazy` because it might be another lazy that needs to be resolved
|
296
|
+
field_result
|
331
297
|
end
|
332
|
-
|
333
|
-
nil
|
334
298
|
end
|
335
299
|
end
|
336
300
|
|
@@ -417,7 +381,7 @@ module GraphQL
|
|
417
381
|
response_hash = {}
|
418
382
|
write_in_response(path, response_hash)
|
419
383
|
gathered_selections = gather_selections(continue_value, current_type, next_selections)
|
420
|
-
evaluate_selections(path, context.scoped_context, continue_value, current_type,
|
384
|
+
evaluate_selections(path, context.scoped_context, continue_value, current_type, false, gathered_selections)
|
421
385
|
response_hash
|
422
386
|
end
|
423
387
|
end
|