graphql 1.11.12 → 1.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of graphql might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/lib/generators/graphql/install_generator.rb +5 -5
- data/lib/generators/graphql/relay_generator.rb +63 -0
- data/lib/generators/graphql/templates/base_connection.erb +8 -0
- data/lib/generators/graphql/templates/base_edge.erb +8 -0
- data/lib/generators/graphql/templates/node_type.erb +9 -0
- data/lib/generators/graphql/templates/object.erb +1 -1
- data/lib/generators/graphql/templates/query_type.erb +1 -3
- data/lib/generators/graphql/templates/schema.erb +8 -35
- data/lib/graphql/analysis/analyze_query.rb +7 -0
- data/lib/graphql/analysis/ast/visitor.rb +9 -1
- data/lib/graphql/analysis/ast.rb +11 -2
- data/lib/graphql/backtrace/legacy_tracer.rb +56 -0
- data/lib/graphql/backtrace/table.rb +22 -2
- data/lib/graphql/backtrace/tracer.rb +40 -9
- data/lib/graphql/backtrace.rb +28 -19
- data/lib/graphql/backwards_compatibility.rb +1 -0
- data/lib/graphql/compatibility/execution_specification.rb +1 -0
- data/lib/graphql/compatibility/lazy_execution_specification.rb +2 -0
- data/lib/graphql/compatibility/query_parser_specification.rb +2 -0
- data/lib/graphql/compatibility/schema_parser_specification.rb +2 -0
- data/lib/graphql/dataloader/null_dataloader.rb +21 -0
- data/lib/graphql/dataloader/request.rb +24 -0
- data/lib/graphql/dataloader/request_all.rb +22 -0
- data/lib/graphql/dataloader/source.rb +93 -0
- data/lib/graphql/dataloader.rb +197 -0
- data/lib/graphql/define/assign_global_id_field.rb +1 -1
- data/lib/graphql/define/instance_definable.rb +32 -2
- data/lib/graphql/define/type_definer.rb +5 -5
- data/lib/graphql/deprecated_dsl.rb +5 -0
- data/lib/graphql/enum_type.rb +2 -0
- data/lib/graphql/execution/errors.rb +4 -0
- data/lib/graphql/execution/execute.rb +7 -0
- data/lib/graphql/execution/interpreter/arguments.rb +51 -14
- data/lib/graphql/execution/interpreter/handles_raw_value.rb +0 -7
- data/lib/graphql/execution/interpreter/runtime.rb +210 -124
- data/lib/graphql/execution/interpreter.rb +10 -6
- data/lib/graphql/execution/multiplex.rb +20 -6
- data/lib/graphql/function.rb +4 -0
- data/lib/graphql/input_object_type.rb +2 -0
- data/lib/graphql/interface_type.rb +3 -1
- data/lib/graphql/language/document_from_schema_definition.rb +50 -23
- data/lib/graphql/language/nodes.rb +0 -5
- data/lib/graphql/language/visitor.rb +0 -1
- data/lib/graphql/object_type.rb +2 -0
- data/lib/graphql/pagination/connection.rb +5 -1
- data/lib/graphql/pagination/connections.rb +6 -16
- data/lib/graphql/query/context.rb +4 -0
- data/lib/graphql/query/serial_execution.rb +1 -0
- data/lib/graphql/query/validation_pipeline.rb +1 -1
- data/lib/graphql/query.rb +2 -0
- data/lib/graphql/relay/base_connection.rb +7 -0
- data/lib/graphql/relay/connection_instrumentation.rb +4 -4
- data/lib/graphql/relay/connection_type.rb +1 -1
- data/lib/graphql/relay/mutation.rb +1 -0
- data/lib/graphql/relay/node.rb +3 -0
- data/lib/graphql/relay/type_extensions.rb +2 -0
- data/lib/graphql/scalar_type.rb +2 -0
- data/lib/graphql/schema/argument.rb +30 -10
- data/lib/graphql/schema/build_from_definition.rb +145 -58
- data/lib/graphql/schema/directive/flagged.rb +57 -0
- data/lib/graphql/schema/directive.rb +76 -0
- data/lib/graphql/schema/enum.rb +3 -0
- data/lib/graphql/schema/enum_value.rb +13 -7
- data/lib/graphql/schema/field/connection_extension.rb +3 -2
- data/lib/graphql/schema/field.rb +28 -10
- data/lib/graphql/schema/input_object.rb +36 -28
- data/lib/graphql/schema/interface.rb +1 -0
- data/lib/graphql/schema/member/base_dsl_methods.rb +1 -0
- data/lib/graphql/schema/member/build_type.rb +3 -3
- data/lib/graphql/schema/member/has_arguments.rb +24 -6
- data/lib/graphql/schema/member/has_deprecation_reason.rb +25 -0
- data/lib/graphql/schema/member/has_directives.rb +98 -0
- data/lib/graphql/schema/member/has_validators.rb +31 -0
- data/lib/graphql/schema/member/type_system_helpers.rb +1 -1
- data/lib/graphql/schema/member.rb +4 -0
- data/lib/graphql/schema/object.rb +11 -0
- data/lib/graphql/schema/printer.rb +5 -4
- data/lib/graphql/schema/resolver/has_payload_type.rb +2 -0
- data/lib/graphql/schema/resolver.rb +7 -0
- data/lib/graphql/schema/subscription.rb +19 -1
- data/lib/graphql/schema/timeout_middleware.rb +2 -0
- data/lib/graphql/schema/validation.rb +2 -0
- data/lib/graphql/schema/validator/exclusion_validator.rb +31 -0
- data/lib/graphql/schema/validator/format_validator.rb +49 -0
- data/lib/graphql/schema/validator/inclusion_validator.rb +33 -0
- data/lib/graphql/schema/validator/length_validator.rb +57 -0
- data/lib/graphql/schema/validator/numericality_validator.rb +71 -0
- data/lib/graphql/schema/validator/required_validator.rb +68 -0
- data/lib/graphql/schema/validator.rb +163 -0
- data/lib/graphql/schema.rb +72 -49
- data/lib/graphql/static_validation/base_visitor.rb +0 -3
- data/lib/graphql/static_validation/rules/fields_will_merge.rb +4 -4
- data/lib/graphql/static_validation/rules/fragments_are_finite.rb +2 -2
- data/lib/graphql/static_validation/validation_context.rb +1 -6
- data/lib/graphql/static_validation/validator.rb +12 -14
- data/lib/graphql/subscriptions.rb +17 -20
- data/lib/graphql/tracing/appoptics_tracing.rb +3 -1
- data/lib/graphql/tracing/platform_tracing.rb +3 -1
- data/lib/graphql/tracing/skylight_tracing.rb +1 -1
- data/lib/graphql/tracing.rb +2 -2
- data/lib/graphql/types/relay/base_connection.rb +2 -92
- data/lib/graphql/types/relay/base_edge.rb +2 -35
- data/lib/graphql/types/relay/connection_behaviors.rb +123 -0
- data/lib/graphql/types/relay/default_relay.rb +27 -0
- data/lib/graphql/types/relay/edge_behaviors.rb +42 -0
- data/lib/graphql/types/relay/has_node_field.rb +41 -0
- data/lib/graphql/types/relay/has_nodes_field.rb +41 -0
- data/lib/graphql/types/relay/node.rb +2 -4
- data/lib/graphql/types/relay/node_behaviors.rb +15 -0
- data/lib/graphql/types/relay/node_field.rb +1 -19
- data/lib/graphql/types/relay/nodes_field.rb +1 -19
- data/lib/graphql/types/relay/page_info.rb +2 -14
- data/lib/graphql/types/relay/page_info_behaviors.rb +25 -0
- data/lib/graphql/types/relay.rb +11 -3
- data/lib/graphql/union_type.rb +2 -0
- data/lib/graphql/upgrader/member.rb +1 -0
- data/lib/graphql/upgrader/schema.rb +1 -0
- data/lib/graphql/version.rb +1 -1
- data/lib/graphql.rb +38 -4
- metadata +34 -9
- data/lib/graphql/types/relay/base_field.rb +0 -22
- data/lib/graphql/types/relay/base_interface.rb +0 -29
- data/lib/graphql/types/relay/base_object.rb +0 -26
@@ -0,0 +1,22 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module GraphQL
|
3
|
+
class Dataloader
|
4
|
+
# @see Source#request_all which returns an instance of this.
|
5
|
+
class RequestAll < Request
|
6
|
+
def initialize(source, keys)
|
7
|
+
@source = source
|
8
|
+
@keys = keys
|
9
|
+
end
|
10
|
+
|
11
|
+
# Call this method to cause the current Fiber to wait for the results of this request.
|
12
|
+
#
|
13
|
+
# @return [Array<Object>] One object for each of `keys`
|
14
|
+
def load
|
15
|
+
if @keys.any? { |k| !@source.results.key?(k) }
|
16
|
+
@source.sync
|
17
|
+
end
|
18
|
+
@keys.map { |k| @source.results[k] }
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,93 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module GraphQL
|
4
|
+
class Dataloader
|
5
|
+
class Source
|
6
|
+
# @api private
|
7
|
+
attr_reader :results
|
8
|
+
|
9
|
+
# Called by {Dataloader} to prepare the {Source}'s internal state
|
10
|
+
# @api private
|
11
|
+
def setup(dataloader)
|
12
|
+
@pending_keys = []
|
13
|
+
@results = {}
|
14
|
+
@dataloader = dataloader
|
15
|
+
end
|
16
|
+
|
17
|
+
attr_reader :dataloader
|
18
|
+
|
19
|
+
# @return [Dataloader::Request] a pending request for a value from `key`. Call `.load` on that object to wait for the result.
|
20
|
+
def request(key)
|
21
|
+
if !@results.key?(key)
|
22
|
+
@pending_keys << key
|
23
|
+
end
|
24
|
+
Dataloader::Request.new(self, key)
|
25
|
+
end
|
26
|
+
|
27
|
+
# @return [Dataloader::Request] a pending request for a values from `keys`. Call `.load` on that object to wait for the results.
|
28
|
+
def request_all(keys)
|
29
|
+
pending_keys = keys.select { |k| !@results.key?(k) }
|
30
|
+
@pending_keys.concat(pending_keys)
|
31
|
+
Dataloader::RequestAll.new(self, keys)
|
32
|
+
end
|
33
|
+
|
34
|
+
# @param key [Object] A loading key which will be passed to {#fetch} if it isn't already in the internal cache.
|
35
|
+
# @return [Object] The result from {#fetch} for `key`. If `key` hasn't been loaded yet, the Fiber will yield until it's loaded.
|
36
|
+
def load(key)
|
37
|
+
if @results.key?(key)
|
38
|
+
@results[key]
|
39
|
+
else
|
40
|
+
@pending_keys << key
|
41
|
+
sync
|
42
|
+
@results[key]
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
# @param keys [Array<Object>] Loading keys which will be passed to `#fetch` (or read from the internal cache).
|
47
|
+
# @return [Object] The result from {#fetch} for `keys`. If `keys` haven't been loaded yet, the Fiber will yield until they're loaded.
|
48
|
+
def load_all(keys)
|
49
|
+
if keys.any? { |k| !@results.key?(k) }
|
50
|
+
pending_keys = keys.select { |k| !@results.key?(k) }
|
51
|
+
@pending_keys.concat(pending_keys)
|
52
|
+
sync
|
53
|
+
end
|
54
|
+
|
55
|
+
keys.map { |k| @results[k] }
|
56
|
+
end
|
57
|
+
|
58
|
+
# Subclasses must implement this method to return a value for each of `keys`
|
59
|
+
# @param keys [Array<Object>] keys passed to {#load}, {#load_all}, {#request}, or {#request_all}
|
60
|
+
# @return [Array<Object>] A loaded value for each of `keys`. The array must match one-for-one to the list of `keys`.
|
61
|
+
def fetch(keys)
|
62
|
+
# somehow retrieve these from the backend
|
63
|
+
raise "Implement `#{self.class}#fetch(#{keys.inspect}) to return a record for each of the keys"
|
64
|
+
end
|
65
|
+
|
66
|
+
# Wait for a batch, if there's anything to batch.
|
67
|
+
# Then run the batch and update the cache.
|
68
|
+
# @return [void]
|
69
|
+
def sync
|
70
|
+
@dataloader.yield
|
71
|
+
end
|
72
|
+
|
73
|
+
# @return [Boolean] True if this source has any pending requests for data.
|
74
|
+
def pending?
|
75
|
+
@pending_keys.any?
|
76
|
+
end
|
77
|
+
|
78
|
+
# Called by {GraphQL::Dataloader} to resolve and pending requests to this source.
|
79
|
+
# @api private
|
80
|
+
# @return [void]
|
81
|
+
def run_pending_keys
|
82
|
+
return if @pending_keys.empty?
|
83
|
+
fetch_keys = @pending_keys.uniq
|
84
|
+
@pending_keys = []
|
85
|
+
results = fetch(fetch_keys)
|
86
|
+
fetch_keys.each_with_index do |key, idx|
|
87
|
+
@results[key] = results[idx]
|
88
|
+
end
|
89
|
+
nil
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
93
|
+
end
|
@@ -0,0 +1,197 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "graphql/dataloader/null_dataloader"
|
4
|
+
require "graphql/dataloader/request"
|
5
|
+
require "graphql/dataloader/request_all"
|
6
|
+
require "graphql/dataloader/source"
|
7
|
+
|
8
|
+
module GraphQL
|
9
|
+
# This plugin supports Fiber-based concurrency, along with {GraphQL::Dataloader::Source}.
|
10
|
+
#
|
11
|
+
# @example Installing Dataloader
|
12
|
+
#
|
13
|
+
# class MySchema < GraphQL::Schema
|
14
|
+
# use GraphQL::Dataloader
|
15
|
+
# end
|
16
|
+
#
|
17
|
+
# @example Waiting for batch-loaded data in a GraphQL field
|
18
|
+
#
|
19
|
+
# field :team, Types::Team, null: true
|
20
|
+
#
|
21
|
+
# def team
|
22
|
+
# dataloader.with(Sources::Record, Team).load(object.team_id)
|
23
|
+
# end
|
24
|
+
#
|
25
|
+
class Dataloader
|
26
|
+
def self.use(schema)
|
27
|
+
schema.dataloader_class = self
|
28
|
+
end
|
29
|
+
|
30
|
+
def initialize(multiplex_context)
|
31
|
+
@context = multiplex_context
|
32
|
+
@source_cache = Hash.new { |h, source_class| h[source_class] = Hash.new { |h2, batch_parameters|
|
33
|
+
source = source_class.new(*batch_parameters)
|
34
|
+
source.setup(self)
|
35
|
+
h2[batch_parameters] = source
|
36
|
+
}
|
37
|
+
}
|
38
|
+
@waiting_fibers = []
|
39
|
+
@yielded_fibers = Set.new
|
40
|
+
end
|
41
|
+
|
42
|
+
# @return [Hash] the {Multiplex} context
|
43
|
+
attr_reader :context
|
44
|
+
|
45
|
+
# @api private
|
46
|
+
attr_reader :yielded_fibers
|
47
|
+
|
48
|
+
# Add some work to this dataloader to be scheduled later.
|
49
|
+
# @param block Some work to enqueue
|
50
|
+
# @return [void]
|
51
|
+
def enqueue(&block)
|
52
|
+
@waiting_fibers << Fiber.new {
|
53
|
+
begin
|
54
|
+
yield
|
55
|
+
rescue StandardError => exception
|
56
|
+
exception
|
57
|
+
end
|
58
|
+
}
|
59
|
+
nil
|
60
|
+
end
|
61
|
+
|
62
|
+
# Tell the dataloader that this fiber is waiting for data.
|
63
|
+
#
|
64
|
+
# Dataloader will resume the fiber after the requested data has been loaded (by another Fiber).
|
65
|
+
#
|
66
|
+
# @return [void]
|
67
|
+
def yield
|
68
|
+
Fiber.yield
|
69
|
+
nil
|
70
|
+
end
|
71
|
+
|
72
|
+
# @return [Boolean] Returns true if the current Fiber has yielded once via Dataloader
|
73
|
+
def yielded?
|
74
|
+
@yielded_fibers.include?(Fiber.current)
|
75
|
+
end
|
76
|
+
|
77
|
+
# Run all Fibers until they're all done
|
78
|
+
#
|
79
|
+
# Each cycle works like this:
|
80
|
+
#
|
81
|
+
# - Run each pending execution fiber (`@waiting_fibers`),
|
82
|
+
# - Then run each pending Source, preparing more data for those fibers.
|
83
|
+
# - Run each pending Source _again_ (if one Source requested more data from another Source)
|
84
|
+
# - Continue until there are no pending sources
|
85
|
+
# - Repeat: run execution fibers again ...
|
86
|
+
#
|
87
|
+
# @return [void]
|
88
|
+
def run
|
89
|
+
# Start executing Fibers. This will run until all the Fibers are done.
|
90
|
+
already_run_fibers = []
|
91
|
+
while (current_fiber = @waiting_fibers.pop)
|
92
|
+
# Run each execution fiber, enqueuing it in `already_run_fibers`
|
93
|
+
# if it's still `.alive?`.
|
94
|
+
# Any spin-off continuations will be enqueued in `@waiting_fibers` (via {#enqueue})
|
95
|
+
resume_fiber_and_enqueue_continuation(current_fiber, already_run_fibers)
|
96
|
+
|
97
|
+
if @waiting_fibers.empty?
|
98
|
+
# Now, run all Sources which have become pending _before_ resuming GraphQL execution.
|
99
|
+
# Sources might queue up other Sources, which is fine -- those will also run before resuming execution.
|
100
|
+
#
|
101
|
+
# This is where an evented approach would be even better -- can we tell which
|
102
|
+
# fibers are ready to continue, and continue execution there?
|
103
|
+
#
|
104
|
+
source_fiber_stack = if (first_source_fiber = create_source_fiber)
|
105
|
+
[first_source_fiber]
|
106
|
+
else
|
107
|
+
nil
|
108
|
+
end
|
109
|
+
|
110
|
+
if source_fiber_stack
|
111
|
+
while (outer_source_fiber = source_fiber_stack.pop)
|
112
|
+
resume_fiber_and_enqueue_continuation(outer_source_fiber, source_fiber_stack)
|
113
|
+
|
114
|
+
# If this source caused more sources to become pending, run those before running this one again:
|
115
|
+
next_source_fiber = create_source_fiber
|
116
|
+
if next_source_fiber
|
117
|
+
source_fiber_stack << next_source_fiber
|
118
|
+
end
|
119
|
+
end
|
120
|
+
end
|
121
|
+
|
122
|
+
# We ran all the first round of execution fibers,
|
123
|
+
# and we ran all the pending sources.
|
124
|
+
# So pick up any paused execution fibers and repeat.
|
125
|
+
@waiting_fibers.concat(already_run_fibers)
|
126
|
+
already_run_fibers.clear
|
127
|
+
end
|
128
|
+
end
|
129
|
+
nil
|
130
|
+
end
|
131
|
+
|
132
|
+
# Get a Source instance from this dataloader, for calling `.load(...)` or `.request(...)` on.
|
133
|
+
#
|
134
|
+
# @param source_class [Class<GraphQL::Dataloader::Source]
|
135
|
+
# @param batch_parameters [Array<Object>]
|
136
|
+
# @return [GraphQL::Dataloader::Source] An instance of {source_class}, initialized with `self, *batch_parameters`,
|
137
|
+
# and cached for the lifetime of this {Multiplex}.
|
138
|
+
def with(source_class, *batch_parameters)
|
139
|
+
@source_cache[source_class][batch_parameters]
|
140
|
+
end
|
141
|
+
|
142
|
+
# @api private
|
143
|
+
attr_accessor :current_runtime
|
144
|
+
|
145
|
+
private
|
146
|
+
|
147
|
+
# Check if this fiber is still alive.
|
148
|
+
# If it is, and it should continue, then enqueue a continuation.
|
149
|
+
# If it is, re-enqueue it in `fiber_queue`.
|
150
|
+
# Otherwise, clean it up from @yielded_fibers.
|
151
|
+
# @return [void]
|
152
|
+
def resume_fiber_and_enqueue_continuation(fiber, fiber_stack)
|
153
|
+
result = fiber.resume
|
154
|
+
if result.is_a?(StandardError)
|
155
|
+
raise result
|
156
|
+
end
|
157
|
+
|
158
|
+
# This fiber yielded; there's more to do here.
|
159
|
+
# (If `#alive?` is false, then the fiber concluded without yielding.)
|
160
|
+
if fiber.alive?
|
161
|
+
if !@yielded_fibers.include?(fiber)
|
162
|
+
# This fiber hasn't yielded yet, we should enqueue a continuation fiber
|
163
|
+
@yielded_fibers.add(fiber)
|
164
|
+
current_runtime.enqueue_selections_fiber
|
165
|
+
end
|
166
|
+
fiber_stack << fiber
|
167
|
+
else
|
168
|
+
# Keep this set clean so that fibers can be GC'ed during execution
|
169
|
+
@yielded_fibers.delete(fiber)
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
# If there are pending sources, return a fiber for running them.
|
174
|
+
# Otherwise, return `nil`.
|
175
|
+
#
|
176
|
+
# @return [Fiber, nil]
|
177
|
+
def create_source_fiber
|
178
|
+
pending_sources = nil
|
179
|
+
@source_cache.each_value do |source_by_batch_params|
|
180
|
+
source_by_batch_params.each_value do |source|
|
181
|
+
if source.pending?
|
182
|
+
pending_sources ||= []
|
183
|
+
pending_sources << source
|
184
|
+
end
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
if pending_sources
|
189
|
+
source_fiber = Fiber.new do
|
190
|
+
pending_sources.each(&:run_pending_keys)
|
191
|
+
end
|
192
|
+
end
|
193
|
+
|
194
|
+
source_fiber
|
195
|
+
end
|
196
|
+
end
|
197
|
+
end
|
@@ -4,7 +4,7 @@ module GraphQL
|
|
4
4
|
module AssignGlobalIdField
|
5
5
|
def self.call(type_defn, field_name, **field_kwargs)
|
6
6
|
resolve = GraphQL::Relay::GlobalIdResolve.new(type: type_defn)
|
7
|
-
GraphQL::Define::AssignObjectField.call(type_defn, field_name, **field_kwargs, type: GraphQL::
|
7
|
+
GraphQL::Define::AssignObjectField.call(type_defn, field_name, **field_kwargs, type: GraphQL::DEPRECATED_ID_TYPE.to_non_null_type, resolve: resolve)
|
8
8
|
end
|
9
9
|
end
|
10
10
|
end
|
@@ -3,6 +3,23 @@ module GraphQL
|
|
3
3
|
module Define
|
4
4
|
# @api deprecated
|
5
5
|
module InstanceDefinable
|
6
|
+
module DeprecatedDefine
|
7
|
+
def define(**kwargs, &block)
|
8
|
+
deprecated_caller = caller(1, 1).first
|
9
|
+
if deprecated_caller.include?("lib/graphql")
|
10
|
+
deprecated_caller = caller(2, 10).find { |c| !c.include?("lib/graphql") }
|
11
|
+
end
|
12
|
+
|
13
|
+
if deprecated_caller
|
14
|
+
warn <<-ERR
|
15
|
+
#{self}.define will be removed in GraphQL-Ruby 2.0; use a class-based definition instead. See https://graphql-ruby.org/schema/class_based_api.html.
|
16
|
+
-> called from #{deprecated_caller}
|
17
|
+
ERR
|
18
|
+
end
|
19
|
+
deprecated_define(**kwargs, &block)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
6
23
|
def self.included(base)
|
7
24
|
base.extend(ClassMethods)
|
8
25
|
base.ensure_defined(:metadata)
|
@@ -14,7 +31,7 @@ module GraphQL
|
|
14
31
|
end
|
15
32
|
|
16
33
|
# @api deprecated
|
17
|
-
def
|
34
|
+
def deprecated_define(**kwargs, &block)
|
18
35
|
# make sure the previous definition_proc was executed:
|
19
36
|
ensure_defined
|
20
37
|
stash_dependent_methods
|
@@ -22,11 +39,16 @@ module GraphQL
|
|
22
39
|
nil
|
23
40
|
end
|
24
41
|
|
42
|
+
# @api deprecated
|
43
|
+
def define(**kwargs, &block)
|
44
|
+
deprecated_define(**kwargs, &block)
|
45
|
+
end
|
46
|
+
|
25
47
|
# @api deprecated
|
26
48
|
def redefine(**kwargs, &block)
|
27
49
|
ensure_defined
|
28
50
|
new_inst = self.dup
|
29
|
-
new_inst.
|
51
|
+
new_inst.deprecated_define(**kwargs, &block)
|
30
52
|
new_inst
|
31
53
|
end
|
32
54
|
|
@@ -125,8 +147,16 @@ module GraphQL
|
|
125
147
|
module ClassMethods
|
126
148
|
# Create a new instance
|
127
149
|
# and prepare a definition using its {.definitions}.
|
150
|
+
# @api deprecated
|
128
151
|
# @param kwargs [Hash] Key-value pairs corresponding to defininitions from `accepts_definitions`
|
129
152
|
# @param block [Proc] Block which calls helper methods from `accepts_definitions`
|
153
|
+
def deprecated_define(**kwargs, &block)
|
154
|
+
instance = self.new
|
155
|
+
instance.deprecated_define(**kwargs, &block)
|
156
|
+
instance
|
157
|
+
end
|
158
|
+
|
159
|
+
# @api deprecated
|
130
160
|
def define(**kwargs, &block)
|
131
161
|
instance = self.new
|
132
162
|
instance.define(**kwargs, &block)
|
@@ -7,11 +7,11 @@ module GraphQL
|
|
7
7
|
class TypeDefiner
|
8
8
|
include Singleton
|
9
9
|
# rubocop:disable Naming/MethodName
|
10
|
-
def Int; GraphQL::
|
11
|
-
def String; GraphQL::
|
12
|
-
def Float; GraphQL::
|
13
|
-
def Boolean; GraphQL::
|
14
|
-
def ID; GraphQL::
|
10
|
+
def Int; GraphQL::DEPRECATED_INT_TYPE; end
|
11
|
+
def String; GraphQL::DEPRECATED_STRING_TYPE; end
|
12
|
+
def Float; GraphQL::DEPRECATED_FLOAT_TYPE; end
|
13
|
+
def Boolean; GraphQL::DEPRECATED_BOOLEAN_TYPE; end
|
14
|
+
def ID; GraphQL::DEPRECATED_ID_TYPE; end
|
15
15
|
# rubocop:enable Naming/MethodName
|
16
16
|
|
17
17
|
# Make a {ListType} which wraps the input type
|
@@ -23,12 +23,17 @@ module GraphQL
|
|
23
23
|
]
|
24
24
|
|
25
25
|
def self.activate
|
26
|
+
deprecated_caller = caller(1, 1).first
|
27
|
+
warn "DeprecatedDSL will be removed from GraphQL-Ruby 2.0, use `.to_non_null_type` instead of `!` and remove `.activate` from #{deprecated_caller}"
|
26
28
|
TYPE_CLASSES.each { |c| c.extend(Methods) }
|
27
29
|
GraphQL::Schema::List.include(Methods)
|
28
30
|
GraphQL::Schema::NonNull.include(Methods)
|
29
31
|
end
|
32
|
+
|
30
33
|
module Methods
|
31
34
|
def !
|
35
|
+
deprecated_caller = caller(1, 1).first
|
36
|
+
warn "DeprecatedDSL will be removed from GraphQL-Ruby 2.0, use `.to_non_null_type` instead of `!` at #{deprecated_caller}"
|
32
37
|
to_non_null_type
|
33
38
|
end
|
34
39
|
end
|
data/lib/graphql/enum_type.rb
CHANGED
@@ -2,6 +2,8 @@
|
|
2
2
|
module GraphQL
|
3
3
|
# @api deprecated
|
4
4
|
class EnumType < GraphQL::BaseType
|
5
|
+
extend Define::InstanceDefinable::DeprecatedDefine
|
6
|
+
|
5
7
|
accepts_definitions :values, value: GraphQL::Define::AssignEnumValue
|
6
8
|
ensure_defined(:values, :validate_non_null_input, :coerce_non_null_input, :coerce_result)
|
7
9
|
attr_accessor :ast_node
|
@@ -18,6 +18,10 @@ module GraphQL
|
|
18
18
|
#
|
19
19
|
class Errors
|
20
20
|
def self.use(schema)
|
21
|
+
if schema.plugins.any? { |(plugin, kwargs)| plugin == self }
|
22
|
+
definition_line = caller(2, 1).first
|
23
|
+
warn("GraphQL::Execution::Errors is now installed by default, remove `use GraphQL::Execution::Errors` from #{definition_line}")
|
24
|
+
end
|
21
25
|
schema.error_handler = self.new(schema)
|
22
26
|
end
|
23
27
|
|
@@ -18,7 +18,14 @@ module GraphQL
|
|
18
18
|
# @api private
|
19
19
|
PROPAGATE_NULL = PropagateNull.new
|
20
20
|
|
21
|
+
def self.use(schema_class)
|
22
|
+
schema_class.query_execution_strategy(self)
|
23
|
+
schema_class.mutation_execution_strategy(self)
|
24
|
+
schema_class.subscription_execution_strategy(self)
|
25
|
+
end
|
26
|
+
|
21
27
|
def execute(ast_operation, root_type, query)
|
28
|
+
warn "#{self.class} will be removed in GraphQL-Ruby 2.0, please upgrade to the Interpreter: https://graphql-ruby.org/queries/interpreter.html"
|
22
29
|
result = resolve_root_selection(query)
|
23
30
|
lazy_resolve_root_selection(result, **{query: query})
|
24
31
|
GraphQL::Execution::Flatten.call(query.context)
|
@@ -5,6 +5,9 @@ module GraphQL
|
|
5
5
|
class Interpreter
|
6
6
|
# A wrapper for argument hashes in GraphQL queries.
|
7
7
|
#
|
8
|
+
# This object is immutable so that the runtime code can be sure that
|
9
|
+
# modifications don't leak from one use to another
|
10
|
+
#
|
8
11
|
# @see GraphQL::Query#arguments_for to get access to these objects.
|
9
12
|
class Arguments
|
10
13
|
extend Forwardable
|
@@ -14,26 +17,43 @@ module GraphQL
|
|
14
17
|
# This hash is the one used at runtime.
|
15
18
|
#
|
16
19
|
# @return [Hash<Symbol, Object>]
|
17
|
-
|
18
|
-
@keyword_arguments ||= begin
|
19
|
-
kwargs = {}
|
20
|
-
argument_values.each do |name, arg_val|
|
21
|
-
kwargs[name] = arg_val.value
|
22
|
-
end
|
23
|
-
kwargs
|
24
|
-
end
|
25
|
-
end
|
20
|
+
attr_reader :keyword_arguments
|
26
21
|
|
27
22
|
# @param argument_values [nil, Hash{Symbol => ArgumentValue}]
|
28
|
-
|
29
|
-
|
23
|
+
# @param keyword_arguments [nil, Hash{Symbol => Object}]
|
24
|
+
def initialize(keyword_arguments: nil, argument_values:)
|
30
25
|
@empty = argument_values.nil? || argument_values.empty?
|
26
|
+
# This is only present when `extras` have been merged in:
|
27
|
+
if keyword_arguments
|
28
|
+
# This is a little crazy. We expect the `:argument_details` extra to _include extras_,
|
29
|
+
# but the object isn't created until _after_ extras are put together.
|
30
|
+
# So, we have to use a special flag here to say, "at the last minute, add yourself to the keyword args."
|
31
|
+
#
|
32
|
+
# Otherwise:
|
33
|
+
# - We can't access the final Arguments instance _while_ we're preparing extras
|
34
|
+
# - After we _can_ access it, it's frozen, so we can't add anything.
|
35
|
+
#
|
36
|
+
# So, this flag gives us a chance to sneak it in before freezing, _and_ while we have access
|
37
|
+
# to the new Arguments instance itself.
|
38
|
+
if keyword_arguments[:argument_details] == :__arguments_add_self
|
39
|
+
keyword_arguments[:argument_details] = self
|
40
|
+
end
|
41
|
+
@keyword_arguments = keyword_arguments.freeze
|
42
|
+
elsif !@empty
|
43
|
+
@keyword_arguments = {}
|
44
|
+
argument_values.each do |name, arg_val|
|
45
|
+
@keyword_arguments[name] = arg_val.value
|
46
|
+
end
|
47
|
+
@keyword_arguments.freeze
|
48
|
+
else
|
49
|
+
@keyword_arguments = NO_ARGS
|
50
|
+
end
|
51
|
+
@argument_values = argument_values ? argument_values.freeze : NO_ARGS
|
52
|
+
freeze
|
31
53
|
end
|
32
54
|
|
33
55
|
# @return [Hash{Symbol => ArgumentValue}]
|
34
|
-
|
35
|
-
@argument_values ||= {}
|
36
|
-
end
|
56
|
+
attr_reader :argument_values
|
37
57
|
|
38
58
|
def empty?
|
39
59
|
@empty
|
@@ -45,6 +65,23 @@ module GraphQL
|
|
45
65
|
def inspect
|
46
66
|
"#<#{self.class} @keyword_arguments=#{keyword_arguments.inspect}>"
|
47
67
|
end
|
68
|
+
|
69
|
+
# Create a new arguments instance which includes these extras.
|
70
|
+
#
|
71
|
+
# This is called by the runtime to implement field `extras: [...]`
|
72
|
+
#
|
73
|
+
# @param extra_args [Hash<Symbol => Object>]
|
74
|
+
# @return [Interpreter::Arguments]
|
75
|
+
# @api private
|
76
|
+
def merge_extras(extra_args)
|
77
|
+
self.class.new(
|
78
|
+
argument_values: argument_values,
|
79
|
+
keyword_arguments: keyword_arguments.merge(extra_args)
|
80
|
+
)
|
81
|
+
end
|
82
|
+
|
83
|
+
NO_ARGS = {}.freeze
|
84
|
+
EMPTY = self.new(argument_values: nil, keyword_arguments: NO_ARGS).freeze
|
48
85
|
end
|
49
86
|
end
|
50
87
|
end
|