graphql 2.2.5 → 2.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of graphql might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/lib/graphql/analysis/ast/field_usage.rb +32 -7
- data/lib/graphql/analysis/ast.rb +7 -1
- data/lib/graphql/coercion_error.rb +1 -9
- data/lib/graphql/execution/interpreter.rb +90 -150
- data/lib/graphql/introspection/entry_points.rb +9 -3
- data/lib/graphql/introspection/schema_type.rb +3 -1
- data/lib/graphql/language/document_from_schema_definition.rb +1 -2
- data/lib/graphql/pagination/array_connection.rb +3 -3
- data/lib/graphql/pagination/relation_connection.rb +3 -3
- data/lib/graphql/query/validation_pipeline.rb +2 -2
- data/lib/graphql/query/variables.rb +3 -3
- data/lib/graphql/query.rb +1 -1
- data/lib/graphql/schema.rb +26 -0
- data/lib/graphql/static_validation/validator.rb +3 -0
- data/lib/graphql/subscriptions.rb +0 -3
- data/lib/graphql/tracing/data_dog_trace.rb +21 -34
- data/lib/graphql/tracing/data_dog_tracing.rb +7 -21
- data/lib/graphql/tracing/legacy_hooks_trace.rb +74 -0
- data/lib/graphql/tracing/platform_tracing.rb +2 -0
- data/lib/graphql/tracing/{prometheus_tracing → prometheus_trace}/graphql_collector.rb +3 -1
- data/lib/graphql/tracing/sentry_trace.rb +94 -0
- data/lib/graphql/tracing.rb +3 -1
- data/lib/graphql/version.rb +1 -1
- metadata +6 -5
- data/lib/graphql/subscriptions/instrumentation.rb +0 -28
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ba7d4c9ad987cbae751625aced364d2ace39bfb84ee22e32ab07f33c335f4ebf
|
4
|
+
data.tar.gz: 40773fa35507e6cbeefb468c4ad153e1561e813092187abaf6f246c5e06802d6
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: fb0a00a30aec2c81a6a0e239f8f7c3313747e190c06d821395a75868a46d84b65f699e4dbf40342455549b2b1b7f92ab5630dcdd2bebfbe95aef66a574a5bd5d
|
7
|
+
data.tar.gz: 6b1b63f5004336b304adbfb6b946d0b8ba87bd60a5356859f38fae3df92f15f092d07c22f1bd9857fdf04883b8b7d2134f38fb67bf11afe08df009528dd3c0ed
|
@@ -8,6 +8,7 @@ module GraphQL
|
|
8
8
|
@used_fields = Set.new
|
9
9
|
@used_deprecated_fields = Set.new
|
10
10
|
@used_deprecated_arguments = Set.new
|
11
|
+
@used_deprecated_enum_values = Set.new
|
11
12
|
end
|
12
13
|
|
13
14
|
def on_leave_field(node, parent, visitor)
|
@@ -15,7 +16,7 @@ module GraphQL
|
|
15
16
|
field = "#{visitor.parent_type_definition.graphql_name}.#{field_defn.graphql_name}"
|
16
17
|
@used_fields << field
|
17
18
|
@used_deprecated_fields << field if field_defn.deprecation_reason
|
18
|
-
arguments = visitor.query.arguments_for(node,
|
19
|
+
arguments = visitor.query.arguments_for(node, field_defn)
|
19
20
|
# If there was an error when preparing this argument object,
|
20
21
|
# then this might be an error or something:
|
21
22
|
if arguments.respond_to?(:argument_values)
|
@@ -28,6 +29,7 @@ module GraphQL
|
|
28
29
|
used_fields: @used_fields.to_a,
|
29
30
|
used_deprecated_fields: @used_deprecated_fields.to_a,
|
30
31
|
used_deprecated_arguments: @used_deprecated_arguments.to_a,
|
32
|
+
used_deprecated_enum_values: @used_deprecated_enum_values.to_a,
|
31
33
|
}
|
32
34
|
end
|
33
35
|
|
@@ -41,16 +43,39 @@ module GraphQL
|
|
41
43
|
|
42
44
|
next if argument.value.nil?
|
43
45
|
|
44
|
-
|
46
|
+
argument_type = argument.definition.type
|
47
|
+
if argument_type.non_null?
|
48
|
+
argument_type = argument_type.of_type
|
49
|
+
end
|
50
|
+
|
51
|
+
if argument_type.kind.input_object?
|
45
52
|
extract_deprecated_arguments(argument.value.arguments.argument_values) # rubocop:disable Development/ContextIsPassedCop -- runtime args instance
|
46
|
-
elsif
|
47
|
-
argument
|
48
|
-
|
49
|
-
|
50
|
-
|
53
|
+
elsif argument_type.kind.enum?
|
54
|
+
extract_deprecated_enum_value(argument_type, argument.value)
|
55
|
+
elsif argument_type.list?
|
56
|
+
inner_type = argument_type.unwrap
|
57
|
+
case inner_type.kind
|
58
|
+
when TypeKinds::INPUT_OBJECT
|
59
|
+
argument.value.each do |value|
|
60
|
+
extract_deprecated_arguments(value.arguments.argument_values) # rubocop:disable Development/ContextIsPassedCop -- runtime args instance
|
61
|
+
end
|
62
|
+
when TypeKinds::ENUM
|
63
|
+
argument.value.each do |value|
|
64
|
+
extract_deprecated_enum_value(inner_type, value)
|
65
|
+
end
|
66
|
+
else
|
67
|
+
# Not a kind of input that we track
|
68
|
+
end
|
51
69
|
end
|
52
70
|
end
|
53
71
|
end
|
72
|
+
|
73
|
+
def extract_deprecated_enum_value(enum_type, value)
|
74
|
+
enum_value = @query.warden.enum_values(enum_type).find { |ev| ev.value == value }
|
75
|
+
if enum_value&.deprecation_reason
|
76
|
+
@used_deprecated_enum_values << enum_value.path
|
77
|
+
end
|
78
|
+
end
|
54
79
|
end
|
55
80
|
end
|
56
81
|
end
|
data/lib/graphql/analysis/ast.rb
CHANGED
@@ -6,6 +6,7 @@ require "graphql/analysis/ast/query_complexity"
|
|
6
6
|
require "graphql/analysis/ast/max_query_complexity"
|
7
7
|
require "graphql/analysis/ast/query_depth"
|
8
8
|
require "graphql/analysis/ast/max_query_depth"
|
9
|
+
require "timeout"
|
9
10
|
|
10
11
|
module GraphQL
|
11
12
|
module Analysis
|
@@ -63,7 +64,10 @@ module GraphQL
|
|
63
64
|
analyzers: analyzers_to_run
|
64
65
|
)
|
65
66
|
|
66
|
-
|
67
|
+
# `nil` or `0` causes no timeout
|
68
|
+
Timeout::timeout(query.validate_timeout_remaining) do
|
69
|
+
visitor.visit
|
70
|
+
end
|
67
71
|
|
68
72
|
if visitor.rescued_errors.any?
|
69
73
|
return visitor.rescued_errors
|
@@ -75,6 +79,8 @@ module GraphQL
|
|
75
79
|
[]
|
76
80
|
end
|
77
81
|
end
|
82
|
+
rescue Timeout::Error
|
83
|
+
[GraphQL::AnalysisError.new("Timeout on validation of query")]
|
78
84
|
end
|
79
85
|
|
80
86
|
def analysis_errors(results)
|
@@ -1,13 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
module GraphQL
|
3
|
-
class CoercionError < GraphQL::
|
4
|
-
# @return [Hash] Optional custom data for error objects which will be added
|
5
|
-
# under the `extensions` key.
|
6
|
-
attr_accessor :extensions
|
7
|
-
|
8
|
-
def initialize(message, extensions: nil)
|
9
|
-
@extensions = extensions
|
10
|
-
super(message)
|
11
|
-
end
|
3
|
+
class CoercionError < GraphQL::ExecutionError
|
12
4
|
end
|
13
5
|
end
|
@@ -37,173 +37,113 @@ module GraphQL
|
|
37
37
|
multiplex.current_trace.execute_multiplex(multiplex: multiplex) do
|
38
38
|
schema = multiplex.schema
|
39
39
|
queries = multiplex.queries
|
40
|
-
query_instrumenters = schema.instrumenters[:query]
|
41
|
-
multiplex_instrumenters = schema.instrumenters[:multiplex]
|
42
40
|
lazies_at_depth = Hash.new { |h, k| h[k] = [] }
|
41
|
+
multiplex_analyzers = schema.multiplex_analyzers
|
42
|
+
if multiplex.max_complexity
|
43
|
+
multiplex_analyzers += [GraphQL::Analysis::AST::MaxQueryComplexity]
|
44
|
+
end
|
43
45
|
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
46
|
+
schema.analysis_engine.analyze_multiplex(multiplex, multiplex_analyzers)
|
47
|
+
begin
|
48
|
+
# Since this is basically the batching context,
|
49
|
+
# share it for a whole multiplex
|
50
|
+
multiplex.context[:interpreter_instance] ||= multiplex.schema.query_execution_strategy.new
|
51
|
+
# Do as much eager evaluation of the query as possible
|
52
|
+
results = []
|
53
|
+
queries.each_with_index do |query, idx|
|
54
|
+
if query.subscription? && !query.subscription_update?
|
55
|
+
query.context.namespace(:subscriptions)[:events] = []
|
52
56
|
end
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
begin
|
68
|
-
# Although queries in a multiplex _share_ an Interpreter instance,
|
69
|
-
# they also have another item of state, which is private to that query
|
70
|
-
# in particular, assign it here:
|
71
|
-
runtime = Runtime.new(query: query, lazies_at_depth: lazies_at_depth)
|
72
|
-
query.context.namespace(:interpreter_runtime)[:runtime] = runtime
|
73
|
-
|
74
|
-
query.current_trace.execute_query(query: query) do
|
75
|
-
runtime.run_eager
|
76
|
-
end
|
77
|
-
rescue GraphQL::ExecutionError => err
|
78
|
-
query.context.errors << err
|
79
|
-
NO_OPERATION
|
80
|
-
end
|
57
|
+
multiplex.dataloader.append_job {
|
58
|
+
operation = query.selected_operation
|
59
|
+
result = if operation.nil? || !query.valid? || query.context.errors.any?
|
60
|
+
NO_OPERATION
|
61
|
+
else
|
62
|
+
begin
|
63
|
+
# Although queries in a multiplex _share_ an Interpreter instance,
|
64
|
+
# they also have another item of state, which is private to that query
|
65
|
+
# in particular, assign it here:
|
66
|
+
runtime = Runtime.new(query: query, lazies_at_depth: lazies_at_depth)
|
67
|
+
query.context.namespace(:interpreter_runtime)[:runtime] = runtime
|
68
|
+
|
69
|
+
query.current_trace.execute_query(query: query) do
|
70
|
+
runtime.run_eager
|
81
71
|
end
|
82
|
-
|
83
|
-
|
72
|
+
rescue GraphQL::ExecutionError => err
|
73
|
+
query.context.errors << err
|
74
|
+
NO_OPERATION
|
75
|
+
end
|
84
76
|
end
|
77
|
+
results[idx] = result
|
78
|
+
}
|
79
|
+
end
|
85
80
|
|
86
|
-
|
87
|
-
|
88
|
-
# Then, work through lazy results in a breadth-first way
|
89
|
-
multiplex.dataloader.append_job {
|
90
|
-
query = multiplex.queries.length == 1 ? multiplex.queries[0] : nil
|
91
|
-
queries = multiplex ? multiplex.queries : [query]
|
92
|
-
final_values = queries.map do |query|
|
93
|
-
runtime = query.context.namespace(:interpreter_runtime)[:runtime]
|
94
|
-
# it might not be present if the query has an error
|
95
|
-
runtime ? runtime.final_result : nil
|
96
|
-
end
|
97
|
-
final_values.compact!
|
98
|
-
multiplex.current_trace.execute_query_lazy(multiplex: multiplex, query: query) do
|
99
|
-
Interpreter::Resolve.resolve_each_depth(lazies_at_depth, multiplex.dataloader)
|
100
|
-
end
|
101
|
-
}
|
102
|
-
multiplex.dataloader.run
|
81
|
+
multiplex.dataloader.run
|
103
82
|
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
83
|
+
# Then, work through lazy results in a breadth-first way
|
84
|
+
multiplex.dataloader.append_job {
|
85
|
+
query = multiplex.queries.length == 1 ? multiplex.queries[0] : nil
|
86
|
+
queries = multiplex ? multiplex.queries : [query]
|
87
|
+
final_values = queries.map do |query|
|
88
|
+
runtime = query.context.namespace(:interpreter_runtime)[:runtime]
|
89
|
+
# it might not be present if the query has an error
|
90
|
+
runtime ? runtime.final_result : nil
|
91
|
+
end
|
92
|
+
final_values.compact!
|
93
|
+
multiplex.current_trace.execute_query_lazy(multiplex: multiplex, query: query) do
|
94
|
+
Interpreter::Resolve.resolve_each_depth(lazies_at_depth, multiplex.dataloader)
|
95
|
+
end
|
96
|
+
}
|
97
|
+
multiplex.dataloader.run
|
119
98
|
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
99
|
+
# Then, find all errors and assign the result to the query object
|
100
|
+
results.each_with_index do |data_result, idx|
|
101
|
+
query = queries[idx]
|
102
|
+
if (events = query.context.namespace(:subscriptions)[:events]) && events.any?
|
103
|
+
schema.subscriptions.write_subscription(query, events)
|
104
|
+
end
|
105
|
+
# Assign the result so that it can be accessed in instrumentation
|
106
|
+
query.result_values = if data_result.equal?(NO_OPERATION)
|
107
|
+
if !query.valid? || query.context.errors.any?
|
108
|
+
# A bit weird, but `Query#static_errors` _includes_ `query.context.errors`
|
109
|
+
{ "errors" => query.static_errors.map(&:to_h) }
|
110
|
+
else
|
111
|
+
data_result
|
112
|
+
end
|
113
|
+
else
|
114
|
+
result = {
|
115
|
+
"data" => query.context.namespace(:interpreter_runtime)[:runtime].final_result
|
116
|
+
}
|
124
117
|
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
query.result_values["extensions"] = query.context.namespace(:__query_result_extensions__)
|
129
|
-
end
|
130
|
-
# Get the Query::Result, not the Hash
|
131
|
-
results[idx] = query.result
|
118
|
+
if query.context.errors.any?
|
119
|
+
error_result = query.context.errors.map(&:to_h)
|
120
|
+
result["errors"] = error_result
|
132
121
|
end
|
133
122
|
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
queries.map { |q| q.result_values ||= {} }
|
139
|
-
raise
|
140
|
-
ensure
|
141
|
-
queries.map { |query|
|
142
|
-
runtime = query.context.namespace(:interpreter_runtime)[:runtime]
|
143
|
-
if runtime
|
144
|
-
runtime.delete_all_interpreter_context
|
145
|
-
end
|
146
|
-
}
|
123
|
+
result
|
124
|
+
end
|
125
|
+
if query.context.namespace?(:__query_result_extensions__)
|
126
|
+
query.result_values["extensions"] = query.context.namespace(:__query_result_extensions__)
|
147
127
|
end
|
128
|
+
# Get the Query::Result, not the Hash
|
129
|
+
results[idx] = query.result
|
148
130
|
end
|
149
|
-
end
|
150
|
-
end
|
151
|
-
end
|
152
|
-
|
153
|
-
private
|
154
131
|
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
132
|
+
results
|
133
|
+
rescue Exception
|
134
|
+
# TODO rescue at a higher level so it will catch errors in analysis, too
|
135
|
+
# Assign values here so that the query's `@executed` becomes true
|
136
|
+
queries.map { |q| q.result_values ||= {} }
|
137
|
+
raise
|
138
|
+
ensure
|
139
|
+
queries.map { |query|
|
140
|
+
runtime = query.context.namespace(:interpreter_runtime)[:runtime]
|
141
|
+
if runtime
|
142
|
+
runtime.delete_all_interpreter_context
|
143
|
+
end
|
166
144
|
}
|
167
|
-
}
|
168
|
-
end
|
169
|
-
end
|
170
|
-
|
171
|
-
# Call each before hook, and if they all succeed, yield.
|
172
|
-
# If they don't all succeed, call after_ for each one that succeeded.
|
173
|
-
def call_hooks(instrumenters, object, before_hook_name, after_hook_name)
|
174
|
-
begin
|
175
|
-
successful = []
|
176
|
-
instrumenters.each do |instrumenter|
|
177
|
-
instrumenter.public_send(before_hook_name, object)
|
178
|
-
successful << instrumenter
|
179
|
-
end
|
180
|
-
|
181
|
-
# if any before hooks raise an exception, quit calling before hooks,
|
182
|
-
# but call the after hooks on anything that succeeded but also
|
183
|
-
# raise the exception that came from the before hook.
|
184
|
-
rescue GraphQL::ExecutionError => err
|
185
|
-
object.context.errors << err
|
186
|
-
rescue => e
|
187
|
-
raise call_after_hooks(successful, object, after_hook_name, e)
|
188
|
-
end
|
189
|
-
|
190
|
-
begin
|
191
|
-
yield # Call the user code
|
192
|
-
ensure
|
193
|
-
ex = call_after_hooks(successful, object, after_hook_name, nil)
|
194
|
-
raise ex if ex
|
195
|
-
end
|
196
|
-
end
|
197
|
-
|
198
|
-
def call_after_hooks(instrumenters, object, after_hook_name, ex)
|
199
|
-
instrumenters.reverse_each do |instrumenter|
|
200
|
-
begin
|
201
|
-
instrumenter.public_send(after_hook_name, object)
|
202
|
-
rescue => e
|
203
|
-
ex = e
|
204
145
|
end
|
205
146
|
end
|
206
|
-
ex
|
207
147
|
end
|
208
148
|
end
|
209
149
|
|
@@ -9,13 +9,19 @@ module GraphQL
|
|
9
9
|
|
10
10
|
def __schema
|
11
11
|
# Apply wrapping manually since this field isn't wrapped by instrumentation
|
12
|
-
schema =
|
12
|
+
schema = context.schema
|
13
13
|
schema_type = schema.introspection_system.types["__Schema"]
|
14
|
-
schema_type.wrap(schema,
|
14
|
+
schema_type.wrap(schema, context)
|
15
15
|
end
|
16
16
|
|
17
17
|
def __type(name:)
|
18
|
-
context.warden.reachable_type?(name)
|
18
|
+
if context.warden.reachable_type?(name)
|
19
|
+
context.warden.get_type(name)
|
20
|
+
elsif (type = context.schema.extra_types.find { |t| t.graphql_name == name })
|
21
|
+
type
|
22
|
+
else
|
23
|
+
nil
|
24
|
+
end
|
19
25
|
end
|
20
26
|
end
|
21
27
|
end
|
@@ -266,8 +266,7 @@ module GraphQL
|
|
266
266
|
end
|
267
267
|
definitions = build_directive_nodes(dirs_to_build)
|
268
268
|
|
269
|
-
type_nodes = build_type_definition_nodes(warden.reachable_types)
|
270
|
-
|
269
|
+
type_nodes = build_type_definition_nodes(warden.reachable_types + schema.extra_types)
|
271
270
|
if @include_one_of
|
272
271
|
# This may have been set to true when iterating over all types
|
273
272
|
definitions.concat(build_directive_nodes([GraphQL::Schema::Directive::OneOf]))
|
@@ -35,10 +35,10 @@ module GraphQL
|
|
35
35
|
def load_nodes
|
36
36
|
@nodes ||= begin
|
37
37
|
sliced_nodes = if before && after
|
38
|
-
end_idx = index_from_cursor(before)-
|
38
|
+
end_idx = index_from_cursor(before) - 2
|
39
39
|
end_idx < 0 ? [] : items[index_from_cursor(after)..end_idx] || []
|
40
40
|
elsif before
|
41
|
-
end_idx = index_from_cursor(before)-2
|
41
|
+
end_idx = index_from_cursor(before) - 2
|
42
42
|
end_idx < 0 ? [] : items[0..end_idx] || []
|
43
43
|
elsif after
|
44
44
|
items[index_from_cursor(after)..-1] || []
|
@@ -56,7 +56,7 @@ module GraphQL
|
|
56
56
|
false
|
57
57
|
end
|
58
58
|
|
59
|
-
@has_next_page = if first
|
59
|
+
@has_next_page = if first_value && first
|
60
60
|
# There are more items after these items
|
61
61
|
sliced_nodes.count > first
|
62
62
|
elsif before
|
@@ -29,14 +29,14 @@ module GraphQL
|
|
29
29
|
|
30
30
|
def has_next_page
|
31
31
|
if @has_next_page.nil?
|
32
|
-
@has_next_page = if
|
33
|
-
true
|
34
|
-
elsif first
|
32
|
+
@has_next_page = if first && first_value
|
35
33
|
if @nodes && @nodes.count < first
|
36
34
|
false
|
37
35
|
else
|
38
36
|
relation_larger_than(sliced_nodes, @sliced_nodes_offset, first)
|
39
37
|
end
|
38
|
+
elsif before_offset && before_offset > 0
|
39
|
+
true
|
40
40
|
else
|
41
41
|
false
|
42
42
|
end
|
@@ -14,7 +14,7 @@ module GraphQL
|
|
14
14
|
#
|
15
15
|
# @api private
|
16
16
|
class ValidationPipeline
|
17
|
-
attr_reader :max_depth, :max_complexity
|
17
|
+
attr_reader :max_depth, :max_complexity, :validate_timeout_remaining
|
18
18
|
|
19
19
|
def initialize(query:, parse_error:, operation_name_error:, max_depth:, max_complexity:)
|
20
20
|
@validation_errors = []
|
@@ -71,7 +71,7 @@ module GraphQL
|
|
71
71
|
validator = @query.static_validator || @schema.static_validator
|
72
72
|
validation_result = validator.validate(@query, validate: @query.validate, timeout: @schema.validate_timeout, max_errors: @schema.validate_max_errors)
|
73
73
|
@validation_errors.concat(validation_result[:errors])
|
74
|
-
|
74
|
+
@validate_timeout_remaining = validation_result[:remaining_timeout]
|
75
75
|
if @validation_errors.empty?
|
76
76
|
@validation_errors.concat(@query.variables.errors)
|
77
77
|
end
|
@@ -26,7 +26,7 @@ module GraphQL
|
|
26
26
|
# - Then, fall back to the default value from the query string
|
27
27
|
# If it's still nil, raise an error if it's required.
|
28
28
|
variable_type = schema.type_from_ast(ast_variable.type, context: ctx)
|
29
|
-
if variable_type.nil?
|
29
|
+
if variable_type.nil? || !variable_type.unwrap.kind.input?
|
30
30
|
# Pass -- it will get handled by a validator
|
31
31
|
else
|
32
32
|
variable_name = ast_variable.name
|
@@ -80,12 +80,12 @@ module GraphQL
|
|
80
80
|
else
|
81
81
|
val
|
82
82
|
end
|
83
|
-
end
|
83
|
+
end
|
84
84
|
|
85
85
|
def add_max_errors_reached_message
|
86
86
|
message = "Too many errors processing variables, max validation error limit reached. Execution aborted"
|
87
87
|
validation_result = GraphQL::Query::InputValidationResult.from_problem(message)
|
88
|
-
errors << GraphQL::Query::VariableValidationError.new(nil, nil, nil, validation_result, msg: message)
|
88
|
+
errors << GraphQL::Query::VariableValidationError.new(nil, nil, nil, validation_result, msg: message)
|
89
89
|
end
|
90
90
|
end
|
91
91
|
end
|
data/lib/graphql/query.rb
CHANGED
@@ -317,7 +317,7 @@ module GraphQL
|
|
317
317
|
end
|
318
318
|
|
319
319
|
def_delegators :validation_pipeline, :validation_errors,
|
320
|
-
:analyzers, :ast_analyzers, :max_depth, :max_complexity
|
320
|
+
:analyzers, :ast_analyzers, :max_depth, :max_complexity, :validate_timeout_remaining
|
321
321
|
|
322
322
|
attr_accessor :analysis_errors
|
323
323
|
def valid?
|
data/lib/graphql/schema.rb
CHANGED
@@ -814,6 +814,26 @@ module GraphQL
|
|
814
814
|
end
|
815
815
|
end
|
816
816
|
|
817
|
+
# @param new_extra_types [Module] Type definitions to include in printing and introspection, even though they aren't referenced in the schema
|
818
|
+
# @return [Array<Module>] Type definitions added to this schema
|
819
|
+
def extra_types(*new_extra_types)
|
820
|
+
if new_extra_types.any?
|
821
|
+
new_extra_types = new_extra_types.flatten
|
822
|
+
@own_extra_types ||= []
|
823
|
+
@own_extra_types.concat(new_extra_types)
|
824
|
+
end
|
825
|
+
inherited_et = find_inherited_value(:extra_types, nil)
|
826
|
+
if inherited_et
|
827
|
+
if @own_extra_types
|
828
|
+
inherited_et + @own_extra_types
|
829
|
+
else
|
830
|
+
inherited_et
|
831
|
+
end
|
832
|
+
else
|
833
|
+
@own_extra_types || EMPTY_ARRAY
|
834
|
+
end
|
835
|
+
end
|
836
|
+
|
817
837
|
def orphan_types(*new_orphan_types)
|
818
838
|
if new_orphan_types.any?
|
819
839
|
new_orphan_types = new_orphan_types.flatten
|
@@ -1044,6 +1064,12 @@ module GraphQL
|
|
1044
1064
|
end
|
1045
1065
|
|
1046
1066
|
def instrument(instrument_step, instrumenter, options = {})
|
1067
|
+
warn <<~WARN
|
1068
|
+
Schema.instrument is deprecated, use `trace_with` instead: https://graphql-ruby.org/queries/tracing.html"
|
1069
|
+
(From `#{self}.instrument(#{instrument_step}, #{instrumenter})` at #{caller(1, 1).first})
|
1070
|
+
|
1071
|
+
WARN
|
1072
|
+
trace_with(Tracing::LegacyHooksTrace)
|
1047
1073
|
own_instrumenters[instrument_step] << instrumenter
|
1048
1074
|
end
|
1049
1075
|
|
@@ -28,6 +28,7 @@ module GraphQL
|
|
28
28
|
# @return [Array<Hash>]
|
29
29
|
def validate(query, validate: true, timeout: nil, max_errors: nil)
|
30
30
|
query.current_trace.validate(validate: validate, query: query) do
|
31
|
+
begin_t = Time.now
|
31
32
|
errors = if validate == false
|
32
33
|
[]
|
33
34
|
else
|
@@ -52,11 +53,13 @@ module GraphQL
|
|
52
53
|
end
|
53
54
|
|
54
55
|
{
|
56
|
+
remaining_timeout: timeout ? (timeout - (Time.now - begin_t)) : nil,
|
55
57
|
errors: errors,
|
56
58
|
}
|
57
59
|
end
|
58
60
|
rescue GraphQL::ExecutionError => e
|
59
61
|
{
|
62
|
+
remaining_timeout: nil,
|
60
63
|
errors: [e],
|
61
64
|
}
|
62
65
|
end
|
@@ -2,7 +2,6 @@
|
|
2
2
|
require "securerandom"
|
3
3
|
require "graphql/subscriptions/broadcast_analyzer"
|
4
4
|
require "graphql/subscriptions/event"
|
5
|
-
require "graphql/subscriptions/instrumentation"
|
6
5
|
require "graphql/subscriptions/serialize"
|
7
6
|
require "graphql/subscriptions/action_cable_subscriptions"
|
8
7
|
require "graphql/subscriptions/default_subscription_resolve_extension"
|
@@ -30,8 +29,6 @@ module GraphQL
|
|
30
29
|
raise ArgumentError, "Can't reinstall subscriptions. #{schema} is using #{schema.subscriptions}, can't also add #{self}"
|
31
30
|
end
|
32
31
|
|
33
|
-
instrumentation = Subscriptions::Instrumentation.new(schema: schema)
|
34
|
-
defn.instrument(:query, instrumentation)
|
35
32
|
options[:schema] = schema
|
36
33
|
schema.subscriptions = self.new(**options)
|
37
34
|
schema.add_subscription_extension_if_necessary
|
@@ -3,20 +3,18 @@
|
|
3
3
|
module GraphQL
|
4
4
|
module Tracing
|
5
5
|
module DataDogTrace
|
6
|
+
# @param tracer [#trace] Deprecated
|
6
7
|
# @param analytics_enabled [Boolean] Deprecated
|
7
8
|
# @param analytics_sample_rate [Float] Deprecated
|
8
|
-
def initialize(tracer: nil, analytics_enabled: false, analytics_sample_rate: 1.0, service:
|
9
|
+
def initialize(tracer: nil, analytics_enabled: false, analytics_sample_rate: 1.0, service: nil, **rest)
|
9
10
|
if tracer.nil?
|
10
11
|
tracer = defined?(Datadog::Tracing) ? Datadog::Tracing : Datadog.tracer
|
11
12
|
end
|
12
13
|
@tracer = tracer
|
13
14
|
|
14
|
-
|
15
|
-
&& Datadog::Contrib::Analytics.respond_to?(:enabled?) \
|
16
|
-
&& Datadog::Contrib::Analytics.respond_to?(:set_sample_rate)
|
17
|
-
|
18
|
-
@analytics_enabled = analytics_available && Datadog::Contrib::Analytics.enabled?(analytics_enabled)
|
15
|
+
@analytics_enabled = analytics_enabled
|
19
16
|
@analytics_sample_rate = analytics_sample_rate
|
17
|
+
|
20
18
|
@service_name = service
|
21
19
|
@has_prepare_span = respond_to?(:prepare_span)
|
22
20
|
super
|
@@ -34,12 +32,9 @@ module GraphQL
|
|
34
32
|
}.each do |trace_method, trace_key|
|
35
33
|
module_eval <<-RUBY, __FILE__, __LINE__
|
36
34
|
def #{trace_method}(**data)
|
37
|
-
@tracer.trace("#{trace_key}", service: @service_name) do |span|
|
38
|
-
|
39
|
-
|
40
|
-
span.set_tag(Datadog::Tracing::Metadata::Ext::TAG_COMPONENT, 'graphql')
|
41
|
-
span.set_tag(Datadog::Tracing::Metadata::Ext::TAG_OPERATION, '#{trace_method}')
|
42
|
-
end
|
35
|
+
@tracer.trace("#{trace_key}", service: @service_name, type: 'custom') do |span|
|
36
|
+
span.set_tag('component', 'graphql')
|
37
|
+
span.set_tag('operation', '#{trace_method}')
|
43
38
|
|
44
39
|
#{
|
45
40
|
if trace_method == 'execute_multiplex'
|
@@ -54,10 +49,8 @@ module GraphQL
|
|
54
49
|
end
|
55
50
|
span.resource = resource if resource
|
56
51
|
|
57
|
-
#
|
58
|
-
if @analytics_enabled
|
59
|
-
Datadog::Contrib::Analytics.set_sample_rate(span, @analytics_sample_rate)
|
60
|
-
end
|
52
|
+
# [Deprecated] will be removed in the future
|
53
|
+
span.set_metric('_dd1.sr.eausr', @analytics_sample_rate) if @analytics_enabled
|
61
54
|
RUBY
|
62
55
|
elsif trace_method == 'execute_query'
|
63
56
|
<<-RUBY
|
@@ -89,12 +82,10 @@ module GraphQL
|
|
89
82
|
nil
|
90
83
|
end
|
91
84
|
if platform_key && trace_field
|
92
|
-
@tracer.trace(platform_key, service: @service_name) do |span|
|
93
|
-
span.
|
94
|
-
|
95
|
-
|
96
|
-
span.set_tag(Datadog::Tracing::Metadata::Ext::TAG_OPERATION, span_key)
|
97
|
-
end
|
85
|
+
@tracer.trace(platform_key, service: @service_name, type: 'custom') do |span|
|
86
|
+
span.set_tag('component', 'graphql')
|
87
|
+
span.set_tag('operation', span_key)
|
88
|
+
|
98
89
|
if @has_prepare_span
|
99
90
|
prepare_span_data = { query: query, field: field, ast_node: ast_node, arguments: arguments, object: object }
|
100
91
|
prepare_span(span_key, prepare_span_data, span)
|
@@ -125,12 +116,10 @@ module GraphQL
|
|
125
116
|
|
126
117
|
def authorized_span(span_key, object, type, query)
|
127
118
|
platform_key = @platform_key_cache[DataDogTrace].platform_authorized_key_cache[type]
|
128
|
-
@tracer.trace(platform_key, service: @service_name) do |span|
|
129
|
-
span.
|
130
|
-
|
131
|
-
|
132
|
-
span.set_tag(Datadog::Tracing::Metadata::Ext::TAG_OPERATION, span_key)
|
133
|
-
end
|
119
|
+
@tracer.trace(platform_key, service: @service_name, type: 'custom') do |span|
|
120
|
+
span.set_tag('component', 'graphql')
|
121
|
+
span.set_tag('operation', span_key)
|
122
|
+
|
134
123
|
if @has_prepare_span
|
135
124
|
prepare_span(span_key, {object: object, type: type, query: query}, span)
|
136
125
|
end
|
@@ -158,12 +147,10 @@ module GraphQL
|
|
158
147
|
|
159
148
|
def resolve_type_span(span_key, object, type, query)
|
160
149
|
platform_key = @platform_key_cache[DataDogTrace].platform_resolve_type_key_cache[type]
|
161
|
-
@tracer.trace(platform_key, service: @service_name) do |span|
|
162
|
-
span.
|
163
|
-
|
164
|
-
|
165
|
-
span.set_tag(Datadog::Tracing::Metadata::Ext::TAG_OPERATION, span_key)
|
166
|
-
end
|
150
|
+
@tracer.trace(platform_key, service: @service_name, type: 'custom') do |span|
|
151
|
+
span.set_tag('component', 'graphql')
|
152
|
+
span.set_tag('operation', span_key)
|
153
|
+
|
167
154
|
if @has_prepare_span
|
168
155
|
prepare_span(span_key, {object: object, type: type, query: query}, span)
|
169
156
|
end
|
@@ -15,12 +15,9 @@ module GraphQL
|
|
15
15
|
}
|
16
16
|
|
17
17
|
def platform_trace(platform_key, key, data)
|
18
|
-
tracer.trace(platform_key, service:
|
19
|
-
span.
|
20
|
-
|
21
|
-
span.set_tag(Datadog::Tracing::Metadata::Ext::TAG_COMPONENT, 'graphql')
|
22
|
-
span.set_tag(Datadog::Tracing::Metadata::Ext::TAG_OPERATION, key)
|
23
|
-
end
|
18
|
+
tracer.trace(platform_key, service: options[:service], type: 'custom') do |span|
|
19
|
+
span.set_tag('component', 'graphql')
|
20
|
+
span.set_tag('operation', key)
|
24
21
|
|
25
22
|
if key == 'execute_multiplex'
|
26
23
|
operations = data[:multiplex].queries.map(&:selected_operation_name).join(', ')
|
@@ -33,10 +30,8 @@ module GraphQL
|
|
33
30
|
end
|
34
31
|
span.resource = resource if resource
|
35
32
|
|
36
|
-
#
|
37
|
-
if analytics_enabled?
|
38
|
-
Datadog::Contrib::Analytics.set_sample_rate(span, analytics_sample_rate)
|
39
|
-
end
|
33
|
+
# [Deprecated] will be removed in the future
|
34
|
+
span.set_metric('_dd1.sr.eausr', analytics_sample_rate) if analytics_enabled?
|
40
35
|
end
|
41
36
|
|
42
37
|
if key == 'execute_query'
|
@@ -51,10 +46,6 @@ module GraphQL
|
|
51
46
|
end
|
52
47
|
end
|
53
48
|
|
54
|
-
def service_name
|
55
|
-
options.fetch(:service, 'ruby-graphql')
|
56
|
-
end
|
57
|
-
|
58
49
|
# Implement this method in a subclass to apply custom tags to datadog spans
|
59
50
|
# @param key [String] The event being traced
|
60
51
|
# @param data [Hash] The runtime data for this event (@see GraphQL::Tracing for keys for each event)
|
@@ -65,18 +56,13 @@ module GraphQL
|
|
65
56
|
def tracer
|
66
57
|
default_tracer = defined?(Datadog::Tracing) ? Datadog::Tracing : Datadog.tracer
|
67
58
|
|
59
|
+
# [Deprecated] options[:tracer] will be removed in the future
|
68
60
|
options.fetch(:tracer, default_tracer)
|
69
61
|
end
|
70
62
|
|
71
|
-
def analytics_available?
|
72
|
-
defined?(Datadog::Contrib::Analytics) \
|
73
|
-
&& Datadog::Contrib::Analytics.respond_to?(:enabled?) \
|
74
|
-
&& Datadog::Contrib::Analytics.respond_to?(:set_sample_rate)
|
75
|
-
end
|
76
|
-
|
77
63
|
def analytics_enabled?
|
78
64
|
# [Deprecated] options[:analytics_enabled] will be removed in the future
|
79
|
-
|
65
|
+
options.fetch(:analytics_enabled, false)
|
80
66
|
end
|
81
67
|
|
82
68
|
def analytics_sample_rate
|
@@ -0,0 +1,74 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module GraphQL
|
3
|
+
module Tracing
|
4
|
+
module LegacyHooksTrace
|
5
|
+
def execute_multiplex(multiplex:)
|
6
|
+
multiplex_instrumenters = multiplex.schema.instrumenters[:multiplex]
|
7
|
+
query_instrumenters = multiplex.schema.instrumenters[:query]
|
8
|
+
# First, run multiplex instrumentation, then query instrumentation for each query
|
9
|
+
RunHooks.call_hooks(multiplex_instrumenters, multiplex, :before_multiplex, :after_multiplex) do
|
10
|
+
RunHooks.each_query_call_hooks(query_instrumenters, multiplex.queries) do
|
11
|
+
super
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
module RunHooks
|
17
|
+
module_function
|
18
|
+
# Call the before_ hooks of each query,
|
19
|
+
# Then yield if no errors.
|
20
|
+
# `call_hooks` takes care of appropriate cleanup.
|
21
|
+
def each_query_call_hooks(instrumenters, queries, i = 0)
|
22
|
+
if i >= queries.length
|
23
|
+
yield
|
24
|
+
else
|
25
|
+
query = queries[i]
|
26
|
+
call_hooks(instrumenters, query, :before_query, :after_query) {
|
27
|
+
each_query_call_hooks(instrumenters, queries, i + 1) {
|
28
|
+
yield
|
29
|
+
}
|
30
|
+
}
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
# Call each before hook, and if they all succeed, yield.
|
35
|
+
# If they don't all succeed, call after_ for each one that succeeded.
|
36
|
+
def call_hooks(instrumenters, object, before_hook_name, after_hook_name)
|
37
|
+
begin
|
38
|
+
successful = []
|
39
|
+
instrumenters.each do |instrumenter|
|
40
|
+
instrumenter.public_send(before_hook_name, object)
|
41
|
+
successful << instrumenter
|
42
|
+
end
|
43
|
+
|
44
|
+
# if any before hooks raise an exception, quit calling before hooks,
|
45
|
+
# but call the after hooks on anything that succeeded but also
|
46
|
+
# raise the exception that came from the before hook.
|
47
|
+
rescue GraphQL::ExecutionError => err
|
48
|
+
object.context.errors << err
|
49
|
+
rescue => e
|
50
|
+
raise call_after_hooks(successful, object, after_hook_name, e)
|
51
|
+
end
|
52
|
+
|
53
|
+
begin
|
54
|
+
yield # Call the user code
|
55
|
+
ensure
|
56
|
+
ex = call_after_hooks(successful, object, after_hook_name, nil)
|
57
|
+
raise ex if ex
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def call_after_hooks(instrumenters, object, after_hook_name, ex)
|
62
|
+
instrumenters.reverse_each do |instrumenter|
|
63
|
+
begin
|
64
|
+
instrumenter.public_send(after_hook_name, object)
|
65
|
+
rescue => e
|
66
|
+
ex = e
|
67
|
+
end
|
68
|
+
end
|
69
|
+
ex
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
@@ -81,8 +81,10 @@ module GraphQL
|
|
81
81
|
trace_name = tracing_name.sub("Tracing", "Trace")
|
82
82
|
if GraphQL::Tracing.const_defined?(trace_name, false)
|
83
83
|
trace_module = GraphQL::Tracing.const_get(trace_name)
|
84
|
+
warn("`use(#{self.name})` is deprecated, use the equivalent `trace_with(#{trace_module.name})` instead. More info: https://graphql-ruby.org/queries/tracing.html")
|
84
85
|
schema_defn.trace_with(trace_module, **options)
|
85
86
|
else
|
87
|
+
warn("`use(#{self.name})` and `Tracing::PlatformTracing` are deprecated. Use a `trace_with(...)` module instead. More info: https://graphql-ruby.org/queries/tracing.html. Please open an issue on the GraphQL-Ruby repo if you want to discuss further!")
|
86
88
|
tracer = self.new(**options)
|
87
89
|
schema_defn.tracer(tracer)
|
88
90
|
end
|
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
module GraphQL
|
4
4
|
module Tracing
|
5
|
-
|
5
|
+
module PrometheusTrace
|
6
6
|
class GraphQLCollector < ::PrometheusExporter::Server::TypeCollector
|
7
7
|
def initialize
|
8
8
|
@graphql_gauge = PrometheusExporter::Metric::Base.default_aggregation.new(
|
@@ -28,5 +28,7 @@ module GraphQL
|
|
28
28
|
end
|
29
29
|
end
|
30
30
|
end
|
31
|
+
# Backwards-compat:
|
32
|
+
PrometheusTracing::GraphQLCollector = PrometheusTrace::GraphQLCollector
|
31
33
|
end
|
32
34
|
end
|
@@ -0,0 +1,94 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module GraphQL
|
4
|
+
module Tracing
|
5
|
+
module SentryTrace
|
6
|
+
include PlatformTrace
|
7
|
+
|
8
|
+
{
|
9
|
+
"lex" => "graphql.lex",
|
10
|
+
"parse" => "graphql.parse",
|
11
|
+
"validate" => "graphql.validate",
|
12
|
+
"analyze_query" => "graphql.analyze",
|
13
|
+
"analyze_multiplex" => "graphql.analyze_multiplex",
|
14
|
+
"execute_multiplex" => "graphql.execute_multiplex",
|
15
|
+
"execute_query" => "graphql.execute",
|
16
|
+
"execute_query_lazy" => "graphql.execute"
|
17
|
+
}.each do |trace_method, platform_key|
|
18
|
+
module_eval <<-RUBY, __FILE__, __LINE__
|
19
|
+
def #{trace_method}(**data, &block)
|
20
|
+
instrument_execution("#{platform_key}", "#{trace_method}", data, &block)
|
21
|
+
end
|
22
|
+
RUBY
|
23
|
+
end
|
24
|
+
|
25
|
+
def platform_execute_field(platform_key, &block)
|
26
|
+
instrument_execution(platform_key, "execute_field", &block)
|
27
|
+
end
|
28
|
+
|
29
|
+
def platform_execute_field_lazy(platform_key, &block)
|
30
|
+
instrument_execution(platform_key, "execute_field_lazy", &block)
|
31
|
+
end
|
32
|
+
|
33
|
+
def platform_authorized(platform_key, &block)
|
34
|
+
instrument_execution(platform_key, "authorized", &block)
|
35
|
+
end
|
36
|
+
|
37
|
+
def platform_authorized_lazy(platform_key, &block)
|
38
|
+
instrument_execution(platform_key, "authorized_lazy", &block)
|
39
|
+
end
|
40
|
+
|
41
|
+
def platform_resolve_type(platform_key, &block)
|
42
|
+
instrument_execution(platform_key, "resolve_type", &block)
|
43
|
+
end
|
44
|
+
|
45
|
+
def platform_resolve_type_lazy(platform_key, &block)
|
46
|
+
instrument_execution(platform_key, "resolve_type_lazy", &block)
|
47
|
+
end
|
48
|
+
|
49
|
+
def platform_field_key(field)
|
50
|
+
"graphql.field.#{field.path}"
|
51
|
+
end
|
52
|
+
|
53
|
+
def platform_authorized_key(type)
|
54
|
+
"graphql.authorized.#{type.graphql_name}"
|
55
|
+
end
|
56
|
+
|
57
|
+
def platform_resolve_type_key(type)
|
58
|
+
"graphql.resolve_type.#{type.graphql_name}"
|
59
|
+
end
|
60
|
+
|
61
|
+
private
|
62
|
+
|
63
|
+
def instrument_execution(platform_key, trace_method, data=nil, &block)
|
64
|
+
return yield unless Sentry.initialized?
|
65
|
+
|
66
|
+
Sentry.with_child_span(op: platform_key, start_timestamp: Sentry.utc_now.to_f) do |span|
|
67
|
+
result = block.call
|
68
|
+
span.finish
|
69
|
+
|
70
|
+
if trace_method == "execute_multiplex" && data.key?(:multiplex)
|
71
|
+
operation_names = data[:multiplex].queries.map{|q| operation_name(q) }
|
72
|
+
span.set_description(operation_names.join(", "))
|
73
|
+
elsif trace_method == "execute_query" && data.key?(:query)
|
74
|
+
span.set_description(operation_name(data[:query]))
|
75
|
+
span.set_data('graphql.document', data[:query].query_string)
|
76
|
+
span.set_data('graphql.operation.name', data[:query].selected_operation_name) if data[:query].selected_operation_name
|
77
|
+
span.set_data('graphql.operation.type', data[:query].selected_operation.operation_type)
|
78
|
+
end
|
79
|
+
|
80
|
+
result
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
def operation_name(query)
|
85
|
+
selected_op = query.selected_operation
|
86
|
+
if selected_op
|
87
|
+
[selected_op.operation_type, selected_op.name].compact.join(' ')
|
88
|
+
else
|
89
|
+
'GraphQL Operation'
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
93
|
+
end
|
94
|
+
end
|
data/lib/graphql/tracing.rb
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
require "graphql/tracing/trace"
|
3
3
|
require "graphql/tracing/legacy_trace"
|
4
|
+
require "graphql/tracing/legacy_hooks_trace"
|
4
5
|
|
5
6
|
# Legacy tracing:
|
6
7
|
require "graphql/tracing/active_support_notifications_tracing"
|
@@ -21,11 +22,12 @@ require "graphql/tracing/appsignal_trace"
|
|
21
22
|
require "graphql/tracing/data_dog_trace"
|
22
23
|
require "graphql/tracing/new_relic_trace"
|
23
24
|
require "graphql/tracing/notifications_trace"
|
25
|
+
require "graphql/tracing/sentry_trace"
|
24
26
|
require "graphql/tracing/scout_trace"
|
25
27
|
require "graphql/tracing/statsd_trace"
|
26
28
|
require "graphql/tracing/prometheus_trace"
|
27
29
|
if defined?(PrometheusExporter::Server)
|
28
|
-
require "graphql/tracing/
|
30
|
+
require "graphql/tracing/prometheus_trace/graphql_collector"
|
29
31
|
end
|
30
32
|
|
31
33
|
module GraphQL
|
data/lib/graphql/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: graphql
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.2.
|
4
|
+
version: 2.2.6
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Robert Mosolgo
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-01-
|
11
|
+
date: 2024-01-25 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: racc
|
@@ -550,7 +550,6 @@ files:
|
|
550
550
|
- lib/graphql/subscriptions/broadcast_analyzer.rb
|
551
551
|
- lib/graphql/subscriptions/default_subscription_resolve_extension.rb
|
552
552
|
- lib/graphql/subscriptions/event.rb
|
553
|
-
- lib/graphql/subscriptions/instrumentation.rb
|
554
553
|
- lib/graphql/subscriptions/serialize.rb
|
555
554
|
- lib/graphql/testing.rb
|
556
555
|
- lib/graphql/testing/helpers.rb
|
@@ -563,6 +562,7 @@ files:
|
|
563
562
|
- lib/graphql/tracing/appsignal_tracing.rb
|
564
563
|
- lib/graphql/tracing/data_dog_trace.rb
|
565
564
|
- lib/graphql/tracing/data_dog_tracing.rb
|
565
|
+
- lib/graphql/tracing/legacy_hooks_trace.rb
|
566
566
|
- lib/graphql/tracing/legacy_trace.rb
|
567
567
|
- lib/graphql/tracing/new_relic_trace.rb
|
568
568
|
- lib/graphql/tracing/new_relic_tracing.rb
|
@@ -571,10 +571,11 @@ files:
|
|
571
571
|
- lib/graphql/tracing/platform_trace.rb
|
572
572
|
- lib/graphql/tracing/platform_tracing.rb
|
573
573
|
- lib/graphql/tracing/prometheus_trace.rb
|
574
|
+
- lib/graphql/tracing/prometheus_trace/graphql_collector.rb
|
574
575
|
- lib/graphql/tracing/prometheus_tracing.rb
|
575
|
-
- lib/graphql/tracing/prometheus_tracing/graphql_collector.rb
|
576
576
|
- lib/graphql/tracing/scout_trace.rb
|
577
577
|
- lib/graphql/tracing/scout_tracing.rb
|
578
|
+
- lib/graphql/tracing/sentry_trace.rb
|
578
579
|
- lib/graphql/tracing/statsd_trace.rb
|
579
580
|
- lib/graphql/tracing/statsd_tracing.rb
|
580
581
|
- lib/graphql/tracing/trace.rb
|
@@ -630,7 +631,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
630
631
|
- !ruby/object:Gem::Version
|
631
632
|
version: '0'
|
632
633
|
requirements: []
|
633
|
-
rubygems_version: 3.
|
634
|
+
rubygems_version: 3.5.5
|
634
635
|
signing_key:
|
635
636
|
specification_version: 4
|
636
637
|
summary: A GraphQL language and runtime for Ruby
|
@@ -1,28 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
module GraphQL
|
3
|
-
class Subscriptions
|
4
|
-
# Wrap the root fields of the subscription type with special logic for:
|
5
|
-
# - Registering the subscription during the first execution
|
6
|
-
# - Evaluating the triggered portion(s) of the subscription during later execution
|
7
|
-
class Instrumentation
|
8
|
-
def initialize(schema:)
|
9
|
-
@schema = schema
|
10
|
-
end
|
11
|
-
|
12
|
-
# If needed, prepare to gather events which this query subscribes to
|
13
|
-
def before_query(query)
|
14
|
-
if query.subscription? && !query.subscription_update?
|
15
|
-
query.context.namespace(:subscriptions)[:events] = []
|
16
|
-
end
|
17
|
-
end
|
18
|
-
|
19
|
-
# After checking the root fields, pass the gathered events to the store
|
20
|
-
def after_query(query)
|
21
|
-
events = query.context.namespace(:subscriptions)[:events]
|
22
|
-
if events && events.any?
|
23
|
-
@schema.subscriptions.write_subscription(query, events)
|
24
|
-
end
|
25
|
-
end
|
26
|
-
end
|
27
|
-
end
|
28
|
-
end
|