graphql 2.0.17 → 2.0.19

Sign up to get free protection for your applications and to get access to all the features.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/lib/graphql/analysis/ast.rb +2 -2
  3. data/lib/graphql/backtrace/tracer.rb +1 -1
  4. data/lib/graphql/execution/interpreter/resolve.rb +19 -0
  5. data/lib/graphql/execution/interpreter/runtime.rb +96 -88
  6. data/lib/graphql/execution/interpreter.rb +8 -13
  7. data/lib/graphql/execution/lazy.rb +2 -4
  8. data/lib/graphql/execution/multiplex.rb +2 -1
  9. data/lib/graphql/language/lexer.rb +216 -1505
  10. data/lib/graphql/language/lexer.ri +744 -0
  11. data/lib/graphql/language/nodes.rb +2 -2
  12. data/lib/graphql/language/parser.rb +39 -38
  13. data/lib/graphql/language/parser.y +38 -37
  14. data/lib/graphql/pagination/active_record_relation_connection.rb +0 -8
  15. data/lib/graphql/query/context.rb +57 -27
  16. data/lib/graphql/query.rb +15 -2
  17. data/lib/graphql/schema/field.rb +31 -19
  18. data/lib/graphql/schema/member/has_deprecation_reason.rb +3 -4
  19. data/lib/graphql/schema/member/has_fields.rb +6 -1
  20. data/lib/graphql/schema/object.rb +2 -4
  21. data/lib/graphql/schema/resolver/has_payload_type.rb +9 -9
  22. data/lib/graphql/schema/timeout.rb +24 -28
  23. data/lib/graphql/schema/warden.rb +8 -1
  24. data/lib/graphql/schema.rb +42 -0
  25. data/lib/graphql/static_validation/validator.rb +1 -1
  26. data/lib/graphql/tracing/active_support_notifications_trace.rb +16 -0
  27. data/lib/graphql/tracing/appoptics_trace.rb +231 -0
  28. data/lib/graphql/tracing/appsignal_trace.rb +66 -0
  29. data/lib/graphql/tracing/data_dog_trace.rb +148 -0
  30. data/lib/graphql/tracing/new_relic_trace.rb +75 -0
  31. data/lib/graphql/tracing/notifications_trace.rb +41 -0
  32. data/lib/graphql/tracing/platform_trace.rb +107 -0
  33. data/lib/graphql/tracing/platform_tracing.rb +15 -3
  34. data/lib/graphql/tracing/prometheus_trace.rb +89 -0
  35. data/lib/graphql/tracing/prometheus_tracing.rb +3 -3
  36. data/lib/graphql/tracing/scout_trace.rb +72 -0
  37. data/lib/graphql/tracing/statsd_trace.rb +56 -0
  38. data/lib/graphql/tracing.rb +136 -39
  39. data/lib/graphql/type_kinds.rb +6 -3
  40. data/lib/graphql/types/relay/base_connection.rb +1 -1
  41. data/lib/graphql/version.rb +1 -1
  42. data/lib/graphql.rb +10 -7
  43. metadata +31 -7
  44. data/lib/graphql/language/lexer.rl +0 -280
@@ -28,8 +28,8 @@ module GraphQL
28
28
  def initialize(options = {})
29
29
  if options.key?(:position_source)
30
30
  position_source = options.delete(:position_source)
31
- @line = position_source.line
32
- @col = position_source.col
31
+ @line = position_source[1]
32
+ @col = position_source[2]
33
33
  end
34
34
 
35
35
  @filename = options.delete(:filename)
@@ -1,6 +1,6 @@
1
1
  #
2
2
  # DO NOT MODIFY!!!!
3
- # This file is automatically generated by Racc 1.6.0
3
+ # This file is automatically generated by Racc 1.6.2
4
4
  # from Racc grammar file "".
5
5
  #
6
6
 
@@ -16,22 +16,22 @@ module_eval(<<'...end parser.y/module_eval...', 'parser.y', 448)
16
16
 
17
17
  EMPTY_ARRAY = [].freeze
18
18
 
19
- def initialize(query_string, filename:, tracer: Tracing::NullTracer)
19
+ def initialize(query_string, filename:, trace: Tracing::NullTrace)
20
20
  raise GraphQL::ParseError.new("No query string was present", nil, nil, query_string) if query_string.nil?
21
21
  @query_string = query_string
22
22
  @filename = filename
23
- @tracer = tracer
23
+ @trace = trace
24
24
  @reused_next_token = [nil, nil]
25
25
  end
26
26
 
27
27
  def parse_document
28
28
  @document ||= begin
29
29
  # Break the string into tokens
30
- @tracer.trace("lex", {query_string: @query_string}) do
30
+ @trace.lex(query_string: @query_string) do
31
31
  @tokens ||= GraphQL.scan(@query_string)
32
32
  end
33
33
  # From the tokens, build an AST
34
- @tracer.trace("parse", {query_string: @query_string}) do
34
+ @trace.parse(query_string: @query_string) do
35
35
  if @tokens.empty?
36
36
  raise GraphQL::ParseError.new("Unexpected end of document", nil, nil, @query_string)
37
37
  else
@@ -44,17 +44,17 @@ end
44
44
  class << self
45
45
  attr_accessor :cache
46
46
 
47
- def parse(query_string, filename: nil, tracer: GraphQL::Tracing::NullTracer)
48
- new(query_string, filename: filename, tracer: tracer).parse_document
47
+ def parse(query_string, filename: nil, trace: GraphQL::Tracing::NullTrace)
48
+ new(query_string, filename: filename, trace: trace).parse_document
49
49
  end
50
50
 
51
- def parse_file(filename, tracer: GraphQL::Tracing::NullTracer)
51
+ def parse_file(filename, trace: GraphQL::Tracing::NullTrace)
52
52
  if cache
53
53
  cache.fetch(filename) do
54
- parse(File.read(filename), filename: filename, tracer: tracer)
54
+ parse(File.read(filename), filename: filename, trace: trace)
55
55
  end
56
56
  else
57
- parse(File.read(filename), filename: filename, tracer: tracer)
57
+ parse(File.read(filename), filename: filename, trace: trace)
58
58
  end
59
59
  end
60
60
  end
@@ -66,7 +66,7 @@ def next_token
66
66
  if lexer_token.nil?
67
67
  nil
68
68
  else
69
- @reused_next_token[0] = lexer_token.name
69
+ @reused_next_token[0] = lexer_token[0]
70
70
  @reused_next_token[1] = lexer_token
71
71
  @reused_next_token
72
72
  end
@@ -77,13 +77,13 @@ def get_description(token)
77
77
 
78
78
  loop do
79
79
  prev_token = token
80
- token = token.prev_token
80
+ token = token[4]
81
81
 
82
82
  break if token.nil?
83
- break if token.name != :COMMENT
84
- break if prev_token.line != token.line + 1
83
+ break if token[0] != :COMMENT
84
+ break if prev_token[1] != token[1] + 1
85
85
 
86
- comments.unshift(token.to_s.sub(/^#\s*/, ""))
86
+ comments.unshift(token[3].sub(/^#\s*/, ""))
87
87
  end
88
88
 
89
89
  return nil if comments.empty?
@@ -99,11 +99,12 @@ def on_error(parser_token_id, lexer_token, vstack)
99
99
  if parser_token_name.nil?
100
100
  raise GraphQL::ParseError.new("Parse Error on unknown token: {token_id: #{parser_token_id}, lexer_token: #{lexer_token}} from #{@query_string}", nil, nil, @query_string, filename: @filename)
101
101
  else
102
- line, col = lexer_token.line_and_column
103
- if lexer_token.name == :BAD_UNICODE_ESCAPE
104
- raise GraphQL::ParseError.new("Parse error on bad Unicode escape sequence: #{lexer_token.to_s.inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
102
+ line = lexer_token[1]
103
+ col = lexer_token[2]
104
+ if lexer_token[0] == :BAD_UNICODE_ESCAPE
105
+ raise GraphQL::ParseError.new("Parse error on bad Unicode escape sequence: #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
105
106
  else
106
- raise GraphQL::ParseError.new("Parse error on #{lexer_token.to_s.inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
107
+ raise GraphQL::ParseError.new("Parse error on #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
107
108
  end
108
109
  end
109
110
  end
@@ -111,8 +112,8 @@ end
111
112
 
112
113
  def make_node(node_name, assigns)
113
114
  assigns.each do |key, value|
114
- if key != :position_source && value.is_a?(GraphQL::Language::Token)
115
- assigns[key] = value.to_s
115
+ if key != :position_source && value.is_a?(Array) && value[0].is_a?(Symbol)
116
+ assigns[key] = value[3]
116
117
  end
117
118
  end
118
119
 
@@ -1280,7 +1281,7 @@ module_eval(<<'.,.,', 'parser.y', 119)
1280
1281
 
1281
1282
  module_eval(<<'.,.,', 'parser.y', 164)
1282
1283
  def _reduce_63(val, _values, result)
1283
- result = make_node(:EnumValueDefinition, name: val[1], directives: val[2], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1284
+ result = make_node(:EnumValueDefinition, name: val[1], directives: val[2], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1284
1285
  result
1285
1286
  end
1286
1287
  .,.,
@@ -1336,21 +1337,21 @@ module_eval(<<'.,.,', 'parser.y', 179)
1336
1337
 
1337
1338
  module_eval(<<'.,.,', 'parser.y', 182)
1338
1339
  def _reduce_71(val, _values, result)
1339
- result = val[0].to_f
1340
+ result = val[0][3].to_f
1340
1341
  result
1341
1342
  end
1342
1343
  .,.,
1343
1344
 
1344
1345
  module_eval(<<'.,.,', 'parser.y', 183)
1345
1346
  def _reduce_72(val, _values, result)
1346
- result = val[0].to_i
1347
+ result = val[0][3].to_i
1347
1348
  result
1348
1349
  end
1349
1350
  .,.,
1350
1351
 
1351
1352
  module_eval(<<'.,.,', 'parser.y', 184)
1352
1353
  def _reduce_73(val, _values, result)
1353
- result = val[0].to_s
1354
+ result = val[0][3]
1354
1355
  result
1355
1356
  end
1356
1357
  .,.,
@@ -1597,7 +1598,7 @@ module_eval(<<'.,.,', 'parser.y', 277)
1597
1598
 
1598
1599
  module_eval(<<'.,.,', 'parser.y', 286)
1599
1600
  def _reduce_114(val, _values, result)
1600
- result = make_node(:SchemaDefinition, position_source: val[0], definition_line: val[0].line, directives: val[1], **val[3])
1601
+ result = make_node(:SchemaDefinition, position_source: val[0], definition_line: val[0][1], directives: val[1], **val[3])
1601
1602
  result
1602
1603
  end
1603
1604
  .,.,
@@ -1613,7 +1614,7 @@ module_eval(<<'.,.,', 'parser.y', 290)
1613
1614
 
1614
1615
  module_eval(<<'.,.,', 'parser.y', 293)
1615
1616
  def _reduce_117(val, _values, result)
1616
- result = { val[0].to_s.to_sym => val[2] }
1617
+ result = { val[0][3].to_sym => val[2] }
1617
1618
  result
1618
1619
  end
1619
1620
  .,.,
@@ -1766,7 +1767,7 @@ module_eval(<<'.,.,', 'parser.y', 343)
1766
1767
 
1767
1768
  module_eval(<<'.,.,', 'parser.y', 353)
1768
1769
  def _reduce_151(val, _values, result)
1769
- result = make_node(:ScalarTypeDefinition, name: val[2], directives: val[3], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1770
+ result = make_node(:ScalarTypeDefinition, name: val[2], directives: val[3], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1770
1771
 
1771
1772
  result
1772
1773
  end
@@ -1774,7 +1775,7 @@ module_eval(<<'.,.,', 'parser.y', 353)
1774
1775
 
1775
1776
  module_eval(<<'.,.,', 'parser.y', 358)
1776
1777
  def _reduce_152(val, _values, result)
1777
- result = make_node(:ObjectTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1778
+ result = make_node(:ObjectTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1778
1779
 
1779
1780
  result
1780
1781
  end
@@ -1840,7 +1841,7 @@ module_eval(<<'.,.,', 'parser.y', 376)
1840
1841
 
1841
1842
  module_eval(<<'.,.,', 'parser.y', 380)
1842
1843
  def _reduce_162(val, _values, result)
1843
- result = make_node(:InputValueDefinition, name: val[1], type: val[3], default_value: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1844
+ result = make_node(:InputValueDefinition, name: val[1], type: val[3], default_value: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1844
1845
 
1845
1846
  result
1846
1847
  end
@@ -1876,7 +1877,7 @@ module_eval(<<'.,.,', 'parser.y', 389)
1876
1877
 
1877
1878
  module_eval(<<'.,.,', 'parser.y', 393)
1878
1879
  def _reduce_167(val, _values, result)
1879
- result = make_node(:FieldDefinition, name: val[1], arguments: val[2], type: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1880
+ result = make_node(:FieldDefinition, name: val[1], arguments: val[2], type: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1880
1881
 
1881
1882
  result
1882
1883
  end
@@ -1919,7 +1920,7 @@ module_eval(<<'.,.,', 'parser.y', 403)
1919
1920
 
1920
1921
  module_eval(<<'.,.,', 'parser.y', 407)
1921
1922
  def _reduce_173(val, _values, result)
1922
- result = make_node(:InterfaceTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1923
+ result = make_node(:InterfaceTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1923
1924
 
1924
1925
  result
1925
1926
  end
@@ -1941,7 +1942,7 @@ module_eval(<<'.,.,', 'parser.y', 412)
1941
1942
 
1942
1943
  module_eval(<<'.,.,', 'parser.y', 416)
1943
1944
  def _reduce_176(val, _values, result)
1944
- result = make_node(:UnionTypeDefinition, name: val[2], directives: val[3], types: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1945
+ result = make_node(:UnionTypeDefinition, name: val[2], directives: val[3], types: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1945
1946
 
1946
1947
  result
1947
1948
  end
@@ -1949,7 +1950,7 @@ module_eval(<<'.,.,', 'parser.y', 416)
1949
1950
 
1950
1951
  module_eval(<<'.,.,', 'parser.y', 421)
1951
1952
  def _reduce_177(val, _values, result)
1952
- result = make_node(:EnumTypeDefinition, name: val[2], directives: val[3], values: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1953
+ result = make_node(:EnumTypeDefinition, name: val[2], directives: val[3], values: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1953
1954
 
1954
1955
  result
1955
1956
  end
@@ -1957,7 +1958,7 @@ module_eval(<<'.,.,', 'parser.y', 421)
1957
1958
 
1958
1959
  module_eval(<<'.,.,', 'parser.y', 426)
1959
1960
  def _reduce_178(val, _values, result)
1960
- result = make_node(:InputObjectTypeDefinition, name: val[2], directives: val[3], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1961
+ result = make_node(:InputObjectTypeDefinition, name: val[2], directives: val[3], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1961
1962
 
1962
1963
  result
1963
1964
  end
@@ -1965,7 +1966,7 @@ module_eval(<<'.,.,', 'parser.y', 426)
1965
1966
 
1966
1967
  module_eval(<<'.,.,', 'parser.y', 431)
1967
1968
  def _reduce_179(val, _values, result)
1968
- result = make_node(:DirectiveDefinition, name: val[3], arguments: val[4], locations: val[7], repeatable: !!val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1969
+ result = make_node(:DirectiveDefinition, name: val[3], arguments: val[4], locations: val[7], repeatable: !!val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1969
1970
 
1970
1971
  result
1971
1972
  end
@@ -1977,14 +1978,14 @@ module_eval(<<'.,.,', 'parser.y', 431)
1977
1978
 
1978
1979
  module_eval(<<'.,.,', 'parser.y', 439)
1979
1980
  def _reduce_182(val, _values, result)
1980
- result = [make_node(:DirectiveLocation, name: val[0].to_s, position_source: val[0])]
1981
+ result = [make_node(:DirectiveLocation, name: val[0][3], position_source: val[0])]
1981
1982
  result
1982
1983
  end
1983
1984
  .,.,
1984
1985
 
1985
1986
  module_eval(<<'.,.,', 'parser.y', 440)
1986
1987
  def _reduce_183(val, _values, result)
1987
- val[0] << make_node(:DirectiveLocation, name: val[2].to_s, position_source: val[2])
1988
+ val[0] << make_node(:DirectiveLocation, name: val[2][3], position_source: val[2])
1988
1989
  result
1989
1990
  end
1990
1991
  .,.,
@@ -162,7 +162,7 @@ rule
162
162
  | schema_keyword
163
163
 
164
164
  enum_value_definition:
165
- description_opt enum_name directives_list_opt { result = make_node(:EnumValueDefinition, name: val[1], directives: val[2], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1]) }
165
+ description_opt enum_name directives_list_opt { result = make_node(:EnumValueDefinition, name: val[1], directives: val[2], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) }
166
166
 
167
167
  enum_value_definitions:
168
168
  enum_value_definition { result = [val[0]] }
@@ -180,9 +180,9 @@ rule
180
180
  name COLON input_value { result = make_node(:Argument, name: val[0], value: val[2], position_source: val[0])}
181
181
 
182
182
  literal_value:
183
- FLOAT { result = val[0].to_f }
184
- | INT { result = val[0].to_i }
185
- | STRING { result = val[0].to_s }
183
+ FLOAT { result = val[0][3].to_f }
184
+ | INT { result = val[0][3].to_i }
185
+ | STRING { result = val[0][3] }
186
186
  | TRUE { result = true }
187
187
  | FALSE { result = false }
188
188
  | null_value
@@ -284,14 +284,14 @@ rule
284
284
  | directive_definition
285
285
 
286
286
  schema_definition:
287
- SCHEMA directives_list_opt LCURLY operation_type_definition_list RCURLY { result = make_node(:SchemaDefinition, position_source: val[0], definition_line: val[0].line, directives: val[1], **val[3]) }
287
+ SCHEMA directives_list_opt LCURLY operation_type_definition_list RCURLY { result = make_node(:SchemaDefinition, position_source: val[0], definition_line: val[0][1], directives: val[1], **val[3]) }
288
288
 
289
289
  operation_type_definition_list:
290
290
  operation_type_definition
291
291
  | operation_type_definition_list operation_type_definition { result = val[0].merge(val[1]) }
292
292
 
293
293
  operation_type_definition:
294
- operation_type COLON name { result = { val[0].to_s.to_sym => val[2] } }
294
+ operation_type COLON name { result = { val[0][3].to_sym => val[2] } }
295
295
 
296
296
  type_definition:
297
297
  scalar_type_definition
@@ -351,12 +351,12 @@ rule
351
351
 
352
352
  scalar_type_definition:
353
353
  description_opt SCALAR name directives_list_opt {
354
- result = make_node(:ScalarTypeDefinition, name: val[2], directives: val[3], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
354
+ result = make_node(:ScalarTypeDefinition, name: val[2], directives: val[3], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
355
355
  }
356
356
 
357
357
  object_type_definition:
358
358
  description_opt TYPE name implements_opt directives_list_opt field_definition_list_opt {
359
- result = make_node(:ObjectTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
359
+ result = make_node(:ObjectTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
360
360
  }
361
361
 
362
362
  implements_opt:
@@ -378,7 +378,7 @@ rule
378
378
 
379
379
  input_value_definition:
380
380
  description_opt name COLON type default_value_opt directives_list_opt {
381
- result = make_node(:InputValueDefinition, name: val[1], type: val[3], default_value: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
381
+ result = make_node(:InputValueDefinition, name: val[1], type: val[3], default_value: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
382
382
  }
383
383
 
384
384
  input_value_definition_list:
@@ -391,7 +391,7 @@ rule
391
391
 
392
392
  field_definition:
393
393
  description_opt name arguments_definitions_opt COLON type directives_list_opt {
394
- result = make_node(:FieldDefinition, name: val[1], arguments: val[2], type: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
394
+ result = make_node(:FieldDefinition, name: val[1], arguments: val[2], type: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
395
395
  }
396
396
 
397
397
  field_definition_list_opt:
@@ -405,7 +405,7 @@ rule
405
405
 
406
406
  interface_type_definition:
407
407
  description_opt INTERFACE name implements_opt directives_list_opt field_definition_list_opt {
408
- result = make_node(:InterfaceTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
408
+ result = make_node(:InterfaceTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
409
409
  }
410
410
 
411
411
  union_members:
@@ -414,22 +414,22 @@ rule
414
414
 
415
415
  union_type_definition:
416
416
  description_opt UNION name directives_list_opt EQUALS union_members {
417
- result = make_node(:UnionTypeDefinition, name: val[2], directives: val[3], types: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
417
+ result = make_node(:UnionTypeDefinition, name: val[2], directives: val[3], types: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
418
418
  }
419
419
 
420
420
  enum_type_definition:
421
421
  description_opt ENUM name directives_list_opt LCURLY enum_value_definitions RCURLY {
422
- result = make_node(:EnumTypeDefinition, name: val[2], directives: val[3], values: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
422
+ result = make_node(:EnumTypeDefinition, name: val[2], directives: val[3], values: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
423
423
  }
424
424
 
425
425
  input_object_type_definition:
426
426
  description_opt INPUT name directives_list_opt LCURLY input_value_definition_list RCURLY {
427
- result = make_node(:InputObjectTypeDefinition, name: val[2], directives: val[3], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
427
+ result = make_node(:InputObjectTypeDefinition, name: val[2], directives: val[3], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
428
428
  }
429
429
 
430
430
  directive_definition:
431
431
  description_opt DIRECTIVE DIR_SIGN name arguments_definitions_opt directive_repeatable_opt ON directive_locations {
432
- result = make_node(:DirectiveDefinition, name: val[3], arguments: val[4], locations: val[7], repeatable: !!val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
432
+ result = make_node(:DirectiveDefinition, name: val[3], arguments: val[4], locations: val[7], repeatable: !!val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
433
433
  }
434
434
 
435
435
  directive_repeatable_opt:
@@ -437,8 +437,8 @@ rule
437
437
  | REPEATABLE
438
438
 
439
439
  directive_locations:
440
- name { result = [make_node(:DirectiveLocation, name: val[0].to_s, position_source: val[0])] }
441
- | directive_locations PIPE name { val[0] << make_node(:DirectiveLocation, name: val[2].to_s, position_source: val[2]) }
440
+ name { result = [make_node(:DirectiveLocation, name: val[0][3], position_source: val[0])] }
441
+ | directive_locations PIPE name { val[0] << make_node(:DirectiveLocation, name: val[2][3], position_source: val[2]) }
442
442
  end
443
443
 
444
444
  ---- header ----
@@ -448,22 +448,22 @@ end
448
448
 
449
449
  EMPTY_ARRAY = [].freeze
450
450
 
451
- def initialize(query_string, filename:, tracer: Tracing::NullTracer)
451
+ def initialize(query_string, filename:, trace: Tracing::NullTrace)
452
452
  raise GraphQL::ParseError.new("No query string was present", nil, nil, query_string) if query_string.nil?
453
453
  @query_string = query_string
454
454
  @filename = filename
455
- @tracer = tracer
455
+ @trace = trace
456
456
  @reused_next_token = [nil, nil]
457
457
  end
458
458
 
459
459
  def parse_document
460
460
  @document ||= begin
461
461
  # Break the string into tokens
462
- @tracer.trace("lex", {query_string: @query_string}) do
462
+ @trace.lex(query_string: @query_string) do
463
463
  @tokens ||= GraphQL.scan(@query_string)
464
464
  end
465
465
  # From the tokens, build an AST
466
- @tracer.trace("parse", {query_string: @query_string}) do
466
+ @trace.parse(query_string: @query_string) do
467
467
  if @tokens.empty?
468
468
  raise GraphQL::ParseError.new("Unexpected end of document", nil, nil, @query_string)
469
469
  else
@@ -476,17 +476,17 @@ end
476
476
  class << self
477
477
  attr_accessor :cache
478
478
 
479
- def parse(query_string, filename: nil, tracer: GraphQL::Tracing::NullTracer)
480
- new(query_string, filename: filename, tracer: tracer).parse_document
479
+ def parse(query_string, filename: nil, trace: GraphQL::Tracing::NullTrace)
480
+ new(query_string, filename: filename, trace: trace).parse_document
481
481
  end
482
482
 
483
- def parse_file(filename, tracer: GraphQL::Tracing::NullTracer)
483
+ def parse_file(filename, trace: GraphQL::Tracing::NullTrace)
484
484
  if cache
485
485
  cache.fetch(filename) do
486
- parse(File.read(filename), filename: filename, tracer: tracer)
486
+ parse(File.read(filename), filename: filename, trace: trace)
487
487
  end
488
488
  else
489
- parse(File.read(filename), filename: filename, tracer: tracer)
489
+ parse(File.read(filename), filename: filename, trace: trace)
490
490
  end
491
491
  end
492
492
  end
@@ -498,7 +498,7 @@ def next_token
498
498
  if lexer_token.nil?
499
499
  nil
500
500
  else
501
- @reused_next_token[0] = lexer_token.name
501
+ @reused_next_token[0] = lexer_token[0]
502
502
  @reused_next_token[1] = lexer_token
503
503
  @reused_next_token
504
504
  end
@@ -509,13 +509,13 @@ def get_description(token)
509
509
 
510
510
  loop do
511
511
  prev_token = token
512
- token = token.prev_token
512
+ token = token[4]
513
513
 
514
514
  break if token.nil?
515
- break if token.name != :COMMENT
516
- break if prev_token.line != token.line + 1
515
+ break if token[0] != :COMMENT
516
+ break if prev_token[1] != token[1] + 1
517
517
 
518
- comments.unshift(token.to_s.sub(/^#\s*/, ""))
518
+ comments.unshift(token[3].sub(/^#\s*/, ""))
519
519
  end
520
520
 
521
521
  return nil if comments.empty?
@@ -531,11 +531,12 @@ def on_error(parser_token_id, lexer_token, vstack)
531
531
  if parser_token_name.nil?
532
532
  raise GraphQL::ParseError.new("Parse Error on unknown token: {token_id: #{parser_token_id}, lexer_token: #{lexer_token}} from #{@query_string}", nil, nil, @query_string, filename: @filename)
533
533
  else
534
- line, col = lexer_token.line_and_column
535
- if lexer_token.name == :BAD_UNICODE_ESCAPE
536
- raise GraphQL::ParseError.new("Parse error on bad Unicode escape sequence: #{lexer_token.to_s.inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
534
+ line = lexer_token[1]
535
+ col = lexer_token[2]
536
+ if lexer_token[0] == :BAD_UNICODE_ESCAPE
537
+ raise GraphQL::ParseError.new("Parse error on bad Unicode escape sequence: #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
537
538
  else
538
- raise GraphQL::ParseError.new("Parse error on #{lexer_token.to_s.inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
539
+ raise GraphQL::ParseError.new("Parse error on #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
539
540
  end
540
541
  end
541
542
  end
@@ -543,8 +544,8 @@ end
543
544
 
544
545
  def make_node(node_name, assigns)
545
546
  assigns.each do |key, value|
546
- if key != :position_source && value.is_a?(GraphQL::Language::Token)
547
- assigns[key] = value.to_s
547
+ if key != :position_source && value.is_a?(Array) && value[0].is_a?(Symbol)
548
+ assigns[key] = value[3]
548
549
  end
549
550
  end
550
551
 
@@ -7,14 +7,6 @@ module GraphQL
7
7
  class ActiveRecordRelationConnection < Pagination::RelationConnection
8
8
  private
9
9
 
10
- def relation_larger_than(relation, initial_offset, size)
11
- if already_loaded?(relation)
12
- (relation.size + initial_offset) > size
13
- else
14
- set_offset(sliced_nodes, initial_offset + size).exists?
15
- end
16
- end
17
-
18
10
  def relation_count(relation)
19
11
  int_or_hash = if already_loaded?(relation)
20
12
  relation.size
@@ -91,22 +91,31 @@ module GraphQL
91
91
  end
92
92
 
93
93
  class ScopedContext
94
+ NO_PATH = [].freeze
95
+ NO_CONTEXT = {}.freeze
96
+
94
97
  def initialize(query_context)
95
98
  @query_context = query_context
96
- @scoped_contexts = {}
97
- @no_path = [].freeze
99
+ @scoped_contexts = nil
100
+ @all_keys = nil
98
101
  end
99
102
 
100
103
  def merged_context
101
- merged_ctx = {}
102
- each_present_path_ctx do |path_ctx|
103
- merged_ctx = path_ctx.merge(merged_ctx)
104
+ if @scoped_contexts.nil?
105
+ NO_CONTEXT
106
+ else
107
+ merged_ctx = {}
108
+ each_present_path_ctx do |path_ctx|
109
+ merged_ctx = path_ctx.merge(merged_ctx)
110
+ end
111
+ merged_ctx
104
112
  end
105
- merged_ctx
106
113
  end
107
114
 
108
115
  def merge!(hash)
109
- ctx = @scoped_contexts
116
+ @all_keys ||= Set.new
117
+ @all_keys.merge(hash.keys)
118
+ ctx = @scoped_contexts ||= {}
110
119
  current_path.each do |path_part|
111
120
  ctx = ctx[path_part] ||= { parent: ctx }
112
121
  end
@@ -114,15 +123,12 @@ module GraphQL
114
123
  this_scoped_ctx.merge!(hash)
115
124
  end
116
125
 
117
- def current_path
118
- thread_info = Thread.current[:__graphql_runtime_info]
119
- (thread_info && thread_info[:current_path]) || @no_path
120
- end
121
-
122
126
  def key?(key)
123
- each_present_path_ctx do |path_ctx|
124
- if path_ctx.key?(key)
125
- return true
127
+ if @all_keys && @all_keys.include?(key)
128
+ each_present_path_ctx do |path_ctx|
129
+ if path_ctx.key?(key)
130
+ return true
131
+ end
126
132
  end
127
133
  end
128
134
  false
@@ -137,6 +143,10 @@ module GraphQL
137
143
  nil
138
144
  end
139
145
 
146
+ def current_path
147
+ @query_context.current_path || NO_PATH
148
+ end
149
+
140
150
  def dig(key, *other_keys)
141
151
  each_present_path_ctx do |path_ctx|
142
152
  if path_ctx.key?(key)
@@ -157,19 +167,23 @@ module GraphQL
157
167
  # but look up the tree for previously-assigned scoped values
158
168
  def each_present_path_ctx
159
169
  ctx = @scoped_contexts
160
- current_path.each do |path_part|
161
- if ctx.key?(path_part)
162
- ctx = ctx[path_part]
163
- else
164
- break
170
+ if ctx.nil?
171
+ # no-op
172
+ else
173
+ current_path.each do |path_part|
174
+ if ctx.key?(path_part)
175
+ ctx = ctx[path_part]
176
+ else
177
+ break
178
+ end
165
179
  end
166
- end
167
180
 
168
- while ctx
169
- if (scoped_ctx = ctx[:scoped_context])
170
- yield(scoped_ctx)
181
+ while ctx
182
+ if (scoped_ctx = ctx[:scoped_context])
183
+ yield(scoped_ctx)
184
+ end
185
+ ctx = ctx[:parent]
171
186
  end
172
- ctx = ctx[:parent]
173
187
  end
174
188
  end
175
189
  end
@@ -209,14 +223,30 @@ module GraphQL
209
223
  elsif @provided_values.key?(key)
210
224
  @provided_values[key]
211
225
  elsif RUNTIME_METADATA_KEYS.include?(key)
212
- thread_info = Thread.current[:__graphql_runtime_info]
213
- thread_info && thread_info[key]
226
+ if key == :current_path
227
+ current_path
228
+ else
229
+ thread_info = Thread.current[:__graphql_runtime_info]
230
+ thread_info && thread_info[key]
231
+ end
214
232
  else
215
233
  # not found
216
234
  nil
217
235
  end
218
236
  end
219
237
 
238
+ def current_path
239
+ thread_info = Thread.current[:__graphql_runtime_info]
240
+ path = thread_info &&
241
+ (result = thread_info[:current_result]) &&
242
+ (result.path)
243
+ if path && (rn = thread_info[:current_result_name])
244
+ path = path.dup
245
+ path.push(rn)
246
+ end
247
+ path
248
+ end
249
+
220
250
  def delete(key)
221
251
  if @scoped_context.key?(key)
222
252
  @scoped_context.delete(key)