graphql 2.0.18 → 2.0.19

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7f43155bcb78f5842e409f8c001b7d031578604d2e7d962457cc7fd92e1d68d4
4
- data.tar.gz: e86aa4c3071f63d605f9e2e3833ba967282f906c3ff0e1252db71b063fe69cd9
3
+ metadata.gz: 0e3f276db828fe3908e759e87f8cc5d33ae761c3ea80bd53b92b16a971ef507a
4
+ data.tar.gz: fc59e806f45125da3065ed4d7758170f88231377664b9a130e0896d6b2790fa3
5
5
  SHA512:
6
- metadata.gz: 79c16409d45a11f1bba6623e1d97877984bfe10577ab5241ecd3ebe515fefb6cd42940f336640c60cf19f94c61d902692f26b0497699e4ca9e912f2532fa6168
7
- data.tar.gz: a9f5e954d7988a440c083cd0afbb6f5ff72ab77d30d4fab708f9f501d2ce8af49b13fde571e50788c9ba79fd943d38e4c207e4ac9ddccd93c80d2c0f9ef8f997
6
+ metadata.gz: 580e967d89228c0174072f5857cb646ea7495ee5a00587249ffc3034ffe9b536f751c09b8112e443b0c93efcd868db01c584497364897d23c7e798ee3f496d64
7
+ data.tar.gz: 43db761134f0ea83311f6a8b62b3b34b9dccdc335869c9067f43dcc5b9fc7f4023b77b8bb70cf203435ac25711d16d0ce5aa620478d351950d0db9f2e7bee9c7
@@ -129,13 +129,13 @@ module GraphQL
129
129
  end
130
130
 
131
131
  def self.emit(token_name, ts, te, meta, token_value)
132
- meta[:tokens] << token = GraphQL::Language::Token.new(
132
+ meta[:tokens] << token = [
133
133
  token_name,
134
- token_value,
135
134
  meta[:line],
136
135
  meta[:col],
136
+ token_value,
137
137
  meta[:previous_token],
138
- )
138
+ ]
139
139
  meta[:previous_token] = token
140
140
  # Bump the column counter for the next token
141
141
  meta[:col] += te - ts
@@ -168,13 +168,13 @@ module GraphQL
168
168
  end
169
169
 
170
170
  def self.record_comment(ts, te, meta, str)
171
- token = GraphQL::Language::Token.new(
171
+ token = [
172
172
  :COMMENT,
173
- str,
174
173
  meta[:line],
175
174
  meta[:col],
175
+ str,
176
176
  meta[:previous_token],
177
- )
177
+ ]
178
178
 
179
179
  meta[:previous_token] = token
180
180
 
@@ -28,8 +28,8 @@ module GraphQL
28
28
  def initialize(options = {})
29
29
  if options.key?(:position_source)
30
30
  position_source = options.delete(:position_source)
31
- @line = position_source.line
32
- @col = position_source.col
31
+ @line = position_source[1]
32
+ @col = position_source[2]
33
33
  end
34
34
 
35
35
  @filename = options.delete(:filename)
@@ -1,6 +1,6 @@
1
1
  #
2
2
  # DO NOT MODIFY!!!!
3
- # This file is automatically generated by Racc 1.6.0
3
+ # This file is automatically generated by Racc 1.6.2
4
4
  # from Racc grammar file "".
5
5
  #
6
6
 
@@ -66,7 +66,7 @@ def next_token
66
66
  if lexer_token.nil?
67
67
  nil
68
68
  else
69
- @reused_next_token[0] = lexer_token.name
69
+ @reused_next_token[0] = lexer_token[0]
70
70
  @reused_next_token[1] = lexer_token
71
71
  @reused_next_token
72
72
  end
@@ -77,13 +77,13 @@ def get_description(token)
77
77
 
78
78
  loop do
79
79
  prev_token = token
80
- token = token.prev_token
80
+ token = token[4]
81
81
 
82
82
  break if token.nil?
83
- break if token.name != :COMMENT
84
- break if prev_token.line != token.line + 1
83
+ break if token[0] != :COMMENT
84
+ break if prev_token[1] != token[1] + 1
85
85
 
86
- comments.unshift(token.to_s.sub(/^#\s*/, ""))
86
+ comments.unshift(token[3].sub(/^#\s*/, ""))
87
87
  end
88
88
 
89
89
  return nil if comments.empty?
@@ -99,11 +99,12 @@ def on_error(parser_token_id, lexer_token, vstack)
99
99
  if parser_token_name.nil?
100
100
  raise GraphQL::ParseError.new("Parse Error on unknown token: {token_id: #{parser_token_id}, lexer_token: #{lexer_token}} from #{@query_string}", nil, nil, @query_string, filename: @filename)
101
101
  else
102
- line, col = lexer_token.line_and_column
103
- if lexer_token.name == :BAD_UNICODE_ESCAPE
104
- raise GraphQL::ParseError.new("Parse error on bad Unicode escape sequence: #{lexer_token.to_s.inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
102
+ line = lexer_token[1]
103
+ col = lexer_token[2]
104
+ if lexer_token[0] == :BAD_UNICODE_ESCAPE
105
+ raise GraphQL::ParseError.new("Parse error on bad Unicode escape sequence: #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
105
106
  else
106
- raise GraphQL::ParseError.new("Parse error on #{lexer_token.to_s.inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
107
+ raise GraphQL::ParseError.new("Parse error on #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
107
108
  end
108
109
  end
109
110
  end
@@ -111,8 +112,8 @@ end
111
112
 
112
113
  def make_node(node_name, assigns)
113
114
  assigns.each do |key, value|
114
- if key != :position_source && value.is_a?(GraphQL::Language::Token)
115
- assigns[key] = value.to_s
115
+ if key != :position_source && value.is_a?(Array) && value[0].is_a?(Symbol)
116
+ assigns[key] = value[3]
116
117
  end
117
118
  end
118
119
 
@@ -1280,7 +1281,7 @@ module_eval(<<'.,.,', 'parser.y', 119)
1280
1281
 
1281
1282
  module_eval(<<'.,.,', 'parser.y', 164)
1282
1283
  def _reduce_63(val, _values, result)
1283
- result = make_node(:EnumValueDefinition, name: val[1], directives: val[2], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1284
+ result = make_node(:EnumValueDefinition, name: val[1], directives: val[2], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1284
1285
  result
1285
1286
  end
1286
1287
  .,.,
@@ -1336,21 +1337,21 @@ module_eval(<<'.,.,', 'parser.y', 179)
1336
1337
 
1337
1338
  module_eval(<<'.,.,', 'parser.y', 182)
1338
1339
  def _reduce_71(val, _values, result)
1339
- result = val[0].to_f
1340
+ result = val[0][3].to_f
1340
1341
  result
1341
1342
  end
1342
1343
  .,.,
1343
1344
 
1344
1345
  module_eval(<<'.,.,', 'parser.y', 183)
1345
1346
  def _reduce_72(val, _values, result)
1346
- result = val[0].to_i
1347
+ result = val[0][3].to_i
1347
1348
  result
1348
1349
  end
1349
1350
  .,.,
1350
1351
 
1351
1352
  module_eval(<<'.,.,', 'parser.y', 184)
1352
1353
  def _reduce_73(val, _values, result)
1353
- result = val[0].to_s
1354
+ result = val[0][3]
1354
1355
  result
1355
1356
  end
1356
1357
  .,.,
@@ -1597,7 +1598,7 @@ module_eval(<<'.,.,', 'parser.y', 277)
1597
1598
 
1598
1599
  module_eval(<<'.,.,', 'parser.y', 286)
1599
1600
  def _reduce_114(val, _values, result)
1600
- result = make_node(:SchemaDefinition, position_source: val[0], definition_line: val[0].line, directives: val[1], **val[3])
1601
+ result = make_node(:SchemaDefinition, position_source: val[0], definition_line: val[0][1], directives: val[1], **val[3])
1601
1602
  result
1602
1603
  end
1603
1604
  .,.,
@@ -1613,7 +1614,7 @@ module_eval(<<'.,.,', 'parser.y', 290)
1613
1614
 
1614
1615
  module_eval(<<'.,.,', 'parser.y', 293)
1615
1616
  def _reduce_117(val, _values, result)
1616
- result = { val[0].to_s.to_sym => val[2] }
1617
+ result = { val[0][3].to_sym => val[2] }
1617
1618
  result
1618
1619
  end
1619
1620
  .,.,
@@ -1766,7 +1767,7 @@ module_eval(<<'.,.,', 'parser.y', 343)
1766
1767
 
1767
1768
  module_eval(<<'.,.,', 'parser.y', 353)
1768
1769
  def _reduce_151(val, _values, result)
1769
- result = make_node(:ScalarTypeDefinition, name: val[2], directives: val[3], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1770
+ result = make_node(:ScalarTypeDefinition, name: val[2], directives: val[3], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1770
1771
 
1771
1772
  result
1772
1773
  end
@@ -1774,7 +1775,7 @@ module_eval(<<'.,.,', 'parser.y', 353)
1774
1775
 
1775
1776
  module_eval(<<'.,.,', 'parser.y', 358)
1776
1777
  def _reduce_152(val, _values, result)
1777
- result = make_node(:ObjectTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1778
+ result = make_node(:ObjectTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1778
1779
 
1779
1780
  result
1780
1781
  end
@@ -1840,7 +1841,7 @@ module_eval(<<'.,.,', 'parser.y', 376)
1840
1841
 
1841
1842
  module_eval(<<'.,.,', 'parser.y', 380)
1842
1843
  def _reduce_162(val, _values, result)
1843
- result = make_node(:InputValueDefinition, name: val[1], type: val[3], default_value: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1844
+ result = make_node(:InputValueDefinition, name: val[1], type: val[3], default_value: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1844
1845
 
1845
1846
  result
1846
1847
  end
@@ -1876,7 +1877,7 @@ module_eval(<<'.,.,', 'parser.y', 389)
1876
1877
 
1877
1878
  module_eval(<<'.,.,', 'parser.y', 393)
1878
1879
  def _reduce_167(val, _values, result)
1879
- result = make_node(:FieldDefinition, name: val[1], arguments: val[2], type: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1880
+ result = make_node(:FieldDefinition, name: val[1], arguments: val[2], type: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1880
1881
 
1881
1882
  result
1882
1883
  end
@@ -1919,7 +1920,7 @@ module_eval(<<'.,.,', 'parser.y', 403)
1919
1920
 
1920
1921
  module_eval(<<'.,.,', 'parser.y', 407)
1921
1922
  def _reduce_173(val, _values, result)
1922
- result = make_node(:InterfaceTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1923
+ result = make_node(:InterfaceTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1923
1924
 
1924
1925
  result
1925
1926
  end
@@ -1941,7 +1942,7 @@ module_eval(<<'.,.,', 'parser.y', 412)
1941
1942
 
1942
1943
  module_eval(<<'.,.,', 'parser.y', 416)
1943
1944
  def _reduce_176(val, _values, result)
1944
- result = make_node(:UnionTypeDefinition, name: val[2], directives: val[3], types: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1945
+ result = make_node(:UnionTypeDefinition, name: val[2], directives: val[3], types: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1945
1946
 
1946
1947
  result
1947
1948
  end
@@ -1949,7 +1950,7 @@ module_eval(<<'.,.,', 'parser.y', 416)
1949
1950
 
1950
1951
  module_eval(<<'.,.,', 'parser.y', 421)
1951
1952
  def _reduce_177(val, _values, result)
1952
- result = make_node(:EnumTypeDefinition, name: val[2], directives: val[3], values: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1953
+ result = make_node(:EnumTypeDefinition, name: val[2], directives: val[3], values: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1953
1954
 
1954
1955
  result
1955
1956
  end
@@ -1957,7 +1958,7 @@ module_eval(<<'.,.,', 'parser.y', 421)
1957
1958
 
1958
1959
  module_eval(<<'.,.,', 'parser.y', 426)
1959
1960
  def _reduce_178(val, _values, result)
1960
- result = make_node(:InputObjectTypeDefinition, name: val[2], directives: val[3], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1961
+ result = make_node(:InputObjectTypeDefinition, name: val[2], directives: val[3], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1961
1962
 
1962
1963
  result
1963
1964
  end
@@ -1965,7 +1966,7 @@ module_eval(<<'.,.,', 'parser.y', 426)
1965
1966
 
1966
1967
  module_eval(<<'.,.,', 'parser.y', 431)
1967
1968
  def _reduce_179(val, _values, result)
1968
- result = make_node(:DirectiveDefinition, name: val[3], arguments: val[4], locations: val[7], repeatable: !!val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
1969
+ result = make_node(:DirectiveDefinition, name: val[3], arguments: val[4], locations: val[7], repeatable: !!val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
1969
1970
 
1970
1971
  result
1971
1972
  end
@@ -1977,14 +1978,14 @@ module_eval(<<'.,.,', 'parser.y', 431)
1977
1978
 
1978
1979
  module_eval(<<'.,.,', 'parser.y', 439)
1979
1980
  def _reduce_182(val, _values, result)
1980
- result = [make_node(:DirectiveLocation, name: val[0].to_s, position_source: val[0])]
1981
+ result = [make_node(:DirectiveLocation, name: val[0][3], position_source: val[0])]
1981
1982
  result
1982
1983
  end
1983
1984
  .,.,
1984
1985
 
1985
1986
  module_eval(<<'.,.,', 'parser.y', 440)
1986
1987
  def _reduce_183(val, _values, result)
1987
- val[0] << make_node(:DirectiveLocation, name: val[2].to_s, position_source: val[2])
1988
+ val[0] << make_node(:DirectiveLocation, name: val[2][3], position_source: val[2])
1988
1989
  result
1989
1990
  end
1990
1991
  .,.,
@@ -162,7 +162,7 @@ rule
162
162
  | schema_keyword
163
163
 
164
164
  enum_value_definition:
165
- description_opt enum_name directives_list_opt { result = make_node(:EnumValueDefinition, name: val[1], directives: val[2], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1]) }
165
+ description_opt enum_name directives_list_opt { result = make_node(:EnumValueDefinition, name: val[1], directives: val[2], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1]) }
166
166
 
167
167
  enum_value_definitions:
168
168
  enum_value_definition { result = [val[0]] }
@@ -180,9 +180,9 @@ rule
180
180
  name COLON input_value { result = make_node(:Argument, name: val[0], value: val[2], position_source: val[0])}
181
181
 
182
182
  literal_value:
183
- FLOAT { result = val[0].to_f }
184
- | INT { result = val[0].to_i }
185
- | STRING { result = val[0].to_s }
183
+ FLOAT { result = val[0][3].to_f }
184
+ | INT { result = val[0][3].to_i }
185
+ | STRING { result = val[0][3] }
186
186
  | TRUE { result = true }
187
187
  | FALSE { result = false }
188
188
  | null_value
@@ -284,14 +284,14 @@ rule
284
284
  | directive_definition
285
285
 
286
286
  schema_definition:
287
- SCHEMA directives_list_opt LCURLY operation_type_definition_list RCURLY { result = make_node(:SchemaDefinition, position_source: val[0], definition_line: val[0].line, directives: val[1], **val[3]) }
287
+ SCHEMA directives_list_opt LCURLY operation_type_definition_list RCURLY { result = make_node(:SchemaDefinition, position_source: val[0], definition_line: val[0][1], directives: val[1], **val[3]) }
288
288
 
289
289
  operation_type_definition_list:
290
290
  operation_type_definition
291
291
  | operation_type_definition_list operation_type_definition { result = val[0].merge(val[1]) }
292
292
 
293
293
  operation_type_definition:
294
- operation_type COLON name { result = { val[0].to_s.to_sym => val[2] } }
294
+ operation_type COLON name { result = { val[0][3].to_sym => val[2] } }
295
295
 
296
296
  type_definition:
297
297
  scalar_type_definition
@@ -351,12 +351,12 @@ rule
351
351
 
352
352
  scalar_type_definition:
353
353
  description_opt SCALAR name directives_list_opt {
354
- result = make_node(:ScalarTypeDefinition, name: val[2], directives: val[3], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
354
+ result = make_node(:ScalarTypeDefinition, name: val[2], directives: val[3], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
355
355
  }
356
356
 
357
357
  object_type_definition:
358
358
  description_opt TYPE name implements_opt directives_list_opt field_definition_list_opt {
359
- result = make_node(:ObjectTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
359
+ result = make_node(:ObjectTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
360
360
  }
361
361
 
362
362
  implements_opt:
@@ -378,7 +378,7 @@ rule
378
378
 
379
379
  input_value_definition:
380
380
  description_opt name COLON type default_value_opt directives_list_opt {
381
- result = make_node(:InputValueDefinition, name: val[1], type: val[3], default_value: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
381
+ result = make_node(:InputValueDefinition, name: val[1], type: val[3], default_value: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
382
382
  }
383
383
 
384
384
  input_value_definition_list:
@@ -391,7 +391,7 @@ rule
391
391
 
392
392
  field_definition:
393
393
  description_opt name arguments_definitions_opt COLON type directives_list_opt {
394
- result = make_node(:FieldDefinition, name: val[1], arguments: val[2], type: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
394
+ result = make_node(:FieldDefinition, name: val[1], arguments: val[2], type: val[4], directives: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
395
395
  }
396
396
 
397
397
  field_definition_list_opt:
@@ -405,7 +405,7 @@ rule
405
405
 
406
406
  interface_type_definition:
407
407
  description_opt INTERFACE name implements_opt directives_list_opt field_definition_list_opt {
408
- result = make_node(:InterfaceTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
408
+ result = make_node(:InterfaceTypeDefinition, name: val[2], interfaces: val[3], directives: val[4], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
409
409
  }
410
410
 
411
411
  union_members:
@@ -414,22 +414,22 @@ rule
414
414
 
415
415
  union_type_definition:
416
416
  description_opt UNION name directives_list_opt EQUALS union_members {
417
- result = make_node(:UnionTypeDefinition, name: val[2], directives: val[3], types: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
417
+ result = make_node(:UnionTypeDefinition, name: val[2], directives: val[3], types: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
418
418
  }
419
419
 
420
420
  enum_type_definition:
421
421
  description_opt ENUM name directives_list_opt LCURLY enum_value_definitions RCURLY {
422
- result = make_node(:EnumTypeDefinition, name: val[2], directives: val[3], values: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
422
+ result = make_node(:EnumTypeDefinition, name: val[2], directives: val[3], values: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
423
423
  }
424
424
 
425
425
  input_object_type_definition:
426
426
  description_opt INPUT name directives_list_opt LCURLY input_value_definition_list RCURLY {
427
- result = make_node(:InputObjectTypeDefinition, name: val[2], directives: val[3], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
427
+ result = make_node(:InputObjectTypeDefinition, name: val[2], directives: val[3], fields: val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
428
428
  }
429
429
 
430
430
  directive_definition:
431
431
  description_opt DIRECTIVE DIR_SIGN name arguments_definitions_opt directive_repeatable_opt ON directive_locations {
432
- result = make_node(:DirectiveDefinition, name: val[3], arguments: val[4], locations: val[7], repeatable: !!val[5], description: val[0] || get_description(val[1]), definition_line: val[1].line, position_source: val[0] || val[1])
432
+ result = make_node(:DirectiveDefinition, name: val[3], arguments: val[4], locations: val[7], repeatable: !!val[5], description: val[0] || get_description(val[1]), definition_line: val[1][1], position_source: val[0] || val[1])
433
433
  }
434
434
 
435
435
  directive_repeatable_opt:
@@ -437,8 +437,8 @@ rule
437
437
  | REPEATABLE
438
438
 
439
439
  directive_locations:
440
- name { result = [make_node(:DirectiveLocation, name: val[0].to_s, position_source: val[0])] }
441
- | directive_locations PIPE name { val[0] << make_node(:DirectiveLocation, name: val[2].to_s, position_source: val[2]) }
440
+ name { result = [make_node(:DirectiveLocation, name: val[0][3], position_source: val[0])] }
441
+ | directive_locations PIPE name { val[0] << make_node(:DirectiveLocation, name: val[2][3], position_source: val[2]) }
442
442
  end
443
443
 
444
444
  ---- header ----
@@ -498,7 +498,7 @@ def next_token
498
498
  if lexer_token.nil?
499
499
  nil
500
500
  else
501
- @reused_next_token[0] = lexer_token.name
501
+ @reused_next_token[0] = lexer_token[0]
502
502
  @reused_next_token[1] = lexer_token
503
503
  @reused_next_token
504
504
  end
@@ -509,13 +509,13 @@ def get_description(token)
509
509
 
510
510
  loop do
511
511
  prev_token = token
512
- token = token.prev_token
512
+ token = token[4]
513
513
 
514
514
  break if token.nil?
515
- break if token.name != :COMMENT
516
- break if prev_token.line != token.line + 1
515
+ break if token[0] != :COMMENT
516
+ break if prev_token[1] != token[1] + 1
517
517
 
518
- comments.unshift(token.to_s.sub(/^#\s*/, ""))
518
+ comments.unshift(token[3].sub(/^#\s*/, ""))
519
519
  end
520
520
 
521
521
  return nil if comments.empty?
@@ -531,11 +531,12 @@ def on_error(parser_token_id, lexer_token, vstack)
531
531
  if parser_token_name.nil?
532
532
  raise GraphQL::ParseError.new("Parse Error on unknown token: {token_id: #{parser_token_id}, lexer_token: #{lexer_token}} from #{@query_string}", nil, nil, @query_string, filename: @filename)
533
533
  else
534
- line, col = lexer_token.line_and_column
535
- if lexer_token.name == :BAD_UNICODE_ESCAPE
536
- raise GraphQL::ParseError.new("Parse error on bad Unicode escape sequence: #{lexer_token.to_s.inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
534
+ line = lexer_token[1]
535
+ col = lexer_token[2]
536
+ if lexer_token[0] == :BAD_UNICODE_ESCAPE
537
+ raise GraphQL::ParseError.new("Parse error on bad Unicode escape sequence: #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
537
538
  else
538
- raise GraphQL::ParseError.new("Parse error on #{lexer_token.to_s.inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
539
+ raise GraphQL::ParseError.new("Parse error on #{lexer_token[3].inspect} (#{parser_token_name}) at [#{line}, #{col}]", line, col, @query_string, filename: @filename)
539
540
  end
540
541
  end
541
542
  end
@@ -543,8 +544,8 @@ end
543
544
 
544
545
  def make_node(node_name, assigns)
545
546
  assigns.each do |key, value|
546
- if key != :position_source && value.is_a?(GraphQL::Language::Token)
547
- assigns[key] = value.to_s
547
+ if key != :position_source && value.is_a?(Array) && value[0].is_a?(Symbol)
548
+ assigns[key] = value[3]
548
549
  end
549
550
  end
550
551
 
@@ -72,18 +72,6 @@ module GraphQL
72
72
  # @return [Array<String, Integer>] The current position in the result
73
73
  attr_reader :path
74
74
 
75
- module EmptyScopedContext
76
- EMPTY_HASH = {}.freeze
77
-
78
- def self.key?(k)
79
- false
80
- end
81
-
82
- def self.merged_context
83
- EMPTY_HASH
84
- end
85
- end
86
-
87
75
  # Make a new context which delegates key lookup to `values`
88
76
  # @param query [GraphQL::Query] the query who owns this context
89
77
  # @param values [Hash] A hash of arbitrary values which will be accessible at query-time
@@ -99,28 +87,35 @@ module GraphQL
99
87
  @path = []
100
88
  @value = nil
101
89
  @context = self # for SharedMethods TODO delete sharedmethods
102
- @scoped_context = EmptyScopedContext
90
+ @scoped_context = ScopedContext.new(self)
103
91
  end
104
92
 
105
93
  class ScopedContext
94
+ NO_PATH = [].freeze
95
+ NO_CONTEXT = {}.freeze
96
+
106
97
  def initialize(query_context)
107
98
  @query_context = query_context
108
- @scoped_contexts = {}
109
- @all_keys = Set.new
110
- @no_path = [].freeze
99
+ @scoped_contexts = nil
100
+ @all_keys = nil
111
101
  end
112
102
 
113
103
  def merged_context
114
- merged_ctx = {}
115
- each_present_path_ctx do |path_ctx|
116
- merged_ctx = path_ctx.merge(merged_ctx)
104
+ if @scoped_contexts.nil?
105
+ NO_CONTEXT
106
+ else
107
+ merged_ctx = {}
108
+ each_present_path_ctx do |path_ctx|
109
+ merged_ctx = path_ctx.merge(merged_ctx)
110
+ end
111
+ merged_ctx
117
112
  end
118
- merged_ctx
119
113
  end
120
114
 
121
115
  def merge!(hash)
116
+ @all_keys ||= Set.new
122
117
  @all_keys.merge(hash.keys)
123
- ctx = @scoped_contexts
118
+ ctx = @scoped_contexts ||= {}
124
119
  current_path.each do |path_part|
125
120
  ctx = ctx[path_part] ||= { parent: ctx }
126
121
  end
@@ -129,7 +124,7 @@ module GraphQL
129
124
  end
130
125
 
131
126
  def key?(key)
132
- if @all_keys.include?(key)
127
+ if @all_keys && @all_keys.include?(key)
133
128
  each_present_path_ctx do |path_ctx|
134
129
  if path_ctx.key?(key)
135
130
  return true
@@ -149,7 +144,7 @@ module GraphQL
149
144
  end
150
145
 
151
146
  def current_path
152
- @query_context.current_path || @no_path
147
+ @query_context.current_path || NO_PATH
153
148
  end
154
149
 
155
150
  def dig(key, *other_keys)
@@ -172,19 +167,23 @@ module GraphQL
172
167
  # but look up the tree for previously-assigned scoped values
173
168
  def each_present_path_ctx
174
169
  ctx = @scoped_contexts
175
- current_path.each do |path_part|
176
- if ctx.key?(path_part)
177
- ctx = ctx[path_part]
178
- else
179
- break
170
+ if ctx.nil?
171
+ # no-op
172
+ else
173
+ current_path.each do |path_part|
174
+ if ctx.key?(path_part)
175
+ ctx = ctx[path_part]
176
+ else
177
+ break
178
+ end
180
179
  end
181
- end
182
180
 
183
- while ctx
184
- if (scoped_ctx = ctx[:scoped_context])
185
- yield(scoped_ctx)
181
+ while ctx
182
+ if (scoped_ctx = ctx[:scoped_context])
183
+ yield(scoped_ctx)
184
+ end
185
+ ctx = ctx[:parent]
186
186
  end
187
- ctx = ctx[:parent]
188
187
  end
189
188
  end
190
189
  end
@@ -329,9 +328,6 @@ module GraphQL
329
328
  end
330
329
 
331
330
  def scoped_merge!(hash)
332
- if @scoped_context == EmptyScopedContext
333
- @scoped_context = ScopedContext.new(self)
334
- end
335
331
  @scoped_context.merge!(hash)
336
332
  end
337
333
 
@@ -81,7 +81,7 @@ module GraphQL
81
81
 
82
82
  error
83
83
  else
84
- yield
84
+ super
85
85
  end
86
86
  end
87
87
  end
@@ -147,7 +147,12 @@ module GraphQL
147
147
  if new_class
148
148
  @trace_class = new_class
149
149
  elsif !defined?(@trace_class)
150
- @trace_class = Class.new(GraphQL::Tracing::Trace)
150
+ parent_trace_class = if superclass.respond_to?(:trace_class)
151
+ superclass.trace_class
152
+ else
153
+ GraphQL::Tracing::Trace
154
+ end
155
+ @trace_class = Class.new(parent_trace_class)
151
156
  end
152
157
  @trace_class
153
158
  end
@@ -955,14 +960,17 @@ module GraphQL
955
960
  # @param options [Hash] Keywords that will be passed to the tracing class during `#initialize`
956
961
  # @return [void]
957
962
  def trace_with(trace_mod, **options)
958
- @trace_options ||= {}
959
- @trace_options.merge!(options)
963
+ trace_options.merge!(options)
960
964
  trace_class.include(trace_mod)
961
965
  end
962
966
 
967
+ def trace_options
968
+ @trace_options ||= superclass.respond_to?(:trace_options) ? superclass.trace_options.dup : {}
969
+ end
970
+
963
971
  def new_trace(**options)
964
972
  if defined?(@trace_options)
965
- options = @trace_options.merge(options)
973
+ options = trace_options.merge(options)
966
974
  end
967
975
  trace_class.new(**options)
968
976
  end
@@ -35,7 +35,7 @@ module GraphQL
35
35
  # # Alternatively, you can call the class methods followed by your edge type
36
36
  # # edges_nullable true
37
37
  # # edge_nullable true
38
- # # nodes_nullable true
38
+ # # node_nullable true
39
39
  # # has_nodes_field true
40
40
  # # edge_type Types::PostEdge
41
41
  # end
@@ -1,4 +1,4 @@
1
1
  # frozen_string_literal: true
2
2
  module GraphQL
3
- VERSION = "2.0.18"
3
+ VERSION = "2.0.19"
4
4
  end
data/lib/graphql.rb CHANGED
@@ -58,8 +58,12 @@ This is probably a bug in GraphQL-Ruby, please report this error on GitHub: http
58
58
  GraphQL::Language::Parser.parse(string, filename: filename, trace: trace)
59
59
  end
60
60
 
61
- # @return [Array<GraphQL::Language::Token>]
61
+ # @return [Array<Array>]
62
62
  def self.scan(graphql_string)
63
+ scan_with_ruby(graphql_string)
64
+ end
65
+
66
+ def self.scan_with_ruby(graphql_string)
63
67
  GraphQL::Language::Lexer.tokenize(graphql_string)
64
68
  end
65
69
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: graphql
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.18
4
+ version: 2.0.19
5
5
  platform: ruby
6
6
  authors:
7
7
  - Robert Mosolgo
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-03-09 00:00:00.000000000 Z
11
+ date: 2023-03-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: benchmark-ips
@@ -112,16 +112,30 @@ dependencies:
112
112
  name: rake
113
113
  requirement: !ruby/object:Gem::Requirement
114
114
  requirements:
115
- - - "~>"
115
+ - - ">="
116
116
  - !ruby/object:Gem::Version
117
- version: '12'
117
+ version: '0'
118
118
  type: :development
119
119
  prerelease: false
120
120
  version_requirements: !ruby/object:Gem::Requirement
121
121
  requirements:
122
- - - "~>"
122
+ - - ">="
123
123
  - !ruby/object:Gem::Version
124
- version: '12'
124
+ version: '0'
125
+ - !ruby/object:Gem::Dependency
126
+ name: rake-compiler
127
+ requirement: !ruby/object:Gem::Requirement
128
+ requirements:
129
+ - - ">="
130
+ - !ruby/object:Gem::Version
131
+ version: '0'
132
+ type: :development
133
+ prerelease: false
134
+ version_requirements: !ruby/object:Gem::Requirement
135
+ requirements:
136
+ - - ">="
137
+ - !ruby/object:Gem::Version
138
+ version: '0'
125
139
  - !ruby/object:Gem::Dependency
126
140
  name: rubocop
127
141
  requirement: !ruby/object:Gem::Requirement
@@ -317,7 +331,6 @@ files:
317
331
  - lib/graphql/execution/multiplex.rb
318
332
  - lib/graphql/execution_error.rb
319
333
  - lib/graphql/filter.rb
320
- - lib/graphql/graphql_ext.bundle
321
334
  - lib/graphql/integer_decoding_error.rb
322
335
  - lib/graphql/integer_encoding_error.rb
323
336
  - lib/graphql/introspection.rb
Binary file