graphql 2.0.30 → 2.3.6
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/generators/graphql/install/mutation_root_generator.rb +2 -2
- data/lib/generators/graphql/install/templates/base_mutation.erb +2 -0
- data/lib/generators/graphql/install/templates/mutation_type.erb +2 -0
- data/lib/generators/graphql/install_generator.rb +3 -0
- data/lib/generators/graphql/templates/base_argument.erb +2 -0
- data/lib/generators/graphql/templates/base_connection.erb +2 -0
- data/lib/generators/graphql/templates/base_edge.erb +2 -0
- data/lib/generators/graphql/templates/base_enum.erb +2 -0
- data/lib/generators/graphql/templates/base_field.erb +2 -0
- data/lib/generators/graphql/templates/base_input_object.erb +2 -0
- data/lib/generators/graphql/templates/base_interface.erb +2 -0
- data/lib/generators/graphql/templates/base_object.erb +2 -0
- data/lib/generators/graphql/templates/base_resolver.erb +6 -0
- data/lib/generators/graphql/templates/base_scalar.erb +2 -0
- data/lib/generators/graphql/templates/base_union.erb +2 -0
- data/lib/generators/graphql/templates/graphql_controller.erb +2 -0
- data/lib/generators/graphql/templates/loader.erb +2 -0
- data/lib/generators/graphql/templates/mutation.erb +2 -0
- data/lib/generators/graphql/templates/node_type.erb +2 -0
- data/lib/generators/graphql/templates/query_type.erb +2 -0
- data/lib/generators/graphql/templates/schema.erb +5 -0
- data/lib/graphql/analysis/analyzer.rb +89 -0
- data/lib/graphql/analysis/field_usage.rb +82 -0
- data/lib/graphql/analysis/max_query_complexity.rb +20 -0
- data/lib/graphql/analysis/max_query_depth.rb +20 -0
- data/lib/graphql/analysis/query_complexity.rb +183 -0
- data/lib/graphql/analysis/query_depth.rb +58 -0
- data/lib/graphql/analysis/visitor.rb +282 -0
- data/lib/graphql/analysis.rb +92 -1
- data/lib/graphql/backtrace/inspect_result.rb +0 -12
- data/lib/graphql/backtrace/trace.rb +12 -15
- data/lib/graphql/coercion_error.rb +1 -9
- data/lib/graphql/dataloader/async_dataloader.rb +88 -0
- data/lib/graphql/dataloader/null_dataloader.rb +1 -1
- data/lib/graphql/dataloader/request.rb +5 -0
- data/lib/graphql/dataloader/source.rb +11 -3
- data/lib/graphql/dataloader.rb +112 -142
- data/lib/graphql/duration_encoding_error.rb +16 -0
- data/lib/graphql/execution/interpreter/argument_value.rb +5 -1
- data/lib/graphql/execution/interpreter/runtime/graphql_result.rb +175 -0
- data/lib/graphql/execution/interpreter/runtime.rb +163 -365
- data/lib/graphql/execution/interpreter.rb +92 -158
- data/lib/graphql/execution/lookahead.rb +88 -21
- data/lib/graphql/introspection/dynamic_fields.rb +1 -1
- data/lib/graphql/introspection/entry_points.rb +11 -5
- data/lib/graphql/introspection/schema_type.rb +3 -1
- data/lib/graphql/language/block_string.rb +34 -18
- data/lib/graphql/language/definition_slice.rb +1 -1
- data/lib/graphql/language/document_from_schema_definition.rb +38 -38
- data/lib/graphql/language/lexer.rb +305 -193
- data/lib/graphql/language/nodes.rb +113 -66
- data/lib/graphql/language/parser.rb +787 -1986
- data/lib/graphql/language/printer.rb +303 -146
- data/lib/graphql/language/sanitized_printer.rb +20 -22
- data/lib/graphql/language/static_visitor.rb +167 -0
- data/lib/graphql/language/visitor.rb +20 -81
- data/lib/graphql/language.rb +61 -0
- data/lib/graphql/load_application_object_failed_error.rb +5 -1
- data/lib/graphql/pagination/array_connection.rb +6 -6
- data/lib/graphql/pagination/connection.rb +28 -1
- data/lib/graphql/pagination/mongoid_relation_connection.rb +1 -2
- data/lib/graphql/query/context/scoped_context.rb +101 -0
- data/lib/graphql/query/context.rb +66 -131
- data/lib/graphql/query/null_context.rb +4 -11
- data/lib/graphql/query/validation_pipeline.rb +4 -4
- data/lib/graphql/query/variables.rb +3 -3
- data/lib/graphql/query.rb +17 -26
- data/lib/graphql/railtie.rb +9 -6
- data/lib/graphql/rake_task.rb +3 -12
- data/lib/graphql/rubocop/graphql/base_cop.rb +1 -1
- data/lib/graphql/schema/addition.rb +21 -11
- data/lib/graphql/schema/argument.rb +43 -8
- data/lib/graphql/schema/base_64_encoder.rb +3 -5
- data/lib/graphql/schema/build_from_definition.rb +9 -12
- data/lib/graphql/schema/directive/one_of.rb +12 -0
- data/lib/graphql/schema/directive/specified_by.rb +14 -0
- data/lib/graphql/schema/directive.rb +3 -1
- data/lib/graphql/schema/enum.rb +3 -3
- data/lib/graphql/schema/field/connection_extension.rb +1 -15
- data/lib/graphql/schema/field/scope_extension.rb +8 -1
- data/lib/graphql/schema/field.rb +49 -35
- data/lib/graphql/schema/has_single_input_argument.rb +157 -0
- data/lib/graphql/schema/input_object.rb +4 -4
- data/lib/graphql/schema/interface.rb +10 -10
- data/lib/graphql/schema/introspection_system.rb +4 -2
- data/lib/graphql/schema/late_bound_type.rb +4 -0
- data/lib/graphql/schema/list.rb +2 -2
- data/lib/graphql/schema/loader.rb +2 -3
- data/lib/graphql/schema/member/base_dsl_methods.rb +2 -1
- data/lib/graphql/schema/member/has_arguments.rb +63 -73
- data/lib/graphql/schema/member/has_directives.rb +1 -1
- data/lib/graphql/schema/member/has_fields.rb +8 -5
- data/lib/graphql/schema/member/has_interfaces.rb +23 -9
- data/lib/graphql/schema/member/relay_shortcuts.rb +1 -1
- data/lib/graphql/schema/member/scoped.rb +19 -0
- data/lib/graphql/schema/member/type_system_helpers.rb +1 -2
- data/lib/graphql/schema/member/validates_input.rb +3 -3
- data/lib/graphql/schema/mutation.rb +7 -0
- data/lib/graphql/schema/object.rb +8 -0
- data/lib/graphql/schema/printer.rb +8 -7
- data/lib/graphql/schema/relay_classic_mutation.rb +6 -128
- data/lib/graphql/schema/resolver.rb +27 -13
- data/lib/graphql/schema/scalar.rb +3 -3
- data/lib/graphql/schema/subscription.rb +11 -4
- data/lib/graphql/schema/union.rb +1 -1
- data/lib/graphql/schema/unique_within_type.rb +1 -1
- data/lib/graphql/schema/warden.rb +96 -95
- data/lib/graphql/schema.rb +323 -102
- data/lib/graphql/static_validation/all_rules.rb +1 -1
- data/lib/graphql/static_validation/base_visitor.rb +1 -1
- data/lib/graphql/static_validation/literal_validator.rb +2 -3
- data/lib/graphql/static_validation/rules/fields_will_merge.rb +2 -2
- data/lib/graphql/static_validation/rules/required_arguments_are_present.rb +1 -1
- data/lib/graphql/static_validation/rules/required_input_object_attributes_are_present.rb +2 -2
- data/lib/graphql/static_validation/validation_context.rb +5 -5
- data/lib/graphql/static_validation/validator.rb +3 -0
- data/lib/graphql/static_validation.rb +0 -1
- data/lib/graphql/subscriptions/action_cable_subscriptions.rb +4 -3
- data/lib/graphql/subscriptions/broadcast_analyzer.rb +1 -1
- data/lib/graphql/subscriptions/event.rb +8 -2
- data/lib/graphql/subscriptions/serialize.rb +2 -0
- data/lib/graphql/subscriptions.rb +15 -13
- data/lib/graphql/testing/helpers.rb +151 -0
- data/lib/graphql/testing.rb +2 -0
- data/lib/graphql/tracing/appoptics_trace.rb +2 -2
- data/lib/graphql/tracing/appoptics_tracing.rb +2 -2
- data/lib/graphql/tracing/legacy_hooks_trace.rb +74 -0
- data/lib/graphql/tracing/platform_tracing.rb +3 -1
- data/lib/graphql/tracing/{prometheus_tracing → prometheus_trace}/graphql_collector.rb +3 -1
- data/lib/graphql/tracing/prometheus_trace.rb +9 -9
- data/lib/graphql/tracing/sentry_trace.rb +112 -0
- data/lib/graphql/tracing/trace.rb +1 -0
- data/lib/graphql/tracing.rb +3 -1
- data/lib/graphql/type_kinds.rb +1 -1
- data/lib/graphql/types/iso_8601_duration.rb +77 -0
- data/lib/graphql/types/relay/connection_behaviors.rb +32 -2
- data/lib/graphql/types/relay/edge_behaviors.rb +7 -0
- data/lib/graphql/types.rb +1 -0
- data/lib/graphql/version.rb +1 -1
- data/lib/graphql.rb +13 -13
- data/readme.md +12 -2
- metadata +33 -26
- data/lib/graphql/analysis/ast/analyzer.rb +0 -84
- data/lib/graphql/analysis/ast/field_usage.rb +0 -57
- data/lib/graphql/analysis/ast/max_query_complexity.rb +0 -22
- data/lib/graphql/analysis/ast/max_query_depth.rb +0 -22
- data/lib/graphql/analysis/ast/query_complexity.rb +0 -230
- data/lib/graphql/analysis/ast/query_depth.rb +0 -55
- data/lib/graphql/analysis/ast/visitor.rb +0 -276
- data/lib/graphql/analysis/ast.rb +0 -81
- data/lib/graphql/deprecation.rb +0 -9
- data/lib/graphql/filter.rb +0 -59
- data/lib/graphql/language/parser.y +0 -560
- data/lib/graphql/schema/base_64_bp.rb +0 -26
- data/lib/graphql/static_validation/type_stack.rb +0 -216
- data/lib/graphql/subscriptions/instrumentation.rb +0 -28
@@ -1,39 +1,246 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require "strscan"
|
4
|
-
|
5
2
|
module GraphQL
|
6
3
|
module Language
|
4
|
+
|
7
5
|
class Lexer
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
6
|
+
def initialize(graphql_str, filename: nil, max_tokens: nil)
|
7
|
+
if !(graphql_str.encoding == Encoding::UTF_8 || graphql_str.ascii_only?)
|
8
|
+
graphql_str = graphql_str.dup.force_encoding(Encoding::UTF_8)
|
9
|
+
end
|
10
|
+
@string = graphql_str
|
11
|
+
@filename = filename
|
12
|
+
@scanner = StringScanner.new(graphql_str)
|
13
|
+
@pos = nil
|
14
|
+
@max_tokens = max_tokens || Float::INFINITY
|
15
|
+
@tokens_count = 0
|
16
|
+
end
|
17
|
+
|
18
|
+
def eos?
|
19
|
+
@scanner.eos?
|
20
|
+
end
|
21
|
+
|
22
|
+
attr_reader :pos
|
23
|
+
|
24
|
+
def advance
|
25
|
+
@scanner.skip(IGNORE_REGEXP)
|
26
|
+
return false if @scanner.eos?
|
27
|
+
@tokens_count += 1
|
28
|
+
if @tokens_count > @max_tokens
|
29
|
+
raise_parse_error("This query is too large to execute.")
|
30
|
+
end
|
31
|
+
@pos = @scanner.pos
|
32
|
+
next_byte = @string.getbyte(@pos)
|
33
|
+
next_byte_is_for = FIRST_BYTES[next_byte]
|
34
|
+
case next_byte_is_for
|
35
|
+
when ByteFor::PUNCTUATION
|
36
|
+
@scanner.pos += 1
|
37
|
+
PUNCTUATION_NAME_FOR_BYTE[next_byte]
|
38
|
+
when ByteFor::NAME
|
39
|
+
if len = @scanner.skip(KEYWORD_REGEXP)
|
40
|
+
case len
|
41
|
+
when 2
|
42
|
+
:ON
|
43
|
+
when 12
|
44
|
+
:SUBSCRIPTION
|
45
|
+
else
|
46
|
+
pos = @pos
|
47
|
+
|
48
|
+
# Use bytes 2 and 3 as a unique identifier for this keyword
|
49
|
+
bytes = (@string.getbyte(pos + 2) << 8) | @string.getbyte(pos + 1)
|
50
|
+
KEYWORD_BY_TWO_BYTES[_hash(bytes)]
|
51
|
+
end
|
52
|
+
else
|
53
|
+
@scanner.skip(IDENTIFIER_REGEXP)
|
54
|
+
:IDENTIFIER
|
55
|
+
end
|
56
|
+
when ByteFor::IDENTIFIER
|
57
|
+
@scanner.skip(IDENTIFIER_REGEXP)
|
58
|
+
:IDENTIFIER
|
59
|
+
when ByteFor::NUMBER
|
60
|
+
@scanner.skip(NUMERIC_REGEXP)
|
61
|
+
|
62
|
+
if GraphQL.reject_numbers_followed_by_names
|
63
|
+
new_pos = @scanner.pos
|
64
|
+
peek_byte = @string.getbyte(new_pos)
|
65
|
+
next_first_byte = FIRST_BYTES[peek_byte]
|
66
|
+
if next_first_byte == ByteFor::NAME || next_first_byte == ByteFor::IDENTIFIER
|
67
|
+
number_part = token_value
|
68
|
+
name_part = @scanner.scan(IDENTIFIER_REGEXP)
|
69
|
+
raise_parse_error("Name after number is not allowed (in `#{number_part}#{name_part}`)")
|
70
|
+
end
|
71
|
+
end
|
72
|
+
# Check for a matched decimal:
|
73
|
+
@scanner[1] ? :FLOAT : :INT
|
74
|
+
when ByteFor::ELLIPSIS
|
75
|
+
if @string.getbyte(@pos + 1) != 46 || @string.getbyte(@pos + 2) != 46
|
76
|
+
raise_parse_error("Expected `...`, actual: #{@string[@pos..@pos + 2].inspect}")
|
77
|
+
end
|
78
|
+
@scanner.pos += 3
|
79
|
+
:ELLIPSIS
|
80
|
+
when ByteFor::STRING
|
81
|
+
if @scanner.skip(BLOCK_STRING_REGEXP) || @scanner.skip(QUOTED_STRING_REGEXP)
|
82
|
+
:STRING
|
83
|
+
else
|
84
|
+
raise_parse_error("Expected string or block string, but it was malformed")
|
85
|
+
end
|
86
|
+
else
|
87
|
+
@scanner.pos += 1
|
88
|
+
:UNKNOWN_CHAR
|
89
|
+
end
|
90
|
+
rescue ArgumentError => err
|
91
|
+
if err.message == "invalid byte sequence in UTF-8"
|
92
|
+
raise_parse_error("Parse error on bad Unicode escape sequence", nil, nil)
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
def token_value
|
97
|
+
@string.byteslice(@scanner.pos - @scanner.matched_size, @scanner.matched_size)
|
98
|
+
rescue StandardError => err
|
99
|
+
raise GraphQL::Error, "(token_value failed: #{err.class}: #{err.message})"
|
100
|
+
end
|
101
|
+
|
102
|
+
def debug_token_value(token_name)
|
103
|
+
if token_name && Lexer::Punctuation.const_defined?(token_name)
|
104
|
+
Lexer::Punctuation.const_get(token_name)
|
105
|
+
elsif token_name == :ELLIPSIS
|
106
|
+
"..."
|
107
|
+
elsif token_name == :STRING
|
108
|
+
string_value
|
109
|
+
elsif @scanner.matched_size.nil?
|
110
|
+
@scanner.peek(1)
|
111
|
+
else
|
112
|
+
token_value
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
ESCAPES = /\\["\\\/bfnrt]/
|
117
|
+
ESCAPES_REPLACE = {
|
118
|
+
'\\"' => '"',
|
119
|
+
"\\\\" => "\\",
|
120
|
+
"\\/" => '/',
|
121
|
+
"\\b" => "\b",
|
122
|
+
"\\f" => "\f",
|
123
|
+
"\\n" => "\n",
|
124
|
+
"\\r" => "\r",
|
125
|
+
"\\t" => "\t",
|
126
|
+
}
|
127
|
+
UTF_8 = /\\u(?:([\dAa-f]{4})|\{([\da-f]{4,})\})(?:\\u([\dAa-f]{4}))?/i
|
128
|
+
VALID_STRING = /\A(?:[^\\]|#{ESCAPES}|#{UTF_8})*\z/o
|
129
|
+
ESCAPED = /(?:#{ESCAPES}|#{UTF_8})/o
|
130
|
+
|
131
|
+
def string_value
|
132
|
+
str = token_value
|
133
|
+
is_block = str.start_with?('"""')
|
134
|
+
if is_block
|
135
|
+
str.gsub!(/\A"""|"""\z/, '')
|
136
|
+
return Language::BlockString.trim_whitespace(str)
|
137
|
+
else
|
138
|
+
str.gsub!(/\A"|"\z/, '')
|
139
|
+
|
140
|
+
if !str.valid_encoding? || !str.match?(VALID_STRING)
|
141
|
+
raise_parse_error("Bad unicode escape in #{str.inspect}")
|
142
|
+
else
|
143
|
+
Lexer.replace_escaped_characters_in_place(str)
|
144
|
+
|
145
|
+
if !str.valid_encoding?
|
146
|
+
raise_parse_error("Bad unicode escape in #{str.inspect}")
|
147
|
+
else
|
148
|
+
str
|
149
|
+
end
|
150
|
+
end
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
def line_number
|
155
|
+
@scanner.string[0..@pos].count("\n") + 1
|
156
|
+
end
|
157
|
+
|
158
|
+
def column_number
|
159
|
+
@scanner.string[0..@pos].split("\n").last.length
|
160
|
+
end
|
161
|
+
|
162
|
+
def raise_parse_error(message, line = line_number, col = column_number)
|
163
|
+
raise GraphQL::ParseError.new(message, line, col, @string, filename: @filename)
|
164
|
+
end
|
165
|
+
|
166
|
+
IGNORE_REGEXP = %r{
|
167
|
+
(?:
|
168
|
+
[, \c\r\n\t]+ |
|
169
|
+
\#.*$
|
170
|
+
)*
|
171
|
+
}x
|
172
|
+
IDENTIFIER_REGEXP = /[_A-Za-z][_0-9A-Za-z]*/
|
173
|
+
INT_REGEXP = /-?(?:[0]|[1-9][0-9]*)/
|
174
|
+
FLOAT_DECIMAL_REGEXP = /[.][0-9]+/
|
175
|
+
FLOAT_EXP_REGEXP = /[eE][+-]?[0-9]+/
|
176
|
+
# TODO: FLOAT_EXP_REGEXP should not be allowed to follow INT_REGEXP, integers are not allowed to have exponent parts.
|
177
|
+
NUMERIC_REGEXP = /#{INT_REGEXP}(#{FLOAT_DECIMAL_REGEXP}#{FLOAT_EXP_REGEXP}|#{FLOAT_DECIMAL_REGEXP}|#{FLOAT_EXP_REGEXP})?/
|
178
|
+
|
179
|
+
KEYWORDS = [
|
180
|
+
"on",
|
181
|
+
"fragment",
|
182
|
+
"true",
|
183
|
+
"false",
|
184
|
+
"null",
|
185
|
+
"query",
|
186
|
+
"mutation",
|
187
|
+
"subscription",
|
188
|
+
"schema",
|
189
|
+
"scalar",
|
190
|
+
"type",
|
191
|
+
"extend",
|
192
|
+
"implements",
|
193
|
+
"interface",
|
194
|
+
"union",
|
195
|
+
"enum",
|
196
|
+
"input",
|
197
|
+
"directive",
|
198
|
+
"repeatable"
|
199
|
+
].freeze
|
200
|
+
|
201
|
+
KEYWORD_REGEXP = /#{Regexp.union(KEYWORDS.sort)}\b/
|
202
|
+
KEYWORD_BY_TWO_BYTES = [
|
203
|
+
:INTERFACE,
|
204
|
+
:MUTATION,
|
205
|
+
:EXTEND,
|
206
|
+
:FALSE,
|
207
|
+
:ENUM,
|
208
|
+
:TRUE,
|
209
|
+
:NULL,
|
210
|
+
nil,
|
211
|
+
nil,
|
212
|
+
nil,
|
213
|
+
nil,
|
214
|
+
nil,
|
215
|
+
nil,
|
216
|
+
nil,
|
217
|
+
:QUERY,
|
218
|
+
nil,
|
219
|
+
nil,
|
220
|
+
:REPEATABLE,
|
221
|
+
:IMPLEMENTS,
|
222
|
+
:INPUT,
|
223
|
+
:TYPE,
|
224
|
+
:SCHEMA,
|
225
|
+
nil,
|
226
|
+
nil,
|
227
|
+
nil,
|
228
|
+
:DIRECTIVE,
|
229
|
+
:UNION,
|
230
|
+
nil,
|
231
|
+
nil,
|
232
|
+
:SCALAR,
|
233
|
+
nil,
|
234
|
+
:FRAGMENT
|
235
|
+
]
|
236
|
+
|
237
|
+
# This produces a unique integer for bytes 2 and 3 of each keyword string
|
238
|
+
# See https://tenderlovemaking.com/2023/09/02/fast-tokenizers-with-stringscanner.html
|
239
|
+
def _hash key
|
240
|
+
(key * 18592990) >> 27 & 0x1f
|
241
|
+
end
|
242
|
+
|
243
|
+
module Punctuation
|
37
244
|
LCURLY = '{'
|
38
245
|
RCURLY = '}'
|
39
246
|
LPAREN = '('
|
@@ -43,36 +250,30 @@ module GraphQL
|
|
43
250
|
COLON = ':'
|
44
251
|
VAR_SIGN = '$'
|
45
252
|
DIR_SIGN = '@'
|
46
|
-
ELLIPSIS = '...'
|
47
253
|
EQUALS = '='
|
48
254
|
BANG = '!'
|
49
255
|
PIPE = '|'
|
50
256
|
AMP = '&'
|
51
257
|
end
|
52
258
|
|
53
|
-
|
259
|
+
# A sparse array mapping the bytes for each punctuation
|
260
|
+
# to a symbol name for that punctuation
|
261
|
+
PUNCTUATION_NAME_FOR_BYTE = Punctuation.constants.each_with_object([]) { |name, arr|
|
262
|
+
punct = Punctuation.const_get(name)
|
263
|
+
arr[punct.ord] = name
|
264
|
+
}
|
54
265
|
|
55
266
|
QUOTE = '"'
|
56
267
|
UNICODE_DIGIT = /[0-9A-Za-z]/
|
57
268
|
FOUR_DIGIT_UNICODE = /#{UNICODE_DIGIT}{4}/
|
58
|
-
N_DIGIT_UNICODE = %r{#{LCURLY}#{UNICODE_DIGIT}{4,}#{RCURLY}}x
|
269
|
+
N_DIGIT_UNICODE = %r{#{Punctuation::LCURLY}#{UNICODE_DIGIT}{4,}#{Punctuation::RCURLY}}x
|
59
270
|
UNICODE_ESCAPE = %r{\\u(?:#{FOUR_DIGIT_UNICODE}|#{N_DIGIT_UNICODE})}
|
60
|
-
# # https://graphql.github.io/graphql-spec/June2018/#sec-String-Value
|
61
271
|
STRING_ESCAPE = %r{[\\][\\/bfnrt]}
|
62
272
|
BLOCK_QUOTE = '"""'
|
63
273
|
ESCAPED_QUOTE = /\\"/;
|
64
|
-
STRING_CHAR = /#{ESCAPED_QUOTE}|[^"
|
65
|
-
|
66
|
-
|
67
|
-
key = Literals.const_get(n)
|
68
|
-
key = key.is_a?(Regexp) ? key.source.gsub(/(\\b|\\)/, '') : key
|
69
|
-
o[key] = n
|
70
|
-
}
|
71
|
-
|
72
|
-
LIT = Regexp.union(Literals.constants.map { |n| Literals.const_get(n) })
|
73
|
-
|
74
|
-
QUOTED_STRING = %r{#{QUOTE} (?:#{STRING_CHAR})* #{QUOTE}}x
|
75
|
-
BLOCK_STRING = %r{
|
274
|
+
STRING_CHAR = /#{ESCAPED_QUOTE}|[^"\\\n\r]|#{UNICODE_ESCAPE}|#{STRING_ESCAPE}/
|
275
|
+
QUOTED_STRING_REGEXP = %r{#{QUOTE} (?:#{STRING_CHAR})* #{QUOTE}}x
|
276
|
+
BLOCK_STRING_REGEXP = %r{
|
76
277
|
#{BLOCK_QUOTE}
|
77
278
|
(?: [^"\\] | # Any characters that aren't a quote or slash
|
78
279
|
(?<!") ["]{1,2} (?!") | # Any quotes that don't have quotes next to them
|
@@ -84,169 +285,80 @@ module GraphQL
|
|
84
285
|
#{BLOCK_QUOTE}
|
85
286
|
}xm
|
86
287
|
|
87
|
-
#
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
288
|
+
# Use this array to check, for a given byte that will start a token,
|
289
|
+
# what kind of token might it start?
|
290
|
+
FIRST_BYTES = Array.new(255)
|
291
|
+
|
292
|
+
module ByteFor
|
293
|
+
NUMBER = 0 # int or float
|
294
|
+
NAME = 1 # identifier or keyword
|
295
|
+
STRING = 2
|
296
|
+
ELLIPSIS = 3
|
297
|
+
IDENTIFIER = 4 # identifier, *not* a keyword
|
298
|
+
PUNCTUATION = 5
|
96
299
|
end
|
97
300
|
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
301
|
+
(0..9).each { |i| FIRST_BYTES[i.to_s.ord] = ByteFor::NUMBER }
|
302
|
+
FIRST_BYTES["-".ord] = ByteFor::NUMBER
|
303
|
+
# Some of these may be overwritten below, if keywords start with the same character
|
304
|
+
("A".."Z").each { |char| FIRST_BYTES[char.ord] = ByteFor::IDENTIFIER }
|
305
|
+
("a".."z").each { |char| FIRST_BYTES[char.ord] = ByteFor::IDENTIFIER }
|
306
|
+
FIRST_BYTES['_'.ord] = ByteFor::IDENTIFIER
|
307
|
+
FIRST_BYTES['.'.ord] = ByteFor::ELLIPSIS
|
308
|
+
FIRST_BYTES['"'.ord] = ByteFor::STRING
|
309
|
+
KEYWORDS.each { |kw| FIRST_BYTES[kw.getbyte(0)] = ByteFor::NAME }
|
310
|
+
Punctuation.constants.each do |punct_name|
|
311
|
+
punct = Punctuation.const_get(punct_name)
|
312
|
+
FIRST_BYTES[punct.ord] = ByteFor::PUNCTUATION
|
106
313
|
end
|
107
314
|
|
108
|
-
def self.tokenize(string)
|
109
|
-
value = string.dup.force_encoding(Encoding::UTF_8)
|
110
|
-
|
111
|
-
scanner = if value.valid_encoding?
|
112
|
-
new value
|
113
|
-
else
|
114
|
-
BadEncoding.new value
|
115
|
-
end
|
116
|
-
|
117
|
-
toks = []
|
118
|
-
|
119
|
-
while tok = scanner.next_token
|
120
|
-
toks << tok
|
121
|
-
end
|
122
|
-
|
123
|
-
toks
|
124
|
-
end
|
125
|
-
|
126
|
-
def next_token
|
127
|
-
return if @scan.eos?
|
128
|
-
|
129
|
-
pos = @scan.pos
|
130
|
-
|
131
|
-
case
|
132
|
-
when str = @scan.scan(FLOAT) then emit(:FLOAT, pos, @scan.pos, str)
|
133
|
-
when str = @scan.scan(INT) then emit(:INT, pos, @scan.pos, str)
|
134
|
-
when str = @scan.scan(LIT) then emit(LIT_NAME_LUT[str], pos, @scan.pos, -str)
|
135
|
-
when str = @scan.scan(IDENTIFIER) then emit(:IDENTIFIER, pos, @scan.pos, str)
|
136
|
-
when str = @scan.scan(BLOCK_STRING) then emit_block(pos, @scan.pos, str.gsub(/\A#{BLOCK_QUOTE}|#{BLOCK_QUOTE}\z/, ''))
|
137
|
-
when str = @scan.scan(QUOTED_STRING) then emit_string(pos, @scan.pos, str.gsub(/^"|"$/, ''))
|
138
|
-
when str = @scan.scan(COMMENT) then record_comment(pos, @scan.pos, str)
|
139
|
-
when str = @scan.scan(NEWLINE)
|
140
|
-
@line += 1
|
141
|
-
@col = 1
|
142
|
-
next_token
|
143
|
-
when @scan.scan(BLANK)
|
144
|
-
@col += @scan.pos - pos
|
145
|
-
next_token
|
146
|
-
when str = @scan.scan(UNKNOWN_CHAR) then emit(:UNKNOWN_CHAR, pos, @scan.pos, str)
|
147
|
-
else
|
148
|
-
# This should never happen since `UNKNOWN_CHAR` ensures we make progress
|
149
|
-
raise "Unknown string?"
|
150
|
-
end
|
151
|
-
end
|
152
|
-
|
153
|
-
def emit(token_name, ts, te, token_value)
|
154
|
-
token = [
|
155
|
-
token_name,
|
156
|
-
@line,
|
157
|
-
@col,
|
158
|
-
token_value,
|
159
|
-
@previous_token,
|
160
|
-
]
|
161
|
-
@previous_token = token
|
162
|
-
# Bump the column counter for the next token
|
163
|
-
@col += te - ts
|
164
|
-
token
|
165
|
-
end
|
166
315
|
|
167
316
|
# Replace any escaped unicode or whitespace with the _actual_ characters
|
168
317
|
# To avoid allocating more strings, this modifies the string passed into it
|
169
318
|
def self.replace_escaped_characters_in_place(raw_string)
|
170
|
-
raw_string.gsub!(
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
(
|
179
|
-
|
180
|
-
|
181
|
-
|
319
|
+
raw_string.gsub!(ESCAPED) do |matched_str|
|
320
|
+
if (point_str_1 = $1 || $2)
|
321
|
+
codepoint_1 = point_str_1.to_i(16)
|
322
|
+
if (codepoint_2 = $3)
|
323
|
+
codepoint_2 = codepoint_2.to_i(16)
|
324
|
+
if (codepoint_1 >= 0xD800 && codepoint_1 <= 0xDBFF) && # leading surrogate
|
325
|
+
(codepoint_2 >= 0xDC00 && codepoint_2 <= 0xDFFF) # trailing surrogate
|
326
|
+
# A surrogate pair
|
327
|
+
combined = ((codepoint_1 - 0xD800) * 0x400) + (codepoint_2 - 0xDC00) + 0x10000
|
328
|
+
[combined].pack('U'.freeze)
|
329
|
+
else
|
330
|
+
# Two separate code points
|
331
|
+
[codepoint_1].pack('U'.freeze) + [codepoint_2].pack('U'.freeze)
|
332
|
+
end
|
182
333
|
else
|
183
|
-
|
184
|
-
[codepoint_1].pack('U'.freeze) + [codepoint_2].pack('U'.freeze)
|
334
|
+
[codepoint_1].pack('U'.freeze)
|
185
335
|
end
|
186
336
|
else
|
187
|
-
[
|
337
|
+
ESCAPES_REPLACE[matched_str]
|
188
338
|
end
|
189
339
|
end
|
190
340
|
nil
|
191
341
|
end
|
192
342
|
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
ESCAPES_REPLACE = {
|
210
|
-
'\\"' => '"',
|
211
|
-
"\\\\" => "\\",
|
212
|
-
"\\/" => '/',
|
213
|
-
"\\b" => "\b",
|
214
|
-
"\\f" => "\f",
|
215
|
-
"\\n" => "\n",
|
216
|
-
"\\r" => "\r",
|
217
|
-
"\\t" => "\t",
|
218
|
-
}
|
219
|
-
UTF_8 = /\\u(?:([\dAa-f]{4})|\{([\da-f]{4,})\})(?:\\u([\dAa-f]{4}))?/i
|
220
|
-
VALID_STRING = /\A(?:[^\\]|#{ESCAPES}|#{UTF_8})*\z/o
|
221
|
-
|
222
|
-
def emit_block(ts, te, value)
|
223
|
-
line_incr = value.count("\n")
|
224
|
-
value = GraphQL::Language::BlockString.trim_whitespace(value)
|
225
|
-
tok = emit_string(ts, te, value)
|
226
|
-
@line += line_incr
|
227
|
-
tok
|
228
|
-
end
|
229
|
-
|
230
|
-
def emit_string(ts, te, value)
|
231
|
-
if !value.valid_encoding? || !value.match?(VALID_STRING)
|
232
|
-
emit(:BAD_UNICODE_ESCAPE, ts, te, value)
|
233
|
-
else
|
234
|
-
self.class.replace_escaped_characters_in_place(value)
|
235
|
-
|
236
|
-
if !value.valid_encoding?
|
237
|
-
emit(:BAD_UNICODE_ESCAPE, ts, te, value)
|
238
|
-
else
|
239
|
-
emit(:STRING, ts, te, value)
|
240
|
-
end
|
343
|
+
# This is not used during parsing because the parser
|
344
|
+
# doesn't actually need tokens.
|
345
|
+
def self.tokenize(string)
|
346
|
+
lexer = GraphQL::Language::Lexer.new(string)
|
347
|
+
tokens = []
|
348
|
+
prev_token = nil
|
349
|
+
while (token_name = lexer.advance)
|
350
|
+
new_token = [
|
351
|
+
token_name,
|
352
|
+
lexer.line_number,
|
353
|
+
lexer.column_number,
|
354
|
+
lexer.debug_token_value(token_name),
|
355
|
+
prev_token,
|
356
|
+
]
|
357
|
+
tokens << new_token
|
358
|
+
prev_token = new_token
|
241
359
|
end
|
360
|
+
tokens
|
242
361
|
end
|
243
|
-
|
244
|
-
private
|
245
|
-
|
246
|
-
def scanner(value)
|
247
|
-
StringScanner.new value
|
248
|
-
end
|
249
|
-
|
250
362
|
end
|
251
363
|
end
|
252
364
|
end
|