graphlyte 0.3.0 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/lib/graphlyte/data.rb +68 -0
  3. data/lib/graphlyte/document.rb +131 -0
  4. data/lib/graphlyte/dsl.rb +86 -0
  5. data/lib/graphlyte/editor.rb +288 -0
  6. data/lib/graphlyte/editors/annotate_types.rb +75 -0
  7. data/lib/graphlyte/editors/canonicalize.rb +26 -0
  8. data/lib/graphlyte/editors/collect_variable_references.rb +36 -0
  9. data/lib/graphlyte/editors/infer_signature.rb +36 -0
  10. data/lib/graphlyte/editors/inline_fragments.rb +37 -0
  11. data/lib/graphlyte/editors/remove_unneeded_spreads.rb +64 -0
  12. data/lib/graphlyte/editors/select_operation.rb +116 -0
  13. data/lib/graphlyte/editors/with_variables.rb +106 -0
  14. data/lib/graphlyte/errors.rb +33 -0
  15. data/lib/graphlyte/lexer.rb +392 -0
  16. data/lib/graphlyte/lexing/location.rb +43 -0
  17. data/lib/graphlyte/lexing/token.rb +31 -0
  18. data/lib/graphlyte/parser.rb +269 -0
  19. data/lib/graphlyte/parsing/backtracking_parser.rb +160 -0
  20. data/lib/graphlyte/refinements/string_refinement.rb +14 -8
  21. data/lib/graphlyte/refinements/syntax_refinements.rb +62 -0
  22. data/lib/graphlyte/schema.rb +165 -0
  23. data/lib/graphlyte/schema_query.rb +82 -65
  24. data/lib/graphlyte/selection_builder.rb +189 -0
  25. data/lib/graphlyte/selector.rb +75 -0
  26. data/lib/graphlyte/serializer.rb +223 -0
  27. data/lib/graphlyte/syntax.rb +369 -0
  28. data/lib/graphlyte.rb +24 -42
  29. metadata +88 -18
  30. data/lib/graphlyte/arguments/set.rb +0 -88
  31. data/lib/graphlyte/arguments/value.rb +0 -32
  32. data/lib/graphlyte/builder.rb +0 -53
  33. data/lib/graphlyte/directive.rb +0 -21
  34. data/lib/graphlyte/field.rb +0 -65
  35. data/lib/graphlyte/fieldset.rb +0 -36
  36. data/lib/graphlyte/fragment.rb +0 -17
  37. data/lib/graphlyte/inline_fragment.rb +0 -29
  38. data/lib/graphlyte/query.rb +0 -148
  39. data/lib/graphlyte/schema/parser.rb +0 -674
  40. data/lib/graphlyte/schema/types/base.rb +0 -54
  41. data/lib/graphlyte/types.rb +0 -9
@@ -0,0 +1,392 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'forwardable'
4
+
5
+ require_relative './lexing/token'
6
+ require_relative './lexing/location'
7
+
8
+ # See: https://github.com/graphql/graphql-spec/blob/main/spec/Appendix%20B%20--%20Grammar%20Summary.md
9
+ #
10
+ # This module implements tokenization of
11
+ # [Lexical Tokens](https://github.com/graphql/graphql-spec/blob/main/spec/Appendix%20B%20--%20Grammar%20Summary.md#lexical-tokens)
12
+ # as per the GraphQL spec.
13
+ #
14
+ # Usage:
15
+ #
16
+ # > Graphlyte::Lexer.lex(source)
17
+ #
18
+ module Graphlyte
19
+ LexError = Class.new(StandardError)
20
+
21
+ # A terminal production. May or may not produce a lexical token.
22
+ class Production
23
+ attr_reader :token
24
+
25
+ def initialize(token)
26
+ @token = token
27
+ end
28
+ end
29
+
30
+ # Transform a string into a stream of tokens - i.e. lexing
31
+ class Lexer
32
+ LINEFEED = "\u000a"
33
+ CARRIAGE_RETURN = "\u000d"
34
+ NEW_LINE = [LINEFEED, CARRIAGE_RETURN].freeze
35
+ HORIZONTAL_TAB = "\u0009"
36
+ SPACE = "\u0020"
37
+ WHITESPACE = [HORIZONTAL_TAB, SPACE].freeze
38
+ COMMENT_CHAR = '#'
39
+ DOUBLE_QUOTE = '"'
40
+ BLOCK_QUOTE = '"""'
41
+ BACK_QUOTE = '\\'
42
+ COMMA = ','
43
+ UNICODE_BOM = "\ufeff"
44
+ IGNORED = [UNICODE_BOM, COMMA, *WHITESPACE].freeze
45
+ PUNCTUATOR = ['!', '$', '&', '(', ')', '...', ':', '=', '@', '[', ']', '{', '|', '}'].freeze
46
+ LETTERS = %w[
47
+ A B C D E F G H I J K L M
48
+ N O P Q R S T U V W X Y Z
49
+ a b c d e f g h i j k l m
50
+ n o p q r s t u v w x y z
51
+ ].freeze
52
+
53
+ DIGITS = %w[0 1 2 3 4 5 6 7 8 9].freeze
54
+
55
+ attr_reader :source, :tokens
56
+ attr_accessor :line, :column, :index, :lexeme_start_p
57
+
58
+ def initialize(source)
59
+ @source = source
60
+ @tokens = []
61
+ @line = 1
62
+ @column = 1
63
+ @index = 0
64
+ @lexeme_start_p = Lexing::Position.new(0, 0)
65
+ end
66
+
67
+ def self.lex(source)
68
+ lexer = new(source)
69
+ lexer.tokenize!
70
+
71
+ lexer.tokens
72
+ end
73
+
74
+ def tokenize!
75
+ while source_uncompleted?
76
+ self.lexeme_start_p = current_position
77
+
78
+ token = next_token
79
+
80
+ tokens << token if token
81
+ end
82
+
83
+ tokens << Lexing::Token.new(:EOF, nil, after_source_end_location)
84
+ end
85
+
86
+ def after_source_end_location
87
+ Lexing::Location.eof
88
+ end
89
+
90
+ def source_uncompleted?
91
+ index < source.length
92
+ end
93
+
94
+ def eof?
95
+ !source_uncompleted?
96
+ end
97
+
98
+ def lookahead(offset = 1)
99
+ lookahead_p = (index - 1) + offset
100
+ return "\0" if lookahead_p >= source.length
101
+
102
+ source[lookahead_p]
103
+ end
104
+
105
+ def match(str)
106
+ str.chars.each_with_index.all? do |char, offset|
107
+ lookahead(offset + 1) == char
108
+ end
109
+ end
110
+
111
+ def lex_error(msg)
112
+ raise LexError, "#{msg} at #{line}:#{column}"
113
+ end
114
+
115
+ def one_of(strings)
116
+ strings.each do |s|
117
+ return s if consume(s)
118
+ end
119
+
120
+ nil
121
+ end
122
+
123
+ def string
124
+ if lookahead == DOUBLE_QUOTE && lookahead(2) != DOUBLE_QUOTE
125
+ consume
126
+ '' # The empty string
127
+ elsif consume('""') # Block string
128
+ block_string_content
129
+ else
130
+ string_content
131
+ end
132
+ end
133
+
134
+ def string_content
135
+ chars = []
136
+ while (char = string_character)
137
+ chars << char
138
+ end
139
+
140
+ lex_error('Unterminated string') unless consume(DOUBLE_QUOTE)
141
+
142
+ chars.join
143
+ end
144
+
145
+ def string_character(block_string: false)
146
+ return if eof?
147
+ return if lookahead == DOUBLE_QUOTE
148
+
149
+ c = consume
150
+
151
+ lex_error("Illegal character #{c.inspect}") if !block_string && NEW_LINE.include?(c)
152
+
153
+ if c == BACK_QUOTE
154
+ escaped_character
155
+ else
156
+ c
157
+ end
158
+ end
159
+
160
+ def escaped_character
161
+ c = consume
162
+
163
+ case c
164
+ when DOUBLE_QUOTE then DOUBLE_QUOTE
165
+ when BACK_QUOTE then BACK_QUOTE
166
+ when '/' then '/'
167
+ when 'b' then "\b"
168
+ when 'f' then "\f"
169
+ when 'n' then LINEFEED
170
+ when 'r' then "\r"
171
+ when 't' then "\t"
172
+ when 'u' then hex_char
173
+ else
174
+ lex_error("Unexpected escaped character in string: #{c}")
175
+ end
176
+ end
177
+
178
+ def hex_char
179
+ char_code = [1, 2, 3, 4].map do
180
+ d = consume
181
+ hex_digit = (digit?(d) || ('a'...'f').cover?(d.downcase))
182
+ lex_error("Expected a hex digit in unicode escape sequence. Got #{d.inspect}") unless hex_digit
183
+
184
+ d
185
+ end
186
+
187
+ char_code.join.hex.chr
188
+ end
189
+
190
+ def block_string_content
191
+ chars = block_chars_raw
192
+
193
+ lines = chomp_lines(chars.join.lines)
194
+ # Consistent indentation
195
+ left_margin = lines.map do |line|
196
+ line.chars.take_while { _1 == ' ' }.length
197
+ end.min
198
+
199
+ lines.map { _1[left_margin..] }.join(LINEFEED)
200
+ end
201
+
202
+ # Strip leading and trailing blank lines, and whitespace on the right margins
203
+ def chomp_lines(lines)
204
+ strip_trailing_blank_lines(strip_leading_blank_lines(lines.map(&:chomp)))
205
+ end
206
+
207
+ def strip_leading_blank_lines(lines)
208
+ lines.drop_while { _1 =~ /^\s*$/ }
209
+ end
210
+
211
+ def strip_trailing_blank_lines(lines)
212
+ strip_leading_blank_lines(lines.reverse).reverse
213
+ end
214
+
215
+ def block_chars_raw
216
+ chars = []
217
+ terminated = false
218
+
219
+ until eof? || (terminated = consume(BLOCK_QUOTE))
220
+ chars << BLOCK_QUOTE if consume("\\#{BLOCK_QUOTE}")
221
+ chars << '"' while consume(DOUBLE_QUOTE)
222
+ while (char = string_character(block_string: true))
223
+ chars << char
224
+ end
225
+ end
226
+
227
+ lex_error('Unterminated string') unless terminated
228
+
229
+ chars
230
+ end
231
+
232
+ def take_while
233
+ chars = []
234
+ chars << consume while yield(lookahead)
235
+
236
+ chars
237
+ end
238
+
239
+ def seek(offset)
240
+ self.index += offset
241
+ end
242
+
243
+ def consume(str = nil)
244
+ return if str && !match(str)
245
+
246
+ c = str || lookahead
247
+
248
+ self.index += c.length
249
+ self.column += c.length
250
+ c
251
+ end
252
+
253
+ def current_location
254
+ Lexing::Location.new(lexeme_start_p, current_position)
255
+ end
256
+
257
+ def current_position
258
+ Lexing::Position.new(line, column)
259
+ end
260
+
261
+ def next_token
262
+ (punctuator || skip_line || lexical_token).token
263
+ end
264
+
265
+ def punctuator
266
+ p = one_of(PUNCTUATOR)
267
+
268
+ Production.new(Lexing::Token.new(:PUNCTUATOR, p, current_location)) if p
269
+ end
270
+
271
+ def skip_line
272
+ lf = one_of([LINEFEED, "#{CARRIAGE_RETURN}#{LINEFEED}"])
273
+ return unless lf
274
+
275
+ next_line!
276
+ Production.new(nil)
277
+ end
278
+
279
+ def lexical_token
280
+ c = consume
281
+ t = if IGNORED.include?(c)
282
+ nil
283
+ elsif c == COMMENT_CHAR
284
+ ignore_comment_line
285
+ elsif name_start?(c)
286
+ to_token(:NAME) { name(c) }
287
+ elsif string_start?(c)
288
+ to_token(:STRING) { string }
289
+ elsif numeric_start?(c)
290
+ to_token(:NUMBER) { number(c) }
291
+ else
292
+ lex_error("Unexpected character: #{c.inspect}")
293
+ end
294
+
295
+ Production.new(t)
296
+ end
297
+
298
+ def next_line!
299
+ self.line += 1
300
+ self.column = 1
301
+ end
302
+
303
+ def string_start?(char)
304
+ char == '"'
305
+ end
306
+
307
+ def numeric_start?(char)
308
+ case char
309
+ when '-'
310
+ DIGITS.include?(lookahead)
311
+ when '0'
312
+ !DIGITS.include?(lookahead)
313
+ else
314
+ char != '0' && DIGITS.include?(char)
315
+ end
316
+ end
317
+
318
+ def to_token(type)
319
+ i = index - 1
320
+ value = yield
321
+ j = index
322
+
323
+ Lexing::Token.new(type, source[i..j], current_location, value: value)
324
+ end
325
+
326
+ def number(char)
327
+ is_negated = char == '-'
328
+
329
+ int_part = is_negated ? [] : [char]
330
+ int_part += take_while { digit?(_1) }
331
+
332
+ frac_part = fractional_part
333
+ exp_part = exponent_part
334
+
335
+ Syntax::NumericLiteral.new(integer_part: int_part&.join(''),
336
+ fractional_part: frac_part&.join(''),
337
+ exponent_part: exp_part,
338
+ negated: is_negated)
339
+ end
340
+
341
+ def fractional_part
342
+ return unless consume('.')
343
+
344
+ lex_error("Expected a digit, got #{lookahead}") unless digit?(lookahead)
345
+
346
+ take_while { digit?(_1) }
347
+ end
348
+
349
+ def exponent_part
350
+ return unless one_of(%w[e E])
351
+
352
+ sign = one_of(%w[- +])
353
+ lex_error("Expected a digit, got #{lookahead}") unless digit?(lookahead)
354
+
355
+ digits = take_while { digit?(_1) }
356
+
357
+ [sign, digits.join]
358
+ end
359
+
360
+ def name(char)
361
+ value = [char] + take_while { name_continue?(_1) }
362
+
363
+ value.join
364
+ end
365
+
366
+ def name_start?(char)
367
+ letter?(char) || underscore?(char)
368
+ end
369
+
370
+ def name_continue?(char)
371
+ letter?(char) || digit?(char) || underscore?(char)
372
+ end
373
+
374
+ def letter?(char)
375
+ LETTERS.include?(char)
376
+ end
377
+
378
+ def underscore?(char)
379
+ char == '_'
380
+ end
381
+
382
+ def digit?(char)
383
+ DIGITS.include?(char)
384
+ end
385
+
386
+ def ignore_comment_line
387
+ take_while { !NEW_LINE.include?(_1) }
388
+
389
+ nil
390
+ end
391
+ end
392
+ end
@@ -0,0 +1,43 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Graphlyte
4
+ module Lexing
5
+ Position = Struct.new(:line, :col) do
6
+ def to_s
7
+ "#{line}:#{col}"
8
+ end
9
+ end
10
+
11
+ # A source file location
12
+ class Location
13
+ attr_reader :start_pos, :end_pos
14
+
15
+ def initialize(start_pos, end_pos)
16
+ @start_pos = start_pos
17
+ @end_pos = end_pos
18
+ end
19
+
20
+ def to(location)
21
+ self.class.new(start_pos, location.end_pos)
22
+ end
23
+
24
+ def self.eof
25
+ new(nil, nil)
26
+ end
27
+
28
+ def eof?
29
+ start_pos.nil?
30
+ end
31
+
32
+ def ==(other)
33
+ other.is_a?(self.class) && to_s == other.to_s
34
+ end
35
+
36
+ def to_s
37
+ return 'EOF' if eof?
38
+
39
+ "#{start_pos}-#{end_pos}"
40
+ end
41
+ end
42
+ end
43
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'forwardable'
4
+
5
+ module Graphlyte
6
+ module Lexing
7
+ # A lexical token
8
+ class Token
9
+ extend Forwardable
10
+
11
+ attr_reader :type, :lexeme, :location
12
+
13
+ def_delegators :@location, :line, :col, :length
14
+
15
+ def initialize(type, lexeme, location, value: nil)
16
+ @type = type
17
+ @lexeme = lexeme
18
+ @value = value
19
+ @location = location
20
+ end
21
+
22
+ def value
23
+ @value || @lexeme
24
+ end
25
+
26
+ def punctuator?(value)
27
+ @type == :PUNCTUATOR && @lexeme == value
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,269 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative './errors'
4
+ require_relative './syntax'
5
+ require_relative './document'
6
+ require_relative './parsing/backtracking_parser'
7
+
8
+ module Graphlyte
9
+ # A parser of GraphQL documents from a stream of lexical tokens.
10
+ class Parser < Parsing::BacktrackingParser
11
+ def document
12
+ doc = Graphlyte::Document.new
13
+ doc.definitions = some { definition }
14
+
15
+ expect(:EOF)
16
+
17
+ doc
18
+ end
19
+
20
+ # Restricted parser: only parses executable definitions
21
+ def query
22
+ doc = Graphlyte::Document.new
23
+ doc.definitions = some { executable_definition }
24
+
25
+ expect(:EOF)
26
+
27
+ doc
28
+ end
29
+
30
+ def definition
31
+ one_of(:executable_definition, :type_definition)
32
+ end
33
+
34
+ def executable_definition
35
+ one_of(:fragment, :operation)
36
+ end
37
+
38
+ def operation
39
+ t = next_token
40
+
41
+ case t.type
42
+ when :PUNCTUATOR
43
+ @index -= 1
44
+ implicit_query
45
+ when :NAME
46
+ operation_from_kind(t.value.to_sym)
47
+ else
48
+ raise Unexpected, t
49
+ end
50
+ end
51
+
52
+ def implicit_query
53
+ Graphlyte::Syntax::Operation.new(type: :query, selection: selection_set)
54
+ end
55
+
56
+ def operation_from_kind(kind)
57
+ op = Graphlyte::Syntax::Operation.new
58
+
59
+ try_parse do
60
+ op.type = kind
61
+ op.name = optional { name }
62
+ op.variables = optional { variable_definitions }
63
+ op.directives = directives
64
+ op.selection = selection_set
65
+ end
66
+
67
+ op
68
+ end
69
+
70
+ def selection_set
71
+ bracket('{', '}') do
72
+ some { one_of(:inline_fragment, :fragment_spread, :field_selection) }
73
+ end
74
+ end
75
+
76
+ def fragment_spread
77
+ frag = Graphlyte::Syntax::FragmentSpread.new
78
+
79
+ punctuator('...')
80
+ frag.name = name
81
+ frag.directives = directives
82
+
83
+ frag
84
+ end
85
+
86
+ def inline_fragment
87
+ punctuator('...')
88
+ name('on')
89
+
90
+ frag = Graphlyte::Syntax::InlineFragment.new
91
+
92
+ frag.type_name = name
93
+ frag.directives = directives
94
+ frag.selection = selection_set
95
+
96
+ frag
97
+ end
98
+
99
+ def field_selection
100
+ field = Graphlyte::Syntax::Field.new
101
+
102
+ field.as = optional do
103
+ n = name
104
+ punctuator(':')
105
+
106
+ n
107
+ end
108
+
109
+ field.name = name
110
+ field.arguments = optional_list { arguments }
111
+ field.directives = directives
112
+ field.selection = optional_list { selection_set }
113
+
114
+ field
115
+ end
116
+
117
+ def arguments
118
+ bracket('(', ')') { some { parse_argument } }
119
+ end
120
+
121
+ def parse_argument
122
+ arg = Graphlyte::Syntax::Argument.new
123
+
124
+ arg.name = name
125
+ expect(:PUNCTUATOR, ':')
126
+ arg.value = parse_value
127
+
128
+ arg
129
+ end
130
+
131
+ def parse_value
132
+ t = next_token
133
+
134
+ case t.type
135
+ when :STRING, :NUMBER
136
+ Graphlyte::Syntax::Value.new(t.value, t.type)
137
+ when :NAME
138
+ Graphlyte::Syntax::Value.from_name(t.value)
139
+ when :PUNCTUATOR
140
+ case t.value
141
+ when '$'
142
+ Graphlyte::Syntax::VariableReference.new(name)
143
+ when '{'
144
+ @index -= 1
145
+ parse_object_value
146
+ when '['
147
+ @index -= 1
148
+ parse_array_value
149
+ else
150
+ raise Unexpected, t
151
+ end
152
+ else
153
+ raise Unexpected, t
154
+ end
155
+ end
156
+
157
+ def parse_array_value
158
+ bracket('[', ']') { many { parse_value } }
159
+ end
160
+
161
+ def parse_object_value
162
+ bracket('{', '}') do
163
+ many do
164
+ n = name
165
+ expect(:PUNCTUATOR, ':')
166
+ value = parse_value
167
+
168
+ [n, value]
169
+ end.to_h
170
+ end
171
+ end
172
+
173
+ def directives
174
+ ret = []
175
+ while peek(offset: 1).punctuator?('@')
176
+ d = Graphlyte::Syntax::Directive.new
177
+
178
+ expect(:PUNCTUATOR, '@')
179
+ d.name = name
180
+ d.arguments = optional { arguments }
181
+
182
+ ret << d
183
+ end
184
+
185
+ ret
186
+ end
187
+
188
+ def operation_type
189
+ raise Unexpected, current unless current.type == :NAME
190
+
191
+ current.value.to_sym
192
+ end
193
+
194
+ def variable_definitions
195
+ bracket('(', ')') do
196
+ some do
197
+ var = Graphlyte::Syntax::VariableDefinition.new
198
+
199
+ var.variable = variable_name
200
+ expect(:PUNCTUATOR, ':')
201
+ var.type = one_of(:list_type_name, :type_name)
202
+
203
+ var.default_value = optional { default_value }
204
+ var.directives = directives
205
+
206
+ var
207
+ end
208
+ end
209
+ end
210
+
211
+ def default_value
212
+ expect(:PUNCTUATOR, '=')
213
+
214
+ parse_value
215
+ end
216
+
217
+ def variable_name
218
+ expect(:PUNCTUATOR, '$')
219
+
220
+ name
221
+ end
222
+
223
+ def type_name(list: false)
224
+ ty = Graphlyte::Syntax::Type.new(name)
225
+
226
+ t = peek(offset: 1)
227
+ ty.non_null = t.punctuator?('!')
228
+ ty.is_list = list
229
+ advance if ty.non_null
230
+
231
+ ty
232
+ end
233
+
234
+ def type_name!
235
+ ty = type_name
236
+ expect(:EOF)
237
+
238
+ ty
239
+ end
240
+
241
+ def list_type_name
242
+ type = bracket('[', ']') { type_name(list: true) }
243
+ t = peek(offset: 1)
244
+ type.non_null_list = t.punctuator?('!')
245
+ advance if type.non_null_list
246
+
247
+ type
248
+ end
249
+
250
+ def fragment
251
+ frag = Graphlyte::Syntax::Fragment.new
252
+
253
+ expect(:NAME, 'fragment')
254
+ frag.name = name
255
+
256
+ expect(:NAME, 'on')
257
+
258
+ frag.type_name = name
259
+ frag.directives = directives
260
+ frag.selection = selection_set
261
+
262
+ frag
263
+ end
264
+
265
+ def type_definition
266
+ raise ParseError, "TODO: #{current.location}"
267
+ end
268
+ end
269
+ end