graphlyte 0.3.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/graphlyte/data.rb +68 -0
- data/lib/graphlyte/document.rb +131 -0
- data/lib/graphlyte/dsl.rb +86 -0
- data/lib/graphlyte/editor.rb +288 -0
- data/lib/graphlyte/editors/annotate_types.rb +75 -0
- data/lib/graphlyte/editors/canonicalize.rb +26 -0
- data/lib/graphlyte/editors/collect_variable_references.rb +36 -0
- data/lib/graphlyte/editors/infer_signature.rb +36 -0
- data/lib/graphlyte/editors/inline_fragments.rb +37 -0
- data/lib/graphlyte/editors/remove_unneeded_spreads.rb +64 -0
- data/lib/graphlyte/editors/select_operation.rb +116 -0
- data/lib/graphlyte/editors/with_variables.rb +106 -0
- data/lib/graphlyte/errors.rb +33 -0
- data/lib/graphlyte/lexer.rb +392 -0
- data/lib/graphlyte/lexing/location.rb +43 -0
- data/lib/graphlyte/lexing/token.rb +31 -0
- data/lib/graphlyte/parser.rb +269 -0
- data/lib/graphlyte/parsing/backtracking_parser.rb +160 -0
- data/lib/graphlyte/refinements/string_refinement.rb +14 -8
- data/lib/graphlyte/refinements/syntax_refinements.rb +62 -0
- data/lib/graphlyte/schema.rb +165 -0
- data/lib/graphlyte/schema_query.rb +82 -65
- data/lib/graphlyte/selection_builder.rb +189 -0
- data/lib/graphlyte/selector.rb +75 -0
- data/lib/graphlyte/serializer.rb +223 -0
- data/lib/graphlyte/syntax.rb +369 -0
- data/lib/graphlyte.rb +24 -42
- metadata +88 -18
- data/lib/graphlyte/arguments/set.rb +0 -88
- data/lib/graphlyte/arguments/value.rb +0 -32
- data/lib/graphlyte/builder.rb +0 -53
- data/lib/graphlyte/directive.rb +0 -21
- data/lib/graphlyte/field.rb +0 -65
- data/lib/graphlyte/fieldset.rb +0 -36
- data/lib/graphlyte/fragment.rb +0 -17
- data/lib/graphlyte/inline_fragment.rb +0 -29
- data/lib/graphlyte/query.rb +0 -148
- data/lib/graphlyte/schema/parser.rb +0 -674
- data/lib/graphlyte/schema/types/base.rb +0 -54
- data/lib/graphlyte/types.rb +0 -9
@@ -1,674 +0,0 @@
|
|
1
|
-
require "strscan"
|
2
|
-
require_relative "../fieldset"
|
3
|
-
require_relative "../query"
|
4
|
-
require_relative "../fragment"
|
5
|
-
require_relative "../schema_query"
|
6
|
-
require_relative "../types"
|
7
|
-
|
8
|
-
module Graphlyte
|
9
|
-
module Schema
|
10
|
-
module ParserHelpers
|
11
|
-
def parse_fields
|
12
|
-
fields = repeat(:parse_field)
|
13
|
-
fields
|
14
|
-
end
|
15
|
-
|
16
|
-
def skip_fieldset
|
17
|
-
expect(:FIELDSET)
|
18
|
-
parse_fields
|
19
|
-
need(:END_FIELDSET)
|
20
|
-
end
|
21
|
-
|
22
|
-
def parse_field
|
23
|
-
alias_field = expect(:ALIAS)
|
24
|
-
if token = expect(:FRAGMENT_REF)
|
25
|
-
raise "Can't find fragment #{token[0][1]}" unless fragments_dictionary[token[0][1]]
|
26
|
-
fragments_dictionary[token[0][1]]
|
27
|
-
elsif expect(:INLINE_FRAGMENT)
|
28
|
-
field = parse_inline_fragment
|
29
|
-
elsif expect(:FIELDSET)
|
30
|
-
|
31
|
-
elsif (field = expect(:FIELD_NAME))
|
32
|
-
args = parse_args
|
33
|
-
directive = parse_directive
|
34
|
-
|
35
|
-
if builder = parse_fieldset_into_builder
|
36
|
-
need(:END_FIELDSET)
|
37
|
-
fieldset = Fieldset.new(builder: builder)
|
38
|
-
field = Field.new(field[0][1], fieldset, args, directive: directive)
|
39
|
-
else
|
40
|
-
field = Field.new(field[0][1], Fieldset.empty, args, directive: directive)
|
41
|
-
end
|
42
|
-
|
43
|
-
if alias_field
|
44
|
-
field.alias(alias_field[0][1])
|
45
|
-
end
|
46
|
-
|
47
|
-
field
|
48
|
-
end
|
49
|
-
end
|
50
|
-
|
51
|
-
def parse_inline_fragment
|
52
|
-
model_name = expect(:MODEL_NAME)&.dig(0, 1)
|
53
|
-
directive = parse_directive
|
54
|
-
inputs = directive ? (parse_args || {}) : {}
|
55
|
-
fields = expect(:FIELDSET) ? parse_fields : []
|
56
|
-
need(:END_FIELDSET)
|
57
|
-
|
58
|
-
InlineFragment.new(model_name, directive: directive, builder: Builder.new(fields), **inputs)
|
59
|
-
end
|
60
|
-
|
61
|
-
def parse_directive
|
62
|
-
if token = expect(:DIRECTIVE)
|
63
|
-
inputs = parse_args || {}
|
64
|
-
|
65
|
-
Directive.new(token[0][1], **inputs)
|
66
|
-
end
|
67
|
-
end
|
68
|
-
|
69
|
-
def parse_fieldset_into_builder
|
70
|
-
fields = []
|
71
|
-
if expect(:FIELDSET)
|
72
|
-
fields = parse_fields
|
73
|
-
Builder.new(fields)
|
74
|
-
end
|
75
|
-
end
|
76
|
-
|
77
|
-
def parse_args
|
78
|
-
if expect(:START_ARGS)
|
79
|
-
args = repeat(:parse_arg).inject(&:merge)
|
80
|
-
need(:END_ARGS)
|
81
|
-
args
|
82
|
-
end
|
83
|
-
end
|
84
|
-
|
85
|
-
def parse_default
|
86
|
-
if expect(:DEFAULT_VALUE)
|
87
|
-
value = parse_value
|
88
|
-
need(:END_DEFAULT_VALUE)
|
89
|
-
value
|
90
|
-
end
|
91
|
-
end
|
92
|
-
|
93
|
-
def parse_arg
|
94
|
-
if (token = expect(:ARG_KEY)) && (value = parse_value)
|
95
|
-
defaults = parse_default
|
96
|
-
key = token[0][1]
|
97
|
-
hash = {}
|
98
|
-
hash[key] = value
|
99
|
-
hash
|
100
|
-
elsif (token = expect(:SPECIAL_ARG_KEY)) && (value = parse_value)
|
101
|
-
defaults = parse_default
|
102
|
-
@special_args ||= {}
|
103
|
-
arg = {}
|
104
|
-
if [Array, Hash].include?(value.class)
|
105
|
-
arg[token[0][1]] = value
|
106
|
-
else
|
107
|
-
new_val = Schema::Types::Base.new(value, token[0][1], defaults)
|
108
|
-
arg[token[0][1]] = new_val
|
109
|
-
end
|
110
|
-
@special_args.merge!(arg)
|
111
|
-
arg
|
112
|
-
end
|
113
|
-
end
|
114
|
-
|
115
|
-
def parse_value
|
116
|
-
if token = expect(:ARG_NUM_VALUE) || expect(:ARG_STRING_VALUE) || expect(:ARG_BOOL_VALUE) || expect(:ARG_FLOAT_VALUE)
|
117
|
-
token[0][1]
|
118
|
-
elsif token = expect(:SPECIAL_ARG_REF)
|
119
|
-
ref = token[0][1]
|
120
|
-
raise "Can't find ref $#{ref}" unless @special_args[ref]
|
121
|
-
@special_args[ref]
|
122
|
-
elsif token = expect(:SPECIAL_ARG_VAL)
|
123
|
-
token[0][1]
|
124
|
-
elsif token = expect(:ARG_HASH)
|
125
|
-
parse_arg_hash
|
126
|
-
elsif expect(:ARG_ARRAY)
|
127
|
-
parse_arg_array
|
128
|
-
end
|
129
|
-
end
|
130
|
-
|
131
|
-
def parse_arg_array
|
132
|
-
args = repeat(:parse_value)
|
133
|
-
need(:ARG_ARRAY_END)
|
134
|
-
args
|
135
|
-
end
|
136
|
-
|
137
|
-
def parse_arg_hash
|
138
|
-
if (key = expect(:ARG_KEY)) && (value = parse_value)
|
139
|
-
hash = {}
|
140
|
-
hash[key[0][1]] = value
|
141
|
-
hash
|
142
|
-
if new_hash = parse_arg_hash
|
143
|
-
hash.merge!(new_hash)
|
144
|
-
else
|
145
|
-
need(:ARG_HASH_END)
|
146
|
-
hash
|
147
|
-
end
|
148
|
-
end
|
149
|
-
end
|
150
|
-
|
151
|
-
def repeat(method)
|
152
|
-
results = []
|
153
|
-
|
154
|
-
while result = send(method)
|
155
|
-
results << result
|
156
|
-
end
|
157
|
-
|
158
|
-
results
|
159
|
-
end
|
160
|
-
|
161
|
-
def expect(*expected_tokens)
|
162
|
-
upcoming = tokens[position, expected_tokens.size]
|
163
|
-
if upcoming.map(&:first) == expected_tokens
|
164
|
-
advance(expected_tokens.size)
|
165
|
-
upcoming
|
166
|
-
end
|
167
|
-
end
|
168
|
-
|
169
|
-
def tokens?
|
170
|
-
!tokens[position].nil?
|
171
|
-
end
|
172
|
-
|
173
|
-
def need(*required_tokens)
|
174
|
-
upcoming = tokens[position, required_tokens.size]
|
175
|
-
expect(*required_tokens) or raise "Unexpected tokens. Expected #{required_tokens.inspect} but got #{upcoming.inspect}"
|
176
|
-
end
|
177
|
-
|
178
|
-
def advance(offset = 1)
|
179
|
-
@position += offset
|
180
|
-
end
|
181
|
-
|
182
|
-
def sort_fragments(sorted = [], fragments)
|
183
|
-
return sorted if !fragments || fragments.empty?
|
184
|
-
fragment_tokens = fragments.shift
|
185
|
-
|
186
|
-
current_ref = fragment_tokens.find do |token|
|
187
|
-
token[0] == :FRAGMENT_REF
|
188
|
-
end
|
189
|
-
|
190
|
-
if current_ref
|
191
|
-
exists = sorted.any? do |frags|
|
192
|
-
frags.find do |el|
|
193
|
-
el[0] == :FRAGMENT && el[1] == current_ref[1]
|
194
|
-
end
|
195
|
-
end
|
196
|
-
if exists
|
197
|
-
sorted << fragment_tokens
|
198
|
-
sort_fragments(sorted, fragments)
|
199
|
-
else
|
200
|
-
fragments.push fragment_tokens
|
201
|
-
sort_fragments(sorted, fragments)
|
202
|
-
end
|
203
|
-
else
|
204
|
-
sorted << fragment_tokens
|
205
|
-
sort_fragments(sorted, fragments)
|
206
|
-
end
|
207
|
-
end
|
208
|
-
|
209
|
-
def take_fragments
|
210
|
-
aggregate = @tokens.inject({taking: false, idx: 0, fragments: []}) do |memo, token_arr|
|
211
|
-
if token_arr[0] == :END_FIELDSET
|
212
|
-
memo[:fragments][memo[:idx]] << token_arr
|
213
|
-
memo[:taking] = false
|
214
|
-
memo[:idx] += 1
|
215
|
-
elsif token_arr[0] === :FRAGMENT
|
216
|
-
memo[:fragments][memo[:idx]] = [token_arr]
|
217
|
-
memo[:taking] = true
|
218
|
-
elsif memo[:taking]
|
219
|
-
memo[:fragments][memo[:idx]] << token_arr
|
220
|
-
end
|
221
|
-
memo
|
222
|
-
end
|
223
|
-
aggregate[:fragments]
|
224
|
-
end
|
225
|
-
|
226
|
-
def fetch_fragments(tokens = @tokens.dup, fragment_tokens = [], memo = { active: false, starts: 0, ends: 0, idx: 0 })
|
227
|
-
token_arr = tokens.shift
|
228
|
-
return fragment_tokens if token_arr.nil?
|
229
|
-
|
230
|
-
|
231
|
-
if memo[:active] == true
|
232
|
-
fragment_tokens[memo[:idx]] << token_arr
|
233
|
-
end
|
234
|
-
|
235
|
-
if token_arr[0] == :END_FIELDSET && memo[:active] == true
|
236
|
-
memo[:ends] += 1
|
237
|
-
fragment_tokens[memo[:idx]] << token_arr if memo[:starts] == memo[:ends]
|
238
|
-
|
239
|
-
memo[:active] = false
|
240
|
-
memo[:ends] = 0
|
241
|
-
memo[:starts] = 0
|
242
|
-
memo[:idx] += 1
|
243
|
-
elsif token_arr[0] == :FRAGMENT
|
244
|
-
memo[:active] = true
|
245
|
-
memo[:starts] += 1
|
246
|
-
fragment_tokens[memo[:idx]] = [token_arr]
|
247
|
-
elsif token_arr[0] == :FIELDSET && memo[:active] == true
|
248
|
-
memo[:starts] += 1
|
249
|
-
end
|
250
|
-
|
251
|
-
|
252
|
-
fetch_fragments(tokens, fragment_tokens, memo)
|
253
|
-
end
|
254
|
-
end
|
255
|
-
|
256
|
-
class FragmentParser
|
257
|
-
attr_reader :tokens, :position, :fragments_dictionary
|
258
|
-
|
259
|
-
include ParserHelpers
|
260
|
-
|
261
|
-
def initialize(tokens)
|
262
|
-
@tokens = tokens.flatten(1)
|
263
|
-
@position = 0
|
264
|
-
@fragments_dictionary = {}
|
265
|
-
end
|
266
|
-
|
267
|
-
def parse_fragments
|
268
|
-
repeat(:parse_fragment)
|
269
|
-
fragments_dictionary
|
270
|
-
end
|
271
|
-
|
272
|
-
def parse_fragment
|
273
|
-
if token = expect(:FRAGMENT)
|
274
|
-
parse_args
|
275
|
-
if builder = parse_fieldset_into_builder
|
276
|
-
fragment = Fragment.new(token[0][1], token[0][2], builder: builder)
|
277
|
-
need(:END_FIELDSET) if tokens?
|
278
|
-
elsif fields = parse_fields
|
279
|
-
builder = Builder.new(fields)
|
280
|
-
fragment = Fragment.new(token[0][1], token[0][2], builder: builder)
|
281
|
-
end
|
282
|
-
@fragments_dictionary[token[0][1]] = fragment
|
283
|
-
end
|
284
|
-
end
|
285
|
-
end
|
286
|
-
|
287
|
-
class Parser
|
288
|
-
attr_reader :tokens, :position, :fragments_dictionary
|
289
|
-
|
290
|
-
include ParserHelpers
|
291
|
-
|
292
|
-
def self.parse(gql)
|
293
|
-
obj = new Lexer.new(gql).tokenize
|
294
|
-
obj.parse
|
295
|
-
end
|
296
|
-
|
297
|
-
def initialize(tokens)
|
298
|
-
@tokens = tokens
|
299
|
-
|
300
|
-
@fragment_tokens = sort_fragments([], fetch_fragments)
|
301
|
-
@fragments_dictionary = {}
|
302
|
-
@fragments_dictionary = @fragment_tokens.any? ? FragmentParser.new(@fragment_tokens).parse_fragments : {}
|
303
|
-
@position = 0
|
304
|
-
end
|
305
|
-
|
306
|
-
def parse
|
307
|
-
if token = expect(:EXPRESSION)
|
308
|
-
parse_expression(token[0][1], token[0][2])
|
309
|
-
elsif expect(:FRAGMENT)
|
310
|
-
skip_fragments
|
311
|
-
parse
|
312
|
-
else
|
313
|
-
raise "INVALID"
|
314
|
-
end
|
315
|
-
end
|
316
|
-
|
317
|
-
def skip_fragments
|
318
|
-
skip_fieldset
|
319
|
-
end
|
320
|
-
|
321
|
-
def parse_expression(type, name)
|
322
|
-
parse_args
|
323
|
-
fields = []
|
324
|
-
builder = parse_fieldset_into_builder
|
325
|
-
need(:END_FIELDSET)
|
326
|
-
query = Query.new(name, type.to_sym, builder: builder)
|
327
|
-
query
|
328
|
-
end
|
329
|
-
end
|
330
|
-
|
331
|
-
class LexerError < StandardError; end
|
332
|
-
|
333
|
-
class Lexer
|
334
|
-
attr_reader :stack, :scanner
|
335
|
-
def initialize(gql, scanner: StringScanner.new(gql))
|
336
|
-
@original_string = gql
|
337
|
-
@scanner = scanner
|
338
|
-
@tokens = []
|
339
|
-
end
|
340
|
-
|
341
|
-
SPECIAL_ARG_REGEX = /^\s*(?:(?<![\"\{]))([\w\!\[\]]+)(?:(?![\"\}]))/
|
342
|
-
|
343
|
-
def tokenize
|
344
|
-
until scanner.eos?
|
345
|
-
tokenize_objects
|
346
|
-
end
|
347
|
-
|
348
|
-
@tokens
|
349
|
-
end
|
350
|
-
|
351
|
-
def tokenize_objects
|
352
|
-
case state
|
353
|
-
when :default # the stack is empty, can only process top level fragments or expressions
|
354
|
-
if scanner.scan %r{\s*fragment\s*(\w+)\s*on\s*(\w+)}
|
355
|
-
@tokens << [:FRAGMENT, scanner[1], scanner[2]]
|
356
|
-
push_context :fragments
|
357
|
-
# check for a fieldset
|
358
|
-
if scanner.check %r[\s*{]
|
359
|
-
tokenize_fieldset
|
360
|
-
else
|
361
|
-
scanner.scan /\(/
|
362
|
-
@tokens << [:START_ARGS]
|
363
|
-
push_state :arguments
|
364
|
-
end
|
365
|
-
elsif scanner.check /\{/
|
366
|
-
push_context :fieldset
|
367
|
-
|
368
|
-
tokenize_fieldset
|
369
|
-
elsif scanner.scan %r{^(\w+) (\w+)}
|
370
|
-
@tokens << [:EXPRESSION, scanner[1], scanner[2]]
|
371
|
-
push_context :expression
|
372
|
-
# check for a fieldset
|
373
|
-
if scanner.check %r[\s*{]
|
374
|
-
tokenize_fieldset
|
375
|
-
else
|
376
|
-
scanner.scan /\(/
|
377
|
-
@tokens << [:START_ARGS]
|
378
|
-
push_state :arguments
|
379
|
-
end
|
380
|
-
elsif scanner.check /\s*\}/
|
381
|
-
if get_context == :fragments
|
382
|
-
end_fragment
|
383
|
-
elsif get_context == :expression
|
384
|
-
end_expression
|
385
|
-
end
|
386
|
-
else
|
387
|
-
advance
|
388
|
-
end
|
389
|
-
when :fieldset
|
390
|
-
tokenize_fields
|
391
|
-
when :arguments
|
392
|
-
tokenize_arguments
|
393
|
-
when :argument_defaults
|
394
|
-
tokenize_shared_arguments
|
395
|
-
when :hash_arguments
|
396
|
-
tokenize_hash_arguments
|
397
|
-
when :array_arguments
|
398
|
-
tokenize_array_arguments
|
399
|
-
when :special_args
|
400
|
-
tokenize_special_arguments
|
401
|
-
when :inline_fragment
|
402
|
-
tokenize_inline_fragment
|
403
|
-
end
|
404
|
-
end
|
405
|
-
|
406
|
-
def check_for_last(regex = /\s*\}/)
|
407
|
-
scanner.check regex
|
408
|
-
end
|
409
|
-
|
410
|
-
def check_for_final
|
411
|
-
scanner.check /\s*\}(?!\s*\})/
|
412
|
-
end
|
413
|
-
|
414
|
-
def check_for_not_last
|
415
|
-
scanner.check /\s*\}(?=\s*\})/
|
416
|
-
end
|
417
|
-
|
418
|
-
def tokenize_inline_fragment
|
419
|
-
if scanner.scan /on (\w+)/
|
420
|
-
@tokens << [:MODEL_NAME, scanner[1]]
|
421
|
-
|
422
|
-
pop_state
|
423
|
-
elsif scanner.scan /@(\w+)/
|
424
|
-
@tokens << [:DIRECTIVE, scanner[1]]
|
425
|
-
|
426
|
-
pop_state
|
427
|
-
else
|
428
|
-
# throw an error here?
|
429
|
-
advance
|
430
|
-
end
|
431
|
-
end
|
432
|
-
|
433
|
-
def end_fieldset
|
434
|
-
scanner.scan /\s*\}/
|
435
|
-
@tokens << [:END_FIELDSET]
|
436
|
-
pop_state
|
437
|
-
end
|
438
|
-
|
439
|
-
def end_arguments
|
440
|
-
scanner.scan /\s*\)/
|
441
|
-
@tokens << [:END_ARGS]
|
442
|
-
pop_state
|
443
|
-
end
|
444
|
-
|
445
|
-
def end_fragment
|
446
|
-
scanner.scan /\s*\}/
|
447
|
-
@tokens << [:END_FRAGMENT]
|
448
|
-
pop_state
|
449
|
-
pop_context
|
450
|
-
end
|
451
|
-
|
452
|
-
def end_expression
|
453
|
-
scanner.scan /\s*\}/
|
454
|
-
@tokens << [:END_EXPRESSION]
|
455
|
-
pop_state
|
456
|
-
pop_context
|
457
|
-
end
|
458
|
-
|
459
|
-
# to tired to figure out why this is right now
|
460
|
-
def tokenize_argument_defaults
|
461
|
-
if scanner.scan /\)/
|
462
|
-
@tokens << [:END_DEFAULT_VALUE]
|
463
|
-
pop_state
|
464
|
-
else
|
465
|
-
tokenize_shared_arguments
|
466
|
-
end
|
467
|
-
end
|
468
|
-
|
469
|
-
def tokenize_special_arguments
|
470
|
-
if scanner.check SPECIAL_ARG_REGEX
|
471
|
-
scanner.scan SPECIAL_ARG_REGEX
|
472
|
-
|
473
|
-
@tokens << [:SPECIAL_ARG_VAL, scanner[1]]
|
474
|
-
|
475
|
-
pop_state
|
476
|
-
|
477
|
-
end_arguments if check_for_last(/\s*\)/)
|
478
|
-
else
|
479
|
-
# revisit this.. should we throw an error here?
|
480
|
-
pop_state
|
481
|
-
raise LexerError, "why can't we parse #{scanner.peek(5)}"
|
482
|
-
end
|
483
|
-
end
|
484
|
-
|
485
|
-
def tokenize_array_arguments
|
486
|
-
if scanner.scan /\]/
|
487
|
-
@tokens << [:ARG_ARRAY_END]
|
488
|
-
|
489
|
-
pop_state
|
490
|
-
# if check_for_last(')')
|
491
|
-
# pop_state
|
492
|
-
# end
|
493
|
-
else
|
494
|
-
tokenize_shared_arguments
|
495
|
-
end
|
496
|
-
end
|
497
|
-
|
498
|
-
def tokenize_hash_arguments
|
499
|
-
if scanner.scan /\}/
|
500
|
-
@tokens << [:ARG_HASH_END]
|
501
|
-
|
502
|
-
pop_state
|
503
|
-
# if this is the last argument in the list, maybe get back to the field scope?
|
504
|
-
# if check_for_last(')')
|
505
|
-
# pop_state
|
506
|
-
# end
|
507
|
-
else
|
508
|
-
tokenize_shared_arguments
|
509
|
-
end
|
510
|
-
end
|
511
|
-
|
512
|
-
def tokenize_arguments
|
513
|
-
# pop argument state if arguments are finished
|
514
|
-
if scanner.scan %r{\)}
|
515
|
-
@tokens << [:END_ARGS]
|
516
|
-
|
517
|
-
pop_state
|
518
|
-
# something(argument: $argument = true)
|
519
|
-
# ^
|
520
|
-
elsif scanner.scan %r{=}
|
521
|
-
@tokens << [:DEFAULT_VALUE]
|
522
|
-
|
523
|
-
push_state :argument_defaults
|
524
|
-
# noop, should expect this, but not important
|
525
|
-
elsif scanner.scan %r{,}
|
526
|
-
nil
|
527
|
-
else
|
528
|
-
tokenize_shared_arguments
|
529
|
-
end
|
530
|
-
end
|
531
|
-
|
532
|
-
def tokenize_shared_arguments
|
533
|
-
if scanner.scan /^(\w+):/
|
534
|
-
@tokens << [:ARG_KEY, scanner[1]]
|
535
|
-
elsif scanner.scan %r[{]
|
536
|
-
@tokens << [:ARG_HASH]
|
537
|
-
|
538
|
-
push_state :hash_arguments
|
539
|
-
elsif scanner.scan /\[/
|
540
|
-
@tokens << [:ARG_ARRAY]
|
541
|
-
|
542
|
-
push_state :array_arguments
|
543
|
-
elsif scanner.scan %r{"(.*?)"}
|
544
|
-
@tokens << [:ARG_STRING_VALUE, scanner[1]]
|
545
|
-
|
546
|
-
end_arguments if check_for_last(/\s*\)/)
|
547
|
-
elsif scanner.scan /(\d+\.\d+)/
|
548
|
-
@tokens << [:ARG_FLOAT_VALUE, scanner[1].to_f]
|
549
|
-
|
550
|
-
end_arguments if check_for_last(/\s*\)/)
|
551
|
-
elsif scanner.scan /(\d+)/
|
552
|
-
@tokens << [:ARG_NUM_VALUE, scanner[1].to_i]
|
553
|
-
|
554
|
-
end_arguments if check_for_last(/\s*\)/)
|
555
|
-
elsif scanner.scan /(true|false)/
|
556
|
-
@tokens << [:ARG_BOOL_VALUE, (scanner[1] == 'true')]
|
557
|
-
|
558
|
-
end_arguments if check_for_last(/\s*\)/)
|
559
|
-
elsif scanner.scan /\$(\w+):/
|
560
|
-
@tokens << [:SPECIAL_ARG_KEY, scanner[1]]
|
561
|
-
|
562
|
-
push_state :special_args
|
563
|
-
elsif scanner.scan /\$(\w+)/
|
564
|
-
@tokens << [:SPECIAL_ARG_REF, scanner[1]]
|
565
|
-
|
566
|
-
end_arguments if check_for_last(/\s*\)/)
|
567
|
-
elsif scanner.scan /,/
|
568
|
-
# no-op
|
569
|
-
elsif check_for_last(/\s*\)/)
|
570
|
-
@tokens << [:END_DEFAULT_VALUE] if state == :argument_defaults
|
571
|
-
end_arguments
|
572
|
-
pop_state
|
573
|
-
else
|
574
|
-
advance
|
575
|
-
end
|
576
|
-
end
|
577
|
-
|
578
|
-
def tokenize_fields
|
579
|
-
if scanner.check %r[{]
|
580
|
-
tokenize_fieldset
|
581
|
-
# ... on Model - or - ... @directive
|
582
|
-
elsif scanner.scan %r{\.{3}\s}
|
583
|
-
@tokens << [:INLINE_FRAGMENT]
|
584
|
-
push_state :inline_fragment
|
585
|
-
# @directive
|
586
|
-
elsif scanner.scan %r{@(\w+)}
|
587
|
-
@tokens << [:DIRECTIVE, scanner[1]]
|
588
|
-
# ...fragmentReference (check for last since it is a field literal)
|
589
|
-
elsif scanner.scan /\.{3}(\w+)/
|
590
|
-
@tokens << [:FRAGMENT_REF, scanner[1]]
|
591
|
-
|
592
|
-
end_fieldset while check_for_last && state == :fieldset
|
593
|
-
# alias:
|
594
|
-
elsif scanner.scan %r{(\w+):}
|
595
|
-
@tokens << [:ALIAS, scanner[1]]
|
596
|
-
# fieldLiteral
|
597
|
-
elsif scanner.scan %r{(\w+)}
|
598
|
-
@tokens << [:FIELD_NAME, scanner[1]]
|
599
|
-
|
600
|
-
end_fieldset while check_for_last && state == :fieldset
|
601
|
-
# (arguments: true)
|
602
|
-
elsif scanner.scan /^\s*\(/
|
603
|
-
@tokens << [:START_ARGS]
|
604
|
-
|
605
|
-
push_state :arguments
|
606
|
-
elsif check_for_final
|
607
|
-
if get_context == :fragments
|
608
|
-
end_fragment
|
609
|
-
elsif get_context == :expression
|
610
|
-
end_expression
|
611
|
-
else
|
612
|
-
advance
|
613
|
-
end
|
614
|
-
else
|
615
|
-
advance
|
616
|
-
end
|
617
|
-
end
|
618
|
-
|
619
|
-
def tokenize_fieldset
|
620
|
-
if scanner.scan %r[\s*{]
|
621
|
-
@tokens << [:FIELDSET]
|
622
|
-
|
623
|
-
push_state :fieldset
|
624
|
-
else
|
625
|
-
raise LexerError, "Expecting `{` got `#{scanner.peek(3)}`"
|
626
|
-
end
|
627
|
-
end
|
628
|
-
|
629
|
-
def env
|
630
|
-
@ctx ||= []
|
631
|
-
end
|
632
|
-
|
633
|
-
def get_context
|
634
|
-
env.last || :default
|
635
|
-
end
|
636
|
-
|
637
|
-
def push_context(context)
|
638
|
-
env << context
|
639
|
-
end
|
640
|
-
|
641
|
-
def pop_context
|
642
|
-
env.pop
|
643
|
-
end
|
644
|
-
|
645
|
-
def rewind
|
646
|
-
scanner.pos = scanner.pos - 1
|
647
|
-
end
|
648
|
-
|
649
|
-
def advance
|
650
|
-
unless scanner.check /\s/
|
651
|
-
raise LexerError, "Unexpected Char: '#{scanner.peek(3)}'"
|
652
|
-
end
|
653
|
-
|
654
|
-
scanner.pos = scanner.pos + 1
|
655
|
-
end
|
656
|
-
|
657
|
-
def stack
|
658
|
-
@stack ||= []
|
659
|
-
end
|
660
|
-
|
661
|
-
def state
|
662
|
-
stack.last || :default
|
663
|
-
end
|
664
|
-
|
665
|
-
def push_state(state)
|
666
|
-
stack << state
|
667
|
-
end
|
668
|
-
|
669
|
-
def pop_state
|
670
|
-
stack.pop
|
671
|
-
end
|
672
|
-
end
|
673
|
-
end
|
674
|
-
end
|
@@ -1,54 +0,0 @@
|
|
1
|
-
module Graphlyte
|
2
|
-
module Schema
|
3
|
-
module Types
|
4
|
-
class Defaults
|
5
|
-
attr_reader :value
|
6
|
-
def initialize(value)
|
7
|
-
@value = value
|
8
|
-
end
|
9
|
-
|
10
|
-
def merge!(str)
|
11
|
-
parse_value(@value, str)
|
12
|
-
end
|
13
|
-
|
14
|
-
def parse_value(value, str)
|
15
|
-
if value.is_a?(Hash)
|
16
|
-
str << "{ "
|
17
|
-
value.each_with_index do |(k, v), idx|
|
18
|
-
str << "#{k}: "
|
19
|
-
parse_value(v, str)
|
20
|
-
str << ", " if idx < (value.size - 1)
|
21
|
-
end
|
22
|
-
str << " }"
|
23
|
-
elsif value.is_a?(Array)
|
24
|
-
str << "["
|
25
|
-
value.each_with_index do |item, idx|
|
26
|
-
parse_value(item, str)
|
27
|
-
str << ", " if idx < (value.size - 1)
|
28
|
-
end
|
29
|
-
str << "]"
|
30
|
-
elsif value.is_a?(Symbol)
|
31
|
-
str << value.to_s
|
32
|
-
else
|
33
|
-
str << "#{Arguments::Value.new(value).to_s}"
|
34
|
-
end
|
35
|
-
end
|
36
|
-
end
|
37
|
-
|
38
|
-
class Base
|
39
|
-
attr_reader :name, :placeholder
|
40
|
-
|
41
|
-
def initialize(name, placeholder, defaults=nil)
|
42
|
-
@name = name
|
43
|
-
@placeholder = placeholder
|
44
|
-
@defaults = defaults
|
45
|
-
end
|
46
|
-
|
47
|
-
def default
|
48
|
-
return nil if @defaults.class == NilClass
|
49
|
-
Defaults.new(@defaults)
|
50
|
-
end
|
51
|
-
end
|
52
|
-
end
|
53
|
-
end
|
54
|
-
end
|