graphlyte 0.3.2 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/lib/graphlyte/data.rb +68 -0
  3. data/lib/graphlyte/document.rb +131 -0
  4. data/lib/graphlyte/dsl.rb +86 -0
  5. data/lib/graphlyte/editor.rb +288 -0
  6. data/lib/graphlyte/editors/annotate_types.rb +75 -0
  7. data/lib/graphlyte/editors/canonicalize.rb +26 -0
  8. data/lib/graphlyte/editors/collect_variable_references.rb +36 -0
  9. data/lib/graphlyte/editors/infer_signature.rb +36 -0
  10. data/lib/graphlyte/editors/inline_fragments.rb +37 -0
  11. data/lib/graphlyte/editors/remove_unneeded_spreads.rb +64 -0
  12. data/lib/graphlyte/editors/select_operation.rb +116 -0
  13. data/lib/graphlyte/editors/with_variables.rb +106 -0
  14. data/lib/graphlyte/errors.rb +33 -0
  15. data/lib/graphlyte/lexer.rb +392 -0
  16. data/lib/graphlyte/lexing/location.rb +43 -0
  17. data/lib/graphlyte/lexing/token.rb +31 -0
  18. data/lib/graphlyte/parser.rb +269 -0
  19. data/lib/graphlyte/parsing/backtracking_parser.rb +160 -0
  20. data/lib/graphlyte/refinements/string_refinement.rb +14 -8
  21. data/lib/graphlyte/refinements/syntax_refinements.rb +62 -0
  22. data/lib/graphlyte/schema.rb +165 -0
  23. data/lib/graphlyte/schema_query.rb +82 -65
  24. data/lib/graphlyte/selection_builder.rb +189 -0
  25. data/lib/graphlyte/selector.rb +75 -0
  26. data/lib/graphlyte/serializer.rb +223 -0
  27. data/lib/graphlyte/syntax.rb +369 -0
  28. data/lib/graphlyte.rb +24 -42
  29. metadata +88 -19
  30. data/lib/graphlyte/arguments/set.rb +0 -94
  31. data/lib/graphlyte/arguments/value.rb +0 -42
  32. data/lib/graphlyte/arguments/value_literal.rb +0 -17
  33. data/lib/graphlyte/builder.rb +0 -59
  34. data/lib/graphlyte/directive.rb +0 -25
  35. data/lib/graphlyte/field.rb +0 -65
  36. data/lib/graphlyte/fieldset.rb +0 -36
  37. data/lib/graphlyte/fragment.rb +0 -17
  38. data/lib/graphlyte/inline_fragment.rb +0 -29
  39. data/lib/graphlyte/query.rb +0 -150
  40. data/lib/graphlyte/schema/parser.rb +0 -687
  41. data/lib/graphlyte/schema/types/base.rb +0 -54
  42. data/lib/graphlyte/types.rb +0 -9
@@ -1,687 +0,0 @@
1
- require "strscan"
2
- require_relative "../fieldset"
3
- require_relative "../query"
4
- require_relative "../fragment"
5
- require_relative "../schema_query"
6
- require_relative "../types"
7
- require_relative "../arguments/value_literal"
8
-
9
- module Graphlyte
10
- module Schema
11
- module ParserHelpers
12
- def parse_fields
13
- fields = repeat(:parse_field)
14
- fields
15
- end
16
-
17
- def skip_fieldset
18
- expect(:FIELDSET)
19
- parse_fields
20
- need(:END_FIELDSET)
21
- end
22
-
23
- def parse_field
24
- alias_field = expect(:ALIAS)
25
- if token = expect(:FRAGMENT_REF)
26
- raise "Can't find fragment #{token[0][1]}" unless fragments_dictionary[token[0][1]]
27
- fragments_dictionary[token[0][1]]
28
- elsif expect(:INLINE_FRAGMENT)
29
- field = parse_inline_fragment
30
- elsif expect(:FIELDSET)
31
-
32
- elsif (field = expect(:FIELD_NAME))
33
- args = parse_args
34
- directive = parse_directive
35
-
36
- if builder = parse_fieldset_into_builder
37
- need(:END_FIELDSET)
38
- fieldset = Fieldset.new(builder: builder)
39
- field = Field.new(field[0][1], fieldset, args, directive: directive)
40
- else
41
- field = Field.new(field[0][1], Fieldset.empty, args, directive: directive)
42
- end
43
-
44
- if alias_field
45
- field.alias(alias_field[0][1])
46
- end
47
-
48
- field
49
- end
50
- end
51
-
52
- def parse_inline_fragment
53
- model_name = expect(:MODEL_NAME)&.dig(0, 1)
54
- directive = parse_directive
55
- inputs = directive ? (parse_args || {}) : {}
56
- fields = expect(:FIELDSET) ? parse_fields : []
57
- need(:END_FIELDSET)
58
-
59
- InlineFragment.new(model_name, directive: directive, builder: Builder.new(fields), **inputs)
60
- end
61
-
62
- def parse_directive
63
- if token = expect(:DIRECTIVE)
64
- inputs = parse_args || {}
65
-
66
- Directive.new(token[0][1], **inputs)
67
- end
68
- end
69
-
70
- def parse_fieldset_into_builder
71
- fields = []
72
- if expect(:FIELDSET)
73
- fields = parse_fields
74
- Builder.new(fields)
75
- end
76
- end
77
-
78
- def parse_args
79
- if expect(:START_ARGS)
80
- args = repeat(:parse_arg).inject(&:merge)
81
- need(:END_ARGS)
82
- args
83
- end
84
- end
85
-
86
- def parse_default
87
- if expect(:DEFAULT_VALUE)
88
- value = parse_value
89
- need(:END_DEFAULT_VALUE)
90
- value
91
- end
92
- end
93
-
94
- def parse_arg
95
- if (token = expect(:ARG_KEY)) && (value = parse_value)
96
- parse_default
97
- key = token[0][1]
98
- arg = {}
99
- arg[key] = value
100
- elsif (token = expect(:SPECIAL_ARG_KEY)) && (value = parse_value)
101
- arg = expect_and_inflate_special_args(token, value)
102
- end
103
-
104
- arg
105
- end
106
-
107
- def expect_and_inflate_special_args(token, value)
108
- return { token[0][1] => value } if value.class == Schema::Types::Base
109
-
110
- defaults = parse_default
111
- @special_args ||= {}
112
- arg = {}
113
- if [Array, Hash].include?(value.class)
114
- arg[token[0][1]] = value
115
- else
116
- new_val = Schema::Types::Base.new(value, token[0][1], defaults)
117
- arg[token[0][1]] = new_val
118
- end
119
- @special_args.merge!(arg)
120
- arg
121
- end
122
-
123
- def parse_value
124
- if token = expect(:ARG_NUM_VALUE) || expect(:ARG_STRING_VALUE) || expect(:ARG_BOOL_VALUE) || expect(:ARG_FLOAT_VALUE)
125
- token[0][1]
126
- elsif token = expect(:ARG_LITERAL_VALUE)
127
- Graphlyte::Arguments::ValueLiteral.new(token[0][1])
128
- elsif token = expect(:SPECIAL_ARG_REF)
129
- ref = token[0][1]
130
- # can't prove if this exists yet, so lets add it to the list
131
- unless @special_args&.dig(ref)
132
- @refs_to_validate ||= []
133
- @refs_to_validate << ref
134
- -> (args) do
135
- args[ref]
136
- end
137
- else
138
- @special_args[ref]
139
- end
140
- elsif token = expect(:SPECIAL_ARG_VAL)
141
- token[0][1]
142
- elsif token = expect(:ARG_HASH)
143
- parse_arg_hash
144
- elsif expect(:ARG_ARRAY)
145
- parse_arg_array
146
- end
147
- end
148
-
149
- def parse_arg_array
150
- args = repeat(:parse_value)
151
- need(:ARG_ARRAY_END)
152
- args
153
- end
154
-
155
- def parse_arg_hash
156
- if (key = expect(:ARG_KEY)) && (value = parse_value)
157
- hash = {}
158
- hash[key[0][1]] = value
159
- hash
160
- if new_hash = parse_arg_hash
161
- hash.merge!(new_hash)
162
- else
163
- need(:ARG_HASH_END)
164
- hash
165
- end
166
- end
167
- end
168
-
169
- def repeat(method)
170
- results = []
171
-
172
- while result = send(method)
173
- results << result
174
- end
175
-
176
- results
177
- end
178
-
179
- def expect(*expected_tokens)
180
- upcoming = tokens[position, expected_tokens.size]
181
- if upcoming.map(&:first) == expected_tokens
182
- advance(expected_tokens.size)
183
- upcoming
184
- end
185
- end
186
-
187
- def tokens?
188
- !tokens[position].nil?
189
- end
190
-
191
- def need(*required_tokens)
192
- upcoming = tokens[position, required_tokens.size]
193
- expect(*required_tokens) or raise "Unexpected tokens. Expected #{required_tokens.inspect} but got #{upcoming.inspect}"
194
- end
195
-
196
- def advance(offset = 1)
197
- @position += offset
198
- end
199
-
200
- def sort_fragments(sorted = [], fragments)
201
- return sorted if !fragments || fragments.empty?
202
- fragment_tokens = fragments.shift
203
-
204
- current_ref = fragment_tokens.find do |token|
205
- token[0] == :FRAGMENT_REF
206
- end
207
-
208
- if current_ref
209
- exists = sorted.any? do |frags|
210
- frags.find do |el|
211
- el[0] == :FRAGMENT && el[1] == current_ref[1]
212
- end
213
- end
214
- if exists
215
- sorted << fragment_tokens
216
- sort_fragments(sorted, fragments)
217
- else
218
- fragments.push fragment_tokens
219
- sort_fragments(sorted, fragments)
220
- end
221
- else
222
- sorted << fragment_tokens
223
- sort_fragments(sorted, fragments)
224
- end
225
- end
226
-
227
- # Select the fragments tokens as an array of arrays
228
- # @return Array [[[:FRAGMENT, 'foo', bar], [:FIELDSET], [:END_FIELDSET]], [[:FRAGMENT 'buzz', 'bazz']...
229
- def fetch_fragments(tokens = @tokens.dup, fragment_tokens = [], memo = { active: false, starts: 0, ends: 0, idx: 0 })
230
- token_arr = tokens.shift
231
- return fragment_tokens if token_arr.nil?
232
-
233
- if memo[:active] == true
234
- fragment_tokens[memo[:idx]] << token_arr
235
- end
236
-
237
- if token_arr[0] == :END_FIELDSET && memo[:active] == true
238
- memo[:ends] += 1
239
- if memo[:starts] == memo[:ends] + 1
240
- memo[:active] = false
241
- memo[:ends] = 0
242
- memo[:starts] = 0
243
- memo[:idx] += 1
244
- end
245
- elsif token_arr[0] == :FRAGMENT
246
- memo[:active] = true
247
- memo[:starts] += 1
248
- fragment_tokens[memo[:idx]] = [token_arr]
249
- elsif token_arr[0] == :FIELDSET && memo[:active] == true
250
- memo[:starts] += 1
251
- end
252
-
253
- fetch_fragments(tokens, fragment_tokens, memo)
254
- end
255
- end
256
-
257
- class FragmentParser
258
- attr_reader :tokens, :position, :fragments_dictionary, :special_args
259
-
260
- include ParserHelpers
261
-
262
- def initialize(tokens)
263
- @tokens = tokens.flatten(1)
264
- @position = 0
265
- @fragments_dictionary = {}
266
- end
267
-
268
- def parse_fragments
269
- repeat(:parse_fragment)
270
- fragments_dictionary
271
- end
272
-
273
- def parse_fragment
274
- if token = expect(:FRAGMENT)
275
- parse_args
276
- if builder = parse_fieldset_into_builder
277
- fragment = Fragment.new(token[0][1], token[0][2], builder: builder)
278
- need(:END_FIELDSET) if tokens?
279
- elsif fields = parse_fields
280
- builder = Builder.new(fields)
281
- fragment = Fragment.new(token[0][1], token[0][2], builder: builder)
282
- end
283
- @fragments_dictionary[token[0][1]] = fragment
284
- end
285
- end
286
- end
287
-
288
- class Parser
289
- attr_reader :tokens, :position, :fragments_dictionary
290
-
291
- include ParserHelpers
292
-
293
- def self.parse(gql)
294
- obj = new Lexer.new(gql).tokenize
295
- obj.parse
296
- end
297
-
298
- def initialize(tokens)
299
- @tokens = tokens
300
-
301
- @fragment_tokens = sort_fragments([], fetch_fragments)
302
- @fragments_dictionary = {}
303
- @fragments_dictionary = @fragment_tokens.any? ? FragmentParser.new(@fragment_tokens).parse_fragments : {}
304
- @position = 0
305
- end
306
-
307
- def refresh_lazy_refs(fields)
308
- fields.each do |field|
309
- if field.is_a? Fieldset
310
- refresh_lazy_refs(field.fields)
311
- else
312
- field.inputs.resolve_lazy_special_args(@special_args)
313
- refresh_lazy_refs(field.fieldset.fields)
314
- end
315
- end
316
- end
317
-
318
- def parse
319
- if token = expect(:EXPRESSION)
320
- value = parse_expression(token[0][1], token[0][2])
321
-
322
- # validate fragment refs
323
- @refs_to_validate&.each do |ref|
324
- raise "Argument reference #{ref} doesn't exist" unless @special_args[ref]
325
- end
326
-
327
- refresh_lazy_refs(value.builder.>>)
328
-
329
- value
330
- elsif expect(:FRAGMENT)
331
- skip_fragments
332
- parse
333
- else
334
- raise "Expression or Fragment not found"
335
- end
336
- end
337
-
338
- def skip_fragments
339
- skip_fieldset
340
- end
341
-
342
- def parse_expression(type, name)
343
- parse_args
344
- builder = parse_fieldset_into_builder
345
- need(:END_FIELDSET)
346
- query = Query.new(name, type.to_sym, builder: builder)
347
- query
348
- end
349
- end
350
-
351
- class LexerError < StandardError; end
352
-
353
- class Lexer
354
- attr_reader :stack, :scanner
355
- def initialize(gql, scanner: StringScanner.new(gql))
356
- @original_string = gql
357
- @scanner = scanner
358
- @tokens = []
359
- end
360
-
361
- SPECIAL_ARG_REGEX = /^\s*(?:(?<![\"\{]))([\w\!\[\]]+)(?:(?![\"\}]))/
362
-
363
- def tokenize
364
- until scanner.eos?
365
- tokenize_objects
366
- end
367
-
368
- @tokens
369
- end
370
-
371
- def tokenize_objects
372
- case state
373
- when :default # the stack is empty, can only process top level fragments or expressions
374
- if scanner.scan %r{\s*fragment\s*(\w+)\s*on\s*(\w+)}
375
- @tokens << [:FRAGMENT, scanner[1], scanner[2]]
376
- push_context :fragments
377
- # check for a fieldset
378
- if scanner.check %r[\s*{]
379
- tokenize_fieldset
380
- else
381
- scanner.scan /\(/
382
- @tokens << [:START_ARGS]
383
- push_state :arguments
384
- end
385
- elsif scanner.check /\{/
386
- @tokens << [:EXPRESSION, 'query', nil] if get_context == :default
387
- push_context :fieldset
388
-
389
- tokenize_fieldset
390
- elsif scanner.scan %r{^(\w+) (\w+)?}
391
- @tokens << [:EXPRESSION, scanner[1], scanner[2]]
392
- push_context :expression
393
- # check for a fieldset
394
- if scanner.check %r[\s*{]
395
- tokenize_fieldset
396
- else
397
- scanner.scan /\(/
398
- @tokens << [:START_ARGS]
399
- push_state :arguments
400
- end
401
- elsif scanner.check /\s*\}/
402
- end_fieldset
403
- else
404
- advance
405
- end
406
- when :fieldset
407
- tokenize_fields
408
- when :arguments
409
- tokenize_arguments
410
- when :argument_defaults
411
- tokenize_argument_defaults
412
- when :hash_arguments
413
- tokenize_hash_arguments
414
- when :array_arguments
415
- tokenize_array_arguments
416
- when :special_args
417
- tokenize_special_arguments
418
- when :inline_fragment
419
- tokenize_inline_fragment
420
- end
421
- end
422
-
423
- def check_for_last(regex = /\s*\}/)
424
- scanner.check regex
425
- end
426
-
427
- def check_for_final
428
- scanner.check /\s*\}(?!\s*\})/
429
- end
430
-
431
- def check_for_not_last
432
- scanner.check /\s*\}(?=\s*\})/
433
- end
434
-
435
- def tokenize_inline_fragment
436
- if scanner.scan /on (\w+)/
437
- @tokens << [:MODEL_NAME, scanner[1]]
438
-
439
- pop_state
440
- elsif scanner.scan /@(\w+)/
441
- @tokens << [:DIRECTIVE, scanner[1]]
442
-
443
- pop_state
444
- else
445
- # throw an error here?
446
- advance
447
- end
448
- end
449
-
450
- def end_fieldset
451
- scanner.scan /\s*\}/
452
- @tokens << [:END_FIELDSET]
453
- pop_state
454
- pop_context if state == :default
455
- end
456
-
457
- def end_arguments
458
- scanner.scan /\s*\)/
459
- @tokens << [:END_ARGS]
460
- pop_state if state == :argument_defaults
461
- pop_state
462
- end_fieldset while check_for_last && state == :fieldset
463
- end
464
-
465
- # to tired to figure out why this is right now
466
- def tokenize_argument_defaults
467
- if scanner.check /\)/
468
- @tokens << [:END_DEFAULT_VALUE]
469
- pop_state
470
- elsif scanner.scan /[\n|,]/
471
- @tokens << [:END_DEFAULT_VALUE]
472
- pop_state
473
- else
474
- tokenize_shared_arguments
475
- end
476
- end
477
-
478
- def tokenize_special_arguments
479
- if scanner.check SPECIAL_ARG_REGEX
480
- scanner.scan SPECIAL_ARG_REGEX
481
-
482
- @tokens << [:SPECIAL_ARG_VAL, scanner[1]]
483
-
484
- pop_state
485
-
486
- end_arguments if check_for_last(/\s*\)/)
487
- else
488
- # revisit this.. should we throw an error here?
489
- pop_state
490
- raise LexerError, "why can't we parse #{scanner.peek(5)}"
491
- end
492
- end
493
-
494
- def tokenize_array_arguments
495
- if scanner.scan /\]/
496
- @tokens << [:ARG_ARRAY_END]
497
-
498
- pop_state
499
- # if check_for_last(')')
500
- # pop_state
501
- # end
502
- else
503
- tokenize_shared_arguments
504
- end
505
- end
506
-
507
- def tokenize_hash_arguments
508
- if scanner.scan /\}/
509
- @tokens << [:ARG_HASH_END]
510
-
511
- pop_state
512
- # if this is the last argument in the list, maybe get back to the field scope?
513
- # if check_for_last(')')
514
- # pop_state
515
- # end
516
- else
517
- tokenize_shared_arguments
518
- end
519
- end
520
-
521
- def pop_argument_state
522
- if check_for_last(/\s*\)/)
523
- @tokens << [:END_DEFAULT_VALUE] if state == :argument_defaults
524
- end_arguments
525
- else
526
- pop_state unless %i[arguments argument_defaults hash_arguments array_arguments special_args].include?(state)
527
- end
528
- end
529
-
530
- def tokenize_arguments
531
- # pop argument state if arguments are finished
532
- if scanner.scan %r{\)}
533
- @tokens << [:END_ARGS]
534
-
535
- pop_state
536
- # something(argument: $argument = true)
537
- elsif scanner.scan %r{=}
538
- @tokens << [:DEFAULT_VALUE]
539
-
540
- push_state :argument_defaults
541
- # noop, should expect this, but not important
542
- elsif scanner.scan %r{,}
543
- nil
544
- else
545
- tokenize_shared_arguments
546
- end
547
- end
548
-
549
- def tokenize_shared_arguments
550
- if scanner.scan /^(\w+):/
551
- @tokens << [:ARG_KEY, scanner[1]]
552
- elsif scanner.scan %r[{]
553
- @tokens << [:ARG_HASH]
554
-
555
- push_state :hash_arguments
556
- elsif scanner.scan /\[/
557
- @tokens << [:ARG_ARRAY]
558
-
559
- push_state :array_arguments
560
- elsif scanner.scan %r{"(.*?)"}
561
- @tokens << [:ARG_STRING_VALUE, scanner[1]]
562
-
563
- pop_argument_state
564
- elsif scanner.scan /(\d+\.\d+)/
565
- @tokens << [:ARG_FLOAT_VALUE, scanner[1].to_f]
566
-
567
- pop_argument_state
568
- elsif scanner.scan /(\d+)/
569
- @tokens << [:ARG_NUM_VALUE, scanner[1].to_i]
570
-
571
- pop_argument_state
572
- elsif scanner.scan /(true|false)/
573
- @tokens << [:ARG_BOOL_VALUE, (scanner[1] == 'true')]
574
-
575
- pop_argument_state
576
- elsif scanner.scan /\$(\w+):/
577
- @tokens << [:SPECIAL_ARG_KEY, scanner[1]]
578
-
579
- push_state :special_args
580
- elsif scanner.scan /\$(\w+)/
581
- @tokens << [:SPECIAL_ARG_REF, scanner[1]]
582
-
583
- pop_argument_state
584
- elsif scanner.scan /,/
585
- # no-op
586
- elsif scanner.scan /([A-Za-z_"]+)/
587
- @tokens << [:ARG_LITERAL_VALUE, scanner[1]]
588
-
589
- elsif check_for_last(/\s*\)/)
590
- @tokens << [:END_DEFAULT_VALUE] if state == :argument_defaults
591
- end_arguments
592
- else
593
- advance
594
- end
595
- end
596
-
597
- def tokenize_fields
598
- if scanner.check %r[\s*{]
599
- tokenize_fieldset
600
- # ... on Model - or - ... @directive
601
- elsif scanner.scan %r{\.{3}\s}
602
- @tokens << [:INLINE_FRAGMENT]
603
- push_state :inline_fragment
604
- # @directive
605
- elsif scanner.scan %r{@(\w+)}
606
- @tokens << [:DIRECTIVE, scanner[1]]
607
-
608
- end_fieldset while check_for_last && state == :fieldset
609
- # ...fragmentReference (check for last since it is a field literal)
610
- elsif scanner.scan /\.{3}(\w+)/
611
- @tokens << [:FRAGMENT_REF, scanner[1]]
612
-
613
- end_fieldset while check_for_last && state == :fieldset
614
- # alias:
615
- elsif scanner.scan %r{(\w+):}
616
- @tokens << [:ALIAS, scanner[1]]
617
- # fieldLiteral
618
- elsif scanner.scan %r{(\w+)}
619
- @tokens << [:FIELD_NAME, scanner[1]]
620
-
621
- end_fieldset while check_for_last && state == :fieldset
622
- # (arguments: true)
623
- elsif scanner.scan /^\s*\(/
624
- @tokens << [:START_ARGS]
625
-
626
- push_state :arguments
627
- else
628
- advance
629
- end
630
- end
631
-
632
- def tokenize_fieldset
633
- if scanner.scan %r[\s*{]
634
- @tokens << [:FIELDSET]
635
-
636
- push_state :fieldset
637
- else
638
- raise LexerError, "Expecting `{` got `#{scanner.peek(3)}`"
639
- end
640
- end
641
-
642
- def env
643
- @ctx ||= []
644
- end
645
-
646
- def get_context
647
- env.last || :default
648
- end
649
-
650
- def push_context(context)
651
- env << context
652
- end
653
-
654
- def pop_context
655
- env.pop
656
- end
657
-
658
- def rewind
659
- scanner.pos = scanner.pos - 1
660
- end
661
-
662
- def advance
663
- unless scanner.check /\s/
664
- raise LexerError, "Unexpected Char: '#{scanner.peek(20)}'"
665
- end
666
-
667
- scanner.pos = scanner.pos + 1
668
- end
669
-
670
- def stack
671
- @stack ||= []
672
- end
673
-
674
- def state
675
- stack.last || :default
676
- end
677
-
678
- def push_state(state)
679
- stack << state
680
- end
681
-
682
- def pop_state
683
- stack.pop
684
- end
685
- end
686
- end
687
- end