tsjson 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (46) hide show
  1. checksums.yaml +7 -0
  2. data/lib/errors/cant_distinguish_type_error.rb +17 -0
  3. data/lib/errors/index.rb +12 -0
  4. data/lib/errors/list_validation_error.rb +34 -0
  5. data/lib/errors/literal_union_validation_error.rb +18 -0
  6. data/lib/errors/literal_validation_error.rb +16 -0
  7. data/lib/errors/not_enough_discriminators.rb +7 -0
  8. data/lib/errors/object_validation_error.rb +56 -0
  9. data/lib/errors/required_field_error.rb +7 -0
  10. data/lib/errors/scalar_union_validation_error.rb +18 -0
  11. data/lib/errors/scalar_validation_error.rb +16 -0
  12. data/lib/errors/unexpected_field_error.rb +7 -0
  13. data/lib/errors/unexpected_value_error.rb +16 -0
  14. data/lib/errors/validation_error.rb +16 -0
  15. data/lib/language/ast/kind.rb +25 -0
  16. data/lib/language/lexer/lexer.rb +452 -0
  17. data/lib/language/lexer/location.rb +20 -0
  18. data/lib/language/lexer/syntax_error.rb +89 -0
  19. data/lib/language/lexer/token.rb +34 -0
  20. data/lib/language/lexer/token_kind.rb +37 -0
  21. data/lib/language/lexer/utils.rb +32 -0
  22. data/lib/language/parser/parser.rb +437 -0
  23. data/lib/language/source.rb +109 -0
  24. data/lib/schema/schema.rb +48 -0
  25. data/lib/schema/schema_builder.rb +148 -0
  26. data/lib/tsjson.rb +1 -0
  27. data/lib/types/any.rb +15 -0
  28. data/lib/types/base.rb +19 -0
  29. data/lib/types/boolean.rb +17 -0
  30. data/lib/types/discriminator_map.rb +116 -0
  31. data/lib/types/enum.rb +47 -0
  32. data/lib/types/float.rb +17 -0
  33. data/lib/types/index.rb +27 -0
  34. data/lib/types/int.rb +17 -0
  35. data/lib/types/intersection.rb +72 -0
  36. data/lib/types/list.rb +33 -0
  37. data/lib/types/literal.rb +25 -0
  38. data/lib/types/literal_union.rb +48 -0
  39. data/lib/types/merge.rb +21 -0
  40. data/lib/types/null.rb +17 -0
  41. data/lib/types/object.rb +87 -0
  42. data/lib/types/scalar.rb +24 -0
  43. data/lib/types/scalar_union.rb +25 -0
  44. data/lib/types/string.rb +17 -0
  45. data/lib/types/union.rb +61 -0
  46. metadata +85 -0
@@ -0,0 +1,20 @@
1
+ module TSJSON
2
+ class Location
3
+ def self.get_location(source, position)
4
+ lineRegexp = /\r\n|[\n\r]/
5
+ line = 1
6
+ column = position + 1
7
+ body = source.body
8
+ new_lines_indexes = (0...body.length).find_all { |i| body[i, 1] == "\n" }
9
+
10
+ new_lines_indexes.each do |index|
11
+ break if index >= position
12
+
13
+ line += 1
14
+ column = position + 1 - (index + 1)
15
+ end
16
+
17
+ return { line: line, column: column }
18
+ end
19
+ end
20
+ end
@@ -0,0 +1,89 @@
1
+ require_relative './location.rb'
2
+
3
+ module TSJSON
4
+ class TSJSONSyntaxError < StandardError
5
+ attr_accessor :message,
6
+ :nodes,
7
+ :source,
8
+ :positions,
9
+ :path,
10
+ :locations,
11
+ :originalError
12
+
13
+ def initialize(
14
+ message, nodes, source, positions, path = nil, originalError = nil
15
+ )
16
+ self.message = message
17
+
18
+ _nodes =
19
+ if nodes.is_a?(Array)
20
+ nodes.length != 0 ? nodes : nil
21
+ else
22
+ nodes ? [nodes] : nil
23
+ end
24
+
25
+ # Compute locations in the source for the given nodes/positions.
26
+ _source = source
27
+ _source = _nodes[0].loc&.source if (!_source && _nodes)
28
+
29
+ _positions = positions
30
+
31
+ if (!_positions && _nodes)
32
+ _positions =
33
+ _nodes.reduce([]) do |list, node|
34
+ list.push(node.loc.start) if (node.loc)
35
+ return list
36
+ end
37
+ end
38
+ _positions = undefined if (_positions && _positions.length === 0)
39
+
40
+ if (positions && source)
41
+ _locations = positions.map { |pos| Location.get_location(source, pos) }
42
+ elsif (_nodes)
43
+ _locations =
44
+ _nodes.reduce([]) do |list, node|
45
+ if (node.loc)
46
+ list.push(
47
+ Location.get_location(node.loc.source, node.loc.start_pos)
48
+ )
49
+ end
50
+ return list
51
+ end
52
+ end
53
+
54
+ self.source = _source
55
+ self.positions = _positions
56
+ self.locations = _locations
57
+ self.nodes = _nodes
58
+ end
59
+
60
+ def self.print_error(error)
61
+ output = error.message
62
+
63
+ if (error.nodes)
64
+ error.nodes.each do |node|
65
+ output += "\n\n" + printLocation(node.loc) if (node.loc)
66
+ end
67
+ elsif (error.source && error.locations)
68
+ error.locations.each do |location|
69
+ output +=
70
+ "\n\n" + Source.print_source_location(error.source, location)
71
+ end
72
+ end
73
+
74
+ return output
75
+ end
76
+
77
+ def self.syntax_error(source, position, description)
78
+ return new("Syntax Error: #{description}", nil, source, [position])
79
+ end
80
+
81
+ def toJSON
82
+ return { message: self.message, locations: self.locations }
83
+ end
84
+
85
+ def to_s
86
+ return TSJSONSyntaxError.print_error(self)
87
+ end
88
+ end
89
+ end
@@ -0,0 +1,34 @@
1
+ module TSJSON
2
+ class Token
3
+ attr_accessor :kind,
4
+ :start_pos,
5
+ :end_pos,
6
+ :line,
7
+ :column,
8
+ :value,
9
+ :prev,
10
+ :next
11
+
12
+ def initialize(kind, start_pos, end_pos, line, column, prev, value = nil)
13
+ self.kind = kind
14
+ self.start_pos = start_pos
15
+ self.end_pos = end_pos
16
+ self.line = line
17
+ self.column = column
18
+ self.value = value
19
+ self.prev = prev
20
+ self.next = nil
21
+ end
22
+
23
+ def toJSON
24
+ {
25
+ kind: self.kind,
26
+ start: self.start_pos,
27
+ end: self.end_pos,
28
+ line: self.line,
29
+ column: self.column,
30
+ value: self.value
31
+ }.compact
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,37 @@
1
+ module TSJSON
2
+ module TokenKind
3
+ SOF = '<SOF>'
4
+ EOF = '<EOF>'
5
+ AMP = '&'
6
+ PIPE = '|'
7
+ PAREN_L = '('
8
+ PAREN_R = ')'
9
+ BRACKET_L = '['
10
+ BRACKET_R = ']'
11
+ BRACE_L = '{'
12
+ BRACE_R = '}'
13
+ CHEVRON_L = '<'
14
+ CHEVRON_R = '>'
15
+ COLON = ':'
16
+ SEMICOLON = ';'
17
+ EQUALS = '='
18
+ COMMA = ','
19
+ DOT = '.'
20
+ QUESTION_MARK = '?'
21
+ NAME = 'Name'
22
+ INT = 'Int'
23
+ FLOAT = 'Float'
24
+ STRING = 'String'
25
+ BLOCK_STRING = 'BlockString'
26
+ COMMENT = 'Comment'
27
+
28
+ def self.operation_precedence(kind)
29
+ case (kind)
30
+ when TokenKind::AMP
31
+ return 2
32
+ when TokenKind::PIPE
33
+ return 1
34
+ end
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,32 @@
1
+ module TSJSON
2
+ class LexerUtils
3
+ class << self
4
+ def is_punctuator_token_kind?(kind)
5
+ return(
6
+ kind === TokenKind::PIPE || kind === TokenKind::AMP ||
7
+ kind === TokenKind::COLON || kind === TokenKind::EQUALS ||
8
+ kind === TokenKind::CHEVRON_L || kind === TokenKind::CHEVRON_R ||
9
+ kind === TokenKind::PAREN_L || kind === TokenKind::PAREN_R ||
10
+ kind === TokenKind::BRACKET_L || kind === TokenKind::BRACKET_R ||
11
+ kind === TokenKind::BRACE_L || kind === TokenKind::BRACE_R
12
+ )
13
+ end
14
+
15
+ def is_operation_token(kind)
16
+ return kind == TokenKind::PIPE || kind == TokenKind::AMP
17
+ end
18
+
19
+ def get_token_desc(token)
20
+ value = token.value
21
+ return(
22
+ get_token_kind_desc(token.kind) +
23
+ (value != nil ? " \"#{value}\"" : '')
24
+ )
25
+ end
26
+
27
+ def get_token_kind_desc(kind)
28
+ return is_punctuator_token_kind?(kind) ? "\"#{kind}\"" : kind
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,437 @@
1
+ require_relative '../lexer/lexer.rb'
2
+ require_relative '../source.rb'
3
+ require_relative '../ast/kind.rb'
4
+ require_relative '../lexer/token_kind.rb'
5
+
6
+ module TSJSON
7
+ class Parser
8
+ def initialize(source)
9
+ source = Source.new(source) unless source.is_a?(Source)
10
+ @lexer = Lexer.new(source)
11
+ end
12
+
13
+ def parse_document
14
+ return(
15
+ {
16
+ kind: AST::Kind::Document,
17
+ definitions:
18
+ parse_optional_many(
19
+ TokenKind::SOF,
20
+ :parse_definition,
21
+ TokenKind::EOF
22
+ )
23
+ }
24
+ )
25
+ end
26
+
27
+ def parse_definition
28
+ if (peek(TokenKind::NAME))
29
+ case current_token.value
30
+ when 'type'
31
+ return parse_type_alias_definition
32
+ when 'enum'
33
+ return parse_enum_definition
34
+ end
35
+ end
36
+
37
+ raise unexpected
38
+ end
39
+
40
+ def parse_type_alias_definition
41
+ start = expect_token(TokenKind::NAME)
42
+
43
+ name = parse_name
44
+ parameters =
45
+ parse_optional_many(
46
+ TokenKind::CHEVRON_L,
47
+ :parse_type_parameter,
48
+ TokenKind::CHEVRON_R,
49
+ TokenKind::COMMA
50
+ )
51
+
52
+ expect_token(TokenKind::EQUALS)
53
+ definition = parse_operation
54
+
55
+ expect_optional_token(TokenKind::SEMICOLON)
56
+
57
+ return(
58
+ {
59
+ kind: AST::Kind::TypeAlias,
60
+ name: name,
61
+ parameters: parameters,
62
+ definition: definition,
63
+ loc: loc(start)
64
+ }
65
+ )
66
+ end
67
+
68
+ def parse_type_parameter
69
+ start = current_token
70
+ name = parse_name
71
+
72
+ return { kind: AST::Kind::TypeParameter, name: name, loc: loc(start) }
73
+ end
74
+
75
+ def parse_name
76
+ token = expect_token(TokenKind::NAME)
77
+ return { kind: AST::Kind::Name, value: token.value, loc: loc(token) }
78
+ end
79
+
80
+ def parse_type_definition()
81
+ start = current_token
82
+ type = nil
83
+ if (peek(TokenKind::NAME))
84
+ type = parse_type_reference
85
+ elsif (peek(TokenKind::BRACE_L))
86
+ type = parse_type_literal
87
+ elsif (peek(TokenKind::PAREN_L))
88
+ type = parse_parenthesized_type
89
+ elsif (peek(TokenKind::BRACKET_L))
90
+ type = parse_tuple
91
+ elsif (peek(TokenKind::STRING))
92
+ type = parse_string_literal
93
+ elsif (peek(TokenKind::INT))
94
+ type = parse_int
95
+ elsif (peek(TokenKind::FLOAT))
96
+ type = parse_float
97
+ end
98
+
99
+ throw unexpected if (!type)
100
+
101
+ loop do
102
+ unless [TokenKind::BRACKET_L, TokenKind::DOT].include?(
103
+ current_token.kind
104
+ )
105
+ break
106
+ end
107
+
108
+ if (expect_optional_token(TokenKind::BRACKET_L))
109
+ if (expect_optional_token(TokenKind::BRACKET_R))
110
+ type = { kind: AST::Kind::ArrayType, type: type, loc: loc(start) }
111
+ else
112
+ index = parse_string_literal
113
+ expect_token(TokenKind::BRACKET_R)
114
+ type = {
115
+ kind: AST::Kind::IndexAccess,
116
+ target: type,
117
+ index: index,
118
+ loc: loc(start)
119
+ }
120
+ end
121
+ elsif (expect_optional_token(TokenKind::DOT))
122
+ property = parse_name
123
+ type = {
124
+ kind: AST::Kind::PropertyAccess,
125
+ target: type,
126
+ property: property,
127
+ loc: loc(start)
128
+ }
129
+ end
130
+ end
131
+
132
+ if (expect_optional_token(TokenKind::BRACKET_L))
133
+ expect_token(TokenKind::BRACKET_R)
134
+ type = { kind: AST::Kind::ArrayType, type: type, loc: loc(start) }
135
+ end
136
+
137
+ return type
138
+ end
139
+
140
+ def parse_operation
141
+ expect_optional_token(TokenKind::PIPE)
142
+ nodes = [parse_type_definition]
143
+ operations = []
144
+
145
+ execute_operation =
146
+ lambda do
147
+ operation = operations.shift
148
+ right = nodes.pop
149
+ left = nodes.pop
150
+
151
+ new_operation = nil
152
+ if (operation == TokenKind::AMP)
153
+ if (left[:kind] == AST::Kind::IntersectionType)
154
+ new_operation = {
155
+ kind: AST::Kind::IntersectionType,
156
+ types: left[:types].concat([right])
157
+ }
158
+ else
159
+ new_operation = {
160
+ kind: AST::Kind::IntersectionType,
161
+ types: [left, right]
162
+ }
163
+ end
164
+ elsif (operation == TokenKind::PIPE)
165
+ if (left[:kind] == AST::Kind::UnionType)
166
+ new_operation = {
167
+ kind: AST::Kind::UnionType,
168
+ types: left[:types].concat([right])
169
+ }
170
+ else
171
+ new_operation = {
172
+ kind: AST::Kind::UnionType,
173
+ types: [left, right]
174
+ }
175
+ end
176
+ end
177
+ nodes.push(new_operation)
178
+ end
179
+
180
+ operation_kind = nil
181
+ loop do
182
+ unless (
183
+ LexerUtils.is_operation_token(
184
+ (operation_kind = current_token.kind)
185
+ )
186
+ )
187
+ break
188
+ end
189
+ loop do
190
+ unless (
191
+ operations.length > 0 &&
192
+ TokenKind.operation_precedence(operations[0]) >=
193
+ TokenKind.operation_precedence(operation_kind)
194
+ )
195
+ break
196
+ end
197
+ execute_operation.call
198
+ end
199
+
200
+ operations.unshift(operation_kind)
201
+ @lexer.advance
202
+ nodes.push(parse_type_definition)
203
+ end
204
+
205
+ loop do
206
+ break unless (operations.length > 0)
207
+ execute_operation.call
208
+ end
209
+
210
+ return nodes[0]
211
+ end
212
+
213
+ def parse_string_literal()
214
+ token = expect_token(TokenKind::STRING)
215
+ return(
216
+ { kind: AST::Kind::StringLiteral, value: token.value, loc: loc(token) }
217
+ )
218
+ end
219
+
220
+ def parse_int()
221
+ token = expect_token(TokenKind::INT)
222
+ return { kind: AST::Kind::Int, value: token.value.to_i, loc: loc(token) }
223
+ end
224
+
225
+ def parse_float()
226
+ token = expect_token(TokenKind::FLOAT)
227
+ return(
228
+ { kind: AST::Kind::Float, value: token.value.to_f, loc: loc(token) }
229
+ )
230
+ end
231
+
232
+ def parse_type_reference
233
+ start = current_token
234
+ name = parse_name
235
+ args =
236
+ parse_optional_many(
237
+ TokenKind::CHEVRON_L,
238
+ :parse_operation,
239
+ TokenKind::CHEVRON_R,
240
+ TokenKind::COMMA
241
+ )
242
+
243
+ return(
244
+ {
245
+ kind: AST::Kind::TypeReference,
246
+ name: name,
247
+ args: args,
248
+ loc: loc(start)
249
+ }
250
+ )
251
+ end
252
+
253
+ def parse_tuple
254
+ start = current_token
255
+ types =
256
+ parse_many(
257
+ TokenKind::BRACKET_L,
258
+ :parse_type_definition,
259
+ TokenKind::BRACKET_R,
260
+ TokenKind::COMMA
261
+ )
262
+ return { kind: AST::Kind::Tuple, types: types, loc: loc(start) }
263
+ end
264
+
265
+ def parse_type_literal
266
+ start = current_token
267
+ properties =
268
+ parse_optional_many(
269
+ TokenKind::BRACE_L,
270
+ :parse_property_signature,
271
+ TokenKind::BRACE_R
272
+ )
273
+ return(
274
+ {
275
+ kind: AST::Kind::TypeLiteral,
276
+ properties: properties,
277
+ loc: loc(start)
278
+ }
279
+ )
280
+ end
281
+
282
+ def parse_property_signature()
283
+ start = current_token
284
+ name = parse_name
285
+ optional = expect_optional_token(TokenKind::QUESTION_MARK).nil?.!
286
+ expect_token(TokenKind::COLON)
287
+ type = parse_operation
288
+ expect_token(TokenKind::SEMICOLON)
289
+
290
+ return(
291
+ {
292
+ kind: AST::Kind::PropertySignature,
293
+ name: name,
294
+ type: type,
295
+ optional: optional,
296
+ loc: loc(start)
297
+ }
298
+ )
299
+ end
300
+
301
+ def parse_parenthesized_type()
302
+ start = expect_token(TokenKind::PAREN_L)
303
+ type = parse_operation
304
+ expect_token(TokenKind::PAREN_R)
305
+ return { kind: AST::Kind::ParenthesizedType, type: type, loc: loc(start) }
306
+ end
307
+
308
+ def parse_enum_definition
309
+ start = expect_token(TokenKind::NAME)
310
+ name = parse_name
311
+ members =
312
+ parse_many(
313
+ TokenKind::BRACE_L,
314
+ :parse_enum_member,
315
+ TokenKind::BRACE_R,
316
+ TokenKind::COMMA
317
+ )
318
+ expect_optional_token(TokenKind::SEMICOLON)
319
+
320
+ return(
321
+ { kind: AST::Kind::Enum, name: name, members: members, loc: loc(start) }
322
+ )
323
+ end
324
+
325
+ def parse_enum_member
326
+ start = current_token
327
+ name = parse_name
328
+
329
+ value = nil
330
+ value = parse_string_literal if (expect_optional_token(TokenKind::EQUALS))
331
+
332
+ return(
333
+ {
334
+ kind: AST::Kind::EnumMember,
335
+ name: name,
336
+ value: value,
337
+ loc: loc(start)
338
+ }
339
+ )
340
+ end
341
+
342
+ #######
343
+
344
+ # Returns current token from lexer
345
+ def current_token
346
+ @lexer.token
347
+ end
348
+
349
+ # Helper function for creating an error when an unexpected lexed token is encountered.
350
+ def unexpected(at_token = nil)
351
+ token = at_token || current_token
352
+ return(
353
+ TSJSONSyntaxError.syntax_error(
354
+ @lexer.source,
355
+ token.start_pos,
356
+ "Unexpected #{LexerUtils.get_token_desc(token)}."
357
+ )
358
+ )
359
+ end
360
+
361
+ # Returns a location object, used to identify the place in the source that created a given parsed object.
362
+ def loc(start_token)
363
+ end_token = @lexer.last_token
364
+ return {} #Location.new(start_token, end_token, @lexer.source)
365
+ end
366
+
367
+ # Determines if the next token is of a given kind
368
+ def peek(kind)
369
+ current_token.kind == kind
370
+ end
371
+
372
+ # Returns a non-empty list of parse nodes, determined by the parseFn.
373
+ # This list begins with a lex token of openKind and ends with a lex token of closeKind.
374
+ # Advances the parser to the next lex token after the closing token.
375
+ def parse_many(open_kind, parse_fn_symbol, close_kind, delimeter_kind = nil)
376
+ expect_token(open_kind)
377
+ nodes = []
378
+ loop do
379
+ nodes.push(self.send(parse_fn_symbol))
380
+ expect_token(delimeter_kind) if (delimeter_kind && !peek(close_kind))
381
+ break if expect_optional_token(close_kind)
382
+ end
383
+ return nodes
384
+ end
385
+
386
+ # Returns a list of parse nodes, determined by the parseFn.
387
+ # It can be empty only if open token is missing otherwise it will always return non-empty list
388
+ # that begins with a lex token of openKind and ends with a lex token of closeKind.
389
+ # Advances the parser to the next lex token after the closing token.
390
+ def parse_optional_many(
391
+ open_kind,
392
+ parse_fn_symbol,
393
+ close_kind,
394
+ delimeter_kind = nil
395
+ )
396
+ if (expect_optional_token(open_kind))
397
+ nodes = []
398
+ loop do
399
+ break if expect_optional_token(close_kind)
400
+ nodes.push(self.send(parse_fn_symbol))
401
+ expect_token(delimeter_kind) if (delimeter_kind && !peek(close_kind))
402
+ end
403
+ return nodes
404
+ end
405
+ return []
406
+ end
407
+
408
+ # If the next token is of the given kind, return that token after advancing the lexer.
409
+ # Otherwise, do not change the parser state and throw an error.
410
+ def expect_token(kind)
411
+ token = current_token
412
+ if (token.kind == kind)
413
+ @lexer.advance
414
+ return token
415
+ end
416
+
417
+ raise TSJSONSyntaxError.syntax_error(
418
+ @lexer.source,
419
+ token.start_pos,
420
+ "Expected #{LexerUtils.get_token_kind_desc(kind)}, found #{
421
+ LexerUtils.get_token_desc(token)
422
+ }."
423
+ )
424
+ end
425
+
426
+ # If the next token is of the given kind, return that token after advancing the lexer.
427
+ # Otherwise, do not change the parser state and return undefined.
428
+ def expect_optional_token(kind)
429
+ token = current_token
430
+ if token.kind === kind
431
+ @lexer.advance
432
+ return token
433
+ end
434
+ return nil
435
+ end
436
+ end
437
+ end