t-ruby 0.0.42 → 0.0.43
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/t_ruby/ast_type_inferrer.rb +2 -0
- data/lib/t_ruby/cache.rb +40 -10
- data/lib/t_ruby/cli.rb +13 -8
- data/lib/t_ruby/compiler.rb +168 -0
- data/lib/t_ruby/diagnostic.rb +115 -0
- data/lib/t_ruby/diagnostic_formatter.rb +162 -0
- data/lib/t_ruby/error_handler.rb +201 -35
- data/lib/t_ruby/error_reporter.rb +57 -0
- data/lib/t_ruby/ir.rb +39 -1
- data/lib/t_ruby/lsp_server.rb +40 -97
- data/lib/t_ruby/parser.rb +18 -4
- data/lib/t_ruby/parser_combinator/combinators/alternative.rb +20 -0
- data/lib/t_ruby/parser_combinator/combinators/chain_left.rb +34 -0
- data/lib/t_ruby/parser_combinator/combinators/choice.rb +29 -0
- data/lib/t_ruby/parser_combinator/combinators/flat_map.rb +21 -0
- data/lib/t_ruby/parser_combinator/combinators/label.rb +22 -0
- data/lib/t_ruby/parser_combinator/combinators/lookahead.rb +21 -0
- data/lib/t_ruby/parser_combinator/combinators/many.rb +29 -0
- data/lib/t_ruby/parser_combinator/combinators/many1.rb +32 -0
- data/lib/t_ruby/parser_combinator/combinators/map.rb +17 -0
- data/lib/t_ruby/parser_combinator/combinators/not_followed_by.rb +21 -0
- data/lib/t_ruby/parser_combinator/combinators/optional.rb +21 -0
- data/lib/t_ruby/parser_combinator/combinators/sep_by.rb +34 -0
- data/lib/t_ruby/parser_combinator/combinators/sep_by1.rb +34 -0
- data/lib/t_ruby/parser_combinator/combinators/sequence.rb +23 -0
- data/lib/t_ruby/parser_combinator/combinators/skip_right.rb +23 -0
- data/lib/t_ruby/parser_combinator/declaration_parser.rb +147 -0
- data/lib/t_ruby/parser_combinator/dsl.rb +115 -0
- data/lib/t_ruby/parser_combinator/parse_error.rb +48 -0
- data/lib/t_ruby/parser_combinator/parse_result.rb +46 -0
- data/lib/t_ruby/parser_combinator/parser.rb +84 -0
- data/lib/t_ruby/parser_combinator/primitives/end_of_input.rb +16 -0
- data/lib/t_ruby/parser_combinator/primitives/fail.rb +16 -0
- data/lib/t_ruby/parser_combinator/primitives/lazy.rb +18 -0
- data/lib/t_ruby/parser_combinator/primitives/literal.rb +21 -0
- data/lib/t_ruby/parser_combinator/primitives/pure.rb +16 -0
- data/lib/t_ruby/parser_combinator/primitives/regex.rb +25 -0
- data/lib/t_ruby/parser_combinator/primitives/satisfy.rb +21 -0
- data/lib/t_ruby/parser_combinator/token/expression_parser.rb +541 -0
- data/lib/t_ruby/parser_combinator/token/statement_parser.rb +644 -0
- data/lib/t_ruby/parser_combinator/token/token_alternative.rb +20 -0
- data/lib/t_ruby/parser_combinator/token/token_body_parser.rb +54 -0
- data/lib/t_ruby/parser_combinator/token/token_declaration_parser.rb +920 -0
- data/lib/t_ruby/parser_combinator/token/token_dsl.rb +16 -0
- data/lib/t_ruby/parser_combinator/token/token_label.rb +22 -0
- data/lib/t_ruby/parser_combinator/token/token_many.rb +29 -0
- data/lib/t_ruby/parser_combinator/token/token_many1.rb +32 -0
- data/lib/t_ruby/parser_combinator/token/token_map.rb +17 -0
- data/lib/t_ruby/parser_combinator/token/token_matcher.rb +29 -0
- data/lib/t_ruby/parser_combinator/token/token_optional.rb +21 -0
- data/lib/t_ruby/parser_combinator/token/token_parse_result.rb +40 -0
- data/lib/t_ruby/parser_combinator/token/token_parser.rb +62 -0
- data/lib/t_ruby/parser_combinator/token/token_sep_by.rb +34 -0
- data/lib/t_ruby/parser_combinator/token/token_sep_by1.rb +34 -0
- data/lib/t_ruby/parser_combinator/token/token_sequence.rb +23 -0
- data/lib/t_ruby/parser_combinator/token/token_skip_right.rb +23 -0
- data/lib/t_ruby/parser_combinator/type_parser.rb +103 -0
- data/lib/t_ruby/parser_combinator.rb +64 -936
- data/lib/t_ruby/scanner.rb +883 -0
- data/lib/t_ruby/version.rb +1 -1
- data/lib/t_ruby/watcher.rb +67 -75
- data/lib/t_ruby.rb +15 -1
- metadata +51 -2
- data/lib/t_ruby/body_parser.rb +0 -561
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Token DSL Module - Convenience methods for token parsing
|
|
6
|
+
module TokenDSL
|
|
7
|
+
def token(type)
|
|
8
|
+
TokenMatcher.new(type)
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def keyword(kw)
|
|
12
|
+
TokenMatcher.new(kw)
|
|
13
|
+
end
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
end
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Label for error messages
|
|
6
|
+
class TokenLabel < TokenParser
|
|
7
|
+
def initialize(parser, name)
|
|
8
|
+
@parser = parser
|
|
9
|
+
@name = name
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def parse(tokens, position = 0)
|
|
13
|
+
result = @parser.parse(tokens, position)
|
|
14
|
+
if result.failure?
|
|
15
|
+
TokenParseResult.failure("Expected #{@name}", tokens, position)
|
|
16
|
+
else
|
|
17
|
+
result
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
end
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Many: zero or more
|
|
6
|
+
class TokenMany < TokenParser
|
|
7
|
+
def initialize(parser)
|
|
8
|
+
@parser = parser
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def parse(tokens, position = 0)
|
|
12
|
+
results = []
|
|
13
|
+
current_pos = position
|
|
14
|
+
|
|
15
|
+
loop do
|
|
16
|
+
result = @parser.parse(tokens, current_pos)
|
|
17
|
+
break if result.failure?
|
|
18
|
+
|
|
19
|
+
results << result.value
|
|
20
|
+
break if result.position == current_pos # Prevent infinite loop
|
|
21
|
+
|
|
22
|
+
current_pos = result.position
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
TokenParseResult.success(results, tokens, current_pos)
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
end
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Many1: one or more
|
|
6
|
+
class TokenMany1 < TokenParser
|
|
7
|
+
def initialize(parser)
|
|
8
|
+
@parser = parser
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def parse(tokens, position = 0)
|
|
12
|
+
first = @parser.parse(tokens, position)
|
|
13
|
+
return first if first.failure?
|
|
14
|
+
|
|
15
|
+
results = [first.value]
|
|
16
|
+
current_pos = first.position
|
|
17
|
+
|
|
18
|
+
loop do
|
|
19
|
+
result = @parser.parse(tokens, current_pos)
|
|
20
|
+
break if result.failure?
|
|
21
|
+
|
|
22
|
+
results << result.value
|
|
23
|
+
break if result.position == current_pos
|
|
24
|
+
|
|
25
|
+
current_pos = result.position
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
TokenParseResult.success(results, tokens, current_pos)
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Map result
|
|
6
|
+
class TokenMap < TokenParser
|
|
7
|
+
def initialize(parser, func)
|
|
8
|
+
@parser = parser
|
|
9
|
+
@func = func
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def parse(tokens, position = 0)
|
|
13
|
+
@parser.parse(tokens, position).map(&@func)
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
end
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Match a specific token type
|
|
6
|
+
class TokenMatcher < TokenParser
|
|
7
|
+
def initialize(token_type)
|
|
8
|
+
@token_type = token_type
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def parse(tokens, position = 0)
|
|
12
|
+
return TokenParseResult.failure("End of input", tokens, position) if position >= tokens.length
|
|
13
|
+
|
|
14
|
+
token = tokens[position]
|
|
15
|
+
return TokenParseResult.failure("End of input", tokens, position) if token.type == :eof
|
|
16
|
+
|
|
17
|
+
if token.type == @token_type
|
|
18
|
+
TokenParseResult.success(token, tokens, position + 1)
|
|
19
|
+
else
|
|
20
|
+
TokenParseResult.failure(
|
|
21
|
+
"Expected :#{@token_type}, got :#{token.type} (#{token.value.inspect})",
|
|
22
|
+
tokens,
|
|
23
|
+
position
|
|
24
|
+
)
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
end
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Optional: zero or one
|
|
6
|
+
class TokenOptional < TokenParser
|
|
7
|
+
def initialize(parser)
|
|
8
|
+
@parser = parser
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def parse(tokens, position = 0)
|
|
12
|
+
result = @parser.parse(tokens, position)
|
|
13
|
+
if result.success?
|
|
14
|
+
result
|
|
15
|
+
else
|
|
16
|
+
TokenParseResult.success(nil, tokens, position)
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
end
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Token-based parse result
|
|
6
|
+
class TokenParseResult
|
|
7
|
+
attr_reader :value, :tokens, :position, :error
|
|
8
|
+
|
|
9
|
+
def initialize(success:, value: nil, tokens: [], position: 0, error: nil)
|
|
10
|
+
@success = success
|
|
11
|
+
@value = value
|
|
12
|
+
@tokens = tokens
|
|
13
|
+
@position = position
|
|
14
|
+
@error = error
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def success?
|
|
18
|
+
@success
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def failure?
|
|
22
|
+
!@success
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def self.success(value, tokens, position)
|
|
26
|
+
new(success: true, value: value, tokens: tokens, position: position)
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def self.failure(error, tokens, position)
|
|
30
|
+
new(success: false, error: error, tokens: tokens, position: position)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def map
|
|
34
|
+
return self if failure?
|
|
35
|
+
|
|
36
|
+
TokenParseResult.success(yield(value), tokens, position)
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
end
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Base class for token parsers
|
|
6
|
+
class TokenParser
|
|
7
|
+
def parse(tokens, position = 0)
|
|
8
|
+
raise NotImplementedError
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
# Sequence: run this parser, then the other
|
|
12
|
+
def >>(other)
|
|
13
|
+
TokenSequence.new(self, other)
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
# Alternative: try this parser, if it fails try the other
|
|
17
|
+
def |(other)
|
|
18
|
+
TokenAlternative.new(self, other)
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
# Map: transform the result
|
|
22
|
+
def map(&block)
|
|
23
|
+
TokenMap.new(self, block)
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
# Many: zero or more repetitions
|
|
27
|
+
def many
|
|
28
|
+
TokenMany.new(self)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
# Many1: one or more repetitions
|
|
32
|
+
def many1
|
|
33
|
+
TokenMany1.new(self)
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
# Optional: zero or one
|
|
37
|
+
def optional
|
|
38
|
+
TokenOptional.new(self)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# Separated by: parse items separated by delimiter
|
|
42
|
+
def sep_by(delimiter)
|
|
43
|
+
TokenSepBy.new(self, delimiter)
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
# Separated by 1: at least one item
|
|
47
|
+
def sep_by1(delimiter)
|
|
48
|
+
TokenSepBy1.new(self, delimiter)
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
# Skip right: parse both, keep left result
|
|
52
|
+
def <<(other)
|
|
53
|
+
TokenSkipRight.new(self, other)
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Label: add a descriptive label for error messages
|
|
57
|
+
def label(name)
|
|
58
|
+
TokenLabel.new(self, name)
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
end
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Separated by delimiter
|
|
6
|
+
class TokenSepBy < TokenParser
|
|
7
|
+
def initialize(parser, delimiter)
|
|
8
|
+
@parser = parser
|
|
9
|
+
@delimiter = delimiter
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def parse(tokens, position = 0)
|
|
13
|
+
first = @parser.parse(tokens, position)
|
|
14
|
+
return TokenParseResult.success([], tokens, position) if first.failure?
|
|
15
|
+
|
|
16
|
+
results = [first.value]
|
|
17
|
+
current_pos = first.position
|
|
18
|
+
|
|
19
|
+
loop do
|
|
20
|
+
delim_result = @delimiter.parse(tokens, current_pos)
|
|
21
|
+
break if delim_result.failure?
|
|
22
|
+
|
|
23
|
+
item_result = @parser.parse(tokens, delim_result.position)
|
|
24
|
+
break if item_result.failure?
|
|
25
|
+
|
|
26
|
+
results << item_result.value
|
|
27
|
+
current_pos = item_result.position
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
TokenParseResult.success(results, tokens, current_pos)
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
end
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Separated by 1 (at least one)
|
|
6
|
+
class TokenSepBy1 < TokenParser
|
|
7
|
+
def initialize(parser, delimiter)
|
|
8
|
+
@parser = parser
|
|
9
|
+
@delimiter = delimiter
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def parse(tokens, position = 0)
|
|
13
|
+
first = @parser.parse(tokens, position)
|
|
14
|
+
return first if first.failure?
|
|
15
|
+
|
|
16
|
+
results = [first.value]
|
|
17
|
+
current_pos = first.position
|
|
18
|
+
|
|
19
|
+
loop do
|
|
20
|
+
delim_result = @delimiter.parse(tokens, current_pos)
|
|
21
|
+
break if delim_result.failure?
|
|
22
|
+
|
|
23
|
+
item_result = @parser.parse(tokens, delim_result.position)
|
|
24
|
+
break if item_result.failure?
|
|
25
|
+
|
|
26
|
+
results << item_result.value
|
|
27
|
+
current_pos = item_result.position
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
TokenParseResult.success(results, tokens, current_pos)
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
end
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Sequence two token parsers
|
|
6
|
+
class TokenSequence < TokenParser
|
|
7
|
+
def initialize(left, right)
|
|
8
|
+
@left = left
|
|
9
|
+
@right = right
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def parse(tokens, position = 0)
|
|
13
|
+
result1 = @left.parse(tokens, position)
|
|
14
|
+
return result1 if result1.failure?
|
|
15
|
+
|
|
16
|
+
result2 = @right.parse(tokens, result1.position)
|
|
17
|
+
return result2 if result2.failure?
|
|
18
|
+
|
|
19
|
+
TokenParseResult.success([result1.value, result2.value], tokens, result2.position)
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
end
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Skip right: parse both, return left
|
|
6
|
+
class TokenSkipRight < TokenParser
|
|
7
|
+
def initialize(left, right)
|
|
8
|
+
@left = left
|
|
9
|
+
@right = right
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def parse(tokens, position = 0)
|
|
13
|
+
result1 = @left.parse(tokens, position)
|
|
14
|
+
return result1 if result1.failure?
|
|
15
|
+
|
|
16
|
+
result2 = @right.parse(tokens, result1.position)
|
|
17
|
+
return result2 if result2.failure?
|
|
18
|
+
|
|
19
|
+
TokenParseResult.success(result1.value, tokens, result2.position)
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
end
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TRuby
|
|
4
|
+
module ParserCombinator
|
|
5
|
+
# Type Parser - Parse T-Ruby type expressions
|
|
6
|
+
class TypeParser
|
|
7
|
+
include DSL
|
|
8
|
+
|
|
9
|
+
def initialize
|
|
10
|
+
build_parsers
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def parse(input)
|
|
14
|
+
result = @type_expr.parse(input.strip)
|
|
15
|
+
if result.success?
|
|
16
|
+
{ success: true, type: result.value, remaining: input[result.position..] }
|
|
17
|
+
else
|
|
18
|
+
{ success: false, error: result.error, position: result.position }
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
private
|
|
23
|
+
|
|
24
|
+
def build_parsers
|
|
25
|
+
# Identifier (type name)
|
|
26
|
+
type_name = identifier.label("type name")
|
|
27
|
+
|
|
28
|
+
# Simple type
|
|
29
|
+
type_name.map { |name| IR::SimpleType.new(name: name) }
|
|
30
|
+
|
|
31
|
+
# Lazy reference for recursive types
|
|
32
|
+
type_expr = lazy { @type_expr }
|
|
33
|
+
|
|
34
|
+
# Generic type arguments: <Type, Type, ...>
|
|
35
|
+
generic_args = (
|
|
36
|
+
lexeme(char("<")) >>
|
|
37
|
+
type_expr.sep_by1(lexeme(char(","))) <<
|
|
38
|
+
lexeme(char(">"))
|
|
39
|
+
).map { |(_, types)| types }
|
|
40
|
+
|
|
41
|
+
# Generic type: Base<Args>
|
|
42
|
+
generic_type = (type_name >> generic_args.optional).map do |(name, args)|
|
|
43
|
+
if args && !args.empty?
|
|
44
|
+
IR::GenericType.new(base: name, type_args: args)
|
|
45
|
+
else
|
|
46
|
+
IR::SimpleType.new(name: name)
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Nullable type: Type?
|
|
51
|
+
nullable_suffix = char("?")
|
|
52
|
+
|
|
53
|
+
# Parenthesized type
|
|
54
|
+
paren_type = (lexeme(char("(")) >> type_expr << lexeme(char(")"))).map { |(_, t)| t }
|
|
55
|
+
|
|
56
|
+
# Function type: (Params) -> ReturnType
|
|
57
|
+
param_list = (
|
|
58
|
+
lexeme(char("(")) >>
|
|
59
|
+
type_expr.sep_by(lexeme(char(","))) <<
|
|
60
|
+
lexeme(char(")"))
|
|
61
|
+
).map { |(_, params)| params }
|
|
62
|
+
|
|
63
|
+
arrow = lexeme(string("->"))
|
|
64
|
+
|
|
65
|
+
function_type = (param_list >> arrow >> type_expr).map do |((params, _arrow), ret)|
|
|
66
|
+
IR::FunctionType.new(param_types: params, return_type: ret)
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
# Tuple type: [Type, Type, ...]
|
|
70
|
+
tuple_type = (
|
|
71
|
+
lexeme(char("[")) >>
|
|
72
|
+
type_expr.sep_by1(lexeme(char(","))) <<
|
|
73
|
+
lexeme(char("]"))
|
|
74
|
+
).map { |(_, types)| IR::TupleType.new(element_types: types) }
|
|
75
|
+
|
|
76
|
+
# Primary type (before operators)
|
|
77
|
+
primary_type = choice(
|
|
78
|
+
function_type,
|
|
79
|
+
tuple_type,
|
|
80
|
+
paren_type,
|
|
81
|
+
generic_type
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
# With optional nullable suffix
|
|
85
|
+
base_type = (primary_type >> nullable_suffix.optional).map do |(type, nullable)|
|
|
86
|
+
nullable ? IR::NullableType.new(inner_type: type) : type
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
# Union type: Type | Type | ...
|
|
90
|
+
union_op = lexeme(char("|"))
|
|
91
|
+
union_type = base_type.sep_by1(union_op).map do |types|
|
|
92
|
+
types.length == 1 ? types.first : IR::UnionType.new(types: types)
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Intersection type: Type & Type & ...
|
|
96
|
+
intersection_op = lexeme(char("&"))
|
|
97
|
+
@type_expr = union_type.sep_by1(intersection_op).map do |types|
|
|
98
|
+
types.length == 1 ? types.first : IR::IntersectionType.new(types: types)
|
|
99
|
+
end
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
end
|