docsterra 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +7 -0
- data/LICENSE.txt +21 -0
- data/README.md +133 -0
- data/Rakefile +8 -0
- data/exe/docsterra +6 -0
- data/lib/docsterra/analyzer/cost_analyzer.rb +104 -0
- data/lib/docsterra/analyzer/dependency_analyzer.rb +192 -0
- data/lib/docsterra/analyzer/network_analyzer.rb +78 -0
- data/lib/docsterra/analyzer/resource_analyzer.rb +95 -0
- data/lib/docsterra/analyzer/security_analyzer.rb +172 -0
- data/lib/docsterra/cli.rb +91 -0
- data/lib/docsterra/config.rb +175 -0
- data/lib/docsterra/document.rb +38 -0
- data/lib/docsterra/model/network.rb +22 -0
- data/lib/docsterra/model/project.rb +98 -0
- data/lib/docsterra/model/relationship.rb +42 -0
- data/lib/docsterra/model/resource.rb +133 -0
- data/lib/docsterra/parser/expression_inspector.rb +122 -0
- data/lib/docsterra/parser/hcl_ast.rb +31 -0
- data/lib/docsterra/parser/hcl_lexer.rb +378 -0
- data/lib/docsterra/parser/hcl_parser.rb +464 -0
- data/lib/docsterra/parser/module_resolver.rb +89 -0
- data/lib/docsterra/parser/resource_registry.rb +196 -0
- data/lib/docsterra/renderer/cost_section.rb +24 -0
- data/lib/docsterra/renderer/markdown_renderer.rb +184 -0
- data/lib/docsterra/renderer/mermaid_diagram.rb +141 -0
- data/lib/docsterra/renderer/resource_table.rb +63 -0
- data/lib/docsterra/renderer/security_section.rb +82 -0
- data/lib/docsterra/version.rb +5 -0
- data/lib/docsterra.rb +227 -0
- metadata +88 -0
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Docsterra
|
|
4
|
+
module Model
|
|
5
|
+
class Resource
|
|
6
|
+
attr_reader :type, :name, :attributes, :project, :references, :meta, :source_file,
|
|
7
|
+
:comment, :kind, :display_name, :category, :key_attributes
|
|
8
|
+
|
|
9
|
+
def initialize(
|
|
10
|
+
type:,
|
|
11
|
+
name:,
|
|
12
|
+
attributes:,
|
|
13
|
+
project:,
|
|
14
|
+
references: [],
|
|
15
|
+
meta: {},
|
|
16
|
+
source_file: nil,
|
|
17
|
+
comment: nil,
|
|
18
|
+
kind: :resource,
|
|
19
|
+
display_name: nil,
|
|
20
|
+
category: :other,
|
|
21
|
+
key_attributes: []
|
|
22
|
+
)
|
|
23
|
+
@type = type
|
|
24
|
+
@name = name
|
|
25
|
+
@attributes = attributes || {}
|
|
26
|
+
@project = project
|
|
27
|
+
@references = Array(references).uniq
|
|
28
|
+
@meta = meta || {}
|
|
29
|
+
@source_file = source_file
|
|
30
|
+
@comment = comment
|
|
31
|
+
@kind = kind
|
|
32
|
+
@display_name = display_name || type
|
|
33
|
+
@category = category || :other
|
|
34
|
+
@key_attributes = Array(key_attributes)
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def identifier
|
|
38
|
+
"#{type}.#{name}"
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def data_source?
|
|
42
|
+
kind == :data
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def resource?
|
|
46
|
+
kind == :resource
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def description
|
|
50
|
+
attribute_text("description") || comment
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def attribute(path)
|
|
54
|
+
segments = path.to_s.split(".")
|
|
55
|
+
traverse(@attributes, segments)
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def attribute_text(path)
|
|
59
|
+
value = attribute(path)
|
|
60
|
+
return nil if value.nil?
|
|
61
|
+
|
|
62
|
+
case value
|
|
63
|
+
when Array
|
|
64
|
+
value.map { |item| item.is_a?(Hash) ? compact_hash(item) : Docsterra::Parser::ExpressionInspector.to_text(item) }
|
|
65
|
+
.map(&:to_s)
|
|
66
|
+
.join(", ")
|
|
67
|
+
when Hash
|
|
68
|
+
compact_hash(value).inspect
|
|
69
|
+
else
|
|
70
|
+
Docsterra::Parser::ExpressionInspector.to_text(value)
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
def attribute_ruby(path)
|
|
75
|
+
value = attribute(path)
|
|
76
|
+
return nil if value.nil?
|
|
77
|
+
return value.map { |item| item.is_a?(Hash) ? hash_to_ruby(item) : Docsterra::Parser::ExpressionInspector.to_ruby(item) } if value.is_a?(Array)
|
|
78
|
+
return hash_to_ruby(value) if value.is_a?(Hash)
|
|
79
|
+
|
|
80
|
+
Docsterra::Parser::ExpressionInspector.to_ruby(value)
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
private
|
|
84
|
+
|
|
85
|
+
def traverse(value, segments)
|
|
86
|
+
return value if segments.empty?
|
|
87
|
+
|
|
88
|
+
head = segments.first
|
|
89
|
+
tail = segments.drop(1)
|
|
90
|
+
|
|
91
|
+
case value
|
|
92
|
+
when Hash
|
|
93
|
+
traverse(value[head], tail)
|
|
94
|
+
when Array
|
|
95
|
+
next_value = if integer_string?(head)
|
|
96
|
+
value[head.to_i]
|
|
97
|
+
else
|
|
98
|
+
value.first
|
|
99
|
+
end
|
|
100
|
+
traverse(next_value, integer_string?(head) ? tail : segments)
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
def integer_string?(value)
|
|
105
|
+
value.match?(/\A\d+\z/)
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
def compact_hash(hash)
|
|
109
|
+
hash.transform_values do |value|
|
|
110
|
+
if value.is_a?(Hash)
|
|
111
|
+
compact_hash(value)
|
|
112
|
+
elsif value.is_a?(Array)
|
|
113
|
+
value.map { |entry| entry.is_a?(Hash) ? compact_hash(entry) : Docsterra::Parser::ExpressionInspector.to_text(entry) }
|
|
114
|
+
else
|
|
115
|
+
Docsterra::Parser::ExpressionInspector.to_text(value)
|
|
116
|
+
end
|
|
117
|
+
end
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
def hash_to_ruby(hash)
|
|
121
|
+
hash.transform_values do |value|
|
|
122
|
+
if value.is_a?(Hash)
|
|
123
|
+
hash_to_ruby(value)
|
|
124
|
+
elsif value.is_a?(Array)
|
|
125
|
+
value.map { |entry| entry.is_a?(Hash) ? hash_to_ruby(entry) : Docsterra::Parser::ExpressionInspector.to_ruby(entry) }
|
|
126
|
+
else
|
|
127
|
+
Docsterra::Parser::ExpressionInspector.to_ruby(value)
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
end
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Docsterra
|
|
4
|
+
module Parser
|
|
5
|
+
module ExpressionInspector
|
|
6
|
+
module_function
|
|
7
|
+
|
|
8
|
+
def to_text(node)
|
|
9
|
+
case node
|
|
10
|
+
when nil
|
|
11
|
+
nil
|
|
12
|
+
when AST::Literal
|
|
13
|
+
format_literal(node.value)
|
|
14
|
+
when AST::Reference
|
|
15
|
+
node.parts.map { |part| part.is_a?(String) ? part : to_text(part) }.join(".")
|
|
16
|
+
when AST::TemplateExpr
|
|
17
|
+
node.parts.join
|
|
18
|
+
when AST::RawExpr
|
|
19
|
+
node.text
|
|
20
|
+
when AST::ListExpr
|
|
21
|
+
"[" + node.elements.map { |element| to_text(element) }.join(", ") + "]"
|
|
22
|
+
when AST::MapExpr
|
|
23
|
+
"{" + node.pairs.map { |k, v| "#{k} = #{to_text(v)}" }.join(", ") + "}"
|
|
24
|
+
when AST::FunctionCall
|
|
25
|
+
"#{node.name}(#{node.args.map { |arg| to_text(arg) }.join(', ')})"
|
|
26
|
+
when AST::IndexExpr
|
|
27
|
+
"#{to_text(node.expr)}[#{to_text(node.index)}]"
|
|
28
|
+
when AST::ConditionalExpr
|
|
29
|
+
"#{to_text(node.cond)} ? #{to_text(node.true_val)} : #{to_text(node.false_val)}"
|
|
30
|
+
when AST::UnaryExpr
|
|
31
|
+
"#{node.op}#{to_text(node.expr)}"
|
|
32
|
+
when AST::BinaryExpr
|
|
33
|
+
"#{to_text(node.left)} #{node.op} #{to_text(node.right)}"
|
|
34
|
+
when AST::SplatExpr
|
|
35
|
+
[to_text(node.expr), "*", node.attr].compact.join(".").gsub(".*.", ".*.")
|
|
36
|
+
when AST::ForExpr
|
|
37
|
+
render_for_expr(node)
|
|
38
|
+
else
|
|
39
|
+
node.to_s
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def to_ruby(node)
|
|
44
|
+
case node
|
|
45
|
+
when nil
|
|
46
|
+
nil
|
|
47
|
+
when AST::Literal
|
|
48
|
+
node.value
|
|
49
|
+
when AST::ListExpr
|
|
50
|
+
node.elements.map { |element| to_ruby(element) }
|
|
51
|
+
when AST::MapExpr
|
|
52
|
+
node.pairs.transform_values { |value| to_ruby(value) }
|
|
53
|
+
else
|
|
54
|
+
to_text(node)
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def collect_references(node)
|
|
59
|
+
refs = []
|
|
60
|
+
walk(node) do |value|
|
|
61
|
+
refs << to_text(value) if value.is_a?(AST::Reference)
|
|
62
|
+
end
|
|
63
|
+
refs.uniq.compact
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def walk(node, &block)
|
|
67
|
+
return if node.nil?
|
|
68
|
+
|
|
69
|
+
yield node
|
|
70
|
+
|
|
71
|
+
case node
|
|
72
|
+
when AST::ListExpr
|
|
73
|
+
node.elements.each { |element| walk(element, &block) }
|
|
74
|
+
when AST::MapExpr
|
|
75
|
+
node.pairs.each_value { |value| walk(value, &block) }
|
|
76
|
+
when AST::FunctionCall
|
|
77
|
+
node.args.each { |arg| walk(arg, &block) }
|
|
78
|
+
when AST::IndexExpr
|
|
79
|
+
walk(node.expr, &block)
|
|
80
|
+
walk(node.index, &block)
|
|
81
|
+
when AST::ConditionalExpr
|
|
82
|
+
walk(node.cond, &block)
|
|
83
|
+
walk(node.true_val, &block)
|
|
84
|
+
walk(node.false_val, &block)
|
|
85
|
+
when AST::ForExpr
|
|
86
|
+
walk(node.collection, &block)
|
|
87
|
+
walk(node.body, &block)
|
|
88
|
+
walk(node.cond, &block)
|
|
89
|
+
when AST::UnaryExpr
|
|
90
|
+
walk(node.expr, &block)
|
|
91
|
+
when AST::BinaryExpr
|
|
92
|
+
walk(node.left, &block)
|
|
93
|
+
walk(node.right, &block)
|
|
94
|
+
when AST::SplatExpr
|
|
95
|
+
walk(node.expr, &block)
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
def render_for_expr(node)
|
|
100
|
+
vars = [node.key_var, node.val_var].compact.join(", ")
|
|
101
|
+
text = "for #{vars} in #{to_text(node.collection)} : #{to_text(node.body)}"
|
|
102
|
+
text += " if #{to_text(node.cond)}" if node.cond
|
|
103
|
+
wrapper_left = node.is_map ? "{" : "["
|
|
104
|
+
wrapper_right = node.is_map ? "}" : "]"
|
|
105
|
+
"#{wrapper_left}#{text}#{wrapper_right}"
|
|
106
|
+
end
|
|
107
|
+
private_class_method :render_for_expr
|
|
108
|
+
|
|
109
|
+
def format_literal(value)
|
|
110
|
+
case value
|
|
111
|
+
when String
|
|
112
|
+
value
|
|
113
|
+
when NilClass
|
|
114
|
+
"null"
|
|
115
|
+
else
|
|
116
|
+
value.to_s
|
|
117
|
+
end
|
|
118
|
+
end
|
|
119
|
+
private_class_method :format_literal
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
end
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Docsterra
|
|
4
|
+
module Parser
|
|
5
|
+
module AST
|
|
6
|
+
def self.node(*members)
|
|
7
|
+
Struct.new(*members, keyword_init: true)
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
# File-level AST nodes
|
|
11
|
+
File = node(:blocks, :comments)
|
|
12
|
+
Block = node(:type, :labels, :body, :comment)
|
|
13
|
+
Attribute = node(:key, :value, :comment)
|
|
14
|
+
|
|
15
|
+
# Value / expression nodes
|
|
16
|
+
Literal = node(:value)
|
|
17
|
+
ListExpr = node(:elements)
|
|
18
|
+
MapExpr = node(:pairs)
|
|
19
|
+
Reference = node(:parts)
|
|
20
|
+
FunctionCall = node(:name, :args)
|
|
21
|
+
TemplateExpr = node(:parts)
|
|
22
|
+
IndexExpr = node(:expr, :index)
|
|
23
|
+
ConditionalExpr = node(:cond, :true_val, :false_val)
|
|
24
|
+
ForExpr = node(:key_var, :val_var, :collection, :body, :cond, :is_map)
|
|
25
|
+
UnaryExpr = node(:op, :expr)
|
|
26
|
+
BinaryExpr = node(:left, :op, :right)
|
|
27
|
+
SplatExpr = node(:expr, :attr)
|
|
28
|
+
RawExpr = node(:text)
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
@@ -0,0 +1,378 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Docsterra
|
|
4
|
+
module Parser
|
|
5
|
+
class HclLexer
|
|
6
|
+
Token = Struct.new(
|
|
7
|
+
:type,
|
|
8
|
+
:value,
|
|
9
|
+
:lexeme,
|
|
10
|
+
:line,
|
|
11
|
+
:column,
|
|
12
|
+
:start_pos,
|
|
13
|
+
:end_pos,
|
|
14
|
+
keyword_init: true
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
SINGLE_CHAR_TOKENS = {
|
|
18
|
+
"{" => :LBRACE,
|
|
19
|
+
"}" => :RBRACE,
|
|
20
|
+
"[" => :LBRACK,
|
|
21
|
+
"]" => :RBRACK,
|
|
22
|
+
"(" => :LPAREN,
|
|
23
|
+
")" => :RPAREN,
|
|
24
|
+
"=" => :EQUALS,
|
|
25
|
+
"," => :COMMA,
|
|
26
|
+
"." => :DOT
|
|
27
|
+
}.freeze
|
|
28
|
+
|
|
29
|
+
EXTRA_TOKENS = {
|
|
30
|
+
"?" => :QUESTION,
|
|
31
|
+
":" => :COLON,
|
|
32
|
+
"+" => :PLUS,
|
|
33
|
+
"-" => :MINUS,
|
|
34
|
+
"*" => :STAR,
|
|
35
|
+
"/" => :SLASH,
|
|
36
|
+
"%" => :PERCENT,
|
|
37
|
+
"!" => :BANG,
|
|
38
|
+
">" => :GT,
|
|
39
|
+
"<" => :LT
|
|
40
|
+
}.freeze
|
|
41
|
+
|
|
42
|
+
def initialize(text = nil)
|
|
43
|
+
@text = text
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def tokenize(text = @text)
|
|
47
|
+
raise ArgumentError, "text is required" if text.nil?
|
|
48
|
+
|
|
49
|
+
setup(text)
|
|
50
|
+
tokens = []
|
|
51
|
+
|
|
52
|
+
until eof?
|
|
53
|
+
case current_char
|
|
54
|
+
when " ", "\t", "\r"
|
|
55
|
+
advance
|
|
56
|
+
when "\n"
|
|
57
|
+
tokens << build_token(:NEWLINE, "\n", consume_newline)
|
|
58
|
+
when "#"
|
|
59
|
+
tokens << consume_line_comment("#")
|
|
60
|
+
when "/"
|
|
61
|
+
tokens << if peek_char == "/"
|
|
62
|
+
consume_line_comment("//")
|
|
63
|
+
elsif peek_char == "*"
|
|
64
|
+
consume_block_comment
|
|
65
|
+
else
|
|
66
|
+
consume_single_char_token(EXTRA_TOKENS.fetch("/"))
|
|
67
|
+
end
|
|
68
|
+
when "\""
|
|
69
|
+
tokens << consume_string
|
|
70
|
+
when "<"
|
|
71
|
+
tokens << if peek_char == "<"
|
|
72
|
+
consume_heredoc
|
|
73
|
+
else
|
|
74
|
+
consume_single_char_token(EXTRA_TOKENS.fetch("<"))
|
|
75
|
+
end
|
|
76
|
+
else
|
|
77
|
+
tokens << consume_token
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
tokens << Token.new(
|
|
82
|
+
type: :EOF,
|
|
83
|
+
value: nil,
|
|
84
|
+
lexeme: "",
|
|
85
|
+
line: @line,
|
|
86
|
+
column: @column,
|
|
87
|
+
start_pos: @index,
|
|
88
|
+
end_pos: @index
|
|
89
|
+
)
|
|
90
|
+
tokens
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
private
|
|
94
|
+
|
|
95
|
+
def setup(text)
|
|
96
|
+
@text = text
|
|
97
|
+
@index = 0
|
|
98
|
+
@line = 1
|
|
99
|
+
@column = 1
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
def consume_token
|
|
103
|
+
return consume_number if digit?(current_char)
|
|
104
|
+
return consume_identifier if identifier_start?(current_char)
|
|
105
|
+
return consume_single_char_token(SINGLE_CHAR_TOKENS.fetch(current_char)) if SINGLE_CHAR_TOKENS.key?(current_char)
|
|
106
|
+
return consume_single_char_token(EXTRA_TOKENS.fetch(current_char)) if EXTRA_TOKENS.key?(current_char)
|
|
107
|
+
|
|
108
|
+
consume_unknown
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
def consume_single_char_token(type)
|
|
112
|
+
start_pos = @index
|
|
113
|
+
start_line = @line
|
|
114
|
+
start_column = @column
|
|
115
|
+
advance
|
|
116
|
+
build_token(type, @text[start_pos...@index], [start_pos, start_line, start_column])
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
def consume_newline
|
|
120
|
+
start_pos = @index
|
|
121
|
+
start_line = @line
|
|
122
|
+
start_column = @column
|
|
123
|
+
advance
|
|
124
|
+
[start_pos, start_line, start_column]
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def consume_line_comment(prefix)
|
|
128
|
+
start_pos = @index
|
|
129
|
+
start_line = @line
|
|
130
|
+
start_column = @column
|
|
131
|
+
advance(prefix.length)
|
|
132
|
+
advance until eof? || current_char == "\n"
|
|
133
|
+
lexeme = @text[start_pos...@index]
|
|
134
|
+
value = lexeme.delete_prefix(prefix).strip
|
|
135
|
+
Token.new(
|
|
136
|
+
type: :COMMENT,
|
|
137
|
+
value: value,
|
|
138
|
+
lexeme: lexeme,
|
|
139
|
+
line: start_line,
|
|
140
|
+
column: start_column,
|
|
141
|
+
start_pos: start_pos,
|
|
142
|
+
end_pos: @index
|
|
143
|
+
)
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def consume_block_comment
|
|
147
|
+
start_pos = @index
|
|
148
|
+
start_line = @line
|
|
149
|
+
start_column = @column
|
|
150
|
+
advance(2)
|
|
151
|
+
|
|
152
|
+
until eof?
|
|
153
|
+
if current_char == "*" && peek_char == "/"
|
|
154
|
+
advance(2)
|
|
155
|
+
break
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
advance
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
lexeme = @text[start_pos...@index]
|
|
162
|
+
value = lexeme.sub(%r{\A/\*}, "").sub(%r{\*/\z}, "").strip
|
|
163
|
+
Token.new(
|
|
164
|
+
type: :COMMENT,
|
|
165
|
+
value: value,
|
|
166
|
+
lexeme: lexeme,
|
|
167
|
+
line: start_line,
|
|
168
|
+
column: start_column,
|
|
169
|
+
start_pos: start_pos,
|
|
170
|
+
end_pos: @index
|
|
171
|
+
)
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
def consume_string
|
|
175
|
+
start_pos = @index
|
|
176
|
+
start_line = @line
|
|
177
|
+
start_column = @column
|
|
178
|
+
advance # opening quote
|
|
179
|
+
|
|
180
|
+
content = +""
|
|
181
|
+
until eof?
|
|
182
|
+
char = current_char
|
|
183
|
+
if char == "\\"
|
|
184
|
+
content << char
|
|
185
|
+
advance
|
|
186
|
+
break if eof?
|
|
187
|
+
|
|
188
|
+
content << current_char
|
|
189
|
+
advance
|
|
190
|
+
next
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
if char == "\""
|
|
194
|
+
advance
|
|
195
|
+
break
|
|
196
|
+
end
|
|
197
|
+
|
|
198
|
+
content << char
|
|
199
|
+
advance
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
Token.new(
|
|
203
|
+
type: :STRING,
|
|
204
|
+
value: content,
|
|
205
|
+
lexeme: @text[start_pos...@index],
|
|
206
|
+
line: start_line,
|
|
207
|
+
column: start_column,
|
|
208
|
+
start_pos: start_pos,
|
|
209
|
+
end_pos: @index
|
|
210
|
+
)
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
def consume_heredoc
|
|
214
|
+
start_pos = @index
|
|
215
|
+
start_line = @line
|
|
216
|
+
start_column = @column
|
|
217
|
+
advance(2) # <<
|
|
218
|
+
advance if current_char == "-"
|
|
219
|
+
|
|
220
|
+
header_delimiter_start = @index
|
|
221
|
+
advance while !eof? && current_char != "\n"
|
|
222
|
+
header_delimiter = @text[header_delimiter_start...@index].strip
|
|
223
|
+
indented = @text[start_pos...header_delimiter_start].end_with?("<<-")
|
|
224
|
+
|
|
225
|
+
advance if current_char == "\n"
|
|
226
|
+
|
|
227
|
+
content = +""
|
|
228
|
+
loop do
|
|
229
|
+
break if eof?
|
|
230
|
+
|
|
231
|
+
line_start = @index
|
|
232
|
+
advance while !eof? && current_char != "\n"
|
|
233
|
+
line_text = @text[line_start...@index]
|
|
234
|
+
|
|
235
|
+
match = indented ? line_text.strip == header_delimiter : line_text == header_delimiter
|
|
236
|
+
if !header_delimiter.empty? && match
|
|
237
|
+
advance if current_char == "\n"
|
|
238
|
+
break
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
content << line_text
|
|
242
|
+
if current_char == "\n"
|
|
243
|
+
content << "\n"
|
|
244
|
+
advance
|
|
245
|
+
end
|
|
246
|
+
end
|
|
247
|
+
|
|
248
|
+
Token.new(
|
|
249
|
+
type: :HEREDOC,
|
|
250
|
+
value: content,
|
|
251
|
+
lexeme: @text[start_pos...@index],
|
|
252
|
+
line: start_line,
|
|
253
|
+
column: start_column,
|
|
254
|
+
start_pos: start_pos,
|
|
255
|
+
end_pos: @index
|
|
256
|
+
)
|
|
257
|
+
end
|
|
258
|
+
|
|
259
|
+
def consume_number
|
|
260
|
+
start_pos = @index
|
|
261
|
+
start_line = @line
|
|
262
|
+
start_column = @column
|
|
263
|
+
|
|
264
|
+
advance while digit?(current_char)
|
|
265
|
+
if current_char == "." && digit?(peek_char)
|
|
266
|
+
advance
|
|
267
|
+
advance while digit?(current_char)
|
|
268
|
+
end
|
|
269
|
+
|
|
270
|
+
lexeme = @text[start_pos...@index]
|
|
271
|
+
value = lexeme.include?(".") ? lexeme.to_f : lexeme.to_i
|
|
272
|
+
Token.new(
|
|
273
|
+
type: :NUMBER,
|
|
274
|
+
value: value,
|
|
275
|
+
lexeme: lexeme,
|
|
276
|
+
line: start_line,
|
|
277
|
+
column: start_column,
|
|
278
|
+
start_pos: start_pos,
|
|
279
|
+
end_pos: @index
|
|
280
|
+
)
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
def consume_identifier
|
|
284
|
+
start_pos = @index
|
|
285
|
+
start_line = @line
|
|
286
|
+
start_column = @column
|
|
287
|
+
advance while identifier_part?(current_char)
|
|
288
|
+
lexeme = @text[start_pos...@index]
|
|
289
|
+
|
|
290
|
+
if %w[true false].include?(lexeme)
|
|
291
|
+
type = :BOOL
|
|
292
|
+
value = lexeme == "true"
|
|
293
|
+
else
|
|
294
|
+
type = :IDENT
|
|
295
|
+
value = lexeme
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
Token.new(
|
|
299
|
+
type: type,
|
|
300
|
+
value: value,
|
|
301
|
+
lexeme: lexeme,
|
|
302
|
+
line: start_line,
|
|
303
|
+
column: start_column,
|
|
304
|
+
start_pos: start_pos,
|
|
305
|
+
end_pos: @index
|
|
306
|
+
)
|
|
307
|
+
end
|
|
308
|
+
|
|
309
|
+
def consume_unknown
|
|
310
|
+
start_pos = @index
|
|
311
|
+
start_line = @line
|
|
312
|
+
start_column = @column
|
|
313
|
+
char = current_char
|
|
314
|
+
advance
|
|
315
|
+
Token.new(
|
|
316
|
+
type: :UNKNOWN,
|
|
317
|
+
value: char,
|
|
318
|
+
lexeme: char,
|
|
319
|
+
line: start_line,
|
|
320
|
+
column: start_column,
|
|
321
|
+
start_pos: start_pos,
|
|
322
|
+
end_pos: @index
|
|
323
|
+
)
|
|
324
|
+
end
|
|
325
|
+
|
|
326
|
+
def build_token(type, value, triple)
|
|
327
|
+
start_pos, start_line, start_column = triple
|
|
328
|
+
Token.new(
|
|
329
|
+
type: type,
|
|
330
|
+
value: value,
|
|
331
|
+
lexeme: @text[start_pos...@index],
|
|
332
|
+
line: start_line,
|
|
333
|
+
column: start_column,
|
|
334
|
+
start_pos: start_pos,
|
|
335
|
+
end_pos: @index
|
|
336
|
+
)
|
|
337
|
+
end
|
|
338
|
+
|
|
339
|
+
def current_char
|
|
340
|
+
@text[@index]
|
|
341
|
+
end
|
|
342
|
+
|
|
343
|
+
def peek_char(offset = 1)
|
|
344
|
+
@text[@index + offset]
|
|
345
|
+
end
|
|
346
|
+
|
|
347
|
+
def advance(count = 1)
|
|
348
|
+
count.times do
|
|
349
|
+
break if eof?
|
|
350
|
+
|
|
351
|
+
if current_char == "\n"
|
|
352
|
+
@line += 1
|
|
353
|
+
@column = 1
|
|
354
|
+
else
|
|
355
|
+
@column += 1
|
|
356
|
+
end
|
|
357
|
+
@index += 1
|
|
358
|
+
end
|
|
359
|
+
end
|
|
360
|
+
|
|
361
|
+
def eof?
|
|
362
|
+
@index >= @text.length
|
|
363
|
+
end
|
|
364
|
+
|
|
365
|
+
def digit?(char)
|
|
366
|
+
!char.nil? && char >= "0" && char <= "9"
|
|
367
|
+
end
|
|
368
|
+
|
|
369
|
+
def identifier_start?(char)
|
|
370
|
+
!char.nil? && ((char >= "a" && char <= "z") || (char >= "A" && char <= "Z") || char == "_")
|
|
371
|
+
end
|
|
372
|
+
|
|
373
|
+
def identifier_part?(char)
|
|
374
|
+
identifier_start?(char) || digit?(char) || char == "-"
|
|
375
|
+
end
|
|
376
|
+
end
|
|
377
|
+
end
|
|
378
|
+
end
|