lrama 0.5.6 → 0.5.7

Sign up to get free protection for your applications and to get access to all the features.
data/lib/lrama/lexer.rb CHANGED
@@ -1,346 +1,174 @@
1
1
  require "strscan"
2
- require "lrama/report/duration"
3
2
  require "lrama/lexer/token"
4
3
 
5
4
  module Lrama
6
- # Lexer for parse.y
7
5
  class Lexer
8
- include Lrama::Report::Duration
9
-
10
- # States
11
- #
12
- # See: https://www.gnu.org/software/bison/manual/html_node/Grammar-Outline.html
13
- Initial = 0
14
- Prologue = 1
15
- BisonDeclarations = 2
16
- GrammarRules = 3
17
- Epilogue = 4
18
-
19
- # Token types
20
-
21
- attr_reader :prologue, :bison_declarations, :grammar_rules, :epilogue,
22
- :bison_declarations_tokens, :grammar_rules_tokens
6
+ attr_accessor :status
7
+ attr_accessor :end_symbol
8
+
9
+ SYMBOLS = %w(%{ %} %% { } \[ \] : \| ;)
10
+ PERCENT_TOKENS = %w(
11
+ %union
12
+ %token
13
+ %type
14
+ %left
15
+ %right
16
+ %nonassoc
17
+ %expect
18
+ %define
19
+ %require
20
+ %printer
21
+ %lex-param
22
+ %parse-param
23
+ %initial-action
24
+ %precedence
25
+ %prec
26
+ %error-token
27
+ )
23
28
 
24
29
  def initialize(text)
25
- @text = text
26
- @state = Initial
27
- # Array of texts
28
- @prologue = []
29
- @bison_declarations = []
30
- @grammar_rules = []
31
- @epilogue = []
32
-
33
- @bison_declarations_tokens = []
34
- @grammar_rules_tokens = []
35
-
36
- @debug = false
30
+ @scanner = StringScanner.new(text)
31
+ @head = @scanner.pos
32
+ @line = 1
33
+ @status = :initial
34
+ @end_symbol = nil
35
+ end
37
36
 
38
- report_duration(:lex) do
39
- lex_text
40
- lex_bison_declarations_tokens
41
- lex_grammar_rules_tokens
37
+ def next_token
38
+ case @status
39
+ when :initial
40
+ lex_token
41
+ when :c_declaration
42
+ lex_c_code
42
43
  end
43
44
  end
44
45
 
45
- private
46
-
47
- def create_token(type, s_value, line, column)
48
- t = Token.new(type: type, s_value: s_value)
49
- t.line = line
50
- t.column = column
51
-
52
- return t
46
+ def line
47
+ @line
53
48
  end
54
49
 
55
- # TODO: Remove this
56
- def lex_text
57
- @text.each_line.with_index(1) do |string, lineno|
58
- case @state
59
- when Initial
60
- # Skip until "%{"
61
- if string == "%{\n"
62
- @state = Prologue
63
- @prologue << ["", lineno]
64
- next
65
- end
66
- when Prologue
67
- # Between "%{" and "%}"
68
- if string == "%}\n"
69
- @state = BisonDeclarations
70
- @prologue << ["", lineno]
71
- next
72
- end
73
-
74
- @prologue << [string, lineno]
75
- when BisonDeclarations
76
- if string == "%%\n"
77
- @state = GrammarRules
78
- next
79
- end
80
-
81
- @bison_declarations << [string, lineno]
82
- when GrammarRules
83
- # Between "%%" and "%%"
84
- if string == "%%\n"
85
- @state = Epilogue
86
- next
87
- end
88
-
89
- @grammar_rules << [string, lineno]
90
- when Epilogue
91
- @epilogue << [string, lineno]
92
- else
93
- raise "Unknown state: #{@state}"
94
- end
95
- end
50
+ def column
51
+ @scanner.pos - @head
96
52
  end
97
53
 
98
- # See:
99
- # * https://www.gnu.org/software/bison/manual/html_node/Decl-Summary.html
100
- # * https://www.gnu.org/software/bison/manual/html_node/Symbol-Decls.html
101
- # * https://www.gnu.org/software/bison/manual/html_node/Empty-Rules.html
102
- def lex_common(lines, tokens)
103
- line = lines.first[1]
104
- column = 0
105
- ss = StringScanner.new(lines.map(&:first).join)
106
-
107
- while !ss.eos? do
54
+ def lex_token
55
+ while !@scanner.eos? do
108
56
  case
109
- when ss.scan(/\n/)
110
- line += 1
111
- column = ss.pos
112
- when ss.scan(/\s+/)
113
- # skip
114
- when ss.scan(/;/)
115
- tokens << create_token(Token::Semicolon, ss[0], line, ss.pos - column)
116
- when ss.scan(/\|/)
117
- tokens << create_token(Token::Bar, ss[0], line, ss.pos - column)
118
- when ss.scan(/(\d+)/)
119
- tokens << create_token(Token::Number, Integer(ss[0]), line, ss.pos - column)
120
- when ss.scan(/(<[a-zA-Z0-9_]+>)/)
121
- tokens << create_token(Token::Tag, ss[0], line, ss.pos - column)
122
- when ss.scan(/([a-zA-Z_.][-a-zA-Z0-9_.]*)\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]\s*:/)
123
- tokens << create_token(Token::Ident_Colon, ss[1], line, ss.pos - column)
124
- tokens << create_token(Token::Named_Ref, ss[2], line, ss.pos - column)
125
- when ss.scan(/([a-zA-Z_.][-a-zA-Z0-9_.]*)\s*:/)
126
- tokens << create_token(Token::Ident_Colon, ss[1], line, ss.pos - column)
127
- when ss.scan(/([a-zA-Z_.][-a-zA-Z0-9_.]*)/)
128
- tokens << create_token(Token::Ident, ss[0], line, ss.pos - column)
129
- when ss.scan(/\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/)
130
- tokens << create_token(Token::Named_Ref, ss[1], line, ss.pos - column)
131
- when ss.scan(/%expect/)
132
- tokens << create_token(Token::P_expect, ss[0], line, ss.pos - column)
133
- when ss.scan(/%define/)
134
- tokens << create_token(Token::P_define, ss[0], line, ss.pos - column)
135
- when ss.scan(/%printer/)
136
- tokens << create_token(Token::P_printer, ss[0], line, ss.pos - column)
137
- when ss.scan(/%error-token/)
138
- tokens << create_token(Token::P_error_token, ss[0], line, ss.pos - column)
139
- when ss.scan(/%lex-param/)
140
- tokens << create_token(Token::P_lex_param, ss[0], line, ss.pos - column)
141
- when ss.scan(/%parse-param/)
142
- tokens << create_token(Token::P_parse_param, ss[0], line, ss.pos - column)
143
- when ss.scan(/%initial-action/)
144
- tokens << create_token(Token::P_initial_action, ss[0], line, ss.pos - column)
145
- when ss.scan(/%union/)
146
- tokens << create_token(Token::P_union, ss[0], line, ss.pos - column)
147
- when ss.scan(/%token/)
148
- tokens << create_token(Token::P_token, ss[0], line, ss.pos - column)
149
- when ss.scan(/%type/)
150
- tokens << create_token(Token::P_type, ss[0], line, ss.pos - column)
151
- when ss.scan(/%nonassoc/)
152
- tokens << create_token(Token::P_nonassoc, ss[0], line, ss.pos - column)
153
- when ss.scan(/%left/)
154
- tokens << create_token(Token::P_left, ss[0], line, ss.pos - column)
155
- when ss.scan(/%right/)
156
- tokens << create_token(Token::P_right, ss[0], line, ss.pos - column)
157
- when ss.scan(/%precedence/)
158
- tokens << create_token(Token::P_precedence, ss[0], line, ss.pos - column)
159
- when ss.scan(/%prec/)
160
- tokens << create_token(Token::P_prec, ss[0], line, ss.pos - column)
161
- when ss.scan(/{/)
162
- token, line = lex_user_code(ss, line, ss.pos - column, lines)
163
- tokens << token
164
- when ss.scan(/"/)
165
- string, line = lex_string(ss, "\"", line, lines)
166
- token = create_token(Token::String, string, line, ss.pos - column)
167
- tokens << token
168
- when ss.scan(/\/\*/)
169
- # TODO: Need to keep comment?
170
- line = lex_comment(ss, line, lines, "")
171
- when ss.scan(/\/\//)
172
- line = lex_line_comment(ss, line, "")
173
- when ss.scan(/'(.)'/)
174
- tokens << create_token(Token::Char, ss[0], line, ss.pos - column)
175
- when ss.scan(/'\\(.)'/) # '\\', '\t'
176
- tokens << create_token(Token::Char, ss[0], line, ss.pos - column)
177
- when ss.scan(/'\\(\d+)'/) # '\13'
178
- tokens << create_token(Token::Char, ss[0], line, ss.pos - column)
179
- when ss.scan(/%empty/)
180
- # skip
57
+ when @scanner.scan(/\n/)
58
+ newline
59
+ when @scanner.scan(/\s+/)
60
+ # noop
61
+ when @scanner.scan(/\/\*/)
62
+ lex_comment
63
+ when @scanner.scan(/\/\//)
64
+ @scanner.scan_until(/\n/)
65
+ newline
66
+ when @scanner.scan(/%empty/)
67
+ # noop
181
68
  else
182
- l = line - lines.first[1]
183
- split = ss.string.split("\n")
184
- col = ss.pos - split[0...l].join("\n").length
185
- raise "Parse error (unknown token): #{split[l]} \"#{ss.string[ss.pos]}\" (#{line}: #{col})"
69
+ break
186
70
  end
187
71
  end
188
- end
189
72
 
190
- def lex_bison_declarations_tokens
191
- lex_common(@bison_declarations, @bison_declarations_tokens)
73
+ @head_line = line
74
+ @head_column = column
75
+
76
+ case
77
+ when @scanner.eos?
78
+ return
79
+ when @scanner.scan(/#{SYMBOLS.join('|')}/)
80
+ return [@scanner.matched, @scanner.matched]
81
+ when @scanner.scan(/#{PERCENT_TOKENS.join('|')}/)
82
+ return [@scanner.matched, @scanner.matched]
83
+ when @scanner.scan(/<\w+>/)
84
+ return [:TAG, build_token(type: Token::Tag, s_value: @scanner.matched)]
85
+ when @scanner.scan(/'.'/)
86
+ return [:CHARACTER, build_token(type: Token::Char, s_value: @scanner.matched)]
87
+ when @scanner.scan(/'\\\\'|'\\b'|'\\t'|'\\f'|'\\r'|'\\n'|'\\v'|'\\13'/)
88
+ return [:CHARACTER, build_token(type: Token::Char, s_value: @scanner.matched)]
89
+ when @scanner.scan(/"/)
90
+ return [:STRING, %Q("#{@scanner.scan_until(/"/)})]
91
+ when @scanner.scan(/\d+/)
92
+ return [:INTEGER, Integer(@scanner.matched)]
93
+ when @scanner.scan(/([a-zA-Z_.][-a-zA-Z0-9_.]*)/)
94
+ token = build_token(type: Token::Ident, s_value: @scanner.matched)
95
+ type =
96
+ if @scanner.check(/\s*(\[\s*[a-zA-Z_.][-a-zA-Z0-9_.]*\s*\])?\s*:/)
97
+ :IDENT_COLON
98
+ else
99
+ :IDENTIFIER
100
+ end
101
+ return [type, token]
102
+ else
103
+ raise
104
+ end
192
105
  end
193
106
 
194
- def lex_user_code(ss, line, column, lines)
195
- first_line = line
196
- first_column = column
197
- debug("Enter lex_user_code: #{line}")
198
- brace_count = 1
199
- str = "{"
200
- # Array of [type, $n, tag, first column, last column]
201
- # TODO: Is it better to keep string, like "$$", and use gsub?
202
- references = []
203
-
204
- while !ss.eos? do
107
+ def lex_c_code
108
+ nested = 0
109
+ code = ''
110
+ while !@scanner.eos? do
205
111
  case
206
- when ss.scan(/\n/)
207
- line += 1
208
- when ss.scan(/"/)
209
- string, line = lex_string(ss, "\"", line, lines)
210
- str << string
211
- next
212
- when ss.scan(/'/)
213
- string, line = lex_string(ss, "'", line, lines)
214
- str << string
215
- next
216
-
217
- # $ references
218
- # It need to wrap an identifier with brackets to use ".-" for identifiers
219
- when ss.scan(/\$(<[a-zA-Z0-9_]+>)?\$/) # $$, $<long>$
220
- tag = ss[1] ? create_token(Token::Tag, ss[1], line, str.length) : nil
221
- references << [:dollar, "$", tag, str.length, str.length + ss[0].length - 1]
222
- when ss.scan(/\$(<[a-zA-Z0-9_]+>)?(\d+)/) # $1, $2, $<long>1
223
- tag = ss[1] ? create_token(Token::Tag, ss[1], line, str.length) : nil
224
- references << [:dollar, Integer(ss[2]), tag, str.length, str.length + ss[0].length - 1]
225
- when ss.scan(/\$(<[a-zA-Z0-9_]+>)?([a-zA-Z_][a-zA-Z0-9_]*)/) # $foo, $expr, $<long>program (named reference without brackets)
226
- tag = ss[1] ? create_token(Token::Tag, ss[1], line, str.length) : nil
227
- references << [:dollar, ss[2], tag, str.length, str.length + ss[0].length - 1]
228
- when ss.scan(/\$(<[a-zA-Z0-9_]+>)?\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # $expr.right, $expr-right, $<long>program (named reference with brackets)
229
- tag = ss[1] ? create_token(Token::Tag, ss[1], line, str.length) : nil
230
- references << [:dollar, ss[2], tag, str.length, str.length + ss[0].length - 1]
231
-
232
- # @ references
233
- # It need to wrap an identifier with brackets to use ".-" for identifiers
234
- when ss.scan(/@\$/) # @$
235
- references << [:at, "$", nil, str.length, str.length + ss[0].length - 1]
236
- when ss.scan(/@(\d+)/) # @1
237
- references << [:at, Integer(ss[1]), nil, str.length, str.length + ss[0].length - 1]
238
- when ss.scan(/@([a-zA-Z][a-zA-Z0-9_]*)/) # @foo, @expr (named reference without brackets)
239
- references << [:at, ss[1], nil, str.length, str.length + ss[0].length - 1]
240
- when ss.scan(/@\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # @expr.right, @expr-right (named reference with brackets)
241
- references << [:at, ss[1], nil, str.length, str.length + ss[0].length - 1]
242
-
243
- when ss.scan(/{/)
244
- brace_count += 1
245
- when ss.scan(/}/)
246
- brace_count -= 1
247
-
248
- debug("Return lex_user_code: #{line}")
249
- if brace_count == 0
250
- str << ss[0]
251
- user_code = Token.new(type: Token::User_code, s_value: str.freeze)
252
- user_code.line = first_line
253
- user_code.column = first_column
254
- user_code.references = references
255
- return [user_code, line]
112
+ when @scanner.scan(/{/)
113
+ code += @scanner.matched
114
+ nested += 1
115
+ when @scanner.scan(/}/)
116
+ if nested == 0 && @end_symbol == '}'
117
+ @scanner.unscan
118
+ return [:C_DECLARATION, build_token(type: Token::User_code, s_value: code, references: [])]
119
+ else
120
+ code += @scanner.matched
121
+ nested -= 1
256
122
  end
257
- when ss.scan(/\/\*/)
258
- str << ss[0]
259
- line = lex_comment(ss, line, lines, str)
260
- when ss.scan(/\/\//)
261
- str << ss[0]
262
- line = lex_line_comment(ss, line, str)
123
+ when @scanner.check(/#{@end_symbol}/)
124
+ return [:C_DECLARATION, build_token(type: Token::User_code, s_value: code, references: [])]
125
+ when @scanner.scan(/\n/)
126
+ code += @scanner.matched
127
+ newline
128
+ when @scanner.scan(/"/)
129
+ matched = @scanner.scan_until(/"/)
130
+ code += %Q("#{matched})
131
+ @line += matched.count("\n")
132
+ when @scanner.scan(/'/)
133
+ matched = @scanner.scan_until(/'/)
134
+ code += %Q('#{matched})
263
135
  else
264
- # noop, just consume char
265
- str << ss.getch
266
- next
136
+ code += @scanner.getch
267
137
  end
268
-
269
- str << ss[0]
270
138
  end
271
-
272
- # Reach to end of input but brace does not match
273
- l = line - lines.first[1]
274
- raise "Parse error (brace mismatch): #{ss.string.split("\n")[l]} \"#{ss.string[ss.pos]}\" (#{line}: #{ss.pos})"
139
+ raise
275
140
  end
276
141
 
277
- def lex_string(ss, terminator, line, lines)
278
- debug("Enter lex_string: #{line}")
279
-
280
- str = terminator.dup
281
-
282
- while (c = ss.getch) do
283
- str << c
284
-
285
- case c
286
- when "\n"
287
- line += 1
288
- when terminator
289
- debug("Return lex_string: #{line}")
290
- return [str, line]
291
- else
292
- # noop
293
- end
294
- end
295
-
296
- # Reach to end of input but quote does not match
297
- l = line - lines.first[1]
298
- raise "Parse error (quote mismatch): #{ss.string.split("\n")[l]} \"#{ss.string[ss.pos]}\" (#{line}: #{ss.pos})"
299
- end
142
+ private
300
143
 
301
- # /* */ style comment
302
- def lex_comment(ss, line, lines, str)
303
- while !ss.eos? do
144
+ def lex_comment
145
+ while !@scanner.eos? do
304
146
  case
305
- when ss.scan(/\n/)
306
- line += 1
307
- when ss.scan(/\*\//)
308
- return line
147
+ when @scanner.scan(/\n/)
148
+ @line += 1
149
+ @head = @scanner.pos + 1
150
+ when @scanner.scan(/\*\//)
151
+ return
309
152
  else
310
- str << ss.getch
311
- next
153
+ @scanner.getch
312
154
  end
313
-
314
- str << ss[0]
315
155
  end
316
-
317
- # Reach to end of input but quote does not match
318
- l = line - lines.first[1]
319
- raise "Parse error (comment mismatch): #{ss.string.split("\n")[l]} \"#{ss.string[ss.pos]}\" (#{line}: #{ss.pos})"
320
156
  end
321
157
 
322
- # // style comment
323
- def lex_line_comment(ss, line, str)
324
- while !ss.eos? do
325
- case
326
- when ss.scan(/\n/)
327
- return line + 1
328
- else
329
- str << ss.getch
330
- next
331
- end
158
+ def build_token(type:, s_value:, **options)
159
+ token = Token.new(type: type, s_value: s_value)
160
+ token.line = @head_line
161
+ token.column = @head_column
162
+ options.each do |attr, value|
163
+ token.public_send("#{attr}=", value)
332
164
  end
333
165
 
334
- line # Reach to end of input
335
- end
336
-
337
- def lex_grammar_rules_tokens
338
- lex_common(@grammar_rules, @grammar_rules_tokens)
166
+ token
339
167
  end
340
168
 
341
- def debug(msg)
342
- return unless @debug
343
- puts "#{msg}\n"
169
+ def newline
170
+ @line += 1
171
+ @head = @scanner.pos + 1
344
172
  end
345
173
  end
346
174
  end
@@ -52,12 +52,16 @@ module Lrama
52
52
  Usage: lrama [options] FILE
53
53
  BANNER
54
54
  o.separator ''
55
+ o.separator 'STDIN mode:'
56
+ o.separator 'lrama [options] - FILE read grammar from STDIN'
57
+ o.separator ''
55
58
  o.separator 'Tuning the Parser:'
56
59
  o.on('-S', '--skeleton=FILE', 'specify the skeleton to use') {|v| @options.skeleton = v }
57
60
  o.on('-t', 'reserved, do nothing') { }
58
61
  o.separator ''
59
62
  o.separator 'Output:'
60
- o.on('-h', '--header=[FILE]', 'also produce a header file named FILE') {|v| @options.header = true; @options.header_file = v }
63
+ o.on('-H', '--header=[FILE]', 'also produce a header file named FILE') {|v| @options.header = true; @options.header_file = v }
64
+ o.on('-h=[FILE]', 'also produce a header file named FILE (deprecated)') {|v| @options.header = true; @options.header_file = v }
61
65
  o.on('-d', 'also produce a header file') { @options.header = true }
62
66
  o.on('-r', '--report=THINGS', Array, 'also produce details on the automaton') {|v| @report = v }
63
67
  o.on('--report-file=FILE', 'also produce details on the automaton output to a file named FILE') {|v| @options.report_file = v }
data/lib/lrama/output.rb CHANGED
@@ -7,7 +7,7 @@ module Lrama
7
7
  extend Forwardable
8
8
  include Report::Duration
9
9
 
10
- attr_reader :grammar_file_path, :context, :grammar, :error_recovery
10
+ attr_reader :grammar_file_path, :context, :grammar, :error_recovery, :include_header
11
11
 
12
12
  def_delegators "@context", :yyfinal, :yylast, :yyntokens, :yynnts, :yynrules, :yynstates,
13
13
  :yymaxutok, :yypact_ninf, :yytable_ninf
@@ -28,6 +28,7 @@ module Lrama
28
28
  @context = context
29
29
  @grammar = grammar
30
30
  @error_recovery = error_recovery
31
+ @include_header = header_file_path ? header_file_path.sub("./", "") : nil
31
32
  end
32
33
 
33
34
  if ERB.instance_method(:initialize).parameters.last.first == :key
@@ -40,11 +41,8 @@ module Lrama
40
41
  end
41
42
  end
42
43
 
43
- def eval_template(file, path)
44
- erb = self.class.erb(File.read(file))
45
- erb.filename = file
46
- tmp = erb.result_with_hash(context: @context, output: self)
47
- replace_special_variables(tmp, path)
44
+ def render_partial(file)
45
+ render_template(partial_file(file))
48
46
  end
49
47
 
50
48
  def render
@@ -143,7 +141,7 @@ module Lrama
143
141
  str << <<-STR
144
142
  case #{sym.enum_name}: /* #{sym.comment} */
145
143
  #line #{sym.printer.lineno} "#{@grammar_file_path}"
146
- #{sym.printer.translated_code(sym.tag)}
144
+ {#{sym.printer.translated_code(sym.tag)}}
147
145
  #line [@oline@] [@ofile@]
148
146
  break;
149
147
 
@@ -160,7 +158,7 @@ module Lrama
160
158
  <<-STR
161
159
  #{comment}
162
160
  #line #{@grammar.initial_action.line} "#{@grammar_file_path}"
163
- #{@grammar.initial_action.translated_code}
161
+ {#{@grammar.initial_action.translated_code}}
164
162
  STR
165
163
  end
166
164
 
@@ -173,7 +171,7 @@ module Lrama
173
171
  str << <<-STR
174
172
  case #{sym.enum_name}: /* #{sym.comment} */
175
173
  #line #{sym.error_token.lineno} "#{@grammar_file_path}"
176
- #{sym.error_token.translated_code(sym.tag)}
174
+ {#{sym.error_token.translated_code(sym.tag)}}
177
175
  #line [@oline@] [@ofile@]
178
176
  break;
179
177
 
@@ -190,14 +188,13 @@ module Lrama
190
188
  @context.states.rules.each do |rule|
191
189
  next unless rule.code
192
190
 
193
- rule = rule
194
191
  code = rule.code
195
192
  spaces = " " * (code.column - 1)
196
193
 
197
194
  str << <<-STR
198
195
  case #{rule.id + 1}: /* #{rule.as_comment} */
199
196
  #line #{code.line} "#{@grammar_file_path}"
200
- #{spaces}#{rule.translated_code}
197
+ #{spaces}{#{rule.translated_code}}
201
198
  #line [@oline@] [@ofile@]
202
199
  break;
203
200
 
@@ -212,14 +209,14 @@ module Lrama
212
209
  str
213
210
  end
214
211
 
215
- def omit_braces_and_blanks(param)
216
- param[1..-2].strip
212
+ def omit_blanks(param)
213
+ param.strip
217
214
  end
218
215
 
219
216
  # b4_parse_param
220
217
  def parse_param
221
218
  if @grammar.parse_param
222
- omit_braces_and_blanks(@grammar.parse_param)
219
+ omit_blanks(@grammar.parse_param)
223
220
  else
224
221
  ""
225
222
  end
@@ -227,7 +224,7 @@ module Lrama
227
224
 
228
225
  def lex_param
229
226
  if @grammar.lex_param
230
- omit_braces_and_blanks(@grammar.lex_param)
227
+ omit_blanks(@grammar.lex_param)
231
228
  else
232
229
  ""
233
230
  end
@@ -354,6 +351,17 @@ module Lrama
354
351
 
355
352
  private
356
353
 
354
+ def eval_template(file, path)
355
+ tmp = render_template(file)
356
+ replace_special_variables(tmp, path)
357
+ end
358
+
359
+ def render_template(file)
360
+ erb = self.class.erb(File.read(file))
361
+ erb.filename = file
362
+ erb.result_with_hash(context: @context, output: self)
363
+ end
364
+
357
365
  def template_file
358
366
  File.join(template_dir, @template_name)
359
367
  end
@@ -362,6 +370,10 @@ module Lrama
362
370
  File.join(template_dir, "bison/yacc.h")
363
371
  end
364
372
 
373
+ def partial_file(file)
374
+ File.join(template_dir, file)
375
+ end
376
+
365
377
  def template_dir
366
378
  File.expand_path("../../../template", __FILE__)
367
379
  end