antelope 0.1.8 → 0.1.9
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.yardopts +2 -0
- data/CONTRIBUTING.md +4 -4
- data/GENERATORS.md +61 -19
- data/README.md +84 -9
- data/TODO.md +58 -0
- data/examples/deterministic.ace +21 -9
- data/examples/example.ace +16 -10
- data/examples/example.output +213 -146
- data/examples/simple.ace +1 -1
- data/lib/antelope/ace/compiler.rb +52 -15
- data/lib/antelope/ace/errors.rb +7 -0
- data/lib/antelope/ace/grammar/generation.rb +3 -3
- data/lib/antelope/ace/grammar/precedences.rb +5 -7
- data/lib/antelope/ace/grammar/productions.rb +36 -11
- data/lib/antelope/ace/grammar/{terminals.rb → symbols.rb} +25 -2
- data/lib/antelope/ace/grammar.rb +12 -3
- data/lib/antelope/ace/precedence.rb +4 -0
- data/lib/antelope/ace/scanner/argument.rb +57 -0
- data/lib/antelope/ace/scanner/first.rb +32 -6
- data/lib/antelope/ace/scanner/second.rb +23 -8
- data/lib/antelope/ace/scanner.rb +32 -26
- data/lib/antelope/ace/token.rb +21 -2
- data/lib/antelope/cli.rb +22 -2
- data/lib/antelope/generation/constructor/first.rb +1 -1
- data/lib/antelope/generation/constructor.rb +2 -0
- data/lib/antelope/generation/null.rb +13 -0
- data/lib/antelope/generation/recognizer/rule.rb +4 -3
- data/lib/antelope/generation/recognizer/state.rb +18 -3
- data/lib/antelope/generation/recognizer.rb +19 -24
- data/lib/antelope/generation/tableizer.rb +30 -2
- data/lib/antelope/generation.rb +1 -0
- data/lib/antelope/generator/base.rb +150 -13
- data/lib/antelope/generator/c.rb +11 -0
- data/lib/antelope/generator/c_header.rb +105 -0
- data/lib/antelope/generator/c_source.rb +39 -0
- data/lib/antelope/generator/null.rb +5 -0
- data/lib/antelope/generator/output.rb +3 -3
- data/lib/antelope/generator/ruby.rb +23 -5
- data/lib/antelope/generator/templates/c_header.ant +36 -0
- data/lib/antelope/generator/templates/c_source.ant +202 -0
- data/lib/antelope/generator/templates/output.ant +68 -0
- data/lib/antelope/generator/templates/ruby.ant +146 -0
- data/lib/antelope/generator.rb +15 -3
- data/lib/antelope/template/compiler.rb +78 -0
- data/lib/antelope/template/errors.rb +9 -0
- data/lib/antelope/template/scanner.rb +111 -0
- data/lib/antelope/template.rb +60 -0
- data/lib/antelope/version.rb +1 -1
- data/lib/antelope.rb +1 -0
- data/spec/antelope/template_spec.rb +39 -0
- data/subl/Ace (Ruby).JSON-tmLanguage +94 -0
- data/subl/Ace (Ruby).tmLanguage +153 -0
- metadata +21 -8
- data/examples/deterministic.output +0 -131
- data/examples/simple.output +0 -121
- data/lib/antelope/generator/templates/output.erb +0 -56
- data/lib/antelope/generator/templates/ruby.erb +0 -63
@@ -1,7 +1,5 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
|
3
|
-
require "pp"
|
4
|
-
|
5
3
|
module Antelope
|
6
4
|
module Generator
|
7
5
|
|
@@ -11,6 +9,8 @@ module Antelope
|
|
11
9
|
|
12
10
|
register_as "output"
|
13
11
|
|
12
|
+
has_directive "output.show-lookahead", Boolean
|
13
|
+
|
14
14
|
# Defines singleton method for every mod that the grammar passed
|
15
15
|
# to the generator.
|
16
16
|
#
|
@@ -42,7 +42,7 @@ module Antelope
|
|
42
42
|
#
|
43
43
|
# @return [void]
|
44
44
|
def generate
|
45
|
-
template "output
|
45
|
+
template "output", "#{file}.output"
|
46
46
|
end
|
47
47
|
end
|
48
48
|
end
|
@@ -8,7 +8,10 @@ module Antelope
|
|
8
8
|
# Generates a ruby parser.
|
9
9
|
class Ruby < Base
|
10
10
|
|
11
|
-
register_as "ruby"
|
11
|
+
register_as "ruby", "rubby"
|
12
|
+
|
13
|
+
has_directive "panic-mode", Boolean
|
14
|
+
has_directive "ruby.error-class", String
|
12
15
|
|
13
16
|
# Creates an action table for the parser.
|
14
17
|
#
|
@@ -36,23 +39,38 @@ module Antelope
|
|
36
39
|
block = if block.empty?
|
37
40
|
"proc { |_| _ }"
|
38
41
|
else
|
39
|
-
"proc #{
|
42
|
+
"proc #{block}"
|
40
43
|
end
|
41
44
|
|
42
45
|
out << block << "],\n"
|
43
46
|
end
|
44
47
|
|
45
|
-
out.chomp!(",\n")
|
48
|
+
out.chomp!( ",\n")
|
46
49
|
|
47
50
|
out << "]"
|
48
51
|
end
|
49
52
|
|
53
|
+
def define_own_handler?
|
54
|
+
directives.ruby.error_class? or
|
55
|
+
panic_mode?
|
56
|
+
end
|
57
|
+
|
58
|
+
def panic_mode?
|
59
|
+
directives.panic_mode &&
|
60
|
+
directives.ruby.error_class? &&
|
61
|
+
grammar.contains_error_token?
|
62
|
+
end
|
63
|
+
|
64
|
+
def error_class
|
65
|
+
directives.ruby.error_class
|
66
|
+
end
|
67
|
+
|
50
68
|
# Actually performs the generation. Takes the template from
|
51
|
-
# ruby.
|
69
|
+
# ruby.ant and outputs it to `<file>.rb`.
|
52
70
|
#
|
53
71
|
# @return [void]
|
54
72
|
def generate
|
55
|
-
template "ruby
|
73
|
+
template "ruby", "#{file}.rb" do |body|
|
56
74
|
sprintf(grammar.compiler.body, :write => body)
|
57
75
|
end
|
58
76
|
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
#ifndef _%{guard_name}_H
|
2
|
+
#define _%{guard_name}_H
|
3
|
+
|
4
|
+
% if define_stype?
|
5
|
+
typedef union {
|
6
|
+
%{union_body}
|
7
|
+
} %{stype};
|
8
|
+
% end
|
9
|
+
|
10
|
+
enum %{prefix}tokentype {
|
11
|
+
% starting = 257
|
12
|
+
%{token_prefix}EOF = 0,
|
13
|
+
% grammar.terminals.each_with_index do |terminal, i|
|
14
|
+
%{token_prefix}%{terminal.name} = %{starting + i},
|
15
|
+
% end
|
16
|
+
};
|
17
|
+
|
18
|
+
typedef struct {
|
19
|
+
struct %{prefix}stack_element* elements;
|
20
|
+
unsigned int current;
|
21
|
+
unsigned int max;
|
22
|
+
unsigned char free;
|
23
|
+
} %{prefix}pstate;
|
24
|
+
|
25
|
+
#define %{upper_prefix}PUSH_MORE -1
|
26
|
+
#define %{upper_prefix}TERMINALS %{grammar.terminals.size}
|
27
|
+
#define %{upper_prefix}STATES %{table.size}
|
28
|
+
|
29
|
+
const char* %{prefix}token_string(%{terminal_type} token);
|
30
|
+
int %{prefix}lex(%{stype}* lval%{lex_params});
|
31
|
+
int %{prefix}parse_push(%{parse_params});
|
32
|
+
int %{prefix}parse_pull(%{parse_params});
|
33
|
+
%{prefix}pstate* %{prefix}pstate_new();
|
34
|
+
void %{prefix}pstate_delete(%{prefix}pstate* stack);
|
35
|
+
|
36
|
+
#endif
|
@@ -0,0 +1,202 @@
|
|
1
|
+
#include "%{file}.h"
|
2
|
+
#include <stdlib.h>
|
3
|
+
#include <stdio.h>
|
4
|
+
#include <stdint.h>
|
5
|
+
#include <limits.h>
|
6
|
+
#include <alloca.h>
|
7
|
+
#include <string.h>
|
8
|
+
|
9
|
+
static const unsigned int %{prefix}states[][%{upper_prefix}TERMINALS] = {
|
10
|
+
% table.each do |state|
|
11
|
+
{ %{action_for(state)} },
|
12
|
+
% end
|
13
|
+
};
|
14
|
+
|
15
|
+
#ifndef %{upper_prefix}INITSTACK
|
16
|
+
# define %{upper_prefix}INITSTACK 64
|
17
|
+
#endif
|
18
|
+
|
19
|
+
#ifndef %{upper_prefix}ALLOC
|
20
|
+
# define %{upper_prefix}ALLOC malloc
|
21
|
+
#endif
|
22
|
+
|
23
|
+
#ifndef %{upper_prefix}FREE
|
24
|
+
# define %{upper_prefix}FREE free
|
25
|
+
#endif
|
26
|
+
|
27
|
+
struct %{prefix}stack_element {
|
28
|
+
%{stype}* val;
|
29
|
+
unsigned int state;
|
30
|
+
};
|
31
|
+
|
32
|
+
#define %{upper_prefix}PUSH_STACK(stack, v, s) do { \
|
33
|
+
int err; \
|
34
|
+
if(stack.current + 1 > stack.max) \
|
35
|
+
{ \
|
36
|
+
if((err = %{prefix}resize_stack(&stack)) != 0) \
|
37
|
+
return err; \
|
38
|
+
} \
|
39
|
+
stack.elements[stack.current].val = val; \
|
40
|
+
stack.elements[stack.current].state = s; \
|
41
|
+
} while(0)
|
42
|
+
|
43
|
+
#define %{upper_prefix}POP_STACK(stack, out) do { \
|
44
|
+
out = stack.elements + stack.current; \
|
45
|
+
stack.current -= 1; \
|
46
|
+
} while(0)
|
47
|
+
|
48
|
+
const char* %{prefix}token_string(%{terminal_type} token)
|
49
|
+
{
|
50
|
+
switch(token)
|
51
|
+
{
|
52
|
+
% grammar.terminals.each do |terminal|
|
53
|
+
case %{token_prefix}%{terminal.name}:
|
54
|
+
return %{terminal.to_s.inspect};
|
55
|
+
% end
|
56
|
+
}
|
57
|
+
|
58
|
+
return "(unknown)";
|
59
|
+
}
|
60
|
+
|
61
|
+
%{prefix}pstate* %{prefix}state_new()
|
62
|
+
{
|
63
|
+
%{prefix}pstate* stack = %{upper_prefix}ALLOC(sizeof(%{prefix}pstate));
|
64
|
+
if(stack != NULL)
|
65
|
+
{
|
66
|
+
stack->max = %{upper_prefix}INITSTACK;
|
67
|
+
stack->elements = %{upper_prefix}ALLOC(sizeof(struct %{prefix}stack_element) * stack->max);
|
68
|
+
stack->current = 0;
|
69
|
+
stack->free = 1;
|
70
|
+
}
|
71
|
+
|
72
|
+
return stack;
|
73
|
+
}
|
74
|
+
|
75
|
+
void %{prefix}state_delete(%{prefix}pstate* stack)
|
76
|
+
{
|
77
|
+
stack->current = 0;
|
78
|
+
stack->max = 0;
|
79
|
+
stack->free = 0;
|
80
|
+
|
81
|
+
if(stack->free)
|
82
|
+
{
|
83
|
+
%{upper_prefix}FREE(stack->elements);
|
84
|
+
%{upper_prefix}FREE(stack);
|
85
|
+
}
|
86
|
+
|
87
|
+
stack->elements = 0;
|
88
|
+
}
|
89
|
+
|
90
|
+
int %{prefix}resize_stack(%{prefix}pstate* stack)
|
91
|
+
{
|
92
|
+
void* body;
|
93
|
+
int new_max;
|
94
|
+
|
95
|
+
if(stack->current + 1 > stack->max)
|
96
|
+
{
|
97
|
+
new_max = stack->max * 2;
|
98
|
+
}
|
99
|
+
else if((stack->current / 2) < stack->max)
|
100
|
+
{
|
101
|
+
new_max = stack->max / 2;
|
102
|
+
}
|
103
|
+
else
|
104
|
+
{
|
105
|
+
return 0;
|
106
|
+
}
|
107
|
+
|
108
|
+
|
109
|
+
body = %{upper_prefix}ALLOC(sizeof(struct %{prefix}stack_element) * new_max);
|
110
|
+
|
111
|
+
if(body == NULL)
|
112
|
+
{
|
113
|
+
return ENOMEM;
|
114
|
+
}
|
115
|
+
|
116
|
+
memcpy(body, stack->elements, sizeof(struct %{prefix}stack_element) * stack->current);
|
117
|
+
|
118
|
+
if(stack->free)
|
119
|
+
{
|
120
|
+
%{upper_prefix}FREE(stack->elements);
|
121
|
+
}
|
122
|
+
|
123
|
+
stack->elements = body;
|
124
|
+
stack->max = new_max;
|
125
|
+
|
126
|
+
return 0;
|
127
|
+
}
|
128
|
+
|
129
|
+
int %{prefix}parse_push(%{prefix}pstate* stack int token,
|
130
|
+
%{upper_prefix}STYPE* val%{parse_params})
|
131
|
+
{
|
132
|
+
unsigned int action;
|
133
|
+
%{prefix}stack_element* current_state;
|
134
|
+
|
135
|
+
current_state = stack.elements + stack.current - 1;
|
136
|
+
|
137
|
+
if(token > %{upper_prefix}TERMINALS || token < 0)
|
138
|
+
%{prefix}abort;
|
139
|
+
|
140
|
+
get_action:
|
141
|
+
action = states[current_state->state][token];
|
142
|
+
|
143
|
+
if(action < %{upper_prefix}STATES)
|
144
|
+
{
|
145
|
+
%{upper_prefix}PUSH_STACK(stack, val, action - 1);
|
146
|
+
return %{upper_prefix}PUSH_MORE;
|
147
|
+
}
|
148
|
+
|
149
|
+
switch(action)
|
150
|
+
{
|
151
|
+
case 0:
|
152
|
+
%{prefix}error;
|
153
|
+
break;
|
154
|
+
case %{table.size + 1}:
|
155
|
+
stack.current = 0;
|
156
|
+
return 0;
|
157
|
+
% productions.each_with_index do |(label, size, block), i|
|
158
|
+
case %{i + table.size + 2}: { // %{i}
|
159
|
+
%{upper_prefix}STYPE* %{prefix}vals[%{size}];
|
160
|
+
%{upper_prefix}STYPE* %{prefix}out;
|
161
|
+
% size.times do |e|
|
162
|
+
%{upper_prefix}POP_STACK(stack, %{prefix}vals[%{e}]);
|
163
|
+
% end
|
164
|
+
%{prefix}out = %{prefix}vals[0];
|
165
|
+
current_state = stack.elements + stack.current - 1;
|
166
|
+
|
167
|
+
do {
|
168
|
+
{{= cify_block(block) }}
|
169
|
+
} while(0);
|
170
|
+
|
171
|
+
token = %{symbols[label.name]};
|
172
|
+
action = states[current_state->state][token];
|
173
|
+
%{upper_prefix}PUSH_STACK(stack, %{prefix}out, action);
|
174
|
+
break;
|
175
|
+
}
|
176
|
+
% end
|
177
|
+
}
|
178
|
+
|
179
|
+
return %{upper_prefix}PUSH_MORE;
|
180
|
+
}
|
181
|
+
|
182
|
+
int %{prefix}parse_pull(%{parse_params})
|
183
|
+
{
|
184
|
+
%{prefix}pstate stack;
|
185
|
+
int token;
|
186
|
+
unsigned int action;
|
187
|
+
%{upper_prefix}STYPE val;
|
188
|
+
%{prefix}stack_element* current_state;
|
189
|
+
|
190
|
+
stack.max = %{upper_prefix}INITSTACK;
|
191
|
+
stack.elements = alloca(sizeof(struct %{prefix}stack_element) * stack.max);
|
192
|
+
stack.current = 0;
|
193
|
+
stack.free = 0;
|
194
|
+
|
195
|
+
%{upper_prefix}PUSH_STACK(stack, NULL, 1);
|
196
|
+
|
197
|
+
while(stack.current > 0)
|
198
|
+
{
|
199
|
+
token = %{prefix}lex(&val%{params});
|
200
|
+
%{prefix}parse_push(&stack, token, &val%{parse_params});
|
201
|
+
}
|
202
|
+
}
|
@@ -0,0 +1,68 @@
|
|
1
|
+
Productions:
|
2
|
+
{{
|
3
|
+
len = grammar.all_productions.size.to_s.size
|
4
|
+
productions = grammar.all_productions.
|
5
|
+
map { |x| ["#{x.label}: #{x.items.join(' ')}", x.block] }
|
6
|
+
body = productions.map { |_| _.first.size }.max
|
7
|
+
}}
|
8
|
+
{{ productions.each_with_index do |prod, i| }}
|
9
|
+
{{= "%#{len}s" % i }} {{= "%-#{body}s" % prod[0] }} {{= prod[1] }}
|
10
|
+
{{ end }}
|
11
|
+
|
12
|
+
{{ if unused_symbols.any? }}
|
13
|
+
Symbols unused in grammar:
|
14
|
+
{{ unused_symbols.each do |sym| }}
|
15
|
+
{{= sym }}
|
16
|
+
{{ end }}
|
17
|
+
{{ end }}
|
18
|
+
|
19
|
+
Precedence:
|
20
|
+
--- highest
|
21
|
+
{{ grammar.precedence.each do |pr| }}
|
22
|
+
{{= "%-8s" % pr.type }} {{= pr.level }}:
|
23
|
+
{{= "{" << pr.tokens.to_a.join(", ") << "}" }}
|
24
|
+
{{ end }}
|
25
|
+
--- lowest
|
26
|
+
|
27
|
+
{{
|
28
|
+
len = table.flatten.map(&:keys).flatten.map(&:size).max
|
29
|
+
states = grammar.states.to_a
|
30
|
+
}}
|
31
|
+
|
32
|
+
{{ table.each_with_index do |v, i| }}
|
33
|
+
{{ state = states[i] }}
|
34
|
+
State {{= i }}:
|
35
|
+
{{ state.rules.each do |rule| }}
|
36
|
+
{{= rule }}
|
37
|
+
{{= "{" << rule.lookahead.to_a.join(", ") << "}" }}
|
38
|
+
{{ end }}
|
39
|
+
{{
|
40
|
+
transitions = v.each.select { |_, a| a && a[0] == :state }
|
41
|
+
reductions = v.each.select { |_, a| a && a[0] == :reduce}
|
42
|
+
accepting = v.each.select { |_, a| a && a[0] == :accept}
|
43
|
+
conflicts = tableizer.conflicts[i].each
|
44
|
+
thing = [:transitions, :reductions, :accepting]
|
45
|
+
num_type = {
|
46
|
+
transitions: "State",
|
47
|
+
reductions: "Rule",
|
48
|
+
accepting: "Rule"
|
49
|
+
}
|
50
|
+
h = Hash[thing.zip([transitions, reductions, accepting])]
|
51
|
+
}}
|
52
|
+
{{ h.each do |key, value| }}
|
53
|
+
{{ next unless value.any? }}
|
54
|
+
{{= key }}:
|
55
|
+
{{ value.each do |token, (_, name)| }}
|
56
|
+
{{ token_value = grammar.terminals.
|
57
|
+
find { |_| _.name == token } || token }}
|
58
|
+
{{= token_value }}: {{= num_type[key] }} {{= name }}
|
59
|
+
{{ end }}
|
60
|
+
{{ end }}
|
61
|
+
{{ if conflicts.any? }}
|
62
|
+
conflicts:
|
63
|
+
{{ conflicts.each do |token, (first, second, rule, terminal)| }}
|
64
|
+
{{= token }}: {{= first.join(" ") }}/{{= second.join(" ") }} ({{= rule }} vs {{= terminal }})
|
65
|
+
{{ end }}
|
66
|
+
{{ end }}
|
67
|
+
|
68
|
+
{{ end }}
|
@@ -0,0 +1,146 @@
|
|
1
|
+
|
2
|
+
# This file assumes that the output of the generator will be placed
|
3
|
+
# within a module or a class. However, the module/class requires a
|
4
|
+
# `type` method, which takes a terminal and gives its type, as a
|
5
|
+
# symbol. These types should line up with the terminals that were
|
6
|
+
# defined in the original grammar.
|
7
|
+
|
8
|
+
# The actions to take during parsing. In every state, there are a
|
9
|
+
# set of acceptable peek tokens; this table tells the parser what
|
10
|
+
# to do on each acceptable peek token. The possible actions include
|
11
|
+
# `:accept`, `:reduce`, and `:state`; `:accept` means to accept the
|
12
|
+
# input and return the value of the pasing. `:reduce` means to
|
13
|
+
# reduce the top of the stack into a given nonterminal. `:state`
|
14
|
+
# means to transition to another state.
|
15
|
+
#
|
16
|
+
# @return [Array<Hash<(Symbol, Array<(Symbol, Numeric)>)>>]
|
17
|
+
ACTION_TABLE = {{= generate_action_table }}.freeze
|
18
|
+
|
19
|
+
# A list of all of the productions. Only includes the left-hand side,
|
20
|
+
# the number of tokens on the right-hand side, and the block to call
|
21
|
+
# on reduction.
|
22
|
+
#
|
23
|
+
# @return [Array<Array<(Symbol, Numeric, Proc)>>]
|
24
|
+
PRODUCTIONS = {{= generate_productions_list }}.freeze
|
25
|
+
|
26
|
+
# Runs the parser.
|
27
|
+
#
|
28
|
+
# @param input [Array<Object>] the input to run the parser over.
|
29
|
+
# @return [Object] the result of the accept.
|
30
|
+
def parse(input)
|
31
|
+
stack = []
|
32
|
+
stack.push([nil, 0])
|
33
|
+
input = input.dup
|
34
|
+
last = nil
|
35
|
+
|
36
|
+
until stack.empty? do
|
37
|
+
last = parse_action(stack, input)
|
38
|
+
end
|
39
|
+
|
40
|
+
last
|
41
|
+
|
42
|
+
end
|
43
|
+
|
44
|
+
# Actually performs the parsing action on the given stack on input.
|
45
|
+
# If you want to implement a push parser, than messing with this
|
46
|
+
# method is probably the way to go.
|
47
|
+
#
|
48
|
+
# @param stack [Array<Array<(Object, Numeric)>>] the stack of the
|
49
|
+
# parser. The actual order of the stack is important.
|
50
|
+
# @param input [Array<Object>] the input to run the parser over.
|
51
|
+
# The elements of this may be passed to the `type` method.
|
52
|
+
# @return [Object] the result of the last accepting reduction.
|
53
|
+
def parse_action(stack, input)
|
54
|
+
last = nil
|
55
|
+
peek_token = if input.empty?
|
56
|
+
:$end
|
57
|
+
else
|
58
|
+
type(input.first)
|
59
|
+
end
|
60
|
+
|
61
|
+
action = ACTION_TABLE[stack.last.last].fetch(peek_token) do
|
62
|
+
ACTION_TABLE[stack.last.last].fetch(:$default)
|
63
|
+
end
|
64
|
+
case action.first
|
65
|
+
when :accept
|
66
|
+
production = PRODUCTIONS[action.last]
|
67
|
+
last = stack.pop(production[1]).first.first
|
68
|
+
stack.pop
|
69
|
+
when :reduce
|
70
|
+
production = PRODUCTIONS[action.last]
|
71
|
+
removing = stack.pop(production[1])
|
72
|
+
value = instance_exec(*removing.map(&:first), &production[2])
|
73
|
+
goto = ACTION_TABLE[stack.last.last][production[0]]
|
74
|
+
stack.push([value, goto.last])
|
75
|
+
when :state
|
76
|
+
stack.push([input.shift, action.last])
|
77
|
+
else
|
78
|
+
raise NotImplementedError, "Unknown action #{action.first}"
|
79
|
+
end
|
80
|
+
|
81
|
+
last
|
82
|
+
|
83
|
+
rescue KeyError => e
|
84
|
+
if handle_error(
|
85
|
+
{ :stack => stack,
|
86
|
+
:peek => peek_token,
|
87
|
+
:remaining => input,
|
88
|
+
:error => e,
|
89
|
+
:expected => ACTION_TABLE[stack.last.last].keys
|
90
|
+
})
|
91
|
+
retry
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
{{ if define_own_handler? }}
|
96
|
+
def handle_error(data, _ = false)
|
97
|
+
{{ if panic_mode? }}
|
98
|
+
if _ || data[:peek] == :$end # we can't recover if
|
99
|
+
# we're at the end
|
100
|
+
{{ end }}
|
101
|
+
raise {{= error_class }},
|
102
|
+
"Unexpected token #{data[:peek]}; " \
|
103
|
+
"expected one of #{data[:expected].join(', ')}",
|
104
|
+
data[:error].backtrace
|
105
|
+
{{ if panic_mode? }}
|
106
|
+
end
|
107
|
+
|
108
|
+
new_peek = :$error
|
109
|
+
acceptable_state = false
|
110
|
+
state = nil
|
111
|
+
|
112
|
+
until data[:stack].empty? or acceptable_state
|
113
|
+
state = data[:stack].last.last
|
114
|
+
|
115
|
+
if ACTION_TABLE[state].key?(new_peek)
|
116
|
+
acceptable_state = true
|
117
|
+
else
|
118
|
+
data[:stack].pop # discard
|
119
|
+
end
|
120
|
+
end
|
121
|
+
|
122
|
+
return handle_error(data, true) unless acceptable_state
|
123
|
+
|
124
|
+
action = ACTION_TABLE[state][new_peek]
|
125
|
+
lookaheads = nil
|
126
|
+
|
127
|
+
until lookaheads
|
128
|
+
if action[0] == :state
|
129
|
+
lookaheads = ACTION_TABLE[action.last].keys
|
130
|
+
elsif action[0] == :reduce
|
131
|
+
rule = PRODUCTIONS[action.last]
|
132
|
+
action = ACTION_TABLE[stack[-rule[1]].last][rule[0]]
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
until data[:remaining].empty? || lookaheads.
|
137
|
+
include?(data[:remaining][0].first)
|
138
|
+
data[:remaining].shift
|
139
|
+
end
|
140
|
+
|
141
|
+
data[:remaining].unshift([new_peek, data[:error]])
|
142
|
+
true
|
143
|
+
|
144
|
+
{{ end }}
|
145
|
+
end
|
146
|
+
{{ end }}
|
data/lib/antelope/generator.rb
CHANGED
@@ -16,6 +16,15 @@ module Antelope
|
|
16
16
|
def generators
|
17
17
|
@_generators ||= Hash.new { |h, k| h[k] = Generator::Null }
|
18
18
|
end
|
19
|
+
# Returns a hash of all of the directives that are available in
|
20
|
+
# the generators of this module.
|
21
|
+
#
|
22
|
+
# @see .generators
|
23
|
+
# @return [Hash]
|
24
|
+
def directives
|
25
|
+
generators.values.map(&:directives).
|
26
|
+
inject({}, :merge)
|
27
|
+
end
|
19
28
|
|
20
29
|
# Registers a generator with the given names. If multiple names
|
21
30
|
# are given, they are assigned the generator as a value in the
|
@@ -27,15 +36,14 @@ module Antelope
|
|
27
36
|
# @param name [String, Symbol] a name to associate the generator
|
28
37
|
# with.
|
29
38
|
def register_generator(generator, *names)
|
30
|
-
names = [names].flatten
|
31
|
-
raise ArgumentError,
|
39
|
+
names = [names].flatten raise ArgumentError,
|
32
40
|
"Requires at least one name" unless names.any?
|
33
41
|
raise ArgumentError,
|
34
42
|
"All name values must be a Symbol or string" unless names.
|
35
43
|
all? {|_| [Symbol, String].include?(_.class) }
|
36
44
|
|
37
45
|
names.each do |name|
|
38
|
-
generators[name] = generator
|
46
|
+
generators[name.to_s.downcase] = generator
|
39
47
|
end
|
40
48
|
end
|
41
49
|
|
@@ -48,3 +56,7 @@ require "antelope/generator/base"
|
|
48
56
|
require "antelope/generator/group"
|
49
57
|
require "antelope/generator/output"
|
50
58
|
require "antelope/generator/ruby"
|
59
|
+
require "antelope/generator/null"
|
60
|
+
require "antelope/generator/c_header"
|
61
|
+
require "antelope/generator/c_source"
|
62
|
+
require "antelope/generator/c"
|
@@ -0,0 +1,78 @@
|
|
1
|
+
module Antelope
|
2
|
+
class Template
|
3
|
+
class Compiler
|
4
|
+
|
5
|
+
attr_reader :buffer
|
6
|
+
|
7
|
+
attr_reader :tokens
|
8
|
+
|
9
|
+
def initialize(tokens, buffer_variable = "_out")
|
10
|
+
@tokens = tokens.dup
|
11
|
+
@buffer = ""
|
12
|
+
@buffer_variable = buffer_variable
|
13
|
+
end
|
14
|
+
|
15
|
+
def compile
|
16
|
+
merge_text_tokens
|
17
|
+
|
18
|
+
@buffer = "#{@buffer_variable} ||= \"\"\n"
|
19
|
+
|
20
|
+
until @tokens.empty?
|
21
|
+
token = @tokens.shift
|
22
|
+
parse_method = "parse_#{token[0]}".intern
|
23
|
+
|
24
|
+
send(parse_method, token[1])
|
25
|
+
end
|
26
|
+
|
27
|
+
@buffer << "#{@buffer_variable}\n"
|
28
|
+
|
29
|
+
@buffer
|
30
|
+
|
31
|
+
rescue NoMethodError => e
|
32
|
+
|
33
|
+
if e.name == parse_method
|
34
|
+
raise NoTokenError, "No token #{token[0]} exists"
|
35
|
+
else
|
36
|
+
raise
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
def parse_text(value)
|
43
|
+
buffer << "#{@buffer_variable} << #{value.to_s.inspect}\n"
|
44
|
+
end
|
45
|
+
|
46
|
+
def parse_tag(value)
|
47
|
+
value.gsub!(/\A\s*([\s\S]*?)\s*\Z/, "\\1")
|
48
|
+
buffer << "#{value}\n"
|
49
|
+
end
|
50
|
+
|
51
|
+
def parse_output_tag(value)
|
52
|
+
value.gsub!(/\A\s*([\s\S]*?)\s*\Z/, "\\1")
|
53
|
+
buffer << "#{@buffer_variable} << begin\n " \
|
54
|
+
"#{value}\nend.to_s\n"
|
55
|
+
end
|
56
|
+
|
57
|
+
def parse_newline(_)
|
58
|
+
parse_text("\n")
|
59
|
+
end
|
60
|
+
|
61
|
+
def parse_comment_tag(_)
|
62
|
+
end
|
63
|
+
|
64
|
+
def merge_text_tokens
|
65
|
+
new_tokens = []
|
66
|
+
@tokens.chunk(&:first).each do |type, tokens|
|
67
|
+
if type == :text
|
68
|
+
new_tokens << [:text, tokens.map(&:last).join('')]
|
69
|
+
else
|
70
|
+
new_tokens.push(*tokens)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
@tokens = new_tokens
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|