hbs 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +4 -0
- data/.gitmodules +3 -0
- data/.rspec +1 -0
- data/Gemfile +4 -0
- data/README.mdown +20 -0
- data/Rakefile +5 -0
- data/hbs.gemspec +28 -0
- data/js/.gitignore +5 -0
- data/js/Gemfile +4 -0
- data/js/LICENSE +20 -0
- data/js/README.markdown +224 -0
- data/js/Rakefile +107 -0
- data/js/bench/benchwarmer.js +147 -0
- data/js/bench/handlebars.js +166 -0
- data/js/lib/handlebars.js +15 -0
- data/js/lib/handlebars/ast.js +103 -0
- data/js/lib/handlebars/base.js +114 -0
- data/js/lib/handlebars/compiler.js +739 -0
- data/js/lib/handlebars/debug.js +29 -0
- data/js/lib/handlebars/printer.js +138 -0
- data/js/lib/handlebars/utils.js +66 -0
- data/js/lib/handlebars/visitor.js +13 -0
- data/js/spec/acceptance_spec.rb +100 -0
- data/js/spec/parser_spec.rb +259 -0
- data/js/spec/qunit_spec.js +836 -0
- data/js/spec/spec_helper.rb +106 -0
- data/js/spec/tokenizer_spec.rb +227 -0
- data/js/src/handlebars.l +34 -0
- data/js/src/handlebars.yy +99 -0
- data/lib/handlebars.rb +23 -0
- data/lib/handlebars/loader.rb +29 -0
- data/lib/handlebars/version.rb +3 -0
- data/spec/handlebars_spec.rb +32 -0
- metadata +112 -0
@@ -0,0 +1,106 @@
|
|
1
|
+
require "v8"
|
2
|
+
|
3
|
+
# Monkey patches due to bugs in RubyRacer
|
4
|
+
class V8::JSError
|
5
|
+
def initialize(try, to)
|
6
|
+
@to = to
|
7
|
+
begin
|
8
|
+
super(initialize_unsafe(try))
|
9
|
+
rescue Exception => e
|
10
|
+
# Original code does not make an Array here
|
11
|
+
@boundaries = [Boundary.new(:rbframes => e.backtrace)]
|
12
|
+
@value = e
|
13
|
+
super("BUG! please report. JSError#initialize failed!: #{e.message}")
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
def parse_js_frames(try)
|
18
|
+
raw = @to.rb(try.StackTrace())
|
19
|
+
if raw && !raw.empty?
|
20
|
+
raw.split("\n")[1..-1].tap do |frames|
|
21
|
+
# Original code uses strip!, and the frames are not guaranteed to be strippable
|
22
|
+
frames.each {|frame| frame.strip.chomp!(",")}
|
23
|
+
end
|
24
|
+
else
|
25
|
+
[]
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
module Handlebars
|
31
|
+
module Spec
|
32
|
+
def self.js_backtrace(context)
|
33
|
+
begin
|
34
|
+
context.eval("throw")
|
35
|
+
rescue V8::JSError => e
|
36
|
+
return e.backtrace(:javascript)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def self.remove_exports(string)
|
41
|
+
match = string.match(%r{\A(.*?)^// BEGIN\(BROWSER\)\n(.*)\n^// END\(BROWSER\)(.*?)\Z}m)
|
42
|
+
prelines = match ? match[1].count("\n") + 1 : 0
|
43
|
+
ret = match ? match[2] : string
|
44
|
+
("\n" * prelines) + ret
|
45
|
+
end
|
46
|
+
|
47
|
+
def self.js_load(file)
|
48
|
+
str = File.read(file)
|
49
|
+
CONTEXT.eval(remove_exports(str), file)
|
50
|
+
end
|
51
|
+
|
52
|
+
CONTEXT = V8::Context.new
|
53
|
+
CONTEXT.instance_eval do |context|
|
54
|
+
context["exports"] = nil
|
55
|
+
|
56
|
+
context["p"] = proc do |val|
|
57
|
+
p val if ENV["DEBUG_JS"]
|
58
|
+
end
|
59
|
+
|
60
|
+
context["puts"] = proc do |val|
|
61
|
+
puts val if ENV["DEBUG_JS"]
|
62
|
+
end
|
63
|
+
|
64
|
+
context["puts_node"] = proc do |val|
|
65
|
+
puts context["Handlebars"]["PrintVisitor"].new.accept(val)
|
66
|
+
puts
|
67
|
+
end
|
68
|
+
|
69
|
+
context["puts_caller"] = proc do
|
70
|
+
puts "BACKTRACE:"
|
71
|
+
puts Handlebars::Spec.js_backtrace(context)
|
72
|
+
puts
|
73
|
+
end
|
74
|
+
|
75
|
+
Handlebars::Spec.js_load('lib/handlebars/parser.js')
|
76
|
+
Handlebars::Spec.js_load('lib/handlebars/base.js');
|
77
|
+
Handlebars::Spec.js_load('lib/handlebars/ast.js');
|
78
|
+
Handlebars::Spec.js_load('lib/handlebars/visitor.js');
|
79
|
+
Handlebars::Spec.js_load('lib/handlebars/printer.js')
|
80
|
+
Handlebars::Spec.js_load('lib/handlebars/utils.js')
|
81
|
+
Handlebars::Spec.js_load('lib/handlebars/compiler.js')
|
82
|
+
Handlebars::Spec.js_load('lib/handlebars.js')
|
83
|
+
|
84
|
+
context["Handlebars"]["logger"]["level"] = ENV["DEBUG_JS"] ? context["Handlebars"]["logger"][ENV["DEBUG_JS"]] : 4
|
85
|
+
|
86
|
+
context["Handlebars"]["logger"]["log"] = proc do |level, str|
|
87
|
+
logger_level = context["Handlebars"]["logger"]["level"].to_i
|
88
|
+
|
89
|
+
if logger_level <= level
|
90
|
+
puts str
|
91
|
+
end
|
92
|
+
end
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
|
98
|
+
require "test/unit/assertions"
|
99
|
+
|
100
|
+
RSpec.configure do |config|
|
101
|
+
config.include Test::Unit::Assertions
|
102
|
+
|
103
|
+
config.before(:all) do
|
104
|
+
@context = Handlebars::Spec::CONTEXT
|
105
|
+
end
|
106
|
+
end
|
@@ -0,0 +1,227 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
require "timeout"
|
3
|
+
|
4
|
+
describe "Tokenizer" do
|
5
|
+
let(:parser) { @context["handlebars"] }
|
6
|
+
let(:lexer) { @context["handlebars"]["lexer"] }
|
7
|
+
|
8
|
+
Token = Struct.new(:name, :text)
|
9
|
+
|
10
|
+
def tokenize(string)
|
11
|
+
lexer.setInput(string)
|
12
|
+
out = []
|
13
|
+
|
14
|
+
while token = lexer.lex
|
15
|
+
# p token
|
16
|
+
result = parser.terminals_[token] || token
|
17
|
+
# p result
|
18
|
+
break if !result || result == "EOF" || result == "INVALID"
|
19
|
+
out << Token.new(result, lexer.yytext)
|
20
|
+
end
|
21
|
+
|
22
|
+
out
|
23
|
+
end
|
24
|
+
|
25
|
+
RSpec::Matchers.define :match_tokens do |tokens|
|
26
|
+
match do |result|
|
27
|
+
result.map(&:name).should == tokens
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
RSpec::Matchers.define :be_token do |name, string|
|
32
|
+
match do |token|
|
33
|
+
token.name.should == name
|
34
|
+
token.text.should == string
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
it "tokenizes a simple mustache as 'OPEN ID CLOSE'" do
|
39
|
+
result = tokenize("{{foo}}")
|
40
|
+
result.should match_tokens(%w(OPEN ID CLOSE))
|
41
|
+
result[1].should be_token("ID", "foo")
|
42
|
+
end
|
43
|
+
|
44
|
+
it "tokenizes a simple path" do
|
45
|
+
result = tokenize("{{foo/bar}}")
|
46
|
+
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
|
47
|
+
end
|
48
|
+
|
49
|
+
it "allows dot notation" do
|
50
|
+
result = tokenize("{{foo.bar}}")
|
51
|
+
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
|
52
|
+
|
53
|
+
tokenize("{{foo.bar.baz}}").should match_tokens(%w(OPEN ID SEP ID SEP ID CLOSE))
|
54
|
+
end
|
55
|
+
|
56
|
+
it "tokenizes {{.}} as OPEN ID CLOSE" do
|
57
|
+
result = tokenize("{{.}}")
|
58
|
+
result.should match_tokens(%w(OPEN ID CLOSE))
|
59
|
+
end
|
60
|
+
|
61
|
+
it "tokenizes a path as 'OPEN (ID SEP)* ID CLOSE'" do
|
62
|
+
result = tokenize("{{../foo/bar}}")
|
63
|
+
result.should match_tokens(%w(OPEN ID SEP ID SEP ID CLOSE))
|
64
|
+
result[1].should be_token("ID", "..")
|
65
|
+
end
|
66
|
+
|
67
|
+
it "tokenizes a path with .. as a parent path" do
|
68
|
+
result = tokenize("{{../foo.bar}}")
|
69
|
+
result.should match_tokens(%w(OPEN ID SEP ID SEP ID CLOSE))
|
70
|
+
result[1].should be_token("ID", "..")
|
71
|
+
end
|
72
|
+
|
73
|
+
it "tokenizes a path with this/foo as OPEN ID SEP ID CLOSE" do
|
74
|
+
result = tokenize("{{this/foo}}")
|
75
|
+
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
|
76
|
+
result[1].should be_token("ID", "this")
|
77
|
+
result[3].should be_token("ID", "foo")
|
78
|
+
end
|
79
|
+
|
80
|
+
it "tokenizes a simple mustache with spaces as 'OPEN ID CLOSE'" do
|
81
|
+
result = tokenize("{{ foo }}")
|
82
|
+
result.should match_tokens(%w(OPEN ID CLOSE))
|
83
|
+
result[1].should be_token("ID", "foo")
|
84
|
+
end
|
85
|
+
|
86
|
+
it "tokenizes a simple mustache with line breaks as 'OPEN ID ID CLOSE'" do
|
87
|
+
result = tokenize("{{ foo \n bar }}")
|
88
|
+
result.should match_tokens(%w(OPEN ID ID CLOSE))
|
89
|
+
result[1].should be_token("ID", "foo")
|
90
|
+
end
|
91
|
+
|
92
|
+
it "tokenizes raw content as 'CONTENT'" do
|
93
|
+
result = tokenize("foo {{ bar }} baz")
|
94
|
+
result.should match_tokens(%w(CONTENT OPEN ID CLOSE CONTENT))
|
95
|
+
result[0].should be_token("CONTENT", "foo ")
|
96
|
+
result[4].should be_token("CONTENT", " baz")
|
97
|
+
end
|
98
|
+
|
99
|
+
it "tokenizes a partial as 'OPEN_PARTIAL ID CLOSE'" do
|
100
|
+
result = tokenize("{{> foo}}")
|
101
|
+
result.should match_tokens(%w(OPEN_PARTIAL ID CLOSE))
|
102
|
+
end
|
103
|
+
|
104
|
+
it "tokenizes a partial with context as 'OPEN_PARTIAL ID ID CLOSE'" do
|
105
|
+
result = tokenize("{{> foo bar }}")
|
106
|
+
result.should match_tokens(%w(OPEN_PARTIAL ID ID CLOSE))
|
107
|
+
end
|
108
|
+
|
109
|
+
it "tokenizes a partial without spaces as 'OPEN_PARTIAL ID CLOSE'" do
|
110
|
+
result = tokenize("{{>foo}}")
|
111
|
+
result.should match_tokens(%w(OPEN_PARTIAL ID CLOSE))
|
112
|
+
end
|
113
|
+
|
114
|
+
it "tokenizes a partial space at the end as 'OPEN_PARTIAL ID CLOSE'" do
|
115
|
+
result = tokenize("{{>foo }}")
|
116
|
+
result.should match_tokens(%w(OPEN_PARTIAL ID CLOSE))
|
117
|
+
end
|
118
|
+
|
119
|
+
it "tokenizes a comment as 'COMMENT'" do
|
120
|
+
result = tokenize("foo {{! this is a comment }} bar {{ baz }}")
|
121
|
+
result.should match_tokens(%w(CONTENT COMMENT CONTENT OPEN ID CLOSE))
|
122
|
+
result[1].should be_token("COMMENT", " this is a comment ")
|
123
|
+
end
|
124
|
+
|
125
|
+
it "tokenizes open and closing blocks as 'OPEN_BLOCK ID CLOSE ... OPEN_ENDBLOCK ID CLOSE'" do
|
126
|
+
result = tokenize("{{#foo}}content{{/foo}}")
|
127
|
+
result.should match_tokens(%w(OPEN_BLOCK ID CLOSE CONTENT OPEN_ENDBLOCK ID CLOSE))
|
128
|
+
end
|
129
|
+
|
130
|
+
it "tokenizes inverse sections as 'OPEN_INVERSE CLOSE'" do
|
131
|
+
tokenize("{{^}}").should match_tokens(%w(OPEN_INVERSE CLOSE))
|
132
|
+
tokenize("{{else}}").should match_tokens(%w(OPEN_INVERSE CLOSE))
|
133
|
+
tokenize("{{ else }}").should match_tokens(%w(OPEN_INVERSE CLOSE))
|
134
|
+
end
|
135
|
+
|
136
|
+
it "tokenizes inverse sections with ID as 'OPEN_INVERSE ID CLOSE'" do
|
137
|
+
result = tokenize("{{^foo}}")
|
138
|
+
result.should match_tokens(%w(OPEN_INVERSE ID CLOSE))
|
139
|
+
result[1].should be_token("ID", "foo")
|
140
|
+
end
|
141
|
+
|
142
|
+
it "tokenizes inverse sections with ID and spaces as 'OPEN_INVERSE ID CLOSE'" do
|
143
|
+
result = tokenize("{{^ foo }}")
|
144
|
+
result.should match_tokens(%w(OPEN_INVERSE ID CLOSE))
|
145
|
+
result[1].should be_token("ID", "foo")
|
146
|
+
end
|
147
|
+
|
148
|
+
it "tokenizes mustaches with params as 'OPEN ID ID ID CLOSE'" do
|
149
|
+
result = tokenize("{{ foo bar baz }}")
|
150
|
+
result.should match_tokens(%w(OPEN ID ID ID CLOSE))
|
151
|
+
result[1].should be_token("ID", "foo")
|
152
|
+
result[2].should be_token("ID", "bar")
|
153
|
+
result[3].should be_token("ID", "baz")
|
154
|
+
end
|
155
|
+
|
156
|
+
it "tokenizes mustaches with String params as 'OPEN ID ID STRING CLOSE'" do
|
157
|
+
result = tokenize("{{ foo bar \"baz\" }}")
|
158
|
+
result.should match_tokens(%w(OPEN ID ID STRING CLOSE))
|
159
|
+
result[3].should be_token("STRING", "baz")
|
160
|
+
end
|
161
|
+
|
162
|
+
it "tokenizes String params with spaces inside as 'STRING'" do
|
163
|
+
result = tokenize("{{ foo bar \"baz bat\" }}")
|
164
|
+
result.should match_tokens(%w(OPEN ID ID STRING CLOSE))
|
165
|
+
result[3].should be_token("STRING", "baz bat")
|
166
|
+
end
|
167
|
+
|
168
|
+
it "tokenizes String params with escapes quotes as 'STRING'" do
|
169
|
+
result = tokenize(%|{{ foo "bar\\"baz" }}|)
|
170
|
+
result.should match_tokens(%w(OPEN ID STRING CLOSE))
|
171
|
+
result[2].should be_token("STRING", %{bar"baz})
|
172
|
+
end
|
173
|
+
|
174
|
+
it "tokenizes numbers" do
|
175
|
+
result = tokenize(%|{{ foo 1 }}|)
|
176
|
+
result.should match_tokens(%w(OPEN ID INTEGER CLOSE))
|
177
|
+
result[2].should be_token("INTEGER", "1")
|
178
|
+
end
|
179
|
+
|
180
|
+
it "tokenizes booleans" do
|
181
|
+
result = tokenize(%|{{ foo true }}|)
|
182
|
+
result.should match_tokens(%w(OPEN ID BOOLEAN CLOSE))
|
183
|
+
result[2].should be_token("BOOLEAN", "true")
|
184
|
+
|
185
|
+
result = tokenize(%|{{ foo false }}|)
|
186
|
+
result.should match_tokens(%w(OPEN ID BOOLEAN CLOSE))
|
187
|
+
result[2].should be_token("BOOLEAN", "false")
|
188
|
+
end
|
189
|
+
|
190
|
+
it "tokenizes hash arguments" do
|
191
|
+
result = tokenize("{{ foo bar=baz }}")
|
192
|
+
result.should match_tokens %w(OPEN ID ID EQUALS ID CLOSE)
|
193
|
+
|
194
|
+
result = tokenize("{{ foo bar baz=bat }}")
|
195
|
+
result.should match_tokens %w(OPEN ID ID ID EQUALS ID CLOSE)
|
196
|
+
|
197
|
+
result = tokenize("{{ foo bar baz=1 }}")
|
198
|
+
result.should match_tokens %w(OPEN ID ID ID EQUALS INTEGER CLOSE)
|
199
|
+
|
200
|
+
result = tokenize("{{ foo bar baz=true }}")
|
201
|
+
result.should match_tokens %w(OPEN ID ID ID EQUALS BOOLEAN CLOSE)
|
202
|
+
|
203
|
+
result = tokenize("{{ foo bar baz=false }}")
|
204
|
+
result.should match_tokens %w(OPEN ID ID ID EQUALS BOOLEAN CLOSE)
|
205
|
+
|
206
|
+
result = tokenize("{{ foo bar\n baz=bat }}")
|
207
|
+
result.should match_tokens %w(OPEN ID ID ID EQUALS ID CLOSE)
|
208
|
+
|
209
|
+
result = tokenize("{{ foo bar baz=\"bat\" }}")
|
210
|
+
result.should match_tokens %w(OPEN ID ID ID EQUALS STRING CLOSE)
|
211
|
+
|
212
|
+
result = tokenize("{{ foo bar baz=\"bat\" bam=wot }}")
|
213
|
+
result.should match_tokens %w(OPEN ID ID ID EQUALS STRING ID EQUALS ID CLOSE)
|
214
|
+
|
215
|
+
result = tokenize("{{foo omg bar=baz bat=\"bam\"}}")
|
216
|
+
result.should match_tokens %w(OPEN ID ID ID EQUALS ID ID EQUALS STRING CLOSE)
|
217
|
+
result[2].should be_token("ID", "omg")
|
218
|
+
end
|
219
|
+
|
220
|
+
it "does not time out in a mustache with a single } followed by EOF" do
|
221
|
+
Timeout.timeout(1) { tokenize("{{foo}").should match_tokens(%w(OPEN ID)) }
|
222
|
+
end
|
223
|
+
|
224
|
+
it "does not time out in a mustache when invalid ID characters are used" do
|
225
|
+
Timeout.timeout(1) { tokenize("{{foo & }}").should match_tokens(%w(OPEN ID)) }
|
226
|
+
end
|
227
|
+
end
|
data/js/src/handlebars.l
ADDED
@@ -0,0 +1,34 @@
|
|
1
|
+
|
2
|
+
%x mu
|
3
|
+
|
4
|
+
%%
|
5
|
+
|
6
|
+
[^\x00]*?/("{{") { this.begin("mu"); if (yytext) return 'CONTENT'; }
|
7
|
+
[^\x00]+ { return 'CONTENT'; }
|
8
|
+
|
9
|
+
<mu>"{{>" { return 'OPEN_PARTIAL'; }
|
10
|
+
<mu>"{{#" { return 'OPEN_BLOCK'; }
|
11
|
+
<mu>"{{/" { return 'OPEN_ENDBLOCK'; }
|
12
|
+
<mu>"{{^" { return 'OPEN_INVERSE'; }
|
13
|
+
<mu>"{{"\s*"else" { return 'OPEN_INVERSE'; }
|
14
|
+
<mu>"{{{" { return 'OPEN_UNESCAPED'; }
|
15
|
+
<mu>"{{&" { return 'OPEN_UNESCAPED'; }
|
16
|
+
<mu>"{{!"[\s\S]*?"}}" { yytext = yytext.substr(3,yyleng-5); this.begin("INITIAL"); return 'COMMENT'; }
|
17
|
+
<mu>"{{" { return 'OPEN'; }
|
18
|
+
|
19
|
+
<mu>"=" { return 'EQUALS'; }
|
20
|
+
<mu>"."/[} ] { return 'ID'; }
|
21
|
+
<mu>".." { return 'ID'; }
|
22
|
+
<mu>[/.] { return 'SEP'; }
|
23
|
+
<mu>\s+ { /*ignore whitespace*/ }
|
24
|
+
<mu>"}}}" { this.begin("INITIAL"); return 'CLOSE'; }
|
25
|
+
<mu>"}}" { this.begin("INITIAL"); return 'CLOSE'; }
|
26
|
+
<mu>'"'("\\"["]|[^"])*'"' { yytext = yytext.substr(1,yyleng-2).replace(/\\"/g,'"'); return 'STRING'; }
|
27
|
+
<mu>"true"/[}\s] { return 'BOOLEAN'; }
|
28
|
+
<mu>"false"/[}\s] { return 'BOOLEAN'; }
|
29
|
+
<mu>[0-9]+/[}\s] { return 'INTEGER'; }
|
30
|
+
<mu>[a-zA-Z0-9_$-]+/[=}\s/.] { return 'ID'; }
|
31
|
+
<mu>. { return 'INVALID'; }
|
32
|
+
|
33
|
+
<INITIAL,mu><<EOF>> { return 'EOF'; }
|
34
|
+
|
@@ -0,0 +1,99 @@
|
|
1
|
+
%start root
|
2
|
+
|
3
|
+
%%
|
4
|
+
|
5
|
+
root
|
6
|
+
: program EOF { return $1 }
|
7
|
+
;
|
8
|
+
|
9
|
+
program
|
10
|
+
: statements simpleInverse statements { $$ = new yy.ProgramNode($1, $3) }
|
11
|
+
| statements { $$ = new yy.ProgramNode($1) }
|
12
|
+
| "" { $$ = new yy.ProgramNode([]) }
|
13
|
+
;
|
14
|
+
|
15
|
+
statements
|
16
|
+
: statement { $$ = [$1] }
|
17
|
+
| statements statement { $1.push($2); $$ = $1 }
|
18
|
+
;
|
19
|
+
|
20
|
+
statement
|
21
|
+
: openInverse program closeBlock { $$ = new yy.InverseNode($1, $2, $3) }
|
22
|
+
| openBlock program closeBlock { $$ = new yy.BlockNode($1, $2, $3) }
|
23
|
+
| mustache { $$ = $1 }
|
24
|
+
| partial { $$ = $1 }
|
25
|
+
| CONTENT { $$ = new yy.ContentNode($1) }
|
26
|
+
| COMMENT { $$ = new yy.CommentNode($1) }
|
27
|
+
;
|
28
|
+
|
29
|
+
openBlock
|
30
|
+
: OPEN_BLOCK inMustache CLOSE { $$ = new yy.MustacheNode($2[0], $2[1]) }
|
31
|
+
;
|
32
|
+
|
33
|
+
openInverse
|
34
|
+
: OPEN_INVERSE inMustache CLOSE { $$ = new yy.MustacheNode($2[0], $2[1]) }
|
35
|
+
;
|
36
|
+
|
37
|
+
closeBlock
|
38
|
+
: OPEN_ENDBLOCK path CLOSE { $$ = $2 }
|
39
|
+
;
|
40
|
+
|
41
|
+
mustache
|
42
|
+
: OPEN inMustache CLOSE { $$ = new yy.MustacheNode($2[0], $2[1]) }
|
43
|
+
| OPEN_UNESCAPED inMustache CLOSE { $$ = new yy.MustacheNode($2[0], $2[1], true) }
|
44
|
+
;
|
45
|
+
|
46
|
+
|
47
|
+
partial
|
48
|
+
: OPEN_PARTIAL path CLOSE { $$ = new yy.PartialNode($2) }
|
49
|
+
| OPEN_PARTIAL path path CLOSE { $$ = new yy.PartialNode($2, $3) }
|
50
|
+
;
|
51
|
+
|
52
|
+
simpleInverse
|
53
|
+
: OPEN_INVERSE CLOSE { }
|
54
|
+
;
|
55
|
+
|
56
|
+
inMustache
|
57
|
+
: path params hash { $$ = [[$1].concat($2), $3] }
|
58
|
+
| path params { $$ = [[$1].concat($2), null] }
|
59
|
+
| path hash { $$ = [[$1], $2] }
|
60
|
+
| path { $$ = [[$1], null] }
|
61
|
+
;
|
62
|
+
|
63
|
+
params
|
64
|
+
: params param { $1.push($2); $$ = $1; }
|
65
|
+
| param { $$ = [$1] }
|
66
|
+
;
|
67
|
+
|
68
|
+
param
|
69
|
+
: path { $$ = $1 }
|
70
|
+
| STRING { $$ = new yy.StringNode($1) }
|
71
|
+
| INTEGER { $$ = new yy.IntegerNode($1) }
|
72
|
+
| BOOLEAN { $$ = new yy.BooleanNode($1) }
|
73
|
+
;
|
74
|
+
|
75
|
+
hash
|
76
|
+
: hashSegments { $$ = new yy.HashNode($1) }
|
77
|
+
;
|
78
|
+
|
79
|
+
hashSegments
|
80
|
+
: hashSegments hashSegment { $1.push($2); $$ = $1 }
|
81
|
+
| hashSegment { $$ = [$1] }
|
82
|
+
;
|
83
|
+
|
84
|
+
hashSegment
|
85
|
+
: ID EQUALS path { $$ = [$1, $3] }
|
86
|
+
| ID EQUALS STRING { $$ = [$1, new yy.StringNode($3)] }
|
87
|
+
| ID EQUALS INTEGER { $$ = [$1, new yy.IntegerNode($3)] }
|
88
|
+
| ID EQUALS BOOLEAN { $$ = [$1, new yy.BooleanNode($3)] }
|
89
|
+
;
|
90
|
+
|
91
|
+
path
|
92
|
+
: pathSegments { $$ = new yy.IdNode($1) }
|
93
|
+
;
|
94
|
+
|
95
|
+
pathSegments
|
96
|
+
: pathSegments SEP ID { $1.push($3); $$ = $1; }
|
97
|
+
| ID { $$ = [$1] }
|
98
|
+
;
|
99
|
+
|