handlebars 0.4.0 → 0.5.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/Gemfile +1 -2
- data/README.mdown +15 -7
- data/handlebars.gemspec +2 -5
- data/lib/handlebars.rb +5 -5
- data/lib/handlebars/context.rb +3 -12
- data/lib/handlebars/version.rb +1 -1
- data/spec/handlebars_spec.rb +1 -1
- metadata +47 -58
- data/.gitmodules +0 -3
- data/vendor/handlebars/.gitignore +0 -8
- data/vendor/handlebars/.jshintrc +0 -52
- data/vendor/handlebars/.npmignore +0 -10
- data/vendor/handlebars/.rspec +0 -1
- data/vendor/handlebars/Gemfile +0 -5
- data/vendor/handlebars/LICENSE +0 -19
- data/vendor/handlebars/README.markdown +0 -317
- data/vendor/handlebars/Rakefile +0 -109
- data/vendor/handlebars/bench/benchwarmer.js +0 -149
- data/vendor/handlebars/bench/handlebars.js +0 -172
- data/vendor/handlebars/bin/handlebars +0 -193
- data/vendor/handlebars/dist/handlebars.js +0 -2201
- data/vendor/handlebars/dist/handlebars.runtime.js +0 -321
- data/vendor/handlebars/lib/handlebars.js +0 -14
- data/vendor/handlebars/lib/handlebars/base.js +0 -154
- data/vendor/handlebars/lib/handlebars/compiler/ast.js +0 -133
- data/vendor/handlebars/lib/handlebars/compiler/base.js +0 -21
- data/vendor/handlebars/lib/handlebars/compiler/compiler.js +0 -1267
- data/vendor/handlebars/lib/handlebars/compiler/index.js +0 -7
- data/vendor/handlebars/lib/handlebars/compiler/printer.js +0 -131
- data/vendor/handlebars/lib/handlebars/compiler/visitor.js +0 -13
- data/vendor/handlebars/lib/handlebars/runtime.js +0 -88
- data/vendor/handlebars/lib/handlebars/utils.js +0 -67
- data/vendor/handlebars/package.json +0 -35
- data/vendor/handlebars/spec/acceptance_spec.rb +0 -101
- data/vendor/handlebars/spec/parser_spec.rb +0 -433
- data/vendor/handlebars/spec/qunit_spec.js +0 -1370
- data/vendor/handlebars/spec/spec_helper.rb +0 -157
- data/vendor/handlebars/spec/tokenizer_spec.rb +0 -301
- data/vendor/handlebars/src/handlebars.l +0 -53
- data/vendor/handlebars/src/handlebars.yy +0 -109
- data/vendor/handlebars/src/parser-prefix.js +0 -1
- data/vendor/handlebars/src/parser-suffix.js +0 -4
@@ -1,157 +0,0 @@
|
|
1
|
-
require "v8"
|
2
|
-
|
3
|
-
# Monkey patches due to bugs in RubyRacer
|
4
|
-
class V8::JSError
|
5
|
-
def initialize(try, to)
|
6
|
-
@to = to
|
7
|
-
begin
|
8
|
-
super(initialize_unsafe(try))
|
9
|
-
rescue Exception => e
|
10
|
-
# Original code does not make an Array here
|
11
|
-
@boundaries = [Boundary.new(:rbframes => e.backtrace)]
|
12
|
-
@value = e
|
13
|
-
super("BUG! please report. JSError#initialize failed!: #{e.message}")
|
14
|
-
end
|
15
|
-
end
|
16
|
-
|
17
|
-
def parse_js_frames(try)
|
18
|
-
raw = @to.rb(try.StackTrace())
|
19
|
-
if raw && !raw.empty?
|
20
|
-
raw.split("\n")[1..-1].tap do |frames|
|
21
|
-
# Original code uses strip!, and the frames are not guaranteed to be strippable
|
22
|
-
frames.each {|frame| frame.strip.chomp!(",")}
|
23
|
-
end
|
24
|
-
else
|
25
|
-
[]
|
26
|
-
end
|
27
|
-
end
|
28
|
-
end
|
29
|
-
|
30
|
-
module Handlebars
|
31
|
-
module Spec
|
32
|
-
def self.js_backtrace(context)
|
33
|
-
begin
|
34
|
-
context.eval("throw")
|
35
|
-
rescue V8::JSError => e
|
36
|
-
return e.backtrace(:javascript)
|
37
|
-
end
|
38
|
-
end
|
39
|
-
|
40
|
-
def self.remove_exports(string)
|
41
|
-
match = string.match(%r{\A(.*?)^// BEGIN\(BROWSER\)\n(.*)\n^// END\(BROWSER\)(.*?)\Z}m)
|
42
|
-
prelines = match ? match[1].count("\n") + 1 : 0
|
43
|
-
ret = match ? match[2] : string
|
44
|
-
("\n" * prelines) + ret
|
45
|
-
end
|
46
|
-
|
47
|
-
def self.load_helpers(context)
|
48
|
-
context["exports"] = nil
|
49
|
-
|
50
|
-
context["p"] = proc do |this, val|
|
51
|
-
p val if ENV["DEBUG_JS"]
|
52
|
-
end
|
53
|
-
|
54
|
-
context["puts"] = proc do |this, val|
|
55
|
-
puts val if ENV["DEBUG_JS"]
|
56
|
-
end
|
57
|
-
|
58
|
-
context["puts_node"] = proc do |this, val|
|
59
|
-
puts context["Handlebars"]["PrintVisitor"].new.accept(val)
|
60
|
-
puts
|
61
|
-
end
|
62
|
-
|
63
|
-
context["puts_caller"] = proc do
|
64
|
-
puts "BACKTRACE:"
|
65
|
-
puts Handlebars::Spec.js_backtrace(context)
|
66
|
-
puts
|
67
|
-
end
|
68
|
-
end
|
69
|
-
|
70
|
-
def self.js_load(context, file)
|
71
|
-
str = File.read(file)
|
72
|
-
context.eval(remove_exports(str), file)
|
73
|
-
end
|
74
|
-
|
75
|
-
CONTEXT = V8::Context.new
|
76
|
-
CONTEXT.instance_eval do |context|
|
77
|
-
Handlebars::Spec.load_helpers(context);
|
78
|
-
|
79
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/base.js');
|
80
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/utils.js');
|
81
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/runtime.js');
|
82
|
-
|
83
|
-
context["CompilerContext"] = {}
|
84
|
-
CompilerContext = context["CompilerContext"]
|
85
|
-
CompilerContext["compile"] = proc do |this, *args|
|
86
|
-
template, options = args[0], args[1] || nil
|
87
|
-
templateSpec = COMPILE_CONTEXT["Handlebars"]["precompile"].call(template, options);
|
88
|
-
context["Handlebars"]["template"].call(context.eval("(#{templateSpec})"));
|
89
|
-
end
|
90
|
-
CompilerContext["compileWithPartial"] = proc do |this, *args|
|
91
|
-
template, options = args[0], args[1] || nil
|
92
|
-
FULL_CONTEXT["Handlebars"]["compile"].call(template, options);
|
93
|
-
end
|
94
|
-
end
|
95
|
-
|
96
|
-
COMPILE_CONTEXT = V8::Context.new
|
97
|
-
COMPILE_CONTEXT.instance_eval do |context|
|
98
|
-
Handlebars::Spec.load_helpers(context);
|
99
|
-
|
100
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/base.js');
|
101
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/utils.js');
|
102
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/parser.js');
|
103
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/base.js');
|
104
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/ast.js');
|
105
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/visitor.js');
|
106
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/printer.js');
|
107
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/compiler.js');
|
108
|
-
|
109
|
-
context["Handlebars"]["logger"]["level"] = ENV["DEBUG_JS"] ? context["Handlebars"]["logger"][ENV["DEBUG_JS"]] : 4
|
110
|
-
|
111
|
-
context["Handlebars"]["logger"]["log"] = proc do |this, level, str|
|
112
|
-
logger_level = context["Handlebars"]["logger"]["level"].to_i
|
113
|
-
|
114
|
-
if logger_level <= level
|
115
|
-
puts str
|
116
|
-
end
|
117
|
-
end
|
118
|
-
end
|
119
|
-
|
120
|
-
FULL_CONTEXT = V8::Context.new
|
121
|
-
FULL_CONTEXT.instance_eval do |context|
|
122
|
-
Handlebars::Spec.load_helpers(context);
|
123
|
-
|
124
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/base.js');
|
125
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/utils.js');
|
126
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/parser.js');
|
127
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/base.js');
|
128
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/ast.js');
|
129
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/visitor.js');
|
130
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/printer.js');
|
131
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/compiler/compiler.js');
|
132
|
-
Handlebars::Spec.js_load(context, 'lib/handlebars/runtime.js');
|
133
|
-
|
134
|
-
context["Handlebars"]["logger"]["level"] = ENV["DEBUG_JS"] ? context["Handlebars"]["logger"][ENV["DEBUG_JS"]] : 4
|
135
|
-
|
136
|
-
context["Handlebars"]["logger"]["log"] = proc do |this, level, str|
|
137
|
-
logger_level = context["Handlebars"]["logger"]["level"].to_i
|
138
|
-
|
139
|
-
if logger_level <= level
|
140
|
-
puts str
|
141
|
-
end
|
142
|
-
end
|
143
|
-
end
|
144
|
-
end
|
145
|
-
end
|
146
|
-
|
147
|
-
|
148
|
-
require "test/unit/assertions"
|
149
|
-
|
150
|
-
RSpec.configure do |config|
|
151
|
-
config.include Test::Unit::Assertions
|
152
|
-
|
153
|
-
# Each is required to allow classes to mark themselves as compiler tests
|
154
|
-
config.before(:each) do
|
155
|
-
@context = @compiles ? Handlebars::Spec::COMPILE_CONTEXT : Handlebars::Spec::CONTEXT
|
156
|
-
end
|
157
|
-
end
|
@@ -1,301 +0,0 @@
|
|
1
|
-
require "spec_helper"
|
2
|
-
require "timeout"
|
3
|
-
|
4
|
-
describe "Tokenizer" do
|
5
|
-
let(:parser) { @context["handlebars"] }
|
6
|
-
let(:lexer) { @context["handlebars"]["lexer"] }
|
7
|
-
|
8
|
-
before(:all) do
|
9
|
-
@compiles = true
|
10
|
-
end
|
11
|
-
Token = Struct.new(:name, :text)
|
12
|
-
|
13
|
-
def tokenize(string)
|
14
|
-
lexer.setInput(string)
|
15
|
-
out = []
|
16
|
-
|
17
|
-
while token = lexer.lex
|
18
|
-
# p token
|
19
|
-
result = parser.terminals_[token] || token
|
20
|
-
# p result
|
21
|
-
break if !result || result == "EOF" || result == "INVALID"
|
22
|
-
out << Token.new(result, lexer.yytext)
|
23
|
-
end
|
24
|
-
|
25
|
-
out
|
26
|
-
end
|
27
|
-
|
28
|
-
RSpec::Matchers.define :match_tokens do |tokens|
|
29
|
-
match do |result|
|
30
|
-
result.map(&:name).should == tokens
|
31
|
-
end
|
32
|
-
end
|
33
|
-
|
34
|
-
RSpec::Matchers.define :be_token do |name, string|
|
35
|
-
match do |token|
|
36
|
-
token.name.should == name
|
37
|
-
token.text.should == string
|
38
|
-
end
|
39
|
-
end
|
40
|
-
|
41
|
-
it "tokenizes a simple mustache as 'OPEN ID CLOSE'" do
|
42
|
-
result = tokenize("{{foo}}")
|
43
|
-
result.should match_tokens(%w(OPEN ID CLOSE))
|
44
|
-
result[1].should be_token("ID", "foo")
|
45
|
-
end
|
46
|
-
|
47
|
-
it "supports escaping delimiters" do
|
48
|
-
result = tokenize("{{foo}} \\{{bar}} {{baz}}")
|
49
|
-
result.should match_tokens(%w(OPEN ID CLOSE CONTENT CONTENT OPEN ID CLOSE))
|
50
|
-
|
51
|
-
result[4].should be_token("CONTENT", "{{bar}} ")
|
52
|
-
end
|
53
|
-
|
54
|
-
it "supports escaping multiple delimiters" do
|
55
|
-
result = tokenize("{{foo}} \\{{bar}} \\{{baz}}")
|
56
|
-
result.should match_tokens(%w(OPEN ID CLOSE CONTENT CONTENT CONTENT))
|
57
|
-
|
58
|
-
result[3].should be_token("CONTENT", " ")
|
59
|
-
result[4].should be_token("CONTENT", "{{bar}} ")
|
60
|
-
result[5].should be_token("CONTENT", "{{baz}}")
|
61
|
-
end
|
62
|
-
|
63
|
-
it "supports escaping a triple stash" do
|
64
|
-
result = tokenize("{{foo}} \\{{{bar}}} {{baz}}")
|
65
|
-
result.should match_tokens(%w(OPEN ID CLOSE CONTENT CONTENT OPEN ID CLOSE))
|
66
|
-
|
67
|
-
result[4].should be_token("CONTENT", "{{{bar}}} ")
|
68
|
-
end
|
69
|
-
|
70
|
-
it "tokenizes a simple path" do
|
71
|
-
result = tokenize("{{foo/bar}}")
|
72
|
-
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
|
73
|
-
end
|
74
|
-
|
75
|
-
it "allows dot notation" do
|
76
|
-
result = tokenize("{{foo.bar}}")
|
77
|
-
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
|
78
|
-
|
79
|
-
tokenize("{{foo.bar.baz}}").should match_tokens(%w(OPEN ID SEP ID SEP ID CLOSE))
|
80
|
-
end
|
81
|
-
|
82
|
-
it "allows path literals with []" do
|
83
|
-
result = tokenize("{{foo.[bar]}}")
|
84
|
-
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
|
85
|
-
end
|
86
|
-
|
87
|
-
it "allows multiple path literals on a line with []" do
|
88
|
-
result = tokenize("{{foo.[bar]}}{{foo.[baz]}}")
|
89
|
-
result.should match_tokens(%w(OPEN ID SEP ID CLOSE OPEN ID SEP ID CLOSE))
|
90
|
-
end
|
91
|
-
|
92
|
-
it "tokenizes {{.}} as OPEN ID CLOSE" do
|
93
|
-
result = tokenize("{{.}}")
|
94
|
-
result.should match_tokens(%w(OPEN ID CLOSE))
|
95
|
-
end
|
96
|
-
|
97
|
-
it "tokenizes a path as 'OPEN (ID SEP)* ID CLOSE'" do
|
98
|
-
result = tokenize("{{../foo/bar}}")
|
99
|
-
result.should match_tokens(%w(OPEN ID SEP ID SEP ID CLOSE))
|
100
|
-
result[1].should be_token("ID", "..")
|
101
|
-
end
|
102
|
-
|
103
|
-
it "tokenizes a path with .. as a parent path" do
|
104
|
-
result = tokenize("{{../foo.bar}}")
|
105
|
-
result.should match_tokens(%w(OPEN ID SEP ID SEP ID CLOSE))
|
106
|
-
result[1].should be_token("ID", "..")
|
107
|
-
end
|
108
|
-
|
109
|
-
it "tokenizes a path with this/foo as OPEN ID SEP ID CLOSE" do
|
110
|
-
result = tokenize("{{this/foo}}")
|
111
|
-
result.should match_tokens(%w(OPEN ID SEP ID CLOSE))
|
112
|
-
result[1].should be_token("ID", "this")
|
113
|
-
result[3].should be_token("ID", "foo")
|
114
|
-
end
|
115
|
-
|
116
|
-
it "tokenizes a simple mustache with spaces as 'OPEN ID CLOSE'" do
|
117
|
-
result = tokenize("{{ foo }}")
|
118
|
-
result.should match_tokens(%w(OPEN ID CLOSE))
|
119
|
-
result[1].should be_token("ID", "foo")
|
120
|
-
end
|
121
|
-
|
122
|
-
it "tokenizes a simple mustache with line breaks as 'OPEN ID ID CLOSE'" do
|
123
|
-
result = tokenize("{{ foo \n bar }}")
|
124
|
-
result.should match_tokens(%w(OPEN ID ID CLOSE))
|
125
|
-
result[1].should be_token("ID", "foo")
|
126
|
-
end
|
127
|
-
|
128
|
-
it "tokenizes raw content as 'CONTENT'" do
|
129
|
-
result = tokenize("foo {{ bar }} baz")
|
130
|
-
result.should match_tokens(%w(CONTENT OPEN ID CLOSE CONTENT))
|
131
|
-
result[0].should be_token("CONTENT", "foo ")
|
132
|
-
result[4].should be_token("CONTENT", " baz")
|
133
|
-
end
|
134
|
-
|
135
|
-
it "tokenizes a partial as 'OPEN_PARTIAL PARTIAL_NAME CLOSE'" do
|
136
|
-
result = tokenize("{{> foo}}")
|
137
|
-
result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME CLOSE))
|
138
|
-
end
|
139
|
-
|
140
|
-
it "tokenizes a partial with context as 'OPEN_PARTIAL PARTIAL_NAME ID CLOSE'" do
|
141
|
-
result = tokenize("{{> foo bar }}")
|
142
|
-
result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME ID CLOSE))
|
143
|
-
end
|
144
|
-
|
145
|
-
it "tokenizes a partial without spaces as 'OPEN_PARTIAL PARTIAL_NAME CLOSE'" do
|
146
|
-
result = tokenize("{{>foo}}")
|
147
|
-
result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME CLOSE))
|
148
|
-
end
|
149
|
-
|
150
|
-
it "tokenizes a partial space at the end as 'OPEN_PARTIAL PARTIAL_NAME CLOSE'" do
|
151
|
-
result = tokenize("{{>foo }}")
|
152
|
-
result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME CLOSE))
|
153
|
-
end
|
154
|
-
|
155
|
-
it "tokenizes a comment as 'COMMENT'" do
|
156
|
-
result = tokenize("foo {{! this is a comment }} bar {{ baz }}")
|
157
|
-
result.should match_tokens(%w(CONTENT COMMENT CONTENT OPEN ID CLOSE))
|
158
|
-
result[1].should be_token("COMMENT", " this is a comment ")
|
159
|
-
end
|
160
|
-
|
161
|
-
it "tokenizes a block comment as 'COMMENT'" do
|
162
|
-
result = tokenize("foo {{!-- this is a {{comment}} --}} bar {{ baz }}")
|
163
|
-
result.should match_tokens(%w(CONTENT COMMENT CONTENT OPEN ID CLOSE))
|
164
|
-
result[1].should be_token("COMMENT", " this is a {{comment}} ")
|
165
|
-
end
|
166
|
-
|
167
|
-
it "tokenizes a block comment with whitespace as 'COMMENT'" do
|
168
|
-
result = tokenize("foo {{!-- this is a\n{{comment}}\n--}} bar {{ baz }}")
|
169
|
-
result.should match_tokens(%w(CONTENT COMMENT CONTENT OPEN ID CLOSE))
|
170
|
-
result[1].should be_token("COMMENT", " this is a\n{{comment}}\n")
|
171
|
-
end
|
172
|
-
|
173
|
-
it "tokenizes open and closing blocks as 'OPEN_BLOCK ID CLOSE ... OPEN_ENDBLOCK ID CLOSE'" do
|
174
|
-
result = tokenize("{{#foo}}content{{/foo}}")
|
175
|
-
result.should match_tokens(%w(OPEN_BLOCK ID CLOSE CONTENT OPEN_ENDBLOCK ID CLOSE))
|
176
|
-
end
|
177
|
-
|
178
|
-
it "tokenizes inverse sections as 'OPEN_INVERSE CLOSE'" do
|
179
|
-
tokenize("{{^}}").should match_tokens(%w(OPEN_INVERSE CLOSE))
|
180
|
-
tokenize("{{else}}").should match_tokens(%w(OPEN_INVERSE CLOSE))
|
181
|
-
tokenize("{{ else }}").should match_tokens(%w(OPEN_INVERSE CLOSE))
|
182
|
-
end
|
183
|
-
|
184
|
-
it "tokenizes inverse sections with ID as 'OPEN_INVERSE ID CLOSE'" do
|
185
|
-
result = tokenize("{{^foo}}")
|
186
|
-
result.should match_tokens(%w(OPEN_INVERSE ID CLOSE))
|
187
|
-
result[1].should be_token("ID", "foo")
|
188
|
-
end
|
189
|
-
|
190
|
-
it "tokenizes inverse sections with ID and spaces as 'OPEN_INVERSE ID CLOSE'" do
|
191
|
-
result = tokenize("{{^ foo }}")
|
192
|
-
result.should match_tokens(%w(OPEN_INVERSE ID CLOSE))
|
193
|
-
result[1].should be_token("ID", "foo")
|
194
|
-
end
|
195
|
-
|
196
|
-
it "tokenizes mustaches with params as 'OPEN ID ID ID CLOSE'" do
|
197
|
-
result = tokenize("{{ foo bar baz }}")
|
198
|
-
result.should match_tokens(%w(OPEN ID ID ID CLOSE))
|
199
|
-
result[1].should be_token("ID", "foo")
|
200
|
-
result[2].should be_token("ID", "bar")
|
201
|
-
result[3].should be_token("ID", "baz")
|
202
|
-
end
|
203
|
-
|
204
|
-
it "tokenizes mustaches with String params as 'OPEN ID ID STRING CLOSE'" do
|
205
|
-
result = tokenize("{{ foo bar \"baz\" }}")
|
206
|
-
result.should match_tokens(%w(OPEN ID ID STRING CLOSE))
|
207
|
-
result[3].should be_token("STRING", "baz")
|
208
|
-
end
|
209
|
-
|
210
|
-
it "tokenizes mustaches with String params using single quotes as 'OPEN ID ID STRING CLOSE'" do
|
211
|
-
result = tokenize("{{ foo bar \'baz\' }}")
|
212
|
-
result.should match_tokens(%w(OPEN ID ID STRING CLOSE))
|
213
|
-
result[3].should be_token("STRING", "baz")
|
214
|
-
end
|
215
|
-
|
216
|
-
it "tokenizes String params with spaces inside as 'STRING'" do
|
217
|
-
result = tokenize("{{ foo bar \"baz bat\" }}")
|
218
|
-
result.should match_tokens(%w(OPEN ID ID STRING CLOSE))
|
219
|
-
result[3].should be_token("STRING", "baz bat")
|
220
|
-
end
|
221
|
-
|
222
|
-
it "tokenizes String params with escapes quotes as 'STRING'" do
|
223
|
-
result = tokenize(%|{{ foo "bar\\"baz" }}|)
|
224
|
-
result.should match_tokens(%w(OPEN ID STRING CLOSE))
|
225
|
-
result[2].should be_token("STRING", %{bar"baz})
|
226
|
-
end
|
227
|
-
|
228
|
-
it "tokenizes String params using single quotes with escapes quotes as 'STRING'" do
|
229
|
-
result = tokenize(%|{{ foo 'bar\\'baz' }}|)
|
230
|
-
result.should match_tokens(%w(OPEN ID STRING CLOSE))
|
231
|
-
result[2].should be_token("STRING", %{bar'baz})
|
232
|
-
end
|
233
|
-
|
234
|
-
it "tokenizes numbers" do
|
235
|
-
result = tokenize(%|{{ foo 1 }}|)
|
236
|
-
result.should match_tokens(%w(OPEN ID INTEGER CLOSE))
|
237
|
-
result[2].should be_token("INTEGER", "1")
|
238
|
-
end
|
239
|
-
|
240
|
-
it "tokenizes booleans" do
|
241
|
-
result = tokenize(%|{{ foo true }}|)
|
242
|
-
result.should match_tokens(%w(OPEN ID BOOLEAN CLOSE))
|
243
|
-
result[2].should be_token("BOOLEAN", "true")
|
244
|
-
|
245
|
-
result = tokenize(%|{{ foo false }}|)
|
246
|
-
result.should match_tokens(%w(OPEN ID BOOLEAN CLOSE))
|
247
|
-
result[2].should be_token("BOOLEAN", "false")
|
248
|
-
end
|
249
|
-
|
250
|
-
it "tokenizes hash arguments" do
|
251
|
-
result = tokenize("{{ foo bar=baz }}")
|
252
|
-
result.should match_tokens %w(OPEN ID ID EQUALS ID CLOSE)
|
253
|
-
|
254
|
-
result = tokenize("{{ foo bar baz=bat }}")
|
255
|
-
result.should match_tokens %w(OPEN ID ID ID EQUALS ID CLOSE)
|
256
|
-
|
257
|
-
result = tokenize("{{ foo bar baz=1 }}")
|
258
|
-
result.should match_tokens %w(OPEN ID ID ID EQUALS INTEGER CLOSE)
|
259
|
-
|
260
|
-
result = tokenize("{{ foo bar baz=true }}")
|
261
|
-
result.should match_tokens %w(OPEN ID ID ID EQUALS BOOLEAN CLOSE)
|
262
|
-
|
263
|
-
result = tokenize("{{ foo bar baz=false }}")
|
264
|
-
result.should match_tokens %w(OPEN ID ID ID EQUALS BOOLEAN CLOSE)
|
265
|
-
|
266
|
-
result = tokenize("{{ foo bar\n baz=bat }}")
|
267
|
-
result.should match_tokens %w(OPEN ID ID ID EQUALS ID CLOSE)
|
268
|
-
|
269
|
-
result = tokenize("{{ foo bar baz=\"bat\" }}")
|
270
|
-
result.should match_tokens %w(OPEN ID ID ID EQUALS STRING CLOSE)
|
271
|
-
|
272
|
-
result = tokenize("{{ foo bar baz=\"bat\" bam=wot }}")
|
273
|
-
result.should match_tokens %w(OPEN ID ID ID EQUALS STRING ID EQUALS ID CLOSE)
|
274
|
-
|
275
|
-
result = tokenize("{{foo omg bar=baz bat=\"bam\"}}")
|
276
|
-
result.should match_tokens %w(OPEN ID ID ID EQUALS ID ID EQUALS STRING CLOSE)
|
277
|
-
result[2].should be_token("ID", "omg")
|
278
|
-
end
|
279
|
-
|
280
|
-
it "tokenizes special @ identifiers" do
|
281
|
-
result = tokenize("{{ @foo }}")
|
282
|
-
result.should match_tokens %w( OPEN DATA CLOSE )
|
283
|
-
result[1].should be_token("DATA", "foo")
|
284
|
-
|
285
|
-
result = tokenize("{{ foo @bar }}")
|
286
|
-
result.should match_tokens %w( OPEN ID DATA CLOSE )
|
287
|
-
result[2].should be_token("DATA", "bar")
|
288
|
-
|
289
|
-
result = tokenize("{{ foo bar=@baz }}")
|
290
|
-
result.should match_tokens %w( OPEN ID ID EQUALS DATA CLOSE )
|
291
|
-
result[4].should be_token("DATA", "baz")
|
292
|
-
end
|
293
|
-
|
294
|
-
it "does not time out in a mustache with a single } followed by EOF" do
|
295
|
-
Timeout.timeout(1) { tokenize("{{foo}").should match_tokens(%w(OPEN ID)) }
|
296
|
-
end
|
297
|
-
|
298
|
-
it "does not time out in a mustache when invalid ID characters are used" do
|
299
|
-
Timeout.timeout(1) { tokenize("{{foo & }}").should match_tokens(%w(OPEN ID)) }
|
300
|
-
end
|
301
|
-
end
|