layo 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (74) hide show
  1. data/LICENSE +26 -0
  2. data/README.mkd +103 -0
  3. data/Rakefile +21 -0
  4. data/UnicodeData.txt +23697 -0
  5. data/bin/layo +22 -0
  6. data/layo.gemspec +23 -0
  7. data/lib/layo.rb +11 -0
  8. data/lib/layo/ast.rb +5 -0
  9. data/lib/layo/ast/block.rb +13 -0
  10. data/lib/layo/ast/expression.rb +14 -0
  11. data/lib/layo/ast/node.rb +6 -0
  12. data/lib/layo/ast/program.rb +9 -0
  13. data/lib/layo/ast/statement.rb +10 -0
  14. data/lib/layo/interpreter.rb +360 -0
  15. data/lib/layo/lexer.rb +162 -0
  16. data/lib/layo/parser.rb +371 -0
  17. data/lib/layo/peekable.rb +31 -0
  18. data/lib/layo/runtime_error.rb +9 -0
  19. data/lib/layo/syntax_error.rb +14 -0
  20. data/lib/layo/tokenizer.rb +119 -0
  21. data/lib/layo/unexpected_token_error.rb +13 -0
  22. data/lib/layo/unicode.rb +23614 -0
  23. data/lib/layo/unknown_token_error.rb +7 -0
  24. data/spec/interpreter_spec.rb +52 -0
  25. data/spec/lexer_spec.rb +176 -0
  26. data/spec/parser_spec.rb +373 -0
  27. data/spec/source/basic/comments.lol +16 -0
  28. data/spec/source/basic/comments.out +2 -0
  29. data/spec/source/basic/line-continuation.lol +8 -0
  30. data/spec/source/basic/line-continuation.out +2 -0
  31. data/spec/source/basic/line-endings.lol +5 -0
  32. data/spec/source/basic/line-endings.out +3 -0
  33. data/spec/source/basic/minimal.lol +2 -0
  34. data/spec/source/casting/boolean.lol +8 -0
  35. data/spec/source/casting/boolean.out +5 -0
  36. data/spec/source/casting/float.lol +10 -0
  37. data/spec/source/casting/float.out +5 -0
  38. data/spec/source/casting/int.lol +9 -0
  39. data/spec/source/casting/int.out +4 -0
  40. data/spec/source/casting/nil.lol +9 -0
  41. data/spec/source/casting/nil.out +4 -0
  42. data/spec/source/casting/string.lol +5 -0
  43. data/spec/source/casting/string.out +2 -0
  44. data/spec/source/expressions/boolean.lol +30 -0
  45. data/spec/source/expressions/boolean.out +17 -0
  46. data/spec/source/expressions/cast.lol +28 -0
  47. data/spec/source/expressions/cast.out +20 -0
  48. data/spec/source/expressions/function.lol +24 -0
  49. data/spec/source/expressions/function.out +4 -0
  50. data/spec/source/expressions/math.lol +9 -0
  51. data/spec/source/expressions/math.out +7 -0
  52. data/spec/source/expressions/string.lol +20 -0
  53. data/spec/source/expressions/string.out +7 -0
  54. data/spec/source/statements/assignment.lol +8 -0
  55. data/spec/source/statements/assignment.out +3 -0
  56. data/spec/source/statements/cast.lol +11 -0
  57. data/spec/source/statements/cast.out +3 -0
  58. data/spec/source/statements/declaration.lol +9 -0
  59. data/spec/source/statements/declaration.out +2 -0
  60. data/spec/source/statements/expression.lol +10 -0
  61. data/spec/source/statements/expression.out +2 -0
  62. data/spec/source/statements/if_then_else.lol +42 -0
  63. data/spec/source/statements/if_then_else.out +3 -0
  64. data/spec/source/statements/input.in +1 -0
  65. data/spec/source/statements/input.lol +4 -0
  66. data/spec/source/statements/input.out +1 -0
  67. data/spec/source/statements/loop.lol +50 -0
  68. data/spec/source/statements/loop.out +20 -0
  69. data/spec/source/statements/print.lol +7 -0
  70. data/spec/source/statements/print.out +2 -0
  71. data/spec/source/statements/switch.lol +95 -0
  72. data/spec/source/statements/switch.out +12 -0
  73. data/spec/tokenizer_spec.rb +105 -0
  74. metadata +135 -0
@@ -0,0 +1,7 @@
1
+ module Layo
2
+ class UnknownTokenError < SyntaxError
3
+ def initialize(lexeme)
4
+ super lexeme[1], lexeme[2], "Unknown token '#{lexeme[0]}'"
5
+ end
6
+ end
7
+ end
@@ -0,0 +1,52 @@
1
+ require 'minitest/autorun'
2
+ require 'layo'
3
+
4
+ include Layo
5
+
6
+ describe Interpreter do
7
+ before do
8
+ @interpreter = Interpreter.new
9
+ @interpreter.output = StringIO.new
10
+ end
11
+
12
+ it "should correctly interpret test programs" do
13
+ mask = File.join(File.dirname(__FILE__), 'source', '**', '*.lol')
14
+ Dir.glob(mask).each do |source_filename|
15
+ lexer = Lexer.new(File.new(source_filename))
16
+ parser = Parser.new(Tokenizer.new(lexer))
17
+ @interpreter.output.string = ''
18
+
19
+ # Supply input stream if provided
20
+ if File.exist?(infile = source_filename[0..-4] + 'in')
21
+ infile = File.new(infile)
22
+ @interpreter.input = infile
23
+ end
24
+
25
+ # Get contents of output file (if provided) to assert later
26
+ if File.exist?(outfile = source_filename[0..-4] + 'out')
27
+ expected_output = File.open(outfile) do |file|
28
+ file.read
29
+ end
30
+ else
31
+ expected_output = nil
32
+ end
33
+
34
+ # Execute the program
35
+ begin
36
+ @interpreter.interpret(parser.parse)
37
+ rescue RuntimeError, SyntaxError => e
38
+ puts "Error interpreting #{source_filename}"
39
+ puts e.message
40
+ end
41
+
42
+ # Assertions
43
+ if expected_output
44
+ assert_equal expected_output, @interpreter.output.string, "File: #{source_filename}"
45
+ end
46
+
47
+ # Cleanup
48
+ lexer.input.close
49
+ infile.close if infile.instance_of?(File)
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,176 @@
1
+ # encoding: UTF-8
2
+
3
+ require 'stringio'
4
+ require 'minitest/autorun'
5
+ require 'layo'
6
+
7
+ include Layo
8
+
9
+ describe Lexer do
10
+ before do
11
+ @lexer = Lexer.new
12
+ end
13
+
14
+ it 'should transform all line-ending characters to \n' do
15
+ str = "\n \r\n \r "
16
+ @lexer.input = StringIO.new(str)
17
+ @lexer.next.must_equal ["\n", 1, 1]
18
+ @lexer.next.must_equal ["\n", 2, 2]
19
+ @lexer.next.must_equal ["\n", 3, 2]
20
+ end
21
+
22
+ it 'should recognize lexemes separated by whitespaces' do
23
+ @lexer.input = StringIO.new("abc def \t\tghi")
24
+ @lexer.next.must_equal ['abc', 1, 1]
25
+ @lexer.next.must_equal ['def', 1, 6]
26
+ @lexer.next.must_equal ['ghi', 1, 14]
27
+ end
28
+
29
+ it 'should recognize lexemes separated by newlines' do
30
+ @lexer.input = StringIO.new("abc\rdef\nghi")
31
+ @lexer.next.must_equal ['abc', 1, 1]
32
+ @lexer.next.must_equal ["\n", 1, 4]
33
+ @lexer.next.must_equal ['def', 2, 1]
34
+ @lexer.next.must_equal ["\n", 2, 4]
35
+ @lexer.next.must_equal ['ghi', 3, 1]
36
+ end
37
+
38
+ it 'should recognize special character lexemes' do
39
+ @lexer.input = StringIO.new("abc! ,def ,")
40
+ @lexer.next.must_equal ['abc', 1, 1]
41
+ @lexer.next.must_equal ['!', 1, 4]
42
+ # Comma acts as a virtual newline or a soft-command-break
43
+ @lexer.next.must_equal ["\n", 1, 6]
44
+ @lexer.next.must_equal ['def', 1, 7]
45
+ @lexer.next.must_equal ["\n", 1, 11]
46
+ end
47
+
48
+ describe 'when sees line ending with triple dots' do
49
+ it 'should join subsequent non-empty line' do
50
+ @lexer.input = StringIO.new("abc...\ndef…\nghi")
51
+ @lexer.next.must_equal ['abc', 1, 1]
52
+ @lexer.next.must_equal ['def', 2, 1]
53
+ @lexer.next.must_equal ['ghi', 3, 1]
54
+ end
55
+
56
+ it 'should raise a syntax error when the subsequent line is empty' do
57
+ @lexer.input = StringIO.new("abc...\n \n")
58
+ @lexer.next
59
+ lambda { @lexer.next }.must_raise Layo::SyntaxError
60
+ end
61
+
62
+ it 'should raise a syntax error when there is not subsequent line' do
63
+ @lexer.input = StringIO.new("abc...\n")
64
+ @lexer.next
65
+ lambda { @lexer.next }.must_raise Layo::SyntaxError
66
+ end
67
+ end
68
+
69
+ describe 'when sees BTW' do
70
+ it 'should treat everything till the end of line as a comment' do
71
+ @lexer.input = StringIO.new("abc BTW it's comment")
72
+ @lexer.next.must_equal ['abc', 1, 1]
73
+ # Newline should be added
74
+ @lexer.next[0].must_equal "\n"
75
+ @lexer.next[0].must_be_nil
76
+ end
77
+ end
78
+
79
+ describe 'when sees OBTW' do
80
+ it 'should treat all lines until TLDR as a comment' do
81
+ @lexer.input = StringIO.new("ABC
82
+ OBTW this is a long comment block
83
+ see, i have more comments here
84
+ and here
85
+ TLDR
86
+ DEF")
87
+ @lexer.next.must_equal ['ABC', 1, 1]
88
+ @lexer.next.must_equal ["\n", 1, 4]
89
+ @lexer.next.must_equal ["\n", 5, 5]
90
+ @lexer.next.must_equal ['DEF', 6, 1]
91
+ end
92
+
93
+ it 'should recognize commands before OBTW and after TLDR' do
94
+ @lexer.input = StringIO.new("ABC, OBTW
95
+ this is comment
96
+ valid comment
97
+ TLDR, DEF")
98
+ @lexer.next.must_equal ['ABC', 1, 1]
99
+ @lexer.next.must_equal ["\n", 1, 4]
100
+ @lexer.next.must_equal ['DEF', 4, 7]
101
+ end
102
+
103
+ it 'should raise a syntax error when there is not TLDR' do
104
+ @lexer.input = StringIO.new('OBTW
105
+ comment. no tldr
106
+ ')
107
+ lambda { @lexer.next }.must_raise Layo::SyntaxError
108
+ end
109
+ end
110
+
111
+ describe 'when sees double quotation marks (")' do
112
+ it 'should treat everything till another " character as a string lexeme' do
113
+ @lexer.input = StringIO.new('"hello world"')
114
+ @lexer.next.must_equal ['"hello world"', 1, 1]
115
+ end
116
+
117
+ it 'should handle empty string' do
118
+ @lexer.input = StringIO.new('""')
119
+ @lexer.next.must_equal ['""', 1, 1]
120
+ end
121
+
122
+ it "should handle escape characters" do
123
+ @lexer.input = StringIO.new('"Special :) :::" :> :o chars ::"')
124
+ @lexer.next[0].must_equal %["Special \n :" \t \a chars :"]
125
+ end
126
+
127
+ it 'should raise a syntax error if string is unterminated' do
128
+ @lexer.input = StringIO.new(' "bla bla bla ')
129
+ lambda { @lexer.next }.must_raise Layo::SyntaxError
130
+ end
131
+
132
+ it 'should raise a syntax error if string terminator is not followed by allowed delimiter' do
133
+ @lexer.input = StringIO.new('"a","b" "c"!"d"...
134
+ "e"…
135
+ "f"
136
+ "g"bla')
137
+ # OK, since "a" is followed by a ','
138
+ @lexer.next.must_equal ['"a"', 1, 1]
139
+ @lexer.next.must_equal ["\n", 1, 4]
140
+ # OK, since "b" is followed by a space
141
+ @lexer.next.must_equal ['"b"', 1, 5]
142
+ # OK, since "c" is followed by a '!'
143
+ @lexer.next.must_equal ['"c"', 1, 9]
144
+ @lexer.next.must_equal ['!', 1, 12]
145
+ # OK, since "d" is followed by a '...'
146
+ @lexer.next.must_equal ['"d"', 1, 13]
147
+ # OK, since "e" is followed by a '…'
148
+ @lexer.next.must_equal ['"e"', 2, 1]
149
+ # OK, since "f" is followed by a newline
150
+ @lexer.next.must_equal ['"f"', 3, 1]
151
+ @lexer.next.must_equal ["\n", 3, 4]
152
+ # Error, since "g" is not followed by any allowed delimiter
153
+ lambda { @lexer.next }.must_raise Layo::SyntaxError
154
+ end
155
+ end
156
+
157
+ describe '#escape_string' do
158
+ it "should handle escape characters" do
159
+ str = 'Example :) usage :> of :" special :o chars ::'
160
+ result = @lexer.escape_string(str)
161
+ result.must_equal %[Example \n usage \t of " special \a chars :]
162
+ end
163
+
164
+ it "should resolve :(<hex>) into corresponding Unicode code point" do
165
+ str = 'Kazakh symbol :(049b), pound sign :(0A3)'
166
+ result = @lexer.escape_string(str)
167
+ result.must_equal 'Kazakh symbol қ, pound sign £'
168
+ end
169
+
170
+ it "should resolve :[<char name>] into corresponding Unicode normative name" do
171
+ str = ':[CYRILLIC SMALL LETTER KA WITH DESCENDER] and :[COMMERCIAL AT]'
172
+ result = @lexer.escape_string(str)
173
+ result.must_equal 'қ and @'
174
+ end
175
+ end
176
+ end
@@ -0,0 +1,373 @@
1
+ require 'minitest/autorun'
2
+ require 'mocha'
3
+ require 'layo'
4
+
5
+ include Layo
6
+ include Layo::Ast
7
+
8
+ describe Parser do
9
+ before do
10
+ @tokenizer = Tokenizer.new(Lexer.new)
11
+ @parser = Parser.new(@tokenizer)
12
+ end
13
+
14
+ it "should restore peek index after statement lookaheads" do
15
+ @tokenizer.stubs(:next_item).returns(
16
+ {:type => :identifier, :data => 'abc'},
17
+ {:type => :is_now_a}, {:type => :newline}
18
+ )
19
+ @parser.next_statement.must_equal 'cast'
20
+ @tokenizer.peek.must_equal :type => :identifier, :data => 'abc'
21
+ end
22
+
23
+ it "should restore peek index after expression lookaheads" do
24
+ @tokenizer.stubs(:next_item).returns({:type => :string, :data => 'abc'})
25
+ @parser.next_expression.must_equal 'constant'
26
+ @tokenizer.peek.must_equal :type => :string, :data => 'abc'
27
+ end
28
+
29
+ it 'should parse program' do
30
+ @tokenizer.stubs(:next_item).returns(
31
+ {:type => :hai}, {:type => :float, :data => 1.2},
32
+ {:type => :newline}, {:type => :kthxbye}, {:type => :eof}
33
+ )
34
+ block = mock
35
+ @parser.expects(:parse_block).returns(block)
36
+ node = @parser.parse_program
37
+ node.must_be_instance_of Program
38
+ node.version.must_equal 1.2
39
+ node.block.must_be_same_as block
40
+ end
41
+
42
+ it "should parse block" do
43
+ statements = [mock]
44
+ @parser.stubs(:skip_newlines)
45
+ @parser.stubs(:parse_statement).returns(*statements)
46
+ @parser.stubs(:next_statement).returns('print', nil)
47
+ node = @parser.parse_block
48
+ node.must_be_instance_of Block
49
+ node.statement_list.must_equal statements
50
+ end
51
+
52
+ it "should parse cast statement" do
53
+ @tokenizer.stubs(:next_item).returns(
54
+ { type: :identifier, data: 'abc' },
55
+ { type: :is_now_a}, { type: :troof }
56
+ )
57
+ node = @parser.parse_cast_statement
58
+ node.type.must_equal 'cast'
59
+ node.identifier.must_equal 'abc'
60
+ node.to.must_equal :troof
61
+ end
62
+
63
+ describe "#parse_print_statement" do
64
+ it "should parse print statement with exclamation mark" do
65
+ @tokenizer.stubs(:next_item).returns(
66
+ {:type => :visible}, {:type => :exclamation}
67
+ )
68
+ expr = mock
69
+ @parser.expects(:parse_expression).returns(expr)
70
+ @parser.stubs(:next_expression).returns(nil)
71
+ node = @parser.parse_print_statement
72
+ node.type.must_equal 'print'
73
+ node.expressions.size.must_equal 1
74
+ node.expressions.first.must_be_same_as expr
75
+ node.suppress.must_equal true
76
+ end
77
+
78
+ it "should parse print statement without exclamation mark" do
79
+ @tokenizer.stubs(:next_item).returns(
80
+ {:type => :visible}
81
+ )
82
+ exprs = [mock, mock]
83
+ @parser.stubs(:parse_expression).returns(*exprs)
84
+ @parser.stubs(:next_expression).returns('constant', nil)
85
+ node = @parser.parse_print_statement
86
+ node.type.must_equal 'print'
87
+ node.expressions.must_equal exprs
88
+ node.suppress.must_equal false
89
+ end
90
+ end
91
+
92
+ it "should parse input statement" do
93
+ @tokenizer.stubs(:next_item).returns(
94
+ {:type => :gimmeh}, {:type => :identifier, :data => 'var'},
95
+ )
96
+ node = @parser.parse_input_statement
97
+ node.type.must_equal 'input'
98
+ node.identifier.must_equal 'var'
99
+ end
100
+
101
+ it "should parse assignment statement" do
102
+ @tokenizer.stubs(:next_item).returns(
103
+ {:type => :identifier, :data => 'abc'}, {:type => :r}
104
+ )
105
+ expr = mock
106
+ @parser.expects(:parse_expression).returns(expr)
107
+ node = @parser.parse_assignment_statement
108
+ node.type.must_equal 'assignment'
109
+ node.identifier.must_equal 'abc'
110
+ node.expression.must_be_same_as expr
111
+ end
112
+
113
+ describe "#parse_declaration_statement" do
114
+ it "should parse declaration statement without initialization" do
115
+ @tokenizer.stubs(:next_item).returns(
116
+ {:type => :i_has_a}, {:type => :identifier, :data => 'abc'}
117
+ )
118
+ node = @parser.parse_declaration_statement
119
+ node.type.must_equal 'declaration'
120
+ node.identifier.must_equal 'abc'
121
+ node.initialization.must_be_nil
122
+ end
123
+
124
+ it "should parse declaration statement with initialization" do
125
+ @tokenizer.stubs(:next_item).returns(
126
+ {:type => :i_has_a}, {:type => :identifier, :data => 'abc'},
127
+ {:type => :itz}
128
+ )
129
+ init = mock
130
+ @parser.expects(:parse_expression).returns(init)
131
+ node = @parser.parse_declaration_statement
132
+ node.type.must_equal 'declaration'
133
+ node.identifier.must_equal 'abc'
134
+ node.initialization.must_be_same_as init
135
+ end
136
+ end
137
+
138
+ describe "#parse_condition_statement" do
139
+ before do
140
+ @tokenizer_expectation = @tokenizer.stubs(:next_item).returns(
141
+ {:type => :o_rly?}, {:type => :newline},
142
+ {:type => :ya_rly}, {:type => :newline}
143
+ )
144
+ @block = mock
145
+ @parser_expectation = @parser.stubs(:parse_block).returns(@block)
146
+ end
147
+
148
+ it "should parse conditional statement without else's" do
149
+ @tokenizer_expectation.then.returns({:type => :oic})
150
+ node = @parser.parse_condition_statement
151
+ node.type.must_equal 'condition'
152
+ node.then.must_be_same_as @block
153
+ node.elseif.must_be_empty
154
+ end
155
+
156
+ it "should parse conditional statement with else and elseif's" do
157
+ @tokenizer_expectation.then.returns(
158
+ { type: :mebbe }, { type: :newline },
159
+ {:type => :no_wai}, {:type => :newline},
160
+ {:type => :oic}
161
+ )
162
+ elseif_condition = mock
163
+ elseif_block = mock
164
+ else_block = mock
165
+ @parser.stubs(:parse_expression).returns(elseif_condition)
166
+ @parser_expectation.then.returns(elseif_block, else_block)
167
+
168
+ node = @parser.parse_condition_statement
169
+
170
+ node.type.must_equal 'condition'
171
+ node.then.must_be_same_as @block
172
+ node.elseif.first[:condition].must_be_same_as elseif_condition
173
+ node.elseif.first[:block].must_be_same_as elseif_block
174
+ node.else.must_be_same_as else_block
175
+ end
176
+ end
177
+
178
+ it "should parse switch statement" do
179
+ @tokenizer.stubs(:next_item).returns(
180
+ {:type => :wtf?}, {:type => :newline},
181
+ # One case
182
+ { type: :omg }, { type: :newline },
183
+ # Default case
184
+ {:type => :omgwtf}, {:type => :newline},
185
+ {:type => :oic}
186
+ )
187
+ kase_expr, kase, default = mock, mock, mock
188
+ @parser.expects(:parse_expression).returns(kase_expr)
189
+ @parser.stubs(:parse_block).returns(kase, default)
190
+
191
+ node = @parser.parse_switch_statement
192
+
193
+ node.type.must_equal 'switch'
194
+ node.cases.first[:expression].must_be_same_as kase_expr
195
+ node.cases.first[:block].must_be_same_as kase
196
+ node.default.must_be_same_as default
197
+ end
198
+
199
+ it "should parse break statement" do
200
+ @tokenizer.stubs(:next_item).returns({:type => :gtfo})
201
+ node = @parser.parse_break_statement
202
+ node.type.must_equal 'break'
203
+ end
204
+
205
+ it "should parse return statement" do
206
+ @tokenizer.stubs(:next_item).returns({:type => :found_yr})
207
+ expr = mock
208
+ @parser.expects(:parse_expression).returns(expr)
209
+ node = @parser.parse_return_statement
210
+ node.type.must_equal 'return'
211
+ node.expression.must_be_same_as expr
212
+ end
213
+
214
+ describe "#parse_loop_statement" do
215
+ it "should parse loop statement" do
216
+ @tokenizer.stubs(:next_item).returns(
217
+ {:type => :im_in_yr}, {:type => :identifier, :data => 'abc'},
218
+ # Loop operation
219
+ { type: :uppin }, { type: :yr }, { type: :identifier, data: 'i' },
220
+ # Loop condition
221
+ { type: :wile },
222
+ {:type => :im_outta_yr}, {:type => :identifier, :data => 'abc'}
223
+ )
224
+ expr, block = mock, mock
225
+ @parser.expects(:parse_expression).returns(expr)
226
+ @parser.expects(:parse_block).returns(block)
227
+
228
+ node = @parser.parse_loop_statement
229
+
230
+ node.type.must_equal 'loop'
231
+ node.label.must_equal 'abc'
232
+ node.op.must_equal :uppin
233
+ node.counter.must_equal 'i'
234
+ node.guard[:type].must_equal :wile
235
+ node.guard[:expression].must_be_same_as expr
236
+ node.block.must_be_same_as block
237
+ end
238
+
239
+ it "should raise exception if labels are not equal" do
240
+ @tokenizer.stubs(:next_item).returns(
241
+ {:type => :im_in_yr, :line => 1, :pos => 1},
242
+ {:type => :identifier, :data => 'foo'},
243
+ {:type => :newline}, {:type => :im_outta_yr},
244
+ {:type => :identifier, :data => 'bar'}
245
+ )
246
+ lambda { @parser.parse_loop_statement }.must_raise Layo::SyntaxError
247
+ end
248
+ end
249
+
250
+ it "should parse function statement" do
251
+ @tokenizer.stubs(:next_item).returns(
252
+ {:type => :how_duz_i}, {:type => :identifier, :data => 'hello'},
253
+ {:type => :newline}, {:type => :if_u_say_so}
254
+ )
255
+ block = mock
256
+ @parser.expects(:parse_block).returns(block)
257
+ node = @parser.parse_function_statement
258
+ node.type.must_equal 'function'
259
+ node.name.must_equal 'hello'
260
+ node.block.must_be_same_as block
261
+ node.args.must_equal []
262
+ end
263
+
264
+ it "should parse expression statement" do
265
+ expr = mock
266
+ @parser.expects(:parse_expression).returns(expr)
267
+ node = @parser.parse_expression_statement
268
+ node.type.must_equal 'expression'
269
+ node.expression.must_be_same_as expr
270
+ end
271
+
272
+ it "should parse cast expression" do
273
+ @tokenizer.stubs(:next_item).returns(
274
+ {:type => :maek}, {:type => :a}, {:type => :troof}
275
+ )
276
+ expr = mock
277
+ @parser.expects(:parse_expression).returns(expr)
278
+ node = @parser.parse_cast_expression
279
+ node.type.must_equal 'cast'
280
+ node.being_casted.must_be_same_as expr
281
+ node.to.must_equal :troof
282
+ end
283
+
284
+ describe "#parse_constant_expressionession" do
285
+ it "should parse boolean values" do
286
+ @tokenizer.stubs(:next_item).returns(
287
+ {:type => :boolean, :data => true},
288
+ {:type => :boolean, :data => false}
289
+ )
290
+ node = @parser.parse_constant_expression
291
+ node.type.must_equal 'constant'
292
+ node.vtype.must_equal :boolean
293
+ node.value.must_equal true
294
+
295
+ node = @parser.parse_constant_expression
296
+ node.value.must_equal false
297
+ end
298
+
299
+ it "should parse integer values" do
300
+ @tokenizer.stubs(:next_item).returns(
301
+ {:type => :integer, :data => 567}
302
+ )
303
+ node = @parser.parse_constant_expression
304
+ node.value.must_equal 567
305
+ end
306
+
307
+ it "should parse float values" do
308
+ @tokenizer.stubs(:next_item).returns(
309
+ {:type => :float, :data => -5.234}
310
+ )
311
+ node = @parser.parse_constant_expression
312
+ node.value.must_equal -5.234
313
+ end
314
+
315
+ it "should parse string values" do
316
+ @tokenizer.stubs(:next_item).returns :type => :string, :data => 'some value'
317
+ node = @parser.parse_constant_expression
318
+ node.value.must_equal 'some value'
319
+ end
320
+ end
321
+
322
+ describe '#parse_identifier_expression' do
323
+ it "should parse variable expression" do
324
+ @tokenizer.stubs(:next_item).returns({:type => :identifier, :data => 'var'})
325
+ node = @parser.parse_identifier_expression
326
+ node.type.must_equal 'variable'
327
+ node.name.must_equal 'var'
328
+ end
329
+
330
+ it "should parse function expression" do
331
+ @tokenizer.stubs(:next_item).returns({:type => :identifier, :data => 'foo'})
332
+ exprs = [mock, mock]
333
+ @parser.stubs(:functions).returns({'foo' => ['arg1', 'arg2']})
334
+ @parser.stubs(:next_expression).returns('cast', 'constant')
335
+ @parser.stubs(:parse_expression).returns(*exprs)
336
+ node = @parser.parse_identifier_expression
337
+ node.type.must_equal 'function'
338
+ node.name.must_equal 'foo'
339
+ node.parameters.must_equal exprs
340
+ end
341
+ end
342
+
343
+ it "should parse unary expr" do
344
+ @tokenizer.stubs(:next_item).returns({:type => :not})
345
+ expr = mock
346
+ @parser.expects(:parse_expression).returns(expr)
347
+ node = @parser.parse_unary_expression
348
+ node.type.must_equal 'unary'
349
+ node.expression.must_be_same_as expr
350
+ end
351
+
352
+ it "should parse binary expr" do
353
+ @tokenizer.stubs(:next_item).returns({:type => :both_of})
354
+ expr1, expr2 = mock, mock
355
+ @parser.stubs(:parse_expression).returns(expr1, expr2)
356
+ node = @parser.parse_binary_expression
357
+ node.type.must_equal 'binary'
358
+ node.operator.must_equal :both_of
359
+ node.left.must_be_same_as expr1
360
+ node.right.must_be_same_as expr2
361
+ end
362
+
363
+ it "should parse nary expr" do
364
+ @tokenizer.stubs(:next_item).returns({:type => :any_of}, {:type => :mkay})
365
+ expressions = [mock, mock, mock]
366
+ @parser.stubs(:parse_expression).returns(*expressions)
367
+ @parser.stubs(:next_expression).returns('constant', 'constant', nil)
368
+ node = @parser.parse_nary_expression
369
+ node.type.must_equal 'nary'
370
+ node.operator.must_equal :any_of
371
+ node.expressions.must_equal expressions
372
+ end
373
+ end