halunke 0.7.0 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -52,6 +52,16 @@ This multiplies two numbers.
52
52
  (0.6 * 0.3) /* => 0.18 */
53
53
  ```
54
54
 
55
+ ## `**`
56
+
57
+ This performs exponation.
58
+
59
+ **Example:**
60
+
61
+ ```
62
+ (0.6 ** 0.3) /* => 0.8579172004440949 */
63
+ ```
64
+
55
65
  ## `<`
56
66
 
57
67
  This compares two numbers. It is true, if the first number is smaller than the
@@ -85,14 +95,24 @@ it is false.
85
95
  (0.6 = 0.3) /* => false */
86
96
  ```
87
97
 
88
- ## `to_s`
98
+ ## `to_boolean`
99
+
100
+ This returns true.
101
+
102
+ **Example:**
103
+
104
+ ```
105
+ (0.6 to_boolean) /* => true */
106
+ ```
107
+
108
+ ## `to_string`
89
109
 
90
110
  This returns a string to represent the number in output.
91
111
 
92
112
  **Example:**
93
113
 
94
114
  ```
95
- (0.6 to_s) /* => "0.6" */
115
+ (0.6 to_string) /* => "0.6" */
96
116
  ```
97
117
 
98
118
  ## `inspect`
@@ -8,5 +8,6 @@ Regexp is a regular expression. It can be created like this:
8
8
  (Regexp from "a+")
9
9
  ```
10
10
 
11
- Currently it doesn't answer to any messages except `inspect` and `to_s`. To use
12
- a Regexp, see the messages you can send to [String](/string).
11
+ Currently it doesn't answer to any messages except `inspect`, `to_string` and
12
+ `to_boolean`. To use a Regexp, see the messages you can send to
13
+ [String](/string).
@@ -9,7 +9,7 @@ isolated.](https://github.com/moonglum/halunke/issues/4)*
9
9
  ## `puts`
10
10
 
11
11
  Prints the object to stdout. To determine how to represent the object, it will
12
- send the `to_s` message to the object, expecting a string.
12
+ send the `to_string` message to the object, expecting a string.
13
13
 
14
14
  **Example:**
15
15
 
@@ -35,7 +35,7 @@ true
35
35
  false
36
36
  ```
37
37
 
38
- ## `p`
38
+ ## `examine`
39
39
 
40
40
  Prints the object to stdout for debugging. To determine how to represent the
41
41
  object, it will send the `inspect` message to the object, expecting a string.
@@ -43,12 +43,12 @@ object, it will send the `inspect` message to the object, expecting a string.
43
43
  **Example:**
44
44
 
45
45
  ```
46
- (stdio p "Hello World")
47
- (stdio p 5.2)
48
- (stdio p @["a" 2 "b" 3])
49
- (stdio p ["a" "b"])
50
- (stdio p true)
51
- (stdio p false)
46
+ (stdio examine "Hello World")
47
+ (stdio examine 5.2)
48
+ (stdio examine @["a" 2 "b" 3])
49
+ (stdio examine ["a" "b"])
50
+ (stdio examine true)
51
+ (stdio examine false)
52
52
  ```
53
53
 
54
54
  This will output:
@@ -38,17 +38,6 @@ by `\1` etc,
38
38
  /* => "<a>b<a>b<a>b" */
39
39
  ```
40
40
 
41
- ## `scan`
42
-
43
- Find all occurrences of the given Regex. Returns an Array. If it doesn't
44
- include any groups, it contains Strings. If it contains groups, then it
45
- contains arrays of strings.
46
-
47
- ```
48
- ('result = ("aaabaac" scan (Regexp from "a+")))
49
- (result @ 0 else "NOT FOUND") /* => "aaa" */
50
- ```
51
-
52
41
  ## `match`
53
42
 
54
43
  This matches a Regexp over a String an collects the results in a Dictionary.
@@ -67,6 +56,17 @@ referenced by their name and their position.
67
56
  (match @ "bar" else "NOT FOUND") /* => "cc" */
68
57
  ```
69
58
 
59
+ ## `scan`
60
+
61
+ Find all occurrences of the given Regex. Returns an Array. If it doesn't
62
+ include any groups, it contains Strings. If it contains groups, then it
63
+ contains arrays of strings.
64
+
65
+ ```
66
+ ('result = ("aaabaac" scan (Regexp from "a+")))
67
+ (result @ 0 else "NOT FOUND") /* => "aaa" */
68
+ ```
69
+
70
70
  ## `=`
71
71
 
72
72
  This compares two strings. It is true, if the two strings are equal. Otherwise,
@@ -88,14 +88,24 @@ Concatenate two strings.
88
88
  ("aaa" + "bbb") /* => "aaabbb" */
89
89
  ```
90
90
 
91
- ## `to_s`
91
+ ## `to_boolean`
92
+
93
+ This returns true.
94
+
95
+ **Example:**
96
+
97
+ ```
98
+ ("aaa" to_boolean) /* => true */
99
+ ```
100
+
101
+ ## `to_string`
92
102
 
93
103
  This returns the string itself.
94
104
 
95
105
  **Example:**
96
106
 
97
107
  ```
98
- ("aaa" to_s) /* => "aaa" */
108
+ ("aaa" to_string) /* => "aaa" */
99
109
  ```
100
110
 
101
111
  ## `inspect`
@@ -45,15 +45,26 @@ false, it will return the second branch.
45
45
  (false then { "yes" } else { "no" }) /* => "no" */
46
46
  ```
47
47
 
48
- ## `to_s`
48
+ ## `to_boolean`
49
+
50
+ This returns the object itself
51
+
52
+ **Example:**
53
+
54
+ ```
55
+ (true to_boolean) /* => true */
56
+ (false to_boolean) /* => false */
57
+ ```
58
+
59
+ ## `to_string`
49
60
 
50
61
  This returns a string to represent true and false in output.
51
62
 
52
63
  **Example:**
53
64
 
54
65
  ```
55
- (true to_s) /* => "true" */
56
- (false to_s) /* => "false" */
66
+ (true to_string) /* => "true" */
67
+ (false to_string) /* => "false" */
57
68
  ```
58
69
 
59
70
  ## `inspect`
@@ -23,8 +23,8 @@ Gem::Specification.new do |spec|
23
23
 
24
24
  spec.add_dependency "rack", "~> 2.0.4"
25
25
 
26
- spec.add_development_dependency "bundler", "~> 1.16.1"
27
- spec.add_development_dependency "rake", "~> 12.3.0"
26
+ spec.add_development_dependency "bundler", "~> 1.17.2"
27
+ spec.add_development_dependency "rake", "~> 12.3.2"
28
28
  spec.add_development_dependency "minitest", "~> 5.11.3"
29
- spec.add_development_dependency "racc", "~> 1.4.14"
29
+ spec.add_development_dependency "racc", "~> 1.4.15"
30
30
  end
@@ -43,13 +43,13 @@ end
43
43
 
44
44
  ---- header
45
45
 
46
- require "halunke/lexer"
46
+ require "halunke/tokenizer"
47
47
  require "halunke/nodes"
48
48
 
49
49
  ---- inner
50
50
 
51
51
  def parse(code)
52
- @tokens = Lexer.new.tokenize(code)
52
+ @tokens = Tokenizer.new.tokenize(code)
53
53
  do_parse
54
54
  end
55
55
 
@@ -81,7 +81,12 @@ module Halunke
81
81
  MessageNode = Struct.new(:nodes) do
82
82
  def eval(context)
83
83
  if nodes.length == 1
84
- [nodes[0].value, []]
84
+ if nodes[0].is_a? NumberNode
85
+ # hack to allow expressions like (1+5)
86
+ ["+", [nodes[0].eval(context)]]
87
+ else
88
+ [nodes[0].value, []]
89
+ end
85
90
  elsif nodes.length.even?
86
91
  name = []
87
92
  message = []
@@ -1,13 +1,13 @@
1
1
  #
2
2
  # DO NOT MODIFY!!!!
3
- # This file is automatically generated by Racc 1.4.14
3
+ # This file is automatically generated by Racc 1.4.15
4
4
  # from Racc grammer file "".
5
5
  #
6
6
 
7
7
  require 'racc/parser.rb'
8
8
 
9
9
 
10
- require "halunke/lexer"
10
+ require "halunke/tokenizer"
11
11
  require "halunke/nodes"
12
12
 
13
13
  module Halunke
@@ -16,7 +16,7 @@ module Halunke
16
16
  module_eval(<<'...end grammar.y/module_eval...', 'grammar.y', 50)
17
17
 
18
18
  def parse(code)
19
- @tokens = Lexer.new.tokenize(code)
19
+ @tokens = Tokenizer.new.tokenize(code)
20
20
  do_parse
21
21
  end
22
22
 
@@ -9,10 +9,10 @@
9
9
  "then else" { |'self 'true_branch 'false_branch|
10
10
  (false_branch call [])
11
11
  }
12
- "to_b" { |'self|
12
+ "to_boolean" { |'self|
13
13
  self
14
14
  }
15
- "to_s" { |'self 'other|
15
+ "to_string" { |'self 'other|
16
16
  "false"
17
17
  }
18
18
  "inspect" { |'self 'other|
@@ -4,6 +4,9 @@ module Halunke
4
4
  "Array",
5
5
  [],
6
6
  {
7
+ "@ else" => HFunction.new([:self, :index, :fallback], lambda { |context|
8
+ context["self"].ruby_value[context["index"].ruby_value] || context["fallback"]
9
+ }),
7
10
  "=" => HFunction.new([:self, :other], lambda { |context|
8
11
  return context["false"] if context["self"].ruby_value.length != context["other"].ruby_value.length
9
12
 
@@ -13,9 +16,6 @@ module Halunke
13
16
  memo.receive_message(context, "and", [value])
14
17
  end
15
18
  }),
16
- "@ else" => HFunction.new([:self, :index, :fallback], lambda { |context|
17
- context["self"].ruby_value[context["index"].ruby_value] || context["fallback"]
18
- }),
19
19
  "map" => HFunction.new([:self, :fn], lambda { |context|
20
20
  return HArray.create_instance(context["self"].ruby_value.map do |x|
21
21
  context["fn"].receive_message(context, "call", [HArray.create_instance([x])])
@@ -31,12 +31,12 @@ module Halunke
31
31
  context["search_fn"].receive_message(context, "call", [HArray.create_instance([element])]) == context["true"]
32
32
  end
33
33
  }),
34
- "to_b" => HFunction.new([:self], lambda { |context|
34
+ "to_boolean" => HFunction.new([:self], lambda { |context|
35
35
  context["true"]
36
36
  }),
37
- "to_s" => HFunction.new([:self], lambda { |context|
37
+ "to_string" => HFunction.new([:self], lambda { |context|
38
38
  inspected_members = context["self"].ruby_value.map do |member|
39
- member.receive_message(context, "to_s", []).ruby_value
39
+ member.receive_message(context, "to_string", []).ruby_value
40
40
  end
41
41
  HString.create_instance("#{inspected_members.join("\n")}")
42
42
  }),
@@ -13,14 +13,14 @@ module Halunke
13
13
  "merge" => HFunction.new([:self, :other], lambda { |context|
14
14
  HDictionary.create_instance(context["self"].ruby_value.merge(context["other"].ruby_value))
15
15
  }),
16
- "to_b" => HFunction.new([:self], lambda { |context|
16
+ "to_boolean" => HFunction.new([:self], lambda { |context|
17
17
  context["true"]
18
18
  }),
19
- "to_s" => HFunction.new([:self], lambda { |context|
19
+ "to_string" => HFunction.new([:self], lambda { |context|
20
20
  x = []
21
21
  context["self"].ruby_value.each_pair do |key, value|
22
- key_s = key.receive_message(context, "to_s", [])
23
- value_s = value.receive_message(context, "to_s", [])
22
+ key_s = key.receive_message(context, "to_string", [])
23
+ value_s = value.receive_message(context, "to_string", [])
24
24
  x.push("#{key_s.ruby_value} #{value_s.ruby_value}")
25
25
  end
26
26
  HString.create_instance(x.join("\n"))
@@ -16,6 +16,9 @@ module Halunke
16
16
  "*" => HFunction.new([:self, :other], lambda { |context|
17
17
  HNumber.create_instance(context["self"].ruby_value * context["other"].ruby_value)
18
18
  }),
19
+ "**" => HFunction.new([:self, :other], lambda { |context|
20
+ HNumber.create_instance(context["self"].ruby_value ** context["other"].ruby_value)
21
+ }),
19
22
  "<" => HFunction.new([:self, :other], lambda { |context|
20
23
  if context["self"].ruby_value < context["other"].ruby_value
21
24
  context["true"]
@@ -37,16 +40,16 @@ module Halunke
37
40
  context["false"]
38
41
  end
39
42
  }),
40
- "to_b" => HFunction.new([:self], lambda { |context|
43
+ "to_boolean" => HFunction.new([:self], lambda { |context|
41
44
  context["true"]
42
45
  }),
43
- "to_s" => HFunction.new([:self], lambda { |context|
46
+ "to_string" => HFunction.new([:self], lambda { |context|
44
47
  float_value = context["self"].ruby_value.to_f
45
48
  float_value = float_value.to_i if float_value.to_i == float_value
46
49
  HString.create_instance(float_value.to_s)
47
50
  }),
48
51
  "inspect" => HFunction.new([:self], lambda { |context|
49
- context["self"].receive_message(context, "to_s", [])
52
+ context["self"].receive_message(context, "to_string", [])
50
53
  })
51
54
  },
52
55
  {},
@@ -4,10 +4,10 @@ module Halunke
4
4
  "Regexp",
5
5
  [],
6
6
  {
7
- "to_b" => HFunction.new([:self], lambda { |context|
7
+ "to_boolean" => HFunction.new([:self], lambda { |context|
8
8
  context["true"]
9
9
  }),
10
- "to_s" => HFunction.new([:self], lambda { |context|
10
+ "to_string" => HFunction.new([:self], lambda { |context|
11
11
  HString.create_instance(context["self"].ruby_value.inspect)
12
12
  }),
13
13
  "inspect" => HFunction.new([:self], lambda { |context|
@@ -5,11 +5,11 @@ module Halunke
5
5
  [],
6
6
  {
7
7
  "puts" => HFunction.new([:self, :obj], lambda { |context|
8
- str = context["obj"].receive_message(context, "to_s", [])
8
+ str = context["obj"].receive_message(context, "to_string", [])
9
9
  puts str.ruby_value
10
10
  str
11
11
  }),
12
- "p" => HFunction.new([:self, :obj], lambda { |context|
12
+ "examine" => HFunction.new([:self, :obj], lambda { |context|
13
13
  str = context["obj"].receive_message(context, "inspect", [])
14
14
  puts str.ruby_value
15
15
  context["obj"]
@@ -48,10 +48,10 @@ module Halunke
48
48
  "+" => HFunction.new([:self, :other], lambda { |context|
49
49
  HString.create_instance(context["self"].ruby_value + context["other"].ruby_value)
50
50
  }),
51
- "to_b" => HFunction.new([:self], lambda { |context|
51
+ "to_boolean" => HFunction.new([:self], lambda { |context|
52
52
  context["true"]
53
53
  }),
54
- "to_s" => HFunction.new([:self], lambda { |context|
54
+ "to_string" => HFunction.new([:self], lambda { |context|
55
55
  context["self"]
56
56
  }),
57
57
  "inspect" => HFunction.new([:self], lambda { |context|
@@ -9,10 +9,10 @@
9
9
  "then else" { |'self 'true_branch 'false_branch|
10
10
  (true_branch call [])
11
11
  }
12
- "to_b" { |'self|
12
+ "to_boolean" { |'self|
13
13
  self
14
14
  }
15
- "to_s" { |'self 'other|
15
+ "to_string" { |'self 'other|
16
16
  "true"
17
17
  }
18
18
  "inspect" { |'self 'other|
@@ -1,21 +1,21 @@
1
1
 
2
- # line 1 "lib/halunke/lexer.rl"
2
+ # line 1 "lib/halunke/tokenizer.rl"
3
3
  =begin
4
4
 
5
- # line 40 "lib/halunke/lexer.rl"
5
+ # line 40 "lib/halunke/tokenizer.rl"
6
6
 
7
7
  =end
8
8
 
9
9
  module Halunke
10
- class Lexer
10
+ class Tokenizer
11
11
  def initialize
12
12
 
13
- # line 14 "lib/halunke/lexer.rb"
13
+ # line 14 "lib/halunke/tokenizer.rb"
14
14
  class << self
15
- attr_accessor :_lexer_actions
16
- private :_lexer_actions, :_lexer_actions=
15
+ attr_accessor :_tokenizer_actions
16
+ private :_tokenizer_actions, :_tokenizer_actions=
17
17
  end
18
- self._lexer_actions = [
18
+ self._tokenizer_actions = [
19
19
  0, 1, 0, 1, 1, 1, 2, 1,
20
20
  5, 1, 6, 1, 7, 1, 8, 1,
21
21
  9, 1, 10, 1, 11, 1, 12, 1,
@@ -26,132 +26,132 @@ self._lexer_actions = [
26
26
  ]
27
27
 
28
28
  class << self
29
- attr_accessor :_lexer_key_offsets
30
- private :_lexer_key_offsets, :_lexer_key_offsets=
29
+ attr_accessor :_tokenizer_key_offsets
30
+ private :_tokenizer_key_offsets, :_tokenizer_key_offsets=
31
31
  end
32
- self._lexer_key_offsets = [
33
- 0, 1, 3, 29, 30, 35, 36, 38,
34
- 41, 43, 44, 45
32
+ self._tokenizer_key_offsets = [
33
+ 0, 1, 3, 29, 30, 35, 37, 39,
34
+ 42, 44, 45, 46
35
35
  ]
36
36
 
37
37
  class << self
38
- attr_accessor :_lexer_trans_keys
39
- private :_lexer_trans_keys, :_lexer_trans_keys=
38
+ attr_accessor :_tokenizer_trans_keys
39
+ private :_tokenizer_trans_keys, :_tokenizer_trans_keys=
40
40
  end
41
- self._lexer_trans_keys = [
41
+ self._tokenizer_trans_keys = [
42
42
  34, 48, 57, 32, 34, 39, 40, 41,
43
43
  42, 43, 45, 47, 64, 91, 93, 95,
44
44
  123, 124, 125, 9, 13, 48, 57, 60,
45
45
  62, 65, 90, 97, 122, 34, 95, 65,
46
- 90, 97, 122, 47, 48, 57, 46, 48,
47
- 57, 48, 57, 42, 91, 95, 65, 90,
48
- 97, 122, 0
46
+ 90, 97, 122, 42, 47, 48, 57, 46,
47
+ 48, 57, 48, 57, 42, 91, 95, 65,
48
+ 90, 97, 122, 0
49
49
  ]
50
50
 
51
51
  class << self
52
- attr_accessor :_lexer_single_lengths
53
- private :_lexer_single_lengths, :_lexer_single_lengths=
52
+ attr_accessor :_tokenizer_single_lengths
53
+ private :_tokenizer_single_lengths, :_tokenizer_single_lengths=
54
54
  end
55
- self._lexer_single_lengths = [
56
- 1, 0, 16, 1, 1, 1, 0, 1,
55
+ self._tokenizer_single_lengths = [
56
+ 1, 0, 16, 1, 1, 2, 0, 1,
57
57
  0, 1, 1, 1
58
58
  ]
59
59
 
60
60
  class << self
61
- attr_accessor :_lexer_range_lengths
62
- private :_lexer_range_lengths, :_lexer_range_lengths=
61
+ attr_accessor :_tokenizer_range_lengths
62
+ private :_tokenizer_range_lengths, :_tokenizer_range_lengths=
63
63
  end
64
- self._lexer_range_lengths = [
64
+ self._tokenizer_range_lengths = [
65
65
  0, 1, 5, 0, 2, 0, 1, 1,
66
66
  1, 0, 0, 2
67
67
  ]
68
68
 
69
69
  class << self
70
- attr_accessor :_lexer_index_offsets
71
- private :_lexer_index_offsets, :_lexer_index_offsets=
70
+ attr_accessor :_tokenizer_index_offsets
71
+ private :_tokenizer_index_offsets, :_tokenizer_index_offsets=
72
72
  end
73
- self._lexer_index_offsets = [
74
- 0, 2, 4, 26, 28, 32, 34, 36,
75
- 39, 41, 43, 45
73
+ self._tokenizer_index_offsets = [
74
+ 0, 2, 4, 26, 28, 32, 35, 37,
75
+ 40, 42, 44, 46
76
76
  ]
77
77
 
78
78
  class << self
79
- attr_accessor :_lexer_trans_targs
80
- private :_lexer_trans_targs, :_lexer_trans_targs=
79
+ attr_accessor :_tokenizer_trans_targs
80
+ private :_tokenizer_trans_targs, :_tokenizer_trans_targs=
81
81
  end
82
- self._lexer_trans_targs = [
82
+ self._tokenizer_trans_targs = [
83
83
  2, 0, 8, 2, 2, 3, 4, 2,
84
84
  2, 5, 6, 6, 9, 10, 2, 2,
85
85
  11, 2, 2, 2, 2, 7, 2, 11,
86
86
  11, 2, 2, 0, 4, 4, 4, 2,
87
- 2, 2, 7, 2, 1, 7, 2, 8,
88
- 2, 2, 2, 2, 2, 11, 11, 11,
89
- 2, 2, 2, 2, 2, 2, 2, 2,
90
- 2, 2, 2, 2, 0
87
+ 2, 2, 2, 7, 2, 1, 7, 2,
88
+ 8, 2, 2, 2, 2, 2, 11, 11,
89
+ 11, 2, 2, 2, 2, 2, 2, 2,
90
+ 2, 2, 2, 2, 2, 0
91
91
  ]
92
92
 
93
93
  class << self
94
- attr_accessor :_lexer_trans_actions
95
- private :_lexer_trans_actions, :_lexer_trans_actions=
94
+ attr_accessor :_tokenizer_trans_actions
95
+ private :_tokenizer_trans_actions, :_tokenizer_trans_actions=
96
96
  end
97
- self._lexer_trans_actions = [
97
+ self._tokenizer_trans_actions = [
98
98
  7, 0, 0, 41, 31, 5, 50, 11,
99
99
  13, 0, 0, 0, 0, 0, 19, 21,
100
100
  0, 15, 29, 17, 31, 5, 9, 0,
101
101
  0, 33, 7, 0, 47, 47, 47, 45,
102
- 27, 37, 5, 37, 0, 5, 35, 0,
103
- 35, 25, 37, 23, 37, 0, 0, 0,
104
- 37, 43, 41, 39, 45, 37, 37, 35,
105
- 35, 37, 37, 37, 0
102
+ 9, 27, 37, 5, 37, 0, 5, 35,
103
+ 0, 35, 25, 37, 23, 37, 0, 0,
104
+ 0, 37, 43, 41, 39, 45, 37, 37,
105
+ 35, 35, 37, 37, 37, 0
106
106
  ]
107
107
 
108
108
  class << self
109
- attr_accessor :_lexer_to_state_actions
110
- private :_lexer_to_state_actions, :_lexer_to_state_actions=
109
+ attr_accessor :_tokenizer_to_state_actions
110
+ private :_tokenizer_to_state_actions, :_tokenizer_to_state_actions=
111
111
  end
112
- self._lexer_to_state_actions = [
112
+ self._tokenizer_to_state_actions = [
113
113
  0, 0, 1, 0, 0, 0, 0, 0,
114
114
  0, 0, 0, 0
115
115
  ]
116
116
 
117
117
  class << self
118
- attr_accessor :_lexer_from_state_actions
119
- private :_lexer_from_state_actions, :_lexer_from_state_actions=
118
+ attr_accessor :_tokenizer_from_state_actions
119
+ private :_tokenizer_from_state_actions, :_tokenizer_from_state_actions=
120
120
  end
121
- self._lexer_from_state_actions = [
121
+ self._tokenizer_from_state_actions = [
122
122
  0, 0, 3, 0, 0, 0, 0, 0,
123
123
  0, 0, 0, 0
124
124
  ]
125
125
 
126
126
  class << self
127
- attr_accessor :_lexer_eof_trans
128
- private :_lexer_eof_trans, :_lexer_eof_trans=
127
+ attr_accessor :_tokenizer_eof_trans
128
+ private :_tokenizer_eof_trans, :_tokenizer_eof_trans=
129
129
  end
130
- self._lexer_eof_trans = [
131
- 50, 51, 0, 52, 53, 60, 60, 57,
132
- 57, 60, 60, 60
130
+ self._tokenizer_eof_trans = [
131
+ 51, 52, 0, 53, 54, 61, 61, 58,
132
+ 58, 61, 61, 61
133
133
  ]
134
134
 
135
135
  class << self
136
- attr_accessor :lexer_start
136
+ attr_accessor :tokenizer_start
137
137
  end
138
- self.lexer_start = 2;
138
+ self.tokenizer_start = 2;
139
139
  class << self
140
- attr_accessor :lexer_first_final
140
+ attr_accessor :tokenizer_first_final
141
141
  end
142
- self.lexer_first_final = 2;
142
+ self.tokenizer_first_final = 2;
143
143
  class << self
144
- attr_accessor :lexer_error
144
+ attr_accessor :tokenizer_error
145
145
  end
146
- self.lexer_error = -1;
146
+ self.tokenizer_error = -1;
147
147
 
148
148
  class << self
149
- attr_accessor :lexer_en_main
149
+ attr_accessor :tokenizer_en_main
150
150
  end
151
- self.lexer_en_main = 2;
151
+ self.tokenizer_en_main = 2;
152
152
 
153
153
 
154
- # line 47 "lib/halunke/lexer.rl"
154
+ # line 47 "lib/halunke/tokenizer.rl"
155
155
  @tokens = []
156
156
  end
157
157
 
@@ -160,19 +160,19 @@ self.lexer_en_main = 2;
160
160
  eof = data.length
161
161
 
162
162
 
163
- # line 164 "lib/halunke/lexer.rb"
163
+ # line 164 "lib/halunke/tokenizer.rb"
164
164
  begin
165
165
  p ||= 0
166
166
  pe ||= data.length
167
- cs = lexer_start
167
+ cs = tokenizer_start
168
168
  ts = nil
169
169
  te = nil
170
170
  act = 0
171
171
  end
172
172
 
173
- # line 55 "lib/halunke/lexer.rl"
173
+ # line 55 "lib/halunke/tokenizer.rl"
174
174
 
175
- # line 176 "lib/halunke/lexer.rb"
175
+ # line 176 "lib/halunke/tokenizer.rb"
176
176
  begin
177
177
  _klen, _trans, _keys, _acts, _nacts = nil
178
178
  _goto_level = 0
@@ -190,27 +190,27 @@ begin
190
190
  end
191
191
  end
192
192
  if _goto_level <= _resume
193
- _acts = _lexer_from_state_actions[cs]
194
- _nacts = _lexer_actions[_acts]
193
+ _acts = _tokenizer_from_state_actions[cs]
194
+ _nacts = _tokenizer_actions[_acts]
195
195
  _acts += 1
196
196
  while _nacts > 0
197
197
  _nacts -= 1
198
198
  _acts += 1
199
- case _lexer_actions[_acts - 1]
199
+ case _tokenizer_actions[_acts - 1]
200
200
  when 1 then
201
201
  # line 1 "NONE"
202
202
  begin
203
203
  ts = p
204
204
  end
205
- # line 206 "lib/halunke/lexer.rb"
205
+ # line 206 "lib/halunke/tokenizer.rb"
206
206
  end # from state action switch
207
207
  end
208
208
  if _trigger_goto
209
209
  next
210
210
  end
211
- _keys = _lexer_key_offsets[cs]
212
- _trans = _lexer_index_offsets[cs]
213
- _klen = _lexer_single_lengths[cs]
211
+ _keys = _tokenizer_key_offsets[cs]
212
+ _trans = _tokenizer_index_offsets[cs]
213
+ _klen = _tokenizer_single_lengths[cs]
214
214
  _break_match = false
215
215
 
216
216
  begin
@@ -222,9 +222,9 @@ ts = p
222
222
  break if _upper < _lower
223
223
  _mid = _lower + ( (_upper - _lower) >> 1 )
224
224
 
225
- if data[p].ord < _lexer_trans_keys[_mid]
225
+ if data[p].ord < _tokenizer_trans_keys[_mid]
226
226
  _upper = _mid - 1
227
- elsif data[p].ord > _lexer_trans_keys[_mid]
227
+ elsif data[p].ord > _tokenizer_trans_keys[_mid]
228
228
  _lower = _mid + 1
229
229
  else
230
230
  _trans += (_mid - _keys)
@@ -236,16 +236,16 @@ ts = p
236
236
  _keys += _klen
237
237
  _trans += _klen
238
238
  end
239
- _klen = _lexer_range_lengths[cs]
239
+ _klen = _tokenizer_range_lengths[cs]
240
240
  if _klen > 0
241
241
  _lower = _keys
242
242
  _upper = _keys + (_klen << 1) - 2
243
243
  loop do
244
244
  break if _upper < _lower
245
245
  _mid = _lower + (((_upper-_lower) >> 1) & ~1)
246
- if data[p].ord < _lexer_trans_keys[_mid]
246
+ if data[p].ord < _tokenizer_trans_keys[_mid]
247
247
  _upper = _mid - 2
248
- elsif data[p].ord > _lexer_trans_keys[_mid+1]
248
+ elsif data[p].ord > _tokenizer_trans_keys[_mid+1]
249
249
  _lower = _mid + 2
250
250
  else
251
251
  _trans += ((_mid - _keys) >> 1)
@@ -259,137 +259,137 @@ ts = p
259
259
  end while false
260
260
  end
261
261
  if _goto_level <= _eof_trans
262
- cs = _lexer_trans_targs[_trans]
263
- if _lexer_trans_actions[_trans] != 0
264
- _acts = _lexer_trans_actions[_trans]
265
- _nacts = _lexer_actions[_acts]
262
+ cs = _tokenizer_trans_targs[_trans]
263
+ if _tokenizer_trans_actions[_trans] != 0
264
+ _acts = _tokenizer_trans_actions[_trans]
265
+ _nacts = _tokenizer_actions[_acts]
266
266
  _acts += 1
267
267
  while _nacts > 0
268
268
  _nacts -= 1
269
269
  _acts += 1
270
- case _lexer_actions[_acts - 1]
270
+ case _tokenizer_actions[_acts - 1]
271
271
  when 2 then
272
272
  # line 1 "NONE"
273
273
  begin
274
274
  te = p+1
275
275
  end
276
276
  when 3 then
277
- # line 24 "lib/halunke/lexer.rl"
277
+ # line 24 "lib/halunke/tokenizer.rl"
278
278
  begin
279
279
  act = 3; end
280
280
  when 4 then
281
- # line 37 "lib/halunke/lexer.rl"
281
+ # line 37 "lib/halunke/tokenizer.rl"
282
282
  begin
283
283
  act = 16; end
284
284
  when 5 then
285
- # line 23 "lib/halunke/lexer.rl"
285
+ # line 23 "lib/halunke/tokenizer.rl"
286
286
  begin
287
287
  te = p+1
288
288
  begin emit(:STRING, data[ts+1...te-1]) end
289
289
  end
290
290
  when 6 then
291
- # line 25 "lib/halunke/lexer.rl"
291
+ # line 25 "lib/halunke/tokenizer.rl"
292
292
  begin
293
293
  te = p+1
294
294
  begin emit(:BAREWORD, data[ts...te]) end
295
295
  end
296
296
  when 7 then
297
- # line 26 "lib/halunke/lexer.rl"
297
+ # line 26 "lib/halunke/tokenizer.rl"
298
298
  begin
299
299
  te = p+1
300
300
  begin emit(:OPEN_PAREN, data[ts...te]) end
301
301
  end
302
302
  when 8 then
303
- # line 27 "lib/halunke/lexer.rl"
303
+ # line 27 "lib/halunke/tokenizer.rl"
304
304
  begin
305
305
  te = p+1
306
306
  begin emit(:CLOSE_PAREN, data[ts...te]) end
307
307
  end
308
308
  when 9 then
309
- # line 28 "lib/halunke/lexer.rl"
309
+ # line 28 "lib/halunke/tokenizer.rl"
310
310
  begin
311
311
  te = p+1
312
312
  begin emit(:OPEN_CURLY, data[ts...te]) end
313
313
  end
314
314
  when 10 then
315
- # line 29 "lib/halunke/lexer.rl"
315
+ # line 29 "lib/halunke/tokenizer.rl"
316
316
  begin
317
317
  te = p+1
318
318
  begin emit(:CLOSE_CURLY, data[ts...te]) end
319
319
  end
320
320
  when 11 then
321
- # line 30 "lib/halunke/lexer.rl"
321
+ # line 30 "lib/halunke/tokenizer.rl"
322
322
  begin
323
323
  te = p+1
324
324
  begin emit(:OPEN_BRACKET, data[ts...te]) end
325
325
  end
326
326
  when 12 then
327
- # line 31 "lib/halunke/lexer.rl"
327
+ # line 31 "lib/halunke/tokenizer.rl"
328
328
  begin
329
329
  te = p+1
330
330
  begin emit(:CLOSE_BRACKET, data[ts...te]) end
331
331
  end
332
332
  when 13 then
333
- # line 32 "lib/halunke/lexer.rl"
333
+ # line 32 "lib/halunke/tokenizer.rl"
334
334
  begin
335
335
  te = p+1
336
336
  begin emit(:OPEN_DICT_BRACKET, data[ts...te]) end
337
337
  end
338
338
  when 14 then
339
- # line 33 "lib/halunke/lexer.rl"
339
+ # line 33 "lib/halunke/tokenizer.rl"
340
340
  begin
341
341
  te = p+1
342
342
  begin emit(:START_COMMENT, data[ts...te]) end
343
343
  end
344
344
  when 15 then
345
- # line 34 "lib/halunke/lexer.rl"
345
+ # line 34 "lib/halunke/tokenizer.rl"
346
346
  begin
347
347
  te = p+1
348
348
  begin emit(:END_COMMENT, data[ts...te]) end
349
349
  end
350
350
  when 16 then
351
- # line 35 "lib/halunke/lexer.rl"
351
+ # line 35 "lib/halunke/tokenizer.rl"
352
352
  begin
353
353
  te = p+1
354
354
  begin emit(:BAR, data[ts...te]) end
355
355
  end
356
356
  when 17 then
357
- # line 36 "lib/halunke/lexer.rl"
357
+ # line 36 "lib/halunke/tokenizer.rl"
358
358
  begin
359
359
  te = p+1
360
360
  end
361
361
  when 18 then
362
- # line 37 "lib/halunke/lexer.rl"
362
+ # line 37 "lib/halunke/tokenizer.rl"
363
363
  begin
364
364
  te = p+1
365
365
  begin raise "Could not lex '#{ data[ts...te] }'" end
366
366
  end
367
367
  when 19 then
368
- # line 22 "lib/halunke/lexer.rl"
368
+ # line 22 "lib/halunke/tokenizer.rl"
369
369
  begin
370
370
  te = p
371
371
  p = p - 1; begin emit(:NUMBER, data[ts...te].to_r) end
372
372
  end
373
373
  when 20 then
374
- # line 25 "lib/halunke/lexer.rl"
374
+ # line 25 "lib/halunke/tokenizer.rl"
375
375
  begin
376
376
  te = p
377
377
  p = p - 1; begin emit(:BAREWORD, data[ts...te]) end
378
378
  end
379
379
  when 21 then
380
- # line 37 "lib/halunke/lexer.rl"
380
+ # line 37 "lib/halunke/tokenizer.rl"
381
381
  begin
382
382
  te = p
383
383
  p = p - 1; begin raise "Could not lex '#{ data[ts...te] }'" end
384
384
  end
385
385
  when 22 then
386
- # line 22 "lib/halunke/lexer.rl"
386
+ # line 22 "lib/halunke/tokenizer.rl"
387
387
  begin
388
388
  begin p = ((te))-1; end
389
389
  begin emit(:NUMBER, data[ts...te].to_r) end
390
390
  end
391
391
  when 23 then
392
- # line 37 "lib/halunke/lexer.rl"
392
+ # line 37 "lib/halunke/tokenizer.rl"
393
393
  begin
394
394
  begin p = ((te))-1; end
395
395
  begin raise "Could not lex '#{ data[ts...te] }'" end
@@ -406,7 +406,7 @@ when 24 then
406
406
  raise "Could not lex '#{ data[ts...te] }'" end
407
407
  end
408
408
  end
409
- # line 410 "lib/halunke/lexer.rb"
409
+ # line 410 "lib/halunke/tokenizer.rb"
410
410
  end # action switch
411
411
  end
412
412
  end
@@ -415,18 +415,18 @@ end
415
415
  end
416
416
  end
417
417
  if _goto_level <= _again
418
- _acts = _lexer_to_state_actions[cs]
419
- _nacts = _lexer_actions[_acts]
418
+ _acts = _tokenizer_to_state_actions[cs]
419
+ _nacts = _tokenizer_actions[_acts]
420
420
  _acts += 1
421
421
  while _nacts > 0
422
422
  _nacts -= 1
423
423
  _acts += 1
424
- case _lexer_actions[_acts - 1]
424
+ case _tokenizer_actions[_acts - 1]
425
425
  when 0 then
426
426
  # line 1 "NONE"
427
427
  begin
428
428
  ts = nil; end
429
- # line 430 "lib/halunke/lexer.rb"
429
+ # line 430 "lib/halunke/tokenizer.rb"
430
430
  end # to state action switch
431
431
  end
432
432
  if _trigger_goto
@@ -440,8 +440,8 @@ ts = nil; end
440
440
  end
441
441
  if _goto_level <= _test_eof
442
442
  if p == eof
443
- if _lexer_eof_trans[cs] > 0
444
- _trans = _lexer_eof_trans[cs] - 1;
443
+ if _tokenizer_eof_trans[cs] > 0
444
+ _trans = _tokenizer_eof_trans[cs] - 1;
445
445
  _goto_level = _eof_trans
446
446
  next;
447
447
  end
@@ -453,7 +453,7 @@ end
453
453
  end
454
454
  end
455
455
 
456
- # line 56 "lib/halunke/lexer.rl"
456
+ # line 56 "lib/halunke/tokenizer.rl"
457
457
 
458
458
  @tokens
459
459
  end