keisan 0.7.0 → 0.8.4

Sign up to get free protection for your applications and to get access to all the features.
Files changed (69) hide show
  1. checksums.yaml +5 -5
  2. data/.travis.yml +6 -3
  3. data/README.md +47 -3
  4. data/keisan.gemspec +5 -5
  5. data/lib/keisan.rb +9 -3
  6. data/lib/keisan/ast.rb +25 -0
  7. data/lib/keisan/ast/bitwise_left_shift.rb +17 -0
  8. data/lib/keisan/ast/bitwise_right_shift.rb +17 -0
  9. data/lib/keisan/ast/block.rb +4 -0
  10. data/lib/keisan/ast/boolean.rb +1 -1
  11. data/lib/keisan/ast/builder.rb +2 -2
  12. data/lib/keisan/ast/cell.rb +10 -0
  13. data/lib/keisan/ast/date.rb +23 -0
  14. data/lib/keisan/ast/date_time_methods.rb +75 -0
  15. data/lib/keisan/ast/function.rb +9 -0
  16. data/lib/keisan/ast/function_assignment.rb +16 -6
  17. data/lib/keisan/ast/hash.rb +4 -0
  18. data/lib/keisan/ast/logical_and.rb +20 -3
  19. data/lib/keisan/ast/logical_equal.rb +6 -5
  20. data/lib/keisan/ast/logical_greater_than.rb +6 -4
  21. data/lib/keisan/ast/logical_greater_than_or_equal_to.rb +6 -4
  22. data/lib/keisan/ast/logical_less_than.rb +6 -4
  23. data/lib/keisan/ast/logical_less_than_or_equal_to.rb +6 -4
  24. data/lib/keisan/ast/logical_not_equal.rb +6 -5
  25. data/lib/keisan/ast/logical_operator.rb +24 -0
  26. data/lib/keisan/ast/logical_or.rb +18 -1
  27. data/lib/keisan/ast/node.rb +25 -0
  28. data/lib/keisan/ast/number.rb +24 -0
  29. data/lib/keisan/ast/operator.rb +3 -1
  30. data/lib/keisan/ast/parent.rb +5 -1
  31. data/lib/keisan/ast/plus.rb +10 -0
  32. data/lib/keisan/ast/time.rb +23 -0
  33. data/lib/keisan/ast/unary_inverse.rb +1 -1
  34. data/lib/keisan/ast/unary_operator.rb +1 -1
  35. data/lib/keisan/ast/variable.rb +10 -9
  36. data/lib/keisan/calculator.rb +17 -3
  37. data/lib/keisan/context.rb +27 -10
  38. data/lib/keisan/evaluator.rb +16 -4
  39. data/lib/keisan/exceptions.rb +3 -0
  40. data/lib/keisan/function.rb +6 -0
  41. data/lib/keisan/functions/break.rb +11 -0
  42. data/lib/keisan/functions/cmath_function.rb +3 -1
  43. data/lib/keisan/functions/continue.rb +11 -0
  44. data/lib/keisan/functions/default_registry.rb +39 -0
  45. data/lib/keisan/functions/enumerable_function.rb +10 -2
  46. data/lib/keisan/functions/expression_function.rb +16 -9
  47. data/lib/keisan/functions/filter.rb +6 -0
  48. data/lib/keisan/functions/loop_control_flow_function.rb +22 -0
  49. data/lib/keisan/functions/map.rb +6 -0
  50. data/lib/keisan/functions/proc_function.rb +2 -2
  51. data/lib/keisan/functions/reduce.rb +5 -0
  52. data/lib/keisan/functions/replace.rb +6 -6
  53. data/lib/keisan/functions/while.rb +7 -1
  54. data/lib/keisan/parser.rb +7 -5
  55. data/lib/keisan/parsing/bitwise_left_shift.rb +9 -0
  56. data/lib/keisan/parsing/bitwise_right_shift.rb +9 -0
  57. data/lib/keisan/parsing/function.rb +1 -1
  58. data/lib/keisan/parsing/hash.rb +2 -2
  59. data/lib/keisan/string_and_group_parser.rb +229 -0
  60. data/lib/keisan/token.rb +1 -1
  61. data/lib/keisan/tokenizer.rb +20 -18
  62. data/lib/keisan/tokens/assignment.rb +3 -1
  63. data/lib/keisan/tokens/bitwise_shift.rb +23 -0
  64. data/lib/keisan/tokens/group.rb +1 -7
  65. data/lib/keisan/tokens/string.rb +2 -4
  66. data/lib/keisan/util.rb +19 -0
  67. data/lib/keisan/variables/default_registry.rb +2 -1
  68. data/lib/keisan/version.rb +1 -1
  69. metadata +40 -28
@@ -10,6 +10,12 @@ module Keisan
10
10
  super("map")
11
11
  end
12
12
 
13
+ protected
14
+
15
+ def shadowing_variable_names(children)
16
+ children.size == 3 ? children[1..1] : children[1..2]
17
+ end
18
+
13
19
  private
14
20
 
15
21
  def evaluate_list(list, arguments, expression, context)
@@ -28,7 +28,7 @@ module Keisan
28
28
 
29
29
  ast_function.instance_variable_set(
30
30
  :@children,
31
- ast_function.children.map {|child| child.evaluate(context).to_node}
31
+ ast_function.children.map {|child| child.simplify(context).to_node}
32
32
  )
33
33
 
34
34
  if ast_function.children.all? {|child| child.well_defined?(context)}
@@ -44,7 +44,7 @@ module Keisan
44
44
 
45
45
  ast_function.instance_variable_set(
46
46
  :@children,
47
- ast_function.children.map {|child| child.evaluate(context)}
47
+ ast_function.children.map {|child| child.simplify(context)}
48
48
  )
49
49
 
50
50
  if ast_function.children.all? {|child| child.is_a?(AST::ConstantLiteral)}
@@ -6,12 +6,17 @@ module Keisan
6
6
  # Reduces (list, initial, accumulator, variable, expression)
7
7
  # e.g. reduce([1,2,3,4], 0, total, x, total+x)
8
8
  # should give 10
9
+ # When hash: (hash, initial, accumulator, key, value, expression)
9
10
  def initialize
10
11
  super("reduce")
11
12
  end
12
13
 
13
14
  protected
14
15
 
16
+ def shadowing_variable_names(children)
17
+ children.size == 5 ? children[2..3] : children[2..4]
18
+ end
19
+
15
20
  def verify_arguments!(arguments)
16
21
  unless arguments[1..-1].all? {|argument| argument.is_a?(AST::Variable)}
17
22
  raise Exceptions::InvalidFunctionError.new("Middle arguments to #{name} must be variables")
@@ -10,19 +10,19 @@ module Keisan
10
10
  evaluate(ast_function, context).value(context)
11
11
  end
12
12
 
13
- def evaluate(ast_function, context = nil)
13
+ def simplify(ast_function, context = nil)
14
14
  context ||= Context.new
15
15
  expression, variable, replacement = expression_variable_replacement(ast_function)
16
16
 
17
- expression = expression.evaluate(context)
18
- replacement = replacement.evaluate(context)
17
+ expression = expression.simplify(context)
18
+ replacement = replacement.simplify(context)
19
19
 
20
- expression.replace(variable, replacement).evaluate(context)
20
+ expression.replace(variable, replacement).simplify(context)
21
21
  end
22
22
 
23
- def simplify(ast_function, context = nil)
23
+ def evaluate(ast_function, context = nil)
24
24
  context ||= Context.new
25
- evaluate(ast_function, context).simplify(context)
25
+ simplify(ast_function, context).evaluate(context)
26
26
  end
27
27
 
28
28
  private
@@ -28,7 +28,13 @@ module Keisan
28
28
  current = Keisan::AST::Null.new
29
29
 
30
30
  while logical_node_evaluates_to_true(logical_node, context)
31
- current = body_node.evaluated(context)
31
+ begin
32
+ current = body_node.evaluated(context)
33
+ rescue Exceptions::BreakError
34
+ break
35
+ rescue Exceptions::ContinueError
36
+ next
37
+ end
32
38
  end
33
39
 
34
40
  current
@@ -1,6 +1,6 @@
1
1
  module Keisan
2
2
  class Parser
3
- KEYWORDS = %w(let puts).freeze
3
+ KEYWORDS = %w(let puts break continue).freeze
4
4
 
5
5
  attr_reader :tokens, :components
6
6
 
@@ -39,7 +39,7 @@ module Keisan
39
39
  end
40
40
 
41
41
  def parse_multi_line!
42
- line_parsers = @tokens.split {|token| token.is_a?(Tokens::LineSeparator)}.map {|tokens| self.class.new(tokens: tokens)}
42
+ line_parsers = Util.array_split(@tokens) {|token| token.is_a?(Tokens::LineSeparator)}.map {|tokens| self.class.new(tokens: tokens)}
43
43
  @components = []
44
44
  line_parsers.each.with_index do |line_parser, i|
45
45
  @components += line_parser.components
@@ -52,7 +52,7 @@ module Keisan
52
52
  def parse_keyword!
53
53
  keyword = tokens.first.string
54
54
  arguments = if tokens[1].is_a?(Tokens::Group)
55
- tokens[1].sub_tokens.split {|token| token.is_a?(Tokens::Comma)}.map {|argument_tokens|
55
+ Util.array_split(tokens[1].sub_tokens) {|token| token.is_a?(Tokens::Comma)}.map {|argument_tokens|
56
56
  Parsing::Argument.new(argument_tokens)
57
57
  }
58
58
  else
@@ -212,7 +212,7 @@ module Keisan
212
212
  @components << Parsing::List.new(arguments_from_group(token))
213
213
  when :curly
214
214
  if token.sub_tokens.any? {|token| token.is_a?(Tokens::Colon)}
215
- @components << Parsing::Hash.new(token.sub_tokens.split {|token| token.is_a?(Tokens::Comma)})
215
+ @components << Parsing::Hash.new(Util.array_split(token.sub_tokens) {|token| token.is_a?(Tokens::Comma)})
216
216
  else
217
217
  @components << Parsing::CurlyGroup.new(token.sub_tokens)
218
218
  end
@@ -241,6 +241,8 @@ module Keisan
241
241
  :"&" => Parsing::BitwiseAnd,
242
242
  :"|" => Parsing::BitwiseOr,
243
243
  :"^" => Parsing::BitwiseXor,
244
+ :<< => Parsing::BitwiseLeftShift,
245
+ :>> => Parsing::BitwiseRightShift,
244
246
  :"==" => Parsing::LogicalEqual,
245
247
  :"!=" => Parsing::LogicalNotEqual,
246
248
  :"&&" => Parsing::LogicalAnd,
@@ -280,7 +282,7 @@ module Keisan
280
282
  if token.sub_tokens.empty?
281
283
  []
282
284
  else
283
- token.sub_tokens.split {|sub_token| sub_token.is_a?(Tokens::Comma)}.map do |sub_tokens|
285
+ Util.array_split(token.sub_tokens) {|sub_token| sub_token.is_a?(Tokens::Comma)}.map do |sub_tokens|
284
286
  Parsing::Argument.new(sub_tokens)
285
287
  end
286
288
  end
@@ -0,0 +1,9 @@
1
+ module Keisan
2
+ module Parsing
3
+ class BitwiseLeftShift < BitwiseOperator
4
+ def node_class
5
+ AST::BitwiseLeftShift
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Keisan
2
+ module Parsing
3
+ class BitwiseRightShift < BitwiseOperator
4
+ def node_class
5
+ AST::BitwiseRightShift
6
+ end
7
+ end
8
+ end
9
+ end
@@ -5,7 +5,7 @@ module Keisan
5
5
 
6
6
  def initialize(name, arguments)
7
7
  @name = name
8
- @arguments = Array.wrap(arguments)
8
+ @arguments = Array(arguments)
9
9
  end
10
10
  end
11
11
  end
@@ -4,7 +4,7 @@ module Keisan
4
4
  attr_reader :key_value_pairs
5
5
 
6
6
  def initialize(key_value_pairs)
7
- @key_value_pairs = Array.wrap(key_value_pairs).map {|key_value_pair|
7
+ @key_value_pairs = Array(key_value_pairs).map {|key_value_pair|
8
8
  validate_and_extract_key_value_pair(key_value_pair)
9
9
  }
10
10
  end
@@ -12,7 +12,7 @@ module Keisan
12
12
  private
13
13
 
14
14
  def validate_and_extract_key_value_pair(key_value_pair)
15
- key, value = key_value_pair.split {|token| token.is_a?(Tokens::Colon)}
15
+ key, value = Util.array_split(key_value_pair) {|token| token.is_a?(Tokens::Colon)}
16
16
  raise Exceptions::ParseError.new("Invalid hash") unless key.size == 1 && value.size >= 1
17
17
 
18
18
  key = key.first
@@ -0,0 +1,229 @@
1
+ module Keisan
2
+ class StringAndGroupParser
3
+ class Portion
4
+ attr_reader :start_index, :end_index
5
+
6
+ def initialize(start_index)
7
+ @start_index = start_index
8
+ end
9
+ end
10
+
11
+ class StringPortion < Portion
12
+ attr_reader :string
13
+
14
+ def initialize(expression, start_index)
15
+ super(start_index)
16
+
17
+ @string = expression[start_index]
18
+ @end_index = start_index + 1
19
+
20
+ while @end_index < expression.size
21
+ if expression[@end_index] == quote_type
22
+ @string << quote_type
23
+ @end_index += 1
24
+ # Successfully parsed the string
25
+ return
26
+ end
27
+
28
+ n, c = process_next_character(expression, @end_index)
29
+ @string << c
30
+ @end_index += n
31
+ end
32
+
33
+ raise Keisan::Exceptions::TokenizingError.new("Tokenizing error, no closing quote #{quote_type}")
34
+ end
35
+
36
+ def size
37
+ string.size
38
+ end
39
+
40
+ def to_s
41
+ string
42
+ end
43
+
44
+ private
45
+
46
+ # Returns number of processed input characters, and the output character
47
+ def process_next_character(expression, index)
48
+ # escape character
49
+ if expression[index] == "\\"
50
+ return [2, escaped_character(expression[index + 1])]
51
+ else
52
+ return [1, expression[index]]
53
+ end
54
+ end
55
+
56
+ def quote_type
57
+ @string[0]
58
+ end
59
+
60
+ def escaped_character(character)
61
+ case character
62
+ when "\\", '"', "'"
63
+ character
64
+ when "a"
65
+ "\a"
66
+ when "b"
67
+ "\b"
68
+ when "r"
69
+ "\r"
70
+ when "n"
71
+ "\n"
72
+ when "s"
73
+ "\s"
74
+ when "t"
75
+ "\t"
76
+ else
77
+ raise Keisan::Exceptions::TokenizingError.new("Tokenizing error, unknown escape character: \"\\#{character}\"")
78
+ end
79
+ end
80
+ end
81
+
82
+ class GroupPortion < Portion
83
+ attr_reader :opening_brace, :closing_brace ,:portions, :size
84
+
85
+ OPENING_TO_CLOSING_BRACE = {
86
+ "(" => ")",
87
+ "{" => "}",
88
+ "[" => "]",
89
+ }
90
+
91
+ def initialize(expression, start_index)
92
+ super(start_index)
93
+
94
+ case expression[start_index]
95
+ when OPEN_GROUP_REGEX
96
+ @opening_brace = expression[start_index]
97
+ else
98
+ raise Keisan::Exceptions::TokenizingError.new("Internal error, GroupPortion did not start with brace")
99
+ end
100
+
101
+ @closing_brace = OPENING_TO_CLOSING_BRACE[opening_brace]
102
+
103
+ parser = StringAndGroupParser.new(expression, start_index: start_index + 1, ending_character: closing_brace)
104
+ @portions = parser.portions
105
+ @size = parser.size + 2
106
+
107
+ if start_index + size > expression.size || expression[start_index + size - 1] != closing_brace
108
+ raise Keisan::Exceptions::TokenizingError.new("Tokenizing error, group with opening brace #{opening_brace} did not have closing brace")
109
+ end
110
+ end
111
+
112
+ def to_s
113
+ opening_brace + portions.map(&:to_s).join + closing_brace
114
+ end
115
+ end
116
+
117
+ class OtherPortion < Portion
118
+ attr_reader :string
119
+
120
+ def initialize(expression, start_index)
121
+ super(start_index)
122
+
123
+ case expression[start_index]
124
+ when STRING_CHARACTER_REGEX, OPEN_GROUP_REGEX, CLOSED_GROUP_REGEX
125
+ raise Keisan::Exceptions::TokenizingError.new("Internal error, OtherPortion should not have string/braces at start")
126
+ else
127
+ index = start_index + 1
128
+ end
129
+
130
+ while index < expression.size
131
+ case expression[index]
132
+ when STRING_CHARACTER_REGEX, OPEN_GROUP_REGEX, CLOSED_GROUP_REGEX, COMMENT_CHARACTER_REGEX
133
+ break
134
+ else
135
+ index += 1
136
+ end
137
+ end
138
+
139
+ @end_index = index
140
+ @string = expression[start_index...end_index]
141
+ end
142
+
143
+ def size
144
+ string.size
145
+ end
146
+
147
+ def to_s
148
+ string
149
+ end
150
+ end
151
+
152
+ class CommentPortion < Portion
153
+ attr_reader :string
154
+
155
+ def initialize(expression, start_index)
156
+ super(start_index)
157
+
158
+ if expression[start_index] != '#'
159
+ raise Keisan::Exceptions::TokenizingError.new("Comment should start with '#'")
160
+ else
161
+ index = start_index + 1
162
+ end
163
+
164
+ while index < expression.size
165
+ break if expression[index] == "\n"
166
+ index += 1
167
+ end
168
+
169
+ @end_index = index
170
+ @string = expression[start_index...end_index]
171
+ end
172
+
173
+ def size
174
+ string.size
175
+ end
176
+
177
+ def to_s
178
+ string
179
+ end
180
+ end
181
+
182
+ # An ordered array of "portions", which
183
+ attr_reader :portions, :size
184
+
185
+ COMMENT_CHARACTER_REGEX = /[#]/
186
+ STRING_CHARACTER_REGEX = /["']/
187
+ OPEN_GROUP_REGEX = /[\(\{\[]/
188
+ CLOSED_GROUP_REGEX = /[\)\}\]]/
189
+
190
+ # Ending character is used as a second ending condition besides expression size
191
+ def initialize(expression, start_index: 0, ending_character: nil)
192
+ index = start_index
193
+ @portions = []
194
+
195
+ while index < expression.size && (ending_character.nil? || expression[index] != ending_character)
196
+ case expression[index]
197
+ when STRING_CHARACTER_REGEX
198
+ portion = StringPortion.new(expression, index)
199
+ index = portion.end_index
200
+ @portions << portion
201
+
202
+ when OPEN_GROUP_REGEX
203
+ portion = GroupPortion.new(expression, index)
204
+ index += portion.size
205
+ @portions << portion
206
+
207
+ when CLOSED_GROUP_REGEX
208
+ raise Keisan::Exceptions::TokenizingError.new("Tokenizing error, unexpected closing brace #{expression[start_index]}")
209
+
210
+ when COMMENT_CHARACTER_REGEX
211
+ portion = CommentPortion.new(expression, index)
212
+ index += portion.size
213
+ @portions << portion
214
+
215
+ else
216
+ portion = OtherPortion.new(expression, index)
217
+ index += portion.size
218
+ @portions << portion
219
+ end
220
+ end
221
+
222
+ @size = index - start_index
223
+ end
224
+
225
+ def to_s
226
+ portions.map(&:to_s).join
227
+ end
228
+ end
229
+ end
@@ -12,7 +12,7 @@ module Keisan
12
12
  end
13
13
 
14
14
  def self.type
15
- @type ||= self.to_s.split("::").last.underscore.to_sym
15
+ @type ||= Util.underscore(self.to_s.split("::").last).to_sym
16
16
  end
17
17
 
18
18
  def regex
@@ -1,13 +1,12 @@
1
1
  module Keisan
2
2
  class Tokenizer
3
3
  TOKEN_CLASSES = [
4
- Tokens::Group,
5
- Tokens::String,
6
4
  Tokens::Null,
7
5
  Tokens::Boolean,
8
6
  Tokens::Word,
9
7
  Tokens::Number,
10
8
  Tokens::Assignment,
9
+ Tokens::BitwiseShift,
11
10
  Tokens::LogicalOperator,
12
11
  Tokens::ArithmeticOperator,
13
12
  Tokens::BitwiseOperator,
@@ -25,28 +24,31 @@ module Keisan
25
24
  attr_reader :expression, :tokens
26
25
 
27
26
  def initialize(expression)
28
- @expression = self.class.normalize_expression(expression)
29
- @scan = @expression.scan(TOKEN_REGEX)
30
- @tokens = tokenize!
31
- end
27
+ @expression = expression
32
28
 
33
- def self.normalize_expression(expression)
34
- expression = normalize_line_delimiters(expression)
35
- expression = remove_comments(expression)
36
- end
29
+ portions = StringAndGroupParser.new(expression).portions.reject do |portion|
30
+ portion.is_a? StringAndGroupParser::CommentPortion
31
+ end
37
32
 
38
- private
33
+ @tokens = portions.inject([]) do |tokens, portion|
34
+ case portion
35
+ when StringAndGroupParser::StringPortion
36
+ tokens << Tokens::String.new(portion.to_s)
37
+ when StringAndGroupParser::GroupPortion
38
+ tokens << Tokens::Group.new(portion.to_s)
39
+ when StringAndGroupParser::OtherPortion
40
+ scan = portion.to_s.scan(TOKEN_REGEX)
41
+ tokens += tokenize!(scan)
42
+ end
39
43
 
40
- def self.normalize_line_delimiters(expression)
41
- expression.gsub(/\n/, ";")
44
+ tokens
45
+ end
42
46
  end
43
47
 
44
- def self.remove_comments(expression)
45
- expression.gsub(/#[^;]*/, "")
46
- end
48
+ private
47
49
 
48
- def tokenize!
49
- @scan.map do |scan_result|
50
+ def tokenize!(scan)
51
+ scan.map do |scan_result|
50
52
  i = scan_result.find_index {|token| !token.nil?}
51
53
  token_string = scan_result[i]
52
54
  token = TOKEN_CLASSES[i].new(token_string)