hayadentaku 3.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. checksums.yaml +7 -0
  2. data/.github/workflows/rspec.yml +26 -0
  3. data/.github/workflows/rubocop.yml +14 -0
  4. data/.gitignore +14 -0
  5. data/.pryrc +2 -0
  6. data/.rubocop.yml +114 -0
  7. data/.travis.yml +10 -0
  8. data/CHANGELOG.md +328 -0
  9. data/Gemfile +4 -0
  10. data/LICENSE +21 -0
  11. data/README.md +352 -0
  12. data/Rakefile +31 -0
  13. data/hayadentaku.gemspec +35 -0
  14. data/lib/dentaku/ast/access.rb +44 -0
  15. data/lib/dentaku/ast/arithmetic.rb +292 -0
  16. data/lib/dentaku/ast/array.rb +38 -0
  17. data/lib/dentaku/ast/bitwise.rb +42 -0
  18. data/lib/dentaku/ast/case/case_conditional.rb +38 -0
  19. data/lib/dentaku/ast/case/case_else.rb +35 -0
  20. data/lib/dentaku/ast/case/case_switch_variable.rb +35 -0
  21. data/lib/dentaku/ast/case/case_then.rb +35 -0
  22. data/lib/dentaku/ast/case/case_when.rb +39 -0
  23. data/lib/dentaku/ast/case.rb +93 -0
  24. data/lib/dentaku/ast/combinators.rb +50 -0
  25. data/lib/dentaku/ast/comparators.rb +88 -0
  26. data/lib/dentaku/ast/datetime.rb +8 -0
  27. data/lib/dentaku/ast/function.rb +56 -0
  28. data/lib/dentaku/ast/function_registry.rb +107 -0
  29. data/lib/dentaku/ast/functions/abs.rb +5 -0
  30. data/lib/dentaku/ast/functions/all.rb +19 -0
  31. data/lib/dentaku/ast/functions/and.rb +25 -0
  32. data/lib/dentaku/ast/functions/any.rb +19 -0
  33. data/lib/dentaku/ast/functions/avg.rb +13 -0
  34. data/lib/dentaku/ast/functions/count.rb +26 -0
  35. data/lib/dentaku/ast/functions/duration.rb +51 -0
  36. data/lib/dentaku/ast/functions/enum.rb +54 -0
  37. data/lib/dentaku/ast/functions/filter.rb +21 -0
  38. data/lib/dentaku/ast/functions/if.rb +47 -0
  39. data/lib/dentaku/ast/functions/intercept.rb +33 -0
  40. data/lib/dentaku/ast/functions/map.rb +19 -0
  41. data/lib/dentaku/ast/functions/max.rb +5 -0
  42. data/lib/dentaku/ast/functions/min.rb +5 -0
  43. data/lib/dentaku/ast/functions/mul.rb +12 -0
  44. data/lib/dentaku/ast/functions/not.rb +5 -0
  45. data/lib/dentaku/ast/functions/or.rb +25 -0
  46. data/lib/dentaku/ast/functions/pluck.rb +34 -0
  47. data/lib/dentaku/ast/functions/reduce.rb +60 -0
  48. data/lib/dentaku/ast/functions/round.rb +5 -0
  49. data/lib/dentaku/ast/functions/rounddown.rb +8 -0
  50. data/lib/dentaku/ast/functions/roundup.rb +8 -0
  51. data/lib/dentaku/ast/functions/ruby_math.rb +57 -0
  52. data/lib/dentaku/ast/functions/string_functions.rb +212 -0
  53. data/lib/dentaku/ast/functions/sum.rb +12 -0
  54. data/lib/dentaku/ast/functions/switch.rb +8 -0
  55. data/lib/dentaku/ast/functions/xor.rb +44 -0
  56. data/lib/dentaku/ast/grouping.rb +23 -0
  57. data/lib/dentaku/ast/identifier.rb +52 -0
  58. data/lib/dentaku/ast/literal.rb +30 -0
  59. data/lib/dentaku/ast/logical.rb +8 -0
  60. data/lib/dentaku/ast/negation.rb +54 -0
  61. data/lib/dentaku/ast/nil.rb +13 -0
  62. data/lib/dentaku/ast/node.rb +29 -0
  63. data/lib/dentaku/ast/numeric.rb +8 -0
  64. data/lib/dentaku/ast/operation.rb +44 -0
  65. data/lib/dentaku/ast/string.rb +15 -0
  66. data/lib/dentaku/ast.rb +42 -0
  67. data/lib/dentaku/bulk_expression_solver.rb +158 -0
  68. data/lib/dentaku/calculator.rb +192 -0
  69. data/lib/dentaku/date_arithmetic.rb +60 -0
  70. data/lib/dentaku/dependency_resolver.rb +29 -0
  71. data/lib/dentaku/exceptions.rb +116 -0
  72. data/lib/dentaku/flat_hash.rb +161 -0
  73. data/lib/dentaku/parser.rb +318 -0
  74. data/lib/dentaku/print_visitor.rb +112 -0
  75. data/lib/dentaku/string_casing.rb +7 -0
  76. data/lib/dentaku/token.rb +48 -0
  77. data/lib/dentaku/token_matcher.rb +138 -0
  78. data/lib/dentaku/token_matchers.rb +29 -0
  79. data/lib/dentaku/token_scanner.rb +240 -0
  80. data/lib/dentaku/tokenizer.rb +127 -0
  81. data/lib/dentaku/version.rb +3 -0
  82. data/lib/dentaku/visitor/infix.rb +86 -0
  83. data/lib/dentaku.rb +69 -0
  84. data/spec/ast/abs_spec.rb +26 -0
  85. data/spec/ast/addition_spec.rb +67 -0
  86. data/spec/ast/all_spec.rb +38 -0
  87. data/spec/ast/and_function_spec.rb +35 -0
  88. data/spec/ast/and_spec.rb +32 -0
  89. data/spec/ast/any_spec.rb +36 -0
  90. data/spec/ast/arithmetic_spec.rb +147 -0
  91. data/spec/ast/avg_spec.rb +42 -0
  92. data/spec/ast/case_spec.rb +84 -0
  93. data/spec/ast/comparator_spec.rb +87 -0
  94. data/spec/ast/count_spec.rb +40 -0
  95. data/spec/ast/division_spec.rb +64 -0
  96. data/spec/ast/filter_spec.rb +25 -0
  97. data/spec/ast/function_spec.rb +69 -0
  98. data/spec/ast/intercept_spec.rb +30 -0
  99. data/spec/ast/map_spec.rb +40 -0
  100. data/spec/ast/max_spec.rb +33 -0
  101. data/spec/ast/min_spec.rb +33 -0
  102. data/spec/ast/mul_spec.rb +43 -0
  103. data/spec/ast/negation_spec.rb +48 -0
  104. data/spec/ast/node_spec.rb +43 -0
  105. data/spec/ast/numeric_spec.rb +16 -0
  106. data/spec/ast/or_spec.rb +35 -0
  107. data/spec/ast/pluck_spec.rb +49 -0
  108. data/spec/ast/reduce_spec.rb +22 -0
  109. data/spec/ast/round_spec.rb +35 -0
  110. data/spec/ast/rounddown_spec.rb +35 -0
  111. data/spec/ast/roundup_spec.rb +35 -0
  112. data/spec/ast/string_functions_spec.rb +217 -0
  113. data/spec/ast/sum_spec.rb +43 -0
  114. data/spec/ast/switch_spec.rb +30 -0
  115. data/spec/ast/xor_spec.rb +35 -0
  116. data/spec/benchmark.rb +70 -0
  117. data/spec/bulk_expression_solver_spec.rb +241 -0
  118. data/spec/calculator_spec.rb +1003 -0
  119. data/spec/dentaku_spec.rb +52 -0
  120. data/spec/dependency_resolver_spec.rb +18 -0
  121. data/spec/exceptions_spec.rb +9 -0
  122. data/spec/external_function_spec.rb +177 -0
  123. data/spec/parser_spec.rb +183 -0
  124. data/spec/print_visitor_spec.rb +77 -0
  125. data/spec/spec_helper.rb +69 -0
  126. data/spec/token_matcher_spec.rb +134 -0
  127. data/spec/token_scanner_spec.rb +49 -0
  128. data/spec/token_spec.rb +16 -0
  129. data/spec/tokenizer_spec.rb +375 -0
  130. data/spec/visitor/infix_spec.rb +52 -0
  131. data/spec/visitor_spec.rb +139 -0
  132. metadata +353 -0
@@ -0,0 +1,138 @@
1
+ require 'dentaku/token'
2
+
3
+ module Dentaku
4
+ class TokenMatcher
5
+ attr_reader :children, :categories, :values
6
+
7
+ def initialize(categories = nil, values = nil, children = [])
8
+ # store categories and values as hash to optimize key lookup, h/t @jan-mangs
9
+ @categories = [categories].compact.flatten.each_with_object({}) { |c, h| h[c] = 1 }
10
+ @values = [values].compact.flatten.each_with_object({}) { |v, h| h[v] = 1 }
11
+ @children = children.compact
12
+ @invert = false
13
+
14
+ @min = 1
15
+ @max = 1
16
+ @range = (@min..@max)
17
+ end
18
+
19
+ def |(other_matcher)
20
+ self.class.new(:nomatch, :nomatch, leaf_matchers + other_matcher.leaf_matchers)
21
+ end
22
+
23
+ def invert
24
+ @invert = ! @invert
25
+ self
26
+ end
27
+
28
+ def ==(token)
29
+ leaf_matcher? ? matches_token?(token) : any_child_matches_token?(token)
30
+ end
31
+
32
+ def match(token_stream, offset = 0)
33
+ matched_tokens = []
34
+ matched = false
35
+
36
+ while self == token_stream[matched_tokens.length + offset] && matched_tokens.length < @max
37
+ matched_tokens << token_stream[matched_tokens.length + offset]
38
+ end
39
+
40
+ if @range.cover?(matched_tokens.length)
41
+ matched = true
42
+ end
43
+
44
+ [matched, matched_tokens]
45
+ end
46
+
47
+ def caret
48
+ @caret = true
49
+ self
50
+ end
51
+
52
+ def caret?
53
+ @caret
54
+ end
55
+
56
+ def star
57
+ @min = 0
58
+ @max = Float::INFINITY
59
+ @range = (@min..@max)
60
+ self
61
+ end
62
+
63
+ def plus
64
+ @max = Float::INFINITY
65
+ @range = (@min..@max)
66
+ self
67
+ end
68
+
69
+ def leaf_matcher?
70
+ children.empty?
71
+ end
72
+
73
+ def leaf_matchers
74
+ leaf_matcher? ? [self] : children
75
+ end
76
+
77
+ private
78
+
79
+ def any_child_matches_token?(token)
80
+ children.any? { |child| child == token }
81
+ end
82
+
83
+ def matches_token?(token)
84
+ return false if token.nil?
85
+ (category_match(token.category) && value_match(token.value)) ^ @invert
86
+ end
87
+
88
+ def category_match(category)
89
+ @categories.empty? || @categories.key?(category)
90
+ end
91
+
92
+ def value_match(value)
93
+ @values.empty? || @values.key?(value)
94
+ end
95
+
96
+ def self.datetime; new(:datetime); end
97
+ def self.numeric; new(:numeric); end
98
+ def self.string; new(:string); end
99
+ def self.logical; new(:logical); end
100
+ def self.value
101
+ new(:datetime) | new(:numeric) | new(:string) | new(:logical)
102
+ end
103
+
104
+ def self.addsub; new(:operator, [:add, :subtract]); end
105
+ def self.subtract; new(:operator, :subtract); end
106
+ def self.anchored_minus; new(:operator, :subtract).caret; end
107
+ def self.muldiv; new(:operator, [:multiply, :divide]); end
108
+ def self.pow; new(:operator, :pow); end
109
+ def self.mod; new(:operator, :mod); end
110
+ def self.combinator; new(:combinator); end
111
+
112
+ def self.comparator; new(:comparator); end
113
+ def self.comp_gt; new(:comparator, [:gt, :ge]); end
114
+ def self.comp_lt; new(:comparator, [:lt, :le]); end
115
+
116
+ def self.open; new(:grouping, :open); end
117
+ def self.close; new(:grouping, :close); end
118
+ def self.comma; new(:grouping, :comma); end
119
+ def self.non_group; new(:grouping).invert; end
120
+ def self.non_group_star; new(:grouping).invert.star; end
121
+ def self.non_close_plus; new(:grouping, :close).invert.plus; end
122
+ def self.arguments; (value | comma).plus; end
123
+
124
+ def self.if; new(:function, :if); end
125
+ def self.round; new(:function, :round); end
126
+ def self.roundup; new(:function, :roundup); end
127
+ def self.rounddown; new(:function, :rounddown); end
128
+ def self.not; new(:function, :not); end
129
+
130
+ def self.method_missing(name, *args, &block)
131
+ new(:function, name)
132
+ end
133
+
134
+ def self.respond_to_missing?(name, include_priv)
135
+ true
136
+ end
137
+ end
138
+ end
@@ -0,0 +1,29 @@
1
+ module Dentaku
2
+ module TokenMatchers
3
+ def self.token_matchers(*symbols)
4
+ symbols.map { |s| matcher(s) }
5
+ end
6
+
7
+ def self.function_token_matchers(function_name, *symbols)
8
+ token_matchers(:open, *symbols, :close).unshift(
9
+ TokenMatcher.send(function_name)
10
+ )
11
+ end
12
+
13
+ def self.matcher(symbol)
14
+ @matchers ||= [
15
+ :datetime, :numeric, :string, :addsub, :subtract, :muldiv, :pow, :mod,
16
+ :comparator, :comp_gt, :comp_lt, :open, :close, :comma,
17
+ :non_close_plus, :non_group, :non_group_star, :arguments,
18
+ :logical, :combinator, :if, :round, :roundup, :rounddown, :not,
19
+ :anchored_minus, :math_neg_pow, :math_neg_mul
20
+ ].each_with_object({}) do |name, matchers|
21
+ matchers[name] = TokenMatcher.send(name)
22
+ end
23
+
24
+ @matchers.fetch(symbol) do
25
+ raise "Unknown token symbol #{ symbol }"
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,240 @@
1
+ require 'bigdecimal'
2
+ require 'strscan'
3
+ require 'time'
4
+ require 'dentaku/string_casing'
5
+ require 'dentaku/token'
6
+
7
+ module Dentaku
8
+ class TokenScanner
9
+ extend StringCasing
10
+
11
+ DATE_TIME_REGEXP = /\d{2}\d{2}?-\d{1,2}-\d{1,2}([ |T]\d{1,2}:\d{1,2}:\d{1,2}(\.\d*)?)? ?(Z|((\+|\-)\d{2}\:?\d{2}))?(?!\d)/.freeze
12
+
13
+ def initialize(category, regexp, converter = nil, condition = nil)
14
+ @category = category
15
+ @regexp = %r{\A(#{ regexp })}i
16
+ # StringScanner anchors implicitly at the current position, so an
17
+ # unanchored copy of the same pattern is what its `scan` wants.
18
+ @ss_regexp = %r{(#{ regexp })}i
19
+ @converter = converter
20
+ @condition = condition
21
+ end
22
+
23
+ # Legacy entry point: takes a string, returns false or an Array of Tokens.
24
+ def scan(string, last_token = nil)
25
+ if (m = @regexp.match(string)) && (@condition.nil? || @condition.call(last_token))
26
+ value = raw = m.to_s
27
+ value = @converter.call(raw) if @converter
28
+
29
+ return Array(value).map do |v|
30
+ Token === v ? v : Token.new(@category, v, raw)
31
+ end
32
+ end
33
+
34
+ false
35
+ end
36
+
37
+ # Fast path used by the Tokenizer: scans directly against a StringScanner
38
+ # without slicing strings, and returns either a single Token, an Array of
39
+ # Tokens, or nil. Avoids the per-call `Array(value).map` allocation in the
40
+ # common (single-token) case.
41
+ def scan_at(strscanner, last_token = nil)
42
+ return nil if @condition && !@condition.call(last_token)
43
+
44
+ raw = strscanner.scan(@ss_regexp)
45
+ return nil unless raw
46
+
47
+ if @converter
48
+ value = @converter.call(raw)
49
+ if value.is_a?(Array)
50
+ value.map! { |v| Token === v ? v : Token.new(@category, v, raw) }
51
+ elsif Token === value
52
+ value
53
+ else
54
+ Token.new(@category, value, raw)
55
+ end
56
+ else
57
+ Token.new(@category, raw, raw)
58
+ end
59
+ end
60
+
61
+ class << self
62
+ attr_reader :case_sensitive
63
+
64
+ def available_scanners
65
+ [
66
+ :null,
67
+ :whitespace,
68
+ :datetime, # before numeric so it can pick up timestamps
69
+ :numeric,
70
+ :hexadecimal,
71
+ :double_quoted_string,
72
+ :single_quoted_string,
73
+ :negate,
74
+ :combinator,
75
+ :operator,
76
+ :grouping,
77
+ :array,
78
+ :access,
79
+ :case_statement,
80
+ :comparator,
81
+ :boolean,
82
+ :function,
83
+ :identifier,
84
+ :quoted_identifier
85
+ ]
86
+ end
87
+
88
+ def register_default_scanners
89
+ register_scanners(available_scanners)
90
+ end
91
+
92
+ def register_scanners(scanner_ids)
93
+ @scanners = scanner_ids.each_with_object({}) do |id, scanners|
94
+ scanners[id] = self.send(id)
95
+ end
96
+ reset_scanner_cache!
97
+ end
98
+
99
+ def register_scanner(id, scanner)
100
+ @scanners[id] = scanner
101
+ reset_scanner_cache!
102
+ end
103
+
104
+ def scanners=(scanner_ids)
105
+ @scanners.select! { |k, v| scanner_ids.include?(k) }
106
+ reset_scanner_cache!
107
+ end
108
+
109
+ def scanners(options = {})
110
+ @case_sensitive = options.fetch(:case_sensitive, false)
111
+ raw_date_literals = options.fetch(:raw_date_literals, true)
112
+
113
+ # Cache the two possible scanner lists so repeated tokenize calls don't
114
+ # rebuild the array each time (and the inner tokenize loop doesn't
115
+ # rebuild it on every iteration).
116
+ if raw_date_literals
117
+ @cached_full ||= @scanners.values
118
+ else
119
+ @cached_no_datetime ||= @scanners.reject { |k, _| k == :datetime }.values
120
+ end
121
+ end
122
+
123
+ # Invalidate cached scanner lists; called by register_scanner(s) so tests
124
+ # that swap scanners in and out keep working.
125
+ def reset_scanner_cache!
126
+ @cached_full = nil
127
+ @cached_no_datetime = nil
128
+ end
129
+
130
+ def whitespace
131
+ new(:whitespace, '\s+')
132
+ end
133
+
134
+ def null
135
+ new(:null, 'null\b')
136
+ end
137
+
138
+ # NOTE: Convert to DateTime as Array(Time) returns the parts of the time for some reason
139
+ def datetime
140
+ new(:datetime, DATE_TIME_REGEXP, lambda { |raw| Time.parse(raw).to_datetime })
141
+ end
142
+
143
+ def numeric
144
+ new(:numeric, '((?:\d+(\.\d+)?|\.\d+)(?:(e|E)(\+|-)?\d+)?)\b', lambda { |raw|
145
+ raw =~ /(\.|e|E)/ ? BigDecimal(raw) : raw.to_i
146
+ })
147
+ end
148
+
149
+ def hexadecimal
150
+ new(:numeric, '(0x[0-9a-f]+)\b', lambda { |raw| raw[2..-1].to_i(16) })
151
+ end
152
+
153
+ def double_quoted_string
154
+ new(:string, '"[^"]*"', lambda { |raw| raw.gsub(/^"|"$/, '') })
155
+ end
156
+
157
+ def single_quoted_string
158
+ new(:string, "'[^']*'", lambda { |raw| raw.gsub(/^'|'$/, '') })
159
+ end
160
+
161
+ def negate
162
+ new(:operator, '-', lambda { |raw| :negate }, lambda { |last_token|
163
+ last_token.nil? ||
164
+ last_token.is?(:operator) ||
165
+ last_token.is?(:comparator) ||
166
+ last_token.is?(:combinator) ||
167
+ last_token.value == :open ||
168
+ last_token.value == :comma ||
169
+ last_token.value == :lbracket ||
170
+ last_token.value == :array_start
171
+ })
172
+ end
173
+
174
+ def operator
175
+ names = {
176
+ pow: '^', add: '+', subtract: '-', multiply: '*', divide: '/', mod: '%', bitor: '|', bitand: '&', bitshiftleft: '<<', bitshiftright: '>>'
177
+ }.invert
178
+ new(:operator, '\^|\+|-|\*|\/|%|\||&|<<|>>', lambda { |raw| names[raw] })
179
+ end
180
+
181
+ def grouping
182
+ names = { open: '(', close: ')', comma: ',' }.invert
183
+ new(:grouping, '\(|\)|,', lambda { |raw| names[raw] })
184
+ end
185
+
186
+ def array
187
+ names = { array_start: '{', array_end: '}', }.invert
188
+ new(:array, '\{|\}|,', lambda { |raw| names[raw] })
189
+ end
190
+
191
+ def access
192
+ names = { lbracket: '[', rbracket: ']' }.invert
193
+ new(:access, '\[|\]', lambda { |raw| names[raw] })
194
+ end
195
+
196
+ def case_statement
197
+ names = { open: 'case', close: 'end', then: 'then', when: 'when', else: 'else' }.invert
198
+ new(:case, '(case|end|then|when|else)\b', lambda { |raw| names[raw.downcase] })
199
+ end
200
+
201
+ def comparator
202
+ names = { le: '<=', ge: '>=', ne: '!=', lt: '<', gt: '>', eq: '=' }.invert
203
+ alternate = { ne: '<>', eq: '==' }.invert
204
+ new(:comparator, '<=|>=|!=|<>|<|>|==|=', lambda { |raw| names[raw] || alternate[raw] })
205
+ end
206
+
207
+ def combinator
208
+ names = { and: '&&', or: '||' }.invert
209
+ new(:combinator, '(and|or|&&|\|\|)\s', lambda { |raw|
210
+ norm = raw.strip.downcase
211
+ names.fetch(norm) { norm.to_sym }
212
+ })
213
+ end
214
+
215
+ def boolean
216
+ new(:logical, '(true|false)\b', lambda { |raw| raw.strip.downcase == 'true' })
217
+ end
218
+
219
+ def function
220
+ new(:function, '\w+!?\s*\(', lambda do |raw|
221
+ function_name = raw.gsub('(', '')
222
+ [
223
+ Token.new(:function, function_name.strip.downcase.to_sym, function_name),
224
+ Token.new(:grouping, :open, '(')
225
+ ]
226
+ end)
227
+ end
228
+
229
+ def identifier
230
+ new(:identifier, '[[[:word:]]\.]+\b', lambda { |raw| standardize_case(raw.strip) })
231
+ end
232
+
233
+ def quoted_identifier
234
+ new(:identifier, '`[^`]*`', lambda { |raw| raw.gsub(/^`|`$/, '') })
235
+ end
236
+ end
237
+
238
+ register_default_scanners
239
+ end
240
+ end
@@ -0,0 +1,127 @@
1
+ require 'strscan'
2
+ require 'dentaku/token'
3
+ require 'dentaku/token_matcher'
4
+ require 'dentaku/token_scanner'
5
+
6
+ module Dentaku
7
+ class Tokenizer
8
+ attr_reader :aliases
9
+
10
+ LPAREN = TokenMatcher.new(:grouping, :open)
11
+ RPAREN = TokenMatcher.new(:grouping, :close)
12
+
13
+ def tokenize(string, options = {})
14
+ @nesting = 0
15
+ @tokens = []
16
+ @aliases = options.fetch(:aliases, global_aliases)
17
+ input = strip_comments(string.to_s.dup)
18
+ input = replace_aliases(input)
19
+
20
+ scanner_options = {
21
+ case_sensitive: options.fetch(:case_sensitive, false),
22
+ raw_date_literals: options.fetch(:raw_date_literals, true)
23
+ }
24
+
25
+ # Hoist the scanner list out of the per-position loop. The previous code
26
+ # rebuilt this Array (via Hash#select + .values) on every advance, which
27
+ # was a significant chunk of tokenize allocations.
28
+ scanners = TokenScanner.scanners(scanner_options)
29
+
30
+ ss = StringScanner.new(input)
31
+
32
+ until ss.eos?
33
+ last_token = @tokens.last
34
+ scanned = false
35
+
36
+ scanners.each do |scanner|
37
+ result = scanner.scan_at(ss, last_token)
38
+ next unless result
39
+
40
+ if result.is_a?(Array)
41
+ result.each { |t| handle_token(t, ss) }
42
+ else
43
+ handle_token(result, ss)
44
+ end
45
+
46
+ scanned = true
47
+ break
48
+ end
49
+
50
+ fail! :parse_error, at: ss.rest unless scanned
51
+ end
52
+
53
+ fail! :too_many_opening_parentheses if @nesting > 0
54
+
55
+ @tokens
56
+ end
57
+
58
+ def last_token
59
+ @tokens.last
60
+ end
61
+
62
+ private
63
+
64
+ def handle_token(token, ss)
65
+ if token.empty?
66
+ fail! :unexpected_zero_width_match,
67
+ token_category: token.category, at: ss.rest
68
+ end
69
+
70
+ @nesting += 1 if LPAREN == token
71
+ @nesting -= 1 if RPAREN == token
72
+ fail! :too_many_closing_parentheses if @nesting < 0
73
+
74
+ @tokens << token unless token.is?(:whitespace)
75
+ end
76
+
77
+ public
78
+
79
+ def strip_comments(input)
80
+ input.gsub(/\/\*[^*]*\*+(?:[^*\/][^*]*\*+)*\//, '')
81
+ end
82
+
83
+ def replace_aliases(string)
84
+ return string unless @aliases.any?
85
+
86
+ string.gsub!(alias_regex) do |match|
87
+ match_regex = /^#{Regexp.escape(match)}$/i
88
+
89
+ @aliases.detect do |(_key, aliases)|
90
+ !aliases.grep(match_regex).empty?
91
+ end.first
92
+ end
93
+
94
+ string
95
+ end
96
+
97
+ def alias_regex
98
+ values = @aliases.values.flatten.join('|')
99
+ /(?<=\p{Punct}|[[:space:]]|\A)(#{values})(?=\()/i
100
+ end
101
+
102
+ private
103
+
104
+ def global_aliases
105
+ return {} unless Dentaku.respond_to?(:aliases)
106
+ Dentaku.aliases
107
+ end
108
+
109
+ def fail!(reason, **meta)
110
+ message =
111
+ case reason
112
+ when :parse_error
113
+ "parse error at: '#{meta.fetch(:at)}'"
114
+ when :too_many_opening_parentheses
115
+ "too many opening parentheses"
116
+ when :too_many_closing_parentheses
117
+ "too many closing parentheses"
118
+ when :unexpected_zero_width_match
119
+ "unexpected zero-width match (:#{meta.fetch(:category)}) at '#{meta.fetch(:at)}'"
120
+ else
121
+ raise ::ArgumentError, "Unhandled #{reason}"
122
+ end
123
+
124
+ raise TokenizerError.for(reason, **meta), message
125
+ end
126
+ end
127
+ end
@@ -0,0 +1,3 @@
1
+ module Dentaku
2
+ VERSION = "3.5.7"
3
+ end
@@ -0,0 +1,86 @@
1
+ # infix visitor
2
+ #
3
+ # use this visitor in a processor to get infix visiting order
4
+ #
5
+ # visitor node deps
6
+ # accept -> visit left ->
7
+ # process
8
+ # visit right ->
9
+ module Dentaku
10
+ module Visitor
11
+ module Infix
12
+ def visit(ast)
13
+ ast.accept(self)
14
+ end
15
+
16
+ def process(_ast)
17
+ raise NotImplementedError
18
+ end
19
+
20
+ def visit_function(node)
21
+ node.args.each do |arg|
22
+ visit(arg)
23
+ end
24
+ process(node)
25
+ end
26
+
27
+ def visit_identifier(node)
28
+ process(node)
29
+ end
30
+
31
+ def visit_operation(node)
32
+ visit(node.left) if node.left
33
+ process(node)
34
+ visit(node.right) if node.right
35
+ end
36
+
37
+ def visit_operand(node)
38
+ process(node)
39
+ end
40
+
41
+ def visit_case(node)
42
+ process(node)
43
+ end
44
+
45
+ def visit_switch(node)
46
+ process(node)
47
+ end
48
+
49
+ def visit_case_conditional(node)
50
+ process(node)
51
+ end
52
+
53
+ def visit_when(node)
54
+ process(node)
55
+ end
56
+
57
+ def visit_then(node)
58
+ process(node)
59
+ end
60
+
61
+ def visit_else(node)
62
+ process(node)
63
+ end
64
+
65
+ def visit_negation(node)
66
+ process(node)
67
+ end
68
+
69
+ def visit_access(node)
70
+ process(node)
71
+ end
72
+
73
+ def visit_literal(node)
74
+ process(node)
75
+ end
76
+
77
+ def visit_nil(node)
78
+ process(node)
79
+ end
80
+
81
+ def visit_array(node)
82
+ process(node)
83
+ end
84
+ end
85
+ end
86
+ end
data/lib/dentaku.rb ADDED
@@ -0,0 +1,69 @@
1
+ require "bigdecimal"
2
+ require "concurrent"
3
+ require "dentaku/calculator"
4
+ require "dentaku/version"
5
+
6
+ module Dentaku
7
+ @enable_ast_caching = false
8
+ @enable_dependency_order_caching = false
9
+ @enable_identifier_caching = false
10
+ @aliases = {}
11
+
12
+ def self.evaluate(expression, data = {}, &block)
13
+ calculator.value.evaluate(expression, data, &block)
14
+ end
15
+
16
+ def self.evaluate!(expression, data = {}, &block)
17
+ calculator.value.evaluate!(expression, data, &block)
18
+ end
19
+
20
+ def self.enable_caching!
21
+ enable_ast_cache!
22
+ enable_dependency_order_cache!
23
+ enable_identifier_cache!
24
+ end
25
+
26
+ def self.enable_ast_cache!
27
+ @enable_ast_caching = true
28
+ end
29
+
30
+ def self.cache_ast?
31
+ @enable_ast_caching
32
+ end
33
+
34
+ def self.enable_dependency_order_cache!
35
+ @enable_dependency_order_caching = true
36
+ end
37
+
38
+ def self.cache_dependency_order?
39
+ @enable_dependency_order_caching
40
+ end
41
+
42
+ def self.enable_identifier_cache!
43
+ @enable_identifier_caching = true
44
+ end
45
+
46
+ def self.cache_identifier?
47
+ @enable_identifier_caching
48
+ end
49
+
50
+ def self.aliases
51
+ @aliases
52
+ end
53
+
54
+ def self.aliases=(hash)
55
+ @aliases = hash
56
+ end
57
+
58
+ def self.calculator
59
+ @calculator ||= Concurrent::ThreadLocalVar.new { Dentaku::Calculator.new }
60
+ end
61
+ end
62
+
63
+ def Dentaku(expression, data = {})
64
+ Dentaku.evaluate(expression, data)
65
+ end
66
+
67
+ def Dentaku!(expression, data = {})
68
+ Dentaku.evaluate!(expression, data)
69
+ end