@the-trybe/formula-engine 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +6 -0
- package/PRD_FORMULA_ENGINE.md +1863 -0
- package/README.md +382 -0
- package/dist/decimal-utils.d.ts +180 -0
- package/dist/decimal-utils.js +355 -0
- package/dist/dependency-extractor.d.ts +20 -0
- package/dist/dependency-extractor.js +103 -0
- package/dist/dependency-graph.d.ts +60 -0
- package/dist/dependency-graph.js +252 -0
- package/dist/errors.d.ts +161 -0
- package/dist/errors.js +260 -0
- package/dist/evaluator.d.ts +51 -0
- package/dist/evaluator.js +494 -0
- package/dist/formula-engine.d.ts +79 -0
- package/dist/formula-engine.js +355 -0
- package/dist/functions.d.ts +3 -0
- package/dist/functions.js +720 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.js +61 -0
- package/dist/lexer.d.ts +25 -0
- package/dist/lexer.js +357 -0
- package/dist/parser.d.ts +32 -0
- package/dist/parser.js +372 -0
- package/dist/types.d.ts +228 -0
- package/dist/types.js +62 -0
- package/jest.config.js +23 -0
- package/package.json +35 -0
- package/src/decimal-utils.ts +408 -0
- package/src/dependency-extractor.ts +117 -0
- package/src/dependency-graph.test.ts +238 -0
- package/src/dependency-graph.ts +288 -0
- package/src/errors.ts +296 -0
- package/src/evaluator.ts +604 -0
- package/src/formula-engine.test.ts +660 -0
- package/src/formula-engine.ts +430 -0
- package/src/functions.ts +770 -0
- package/src/index.ts +103 -0
- package/src/lexer.test.ts +288 -0
- package/src/lexer.ts +394 -0
- package/src/parser.test.ts +349 -0
- package/src/parser.ts +449 -0
- package/src/types.ts +347 -0
- package/tsconfig.json +29 -0
package/src/index.ts
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
// Main exports
|
|
2
|
+
export { FormulaEngine } from './formula-engine';
|
|
3
|
+
|
|
4
|
+
// Types
|
|
5
|
+
export {
|
|
6
|
+
// Configuration
|
|
7
|
+
FormulaEngineConfig,
|
|
8
|
+
FormulaDefinition,
|
|
9
|
+
EvaluationContext,
|
|
10
|
+
FunctionDefinition,
|
|
11
|
+
ArgumentType,
|
|
12
|
+
FunctionImplementation,
|
|
13
|
+
|
|
14
|
+
// Decimal
|
|
15
|
+
DecimalConfig,
|
|
16
|
+
DecimalRoundingMode,
|
|
17
|
+
RoundingConfig,
|
|
18
|
+
|
|
19
|
+
// Results
|
|
20
|
+
EvaluationResult,
|
|
21
|
+
EvaluationResultSet,
|
|
22
|
+
ValidationResult,
|
|
23
|
+
CacheStats,
|
|
24
|
+
|
|
25
|
+
// Error handling
|
|
26
|
+
ErrorBehavior,
|
|
27
|
+
SecurityConfig,
|
|
28
|
+
|
|
29
|
+
// AST nodes
|
|
30
|
+
ASTNode,
|
|
31
|
+
DecimalLiteral,
|
|
32
|
+
NumberLiteral,
|
|
33
|
+
StringLiteral,
|
|
34
|
+
BooleanLiteral,
|
|
35
|
+
NullLiteral,
|
|
36
|
+
ArrayLiteral,
|
|
37
|
+
VariableReference,
|
|
38
|
+
BinaryOperation,
|
|
39
|
+
UnaryOperation,
|
|
40
|
+
ConditionalExpression,
|
|
41
|
+
FunctionCall,
|
|
42
|
+
MemberAccess,
|
|
43
|
+
IndexAccess,
|
|
44
|
+
|
|
45
|
+
// Graph
|
|
46
|
+
DependencyGraph,
|
|
47
|
+
|
|
48
|
+
// Values
|
|
49
|
+
ValueType,
|
|
50
|
+
FormulaValue,
|
|
51
|
+
} from './types';
|
|
52
|
+
|
|
53
|
+
// Components (for advanced usage)
|
|
54
|
+
export { Parser } from './parser';
|
|
55
|
+
export { Lexer } from './lexer';
|
|
56
|
+
export { Evaluator } from './evaluator';
|
|
57
|
+
export { DependencyExtractor } from './dependency-extractor';
|
|
58
|
+
export { DependencyGraph as DependencyGraphImpl, DependencyGraphBuilder } from './dependency-graph';
|
|
59
|
+
export { DecimalUtils, Decimal, DecimalLike } from './decimal-utils';
|
|
60
|
+
export { createBuiltInFunctions } from './functions';
|
|
61
|
+
|
|
62
|
+
// Errors
|
|
63
|
+
export {
|
|
64
|
+
FormulaEngineError,
|
|
65
|
+
GeneralFormulaError,
|
|
66
|
+
ErrorCategory,
|
|
67
|
+
|
|
68
|
+
// Parse errors
|
|
69
|
+
SyntaxError,
|
|
70
|
+
UnexpectedTokenError,
|
|
71
|
+
UnterminatedStringError,
|
|
72
|
+
InvalidNumberError,
|
|
73
|
+
|
|
74
|
+
// Validation errors
|
|
75
|
+
CircularDependencyError,
|
|
76
|
+
UndefinedVariableError,
|
|
77
|
+
UndefinedFunctionError,
|
|
78
|
+
DuplicateFormulaError,
|
|
79
|
+
|
|
80
|
+
// Evaluation errors
|
|
81
|
+
DivisionByZeroError,
|
|
82
|
+
TypeMismatchError,
|
|
83
|
+
ArgumentCountError,
|
|
84
|
+
InvalidOperationError,
|
|
85
|
+
PropertyAccessError,
|
|
86
|
+
IndexAccessError,
|
|
87
|
+
|
|
88
|
+
// Decimal errors
|
|
89
|
+
DecimalError,
|
|
90
|
+
DecimalOverflowError,
|
|
91
|
+
DecimalUnderflowError,
|
|
92
|
+
DecimalDivisionByZeroError,
|
|
93
|
+
InvalidDecimalError,
|
|
94
|
+
|
|
95
|
+
// Configuration errors
|
|
96
|
+
ConfigurationError,
|
|
97
|
+
|
|
98
|
+
// Security errors
|
|
99
|
+
SecurityError,
|
|
100
|
+
MaxIterationsError,
|
|
101
|
+
MaxRecursionError,
|
|
102
|
+
MaxExpressionLengthError,
|
|
103
|
+
} from './errors';
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
import { Lexer } from './lexer';
|
|
2
|
+
import { TokenType } from './types';
|
|
3
|
+
import { SyntaxError, UnterminatedStringError } from './errors';
|
|
4
|
+
|
|
5
|
+
describe('Lexer', () => {
|
|
6
|
+
describe('Numbers', () => {
|
|
7
|
+
it('should tokenize integers', () => {
|
|
8
|
+
const lexer = new Lexer('42');
|
|
9
|
+
const tokens = lexer.tokenize();
|
|
10
|
+
|
|
11
|
+
expect(tokens).toHaveLength(2);
|
|
12
|
+
expect(tokens[0].type).toBe(TokenType.NUMBER);
|
|
13
|
+
expect(tokens[0].value).toBe('42');
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
it('should tokenize decimals', () => {
|
|
17
|
+
const lexer = new Lexer('3.14159');
|
|
18
|
+
const tokens = lexer.tokenize();
|
|
19
|
+
|
|
20
|
+
expect(tokens[0].type).toBe(TokenType.NUMBER);
|
|
21
|
+
expect(tokens[0].value).toBe('3.14159');
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
it('should tokenize negative numbers as separate tokens', () => {
|
|
25
|
+
const lexer = new Lexer('-5');
|
|
26
|
+
const tokens = lexer.tokenize();
|
|
27
|
+
|
|
28
|
+
expect(tokens).toHaveLength(3);
|
|
29
|
+
expect(tokens[0].type).toBe(TokenType.MINUS);
|
|
30
|
+
expect(tokens[1].type).toBe(TokenType.NUMBER);
|
|
31
|
+
expect(tokens[1].value).toBe('5');
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
it('should tokenize scientific notation as float', () => {
|
|
35
|
+
const lexer = new Lexer('1e6');
|
|
36
|
+
const tokens = lexer.tokenize();
|
|
37
|
+
|
|
38
|
+
expect(tokens[0].type).toBe(TokenType.NUMBER);
|
|
39
|
+
expect(tokens[0].value).toBe(1000000);
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
it('should tokenize float suffix', () => {
|
|
43
|
+
const lexer = new Lexer('3.14f');
|
|
44
|
+
const tokens = lexer.tokenize();
|
|
45
|
+
|
|
46
|
+
expect(tokens[0].type).toBe(TokenType.NUMBER);
|
|
47
|
+
expect(typeof tokens[0].value).toBe('number');
|
|
48
|
+
});
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
describe('Strings', () => {
|
|
52
|
+
it('should tokenize double-quoted strings', () => {
|
|
53
|
+
const lexer = new Lexer('"hello world"');
|
|
54
|
+
const tokens = lexer.tokenize();
|
|
55
|
+
|
|
56
|
+
expect(tokens[0].type).toBe(TokenType.STRING);
|
|
57
|
+
expect(tokens[0].value).toBe('hello world');
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
it('should tokenize single-quoted strings', () => {
|
|
61
|
+
const lexer = new Lexer("'hello'");
|
|
62
|
+
const tokens = lexer.tokenize();
|
|
63
|
+
|
|
64
|
+
expect(tokens[0].type).toBe(TokenType.STRING);
|
|
65
|
+
expect(tokens[0].value).toBe('hello');
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it('should handle escape sequences', () => {
|
|
69
|
+
const lexer = new Lexer('"line1\\nline2"');
|
|
70
|
+
const tokens = lexer.tokenize();
|
|
71
|
+
|
|
72
|
+
expect(tokens[0].value).toBe('line1\nline2');
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
it('should throw on unterminated strings', () => {
|
|
76
|
+
const lexer = new Lexer('"unterminated');
|
|
77
|
+
|
|
78
|
+
expect(() => lexer.tokenize()).toThrow(UnterminatedStringError);
|
|
79
|
+
});
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
describe('Booleans and Null', () => {
|
|
83
|
+
it('should tokenize true', () => {
|
|
84
|
+
const lexer = new Lexer('true');
|
|
85
|
+
const tokens = lexer.tokenize();
|
|
86
|
+
|
|
87
|
+
expect(tokens[0].type).toBe(TokenType.BOOLEAN);
|
|
88
|
+
expect(tokens[0].value).toBe(true);
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
it('should tokenize false', () => {
|
|
92
|
+
const lexer = new Lexer('false');
|
|
93
|
+
const tokens = lexer.tokenize();
|
|
94
|
+
|
|
95
|
+
expect(tokens[0].type).toBe(TokenType.BOOLEAN);
|
|
96
|
+
expect(tokens[0].value).toBe(false);
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
it('should tokenize null', () => {
|
|
100
|
+
const lexer = new Lexer('null');
|
|
101
|
+
const tokens = lexer.tokenize();
|
|
102
|
+
|
|
103
|
+
expect(tokens[0].type).toBe(TokenType.NULL);
|
|
104
|
+
expect(tokens[0].value).toBe(null);
|
|
105
|
+
});
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
describe('Variables', () => {
|
|
109
|
+
it('should tokenize $ variables', () => {
|
|
110
|
+
const lexer = new Lexer('$price');
|
|
111
|
+
const tokens = lexer.tokenize();
|
|
112
|
+
|
|
113
|
+
expect(tokens[0].type).toBe(TokenType.VARIABLE);
|
|
114
|
+
expect(tokens[0].value).toBe('price');
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
it('should tokenize @ context variables', () => {
|
|
118
|
+
const lexer = new Lexer('@userId');
|
|
119
|
+
const tokens = lexer.tokenize();
|
|
120
|
+
|
|
121
|
+
expect(tokens[0].type).toBe(TokenType.CONTEXT_VAR);
|
|
122
|
+
expect(tokens[0].value).toBe('userId');
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
it('should handle variables with underscores', () => {
|
|
126
|
+
const lexer = new Lexer('$unit_price');
|
|
127
|
+
const tokens = lexer.tokenize();
|
|
128
|
+
|
|
129
|
+
expect(tokens[0].value).toBe('unit_price');
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
it('should handle variables with numbers', () => {
|
|
133
|
+
const lexer = new Lexer('$var1');
|
|
134
|
+
const tokens = lexer.tokenize();
|
|
135
|
+
|
|
136
|
+
expect(tokens[0].value).toBe('var1');
|
|
137
|
+
});
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
describe('Operators', () => {
|
|
141
|
+
it('should tokenize arithmetic operators', () => {
|
|
142
|
+
const lexer = new Lexer('+ - * / % ^');
|
|
143
|
+
const tokens = lexer.tokenize();
|
|
144
|
+
|
|
145
|
+
expect(tokens[0].type).toBe(TokenType.PLUS);
|
|
146
|
+
expect(tokens[1].type).toBe(TokenType.MINUS);
|
|
147
|
+
expect(tokens[2].type).toBe(TokenType.MULTIPLY);
|
|
148
|
+
expect(tokens[3].type).toBe(TokenType.DIVIDE);
|
|
149
|
+
expect(tokens[4].type).toBe(TokenType.MODULO);
|
|
150
|
+
expect(tokens[5].type).toBe(TokenType.POWER);
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
it('should tokenize comparison operators', () => {
|
|
154
|
+
const lexer = new Lexer('== != < > <= >=');
|
|
155
|
+
const tokens = lexer.tokenize();
|
|
156
|
+
|
|
157
|
+
expect(tokens[0].type).toBe(TokenType.EQ);
|
|
158
|
+
expect(tokens[1].type).toBe(TokenType.NEQ);
|
|
159
|
+
expect(tokens[2].type).toBe(TokenType.LT);
|
|
160
|
+
expect(tokens[3].type).toBe(TokenType.GT);
|
|
161
|
+
expect(tokens[4].type).toBe(TokenType.LTE);
|
|
162
|
+
expect(tokens[5].type).toBe(TokenType.GTE);
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
it('should tokenize logical operators', () => {
|
|
166
|
+
const lexer = new Lexer('&& || !');
|
|
167
|
+
const tokens = lexer.tokenize();
|
|
168
|
+
|
|
169
|
+
expect(tokens[0].type).toBe(TokenType.AND);
|
|
170
|
+
expect(tokens[1].type).toBe(TokenType.OR);
|
|
171
|
+
expect(tokens[2].type).toBe(TokenType.NOT);
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
it('should tokenize AND/OR/NOT keywords', () => {
|
|
175
|
+
const lexer = new Lexer('AND OR NOT');
|
|
176
|
+
const tokens = lexer.tokenize();
|
|
177
|
+
|
|
178
|
+
expect(tokens[0].type).toBe(TokenType.AND);
|
|
179
|
+
expect(tokens[1].type).toBe(TokenType.OR);
|
|
180
|
+
expect(tokens[2].type).toBe(TokenType.NOT);
|
|
181
|
+
});
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
describe('Punctuation', () => {
|
|
185
|
+
it('should tokenize parentheses', () => {
|
|
186
|
+
const lexer = new Lexer('()');
|
|
187
|
+
const tokens = lexer.tokenize();
|
|
188
|
+
|
|
189
|
+
expect(tokens[0].type).toBe(TokenType.LPAREN);
|
|
190
|
+
expect(tokens[1].type).toBe(TokenType.RPAREN);
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
it('should tokenize brackets', () => {
|
|
194
|
+
const lexer = new Lexer('[]');
|
|
195
|
+
const tokens = lexer.tokenize();
|
|
196
|
+
|
|
197
|
+
expect(tokens[0].type).toBe(TokenType.LBRACKET);
|
|
198
|
+
expect(tokens[1].type).toBe(TokenType.RBRACKET);
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
it('should tokenize comma, dot, question, colon', () => {
|
|
202
|
+
const lexer = new Lexer(', . ? :');
|
|
203
|
+
const tokens = lexer.tokenize();
|
|
204
|
+
|
|
205
|
+
expect(tokens[0].type).toBe(TokenType.COMMA);
|
|
206
|
+
expect(tokens[1].type).toBe(TokenType.DOT);
|
|
207
|
+
expect(tokens[2].type).toBe(TokenType.QUESTION);
|
|
208
|
+
expect(tokens[3].type).toBe(TokenType.COLON);
|
|
209
|
+
});
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
describe('Complex expressions', () => {
|
|
213
|
+
it('should tokenize a math expression', () => {
|
|
214
|
+
const lexer = new Lexer('$price * $quantity + 10');
|
|
215
|
+
const tokens = lexer.tokenize();
|
|
216
|
+
|
|
217
|
+
expect(tokens.map(t => t.type)).toEqual([
|
|
218
|
+
TokenType.VARIABLE,
|
|
219
|
+
TokenType.MULTIPLY,
|
|
220
|
+
TokenType.VARIABLE,
|
|
221
|
+
TokenType.PLUS,
|
|
222
|
+
TokenType.NUMBER,
|
|
223
|
+
TokenType.EOF,
|
|
224
|
+
]);
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
it('should tokenize function calls', () => {
|
|
228
|
+
const lexer = new Lexer('MAX($a, $b)');
|
|
229
|
+
const tokens = lexer.tokenize();
|
|
230
|
+
|
|
231
|
+
expect(tokens.map(t => t.type)).toEqual([
|
|
232
|
+
TokenType.IDENTIFIER,
|
|
233
|
+
TokenType.LPAREN,
|
|
234
|
+
TokenType.VARIABLE,
|
|
235
|
+
TokenType.COMMA,
|
|
236
|
+
TokenType.VARIABLE,
|
|
237
|
+
TokenType.RPAREN,
|
|
238
|
+
TokenType.EOF,
|
|
239
|
+
]);
|
|
240
|
+
});
|
|
241
|
+
|
|
242
|
+
it('should tokenize ternary expression', () => {
|
|
243
|
+
const lexer = new Lexer('$a > 0 ? $b : $c');
|
|
244
|
+
const tokens = lexer.tokenize();
|
|
245
|
+
|
|
246
|
+
expect(tokens.map(t => t.type)).toEqual([
|
|
247
|
+
TokenType.VARIABLE,
|
|
248
|
+
TokenType.GT,
|
|
249
|
+
TokenType.NUMBER,
|
|
250
|
+
TokenType.QUESTION,
|
|
251
|
+
TokenType.VARIABLE,
|
|
252
|
+
TokenType.COLON,
|
|
253
|
+
TokenType.VARIABLE,
|
|
254
|
+
TokenType.EOF,
|
|
255
|
+
]);
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
it('should handle whitespace correctly', () => {
|
|
259
|
+
const lexer = new Lexer(' $a + $b ');
|
|
260
|
+
const tokens = lexer.tokenize();
|
|
261
|
+
|
|
262
|
+
expect(tokens).toHaveLength(4);
|
|
263
|
+
expect(tokens[0].type).toBe(TokenType.VARIABLE);
|
|
264
|
+
expect(tokens[1].type).toBe(TokenType.PLUS);
|
|
265
|
+
expect(tokens[2].type).toBe(TokenType.VARIABLE);
|
|
266
|
+
});
|
|
267
|
+
});
|
|
268
|
+
|
|
269
|
+
describe('Error handling', () => {
|
|
270
|
+
it('should throw on invalid characters', () => {
|
|
271
|
+
const lexer = new Lexer('$a # $b');
|
|
272
|
+
|
|
273
|
+
expect(() => lexer.tokenize()).toThrow(SyntaxError);
|
|
274
|
+
});
|
|
275
|
+
|
|
276
|
+
it('should throw on lone ampersand', () => {
|
|
277
|
+
const lexer = new Lexer('$a & $b');
|
|
278
|
+
|
|
279
|
+
expect(() => lexer.tokenize()).toThrow(SyntaxError);
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
it('should throw on lone pipe', () => {
|
|
283
|
+
const lexer = new Lexer('$a | $b');
|
|
284
|
+
|
|
285
|
+
expect(() => lexer.tokenize()).toThrow(SyntaxError);
|
|
286
|
+
});
|
|
287
|
+
});
|
|
288
|
+
});
|