@mojir/lits 2.0.20 → 2.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/cli.js +18 -16
- package/dist/cli/src/Lits/Lits.d.ts +3 -1
- package/dist/cli/src/tokenizer/minifyTokenStream.d.ts +2 -0
- package/dist/index.esm.js +17 -15
- package/dist/index.esm.js.map +1 -1
- package/dist/index.js +17 -15
- package/dist/index.js.map +1 -1
- package/dist/lits.iife.js +17 -15
- package/dist/lits.iife.js.map +1 -1
- package/dist/src/Lits/Lits.d.ts +3 -1
- package/dist/src/tokenizer/minifyTokenStream.d.ts +2 -0
- package/dist/testFramework.esm.js +17 -15
- package/dist/testFramework.esm.js.map +1 -1
- package/dist/testFramework.js +17 -15
- package/dist/testFramework.js.map +1 -1
- package/package.json +1 -1
package/dist/cli/cli.js
CHANGED
|
@@ -92,7 +92,7 @@ typeof SuppressedError === "function" ? SuppressedError : function (error, suppr
|
|
|
92
92
|
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
|
|
93
93
|
};
|
|
94
94
|
|
|
95
|
-
var version = "2.0.
|
|
95
|
+
var version = "2.0.21";
|
|
96
96
|
|
|
97
97
|
var AstNodeType;
|
|
98
98
|
(function (AstNodeType) {
|
|
@@ -6111,6 +6111,20 @@ function analyze(ast, params) {
|
|
|
6111
6111
|
};
|
|
6112
6112
|
}
|
|
6113
6113
|
|
|
6114
|
+
function minifyTokenStream(tokenStream) {
|
|
6115
|
+
var tokens = tokenStream.tokens.filter(function (token) {
|
|
6116
|
+
if (isP_CommentToken(token)
|
|
6117
|
+
|| isA_CommentToken(token)
|
|
6118
|
+
|| isA_MultiLineCommentToken(token)
|
|
6119
|
+
|| isA_WhitespaceToken(token)
|
|
6120
|
+
|| isP_WhitespaceToken(token)) {
|
|
6121
|
+
return false;
|
|
6122
|
+
}
|
|
6123
|
+
return true;
|
|
6124
|
+
});
|
|
6125
|
+
return __assign(__assign({}, tokenStream), { tokens: tokens });
|
|
6126
|
+
}
|
|
6127
|
+
|
|
6114
6128
|
function parseSymbol(tokenStream, parseState) {
|
|
6115
6129
|
var _a;
|
|
6116
6130
|
var tkn = asToken(tokenStream.tokens[parseState.position++]);
|
|
@@ -7629,7 +7643,7 @@ function parsePolishToken(tokenStream, parseState) {
|
|
|
7629
7643
|
}
|
|
7630
7644
|
|
|
7631
7645
|
function parse(tokenStream) {
|
|
7632
|
-
tokenStream =
|
|
7646
|
+
tokenStream = minifyTokenStream(tokenStream);
|
|
7633
7647
|
var algebraic = tokenStream.algebraic;
|
|
7634
7648
|
var ast = {
|
|
7635
7649
|
b: [],
|
|
@@ -7650,19 +7664,6 @@ function parse(tokenStream) {
|
|
|
7650
7664
|
}
|
|
7651
7665
|
return ast;
|
|
7652
7666
|
}
|
|
7653
|
-
function removeUnnecessaryTokens(tokenStream) {
|
|
7654
|
-
var tokens = tokenStream.tokens.filter(function (token) {
|
|
7655
|
-
if (isP_CommentToken(token)
|
|
7656
|
-
|| isA_CommentToken(token)
|
|
7657
|
-
|| isA_MultiLineCommentToken(token)
|
|
7658
|
-
|| isA_WhitespaceToken(token)
|
|
7659
|
-
|| isP_WhitespaceToken(token)) {
|
|
7660
|
-
return false;
|
|
7661
|
-
}
|
|
7662
|
-
return true;
|
|
7663
|
-
});
|
|
7664
|
-
return __assign(__assign({}, tokenStream), { tokens: tokens });
|
|
7665
|
-
}
|
|
7666
7667
|
function parseToken(tokenStream, parseState) {
|
|
7667
7668
|
return parsePolishToken(tokenStream, parseState);
|
|
7668
7669
|
}
|
|
@@ -8490,7 +8491,8 @@ var Lits = /** @class */ (function () {
|
|
|
8490
8491
|
if (tokenizeParams === void 0) { tokenizeParams = {}; }
|
|
8491
8492
|
var debug = this.debug;
|
|
8492
8493
|
var algebraic = this.algebraic;
|
|
8493
|
-
|
|
8494
|
+
var tokenStream = tokenize(program, __assign(__assign({}, tokenizeParams), { debug: debug, algebraic: algebraic }));
|
|
8495
|
+
return tokenizeParams.minify ? minifyTokenStream(tokenStream) : tokenStream;
|
|
8494
8496
|
};
|
|
8495
8497
|
Lits.prototype.parse = function (tokenStream) {
|
|
8496
8498
|
return parse(tokenStream);
|
|
@@ -40,7 +40,9 @@ export declare class Lits {
|
|
|
40
40
|
run(program: string, params?: LitsParams): unknown;
|
|
41
41
|
context(program: string, params?: LitsParams): Context;
|
|
42
42
|
analyze(program: string, params?: LitsParams): Analysis;
|
|
43
|
-
tokenize(program: string, tokenizeParams?: Pick<TokenizeParams, 'filePath'>
|
|
43
|
+
tokenize(program: string, tokenizeParams?: Pick<TokenizeParams, 'filePath'> & {
|
|
44
|
+
minify?: boolean;
|
|
45
|
+
}): TokenStream;
|
|
44
46
|
parse(tokenStream: TokenStream): Ast;
|
|
45
47
|
evaluate(ast: Ast, params: LitsParams): Any;
|
|
46
48
|
transform(tokenStream: TokenStream, transformer: (name: string) => string): TokenStream;
|
package/dist/index.esm.js
CHANGED
|
@@ -6141,6 +6141,20 @@ function analyze(ast, params) {
|
|
|
6141
6141
|
};
|
|
6142
6142
|
}
|
|
6143
6143
|
|
|
6144
|
+
function minifyTokenStream(tokenStream) {
|
|
6145
|
+
var tokens = tokenStream.tokens.filter(function (token) {
|
|
6146
|
+
if (isP_CommentToken(token)
|
|
6147
|
+
|| isA_CommentToken(token)
|
|
6148
|
+
|| isA_MultiLineCommentToken(token)
|
|
6149
|
+
|| isA_WhitespaceToken(token)
|
|
6150
|
+
|| isP_WhitespaceToken(token)) {
|
|
6151
|
+
return false;
|
|
6152
|
+
}
|
|
6153
|
+
return true;
|
|
6154
|
+
});
|
|
6155
|
+
return __assign(__assign({}, tokenStream), { tokens: tokens });
|
|
6156
|
+
}
|
|
6157
|
+
|
|
6144
6158
|
function parseSymbol(tokenStream, parseState) {
|
|
6145
6159
|
var _a;
|
|
6146
6160
|
var tkn = asToken(tokenStream.tokens[parseState.position++]);
|
|
@@ -7659,7 +7673,7 @@ function parsePolishToken(tokenStream, parseState) {
|
|
|
7659
7673
|
}
|
|
7660
7674
|
|
|
7661
7675
|
function parse(tokenStream) {
|
|
7662
|
-
tokenStream =
|
|
7676
|
+
tokenStream = minifyTokenStream(tokenStream);
|
|
7663
7677
|
var algebraic = tokenStream.algebraic;
|
|
7664
7678
|
var ast = {
|
|
7665
7679
|
b: [],
|
|
@@ -7680,19 +7694,6 @@ function parse(tokenStream) {
|
|
|
7680
7694
|
}
|
|
7681
7695
|
return ast;
|
|
7682
7696
|
}
|
|
7683
|
-
function removeUnnecessaryTokens(tokenStream) {
|
|
7684
|
-
var tokens = tokenStream.tokens.filter(function (token) {
|
|
7685
|
-
if (isP_CommentToken(token)
|
|
7686
|
-
|| isA_CommentToken(token)
|
|
7687
|
-
|| isA_MultiLineCommentToken(token)
|
|
7688
|
-
|| isA_WhitespaceToken(token)
|
|
7689
|
-
|| isP_WhitespaceToken(token)) {
|
|
7690
|
-
return false;
|
|
7691
|
-
}
|
|
7692
|
-
return true;
|
|
7693
|
-
});
|
|
7694
|
-
return __assign(__assign({}, tokenStream), { tokens: tokens });
|
|
7695
|
-
}
|
|
7696
7697
|
function parseToken(tokenStream, parseState) {
|
|
7697
7698
|
return parsePolishToken(tokenStream, parseState);
|
|
7698
7699
|
}
|
|
@@ -8520,7 +8521,8 @@ var Lits = /** @class */ (function () {
|
|
|
8520
8521
|
if (tokenizeParams === void 0) { tokenizeParams = {}; }
|
|
8521
8522
|
var debug = this.debug;
|
|
8522
8523
|
var algebraic = this.algebraic;
|
|
8523
|
-
|
|
8524
|
+
var tokenStream = tokenize(program, __assign(__assign({}, tokenizeParams), { debug: debug, algebraic: algebraic }));
|
|
8525
|
+
return tokenizeParams.minify ? minifyTokenStream(tokenStream) : tokenStream;
|
|
8524
8526
|
};
|
|
8525
8527
|
Lits.prototype.parse = function (tokenStream) {
|
|
8526
8528
|
return parse(tokenStream);
|