@minduscript/parser 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.npmignore +2 -0
- package/dist/exe.d.ts +2 -0
- package/dist/exe.js +75 -0
- package/dist/expression-parser.d.ts +25 -0
- package/dist/expression-parser.js +349 -0
- package/dist/index.d.ts +76 -0
- package/dist/index.js +17 -0
- package/dist/nodes.d.ts +648 -0
- package/dist/nodes.js +167 -0
- package/dist/parser.d.ts +5 -0
- package/dist/parser.js +1280 -0
- package/package.json +26 -0
package/.npmignore
ADDED
package/dist/exe.d.ts
ADDED
package/dist/exe.js
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
const lexer_1 = require("@minduscript/lexer");
|
|
8
|
+
const lexer_2 = require("@minduscript/lexer/dist/lexer");
|
|
9
|
+
const chlamydomonos_parser_1 = require("chlamydomonos-parser");
|
|
10
|
+
const commander_1 = require("commander");
|
|
11
|
+
const fs_1 = __importDefault(require("fs"));
|
|
12
|
+
const nodes_1 = require("./nodes");
|
|
13
|
+
const parser_1 = require("./parser");
|
|
14
|
+
commander_1.program
|
|
15
|
+
.option('-o, --output <string>', '输出文件,若不存在则输出到标准输出')
|
|
16
|
+
.option('-r, --raw', '输出可被编译器使用的原始语法树')
|
|
17
|
+
.option('-i, --input-raw', '输入包含原始Token流的Json文件')
|
|
18
|
+
.argument('[inputFile]', '输入文件,若不存在则则读取标准输入')
|
|
19
|
+
.action(function (input) {
|
|
20
|
+
const options = this.opts();
|
|
21
|
+
const output = options.output;
|
|
22
|
+
const raw = options.raw ?? false;
|
|
23
|
+
const inputRaw = options.inputRaw ?? false;
|
|
24
|
+
let inputData;
|
|
25
|
+
if (input) {
|
|
26
|
+
inputData = fs_1.default.readFileSync(input, { encoding: 'utf-8' });
|
|
27
|
+
}
|
|
28
|
+
else {
|
|
29
|
+
inputData = process.stdin.read();
|
|
30
|
+
}
|
|
31
|
+
try {
|
|
32
|
+
let tokens;
|
|
33
|
+
if (inputRaw) {
|
|
34
|
+
tokens = JSON.parse(inputData);
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
tokens = lexer_1.lexer.lexString(inputData);
|
|
38
|
+
}
|
|
39
|
+
const ast = parser_1.parser.parse(tokens);
|
|
40
|
+
let outputData;
|
|
41
|
+
if (raw) {
|
|
42
|
+
outputData = JSON.stringify(ast);
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
outputData = JSON.stringify(ast, (key, value) => {
|
|
46
|
+
if (key == 'tokens') {
|
|
47
|
+
return undefined;
|
|
48
|
+
}
|
|
49
|
+
if (key == 'type') {
|
|
50
|
+
return nodes_1.NodeType[value];
|
|
51
|
+
}
|
|
52
|
+
return value;
|
|
53
|
+
}, 4);
|
|
54
|
+
}
|
|
55
|
+
if (output) {
|
|
56
|
+
fs_1.default.writeFileSync(output, outputData, { encoding: 'utf-8' });
|
|
57
|
+
}
|
|
58
|
+
else {
|
|
59
|
+
console.log(outputData);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
catch (e) {
|
|
63
|
+
if (e instanceof lexer_2.LexError) {
|
|
64
|
+
console.error(e.message);
|
|
65
|
+
process.exit(1);
|
|
66
|
+
}
|
|
67
|
+
else if (e instanceof chlamydomonos_parser_1.ParserError) {
|
|
68
|
+
console.error(e.message);
|
|
69
|
+
process.exit(1);
|
|
70
|
+
}
|
|
71
|
+
throw e;
|
|
72
|
+
}
|
|
73
|
+
})
|
|
74
|
+
.parse();
|
|
75
|
+
//# sourceMappingURL=exe.js.map
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { type Token } from '@minduscript/lexer';
|
|
2
|
+
import type { ExpressionNode } from './nodes';
|
|
3
|
+
declare class ExpressionParser {
|
|
4
|
+
private readonly L_PAREN;
|
|
5
|
+
private readonly R_PAREN;
|
|
6
|
+
private readonly COMMA;
|
|
7
|
+
private readonly unaryOperators;
|
|
8
|
+
private readonly binaryOperators;
|
|
9
|
+
private readonly functions;
|
|
10
|
+
private readonly literals;
|
|
11
|
+
private readonly identifiers;
|
|
12
|
+
private buildUnaryOp;
|
|
13
|
+
private buildBinaryOp;
|
|
14
|
+
private buildFunction;
|
|
15
|
+
private buildMacro;
|
|
16
|
+
private buildLiteral;
|
|
17
|
+
private buildIdentifier;
|
|
18
|
+
private buildDefinedIdentifier;
|
|
19
|
+
parse(tokens: Token[], from: number): {
|
|
20
|
+
node: Omit<ExpressionNode, 'type'>;
|
|
21
|
+
next: number;
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
export declare const expressionParser: ExpressionParser['parse'];
|
|
25
|
+
export {};
|
|
@@ -0,0 +1,349 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.expressionParser = void 0;
|
|
4
|
+
const lexer_1 = require("@minduscript/lexer");
|
|
5
|
+
const nodes_1 = require("./nodes");
|
|
6
|
+
var Associativity;
|
|
7
|
+
(function (Associativity) {
|
|
8
|
+
Associativity[Associativity["LEFT"] = 0] = "LEFT";
|
|
9
|
+
Associativity[Associativity["RIGHT"] = 1] = "RIGHT";
|
|
10
|
+
})(Associativity || (Associativity = {}));
|
|
11
|
+
const MIN_NODE_TYPE = nodes_1.NodeType.DOCUMENT;
|
|
12
|
+
class ExpressionParser {
|
|
13
|
+
L_PAREN = lexer_1.TokenType.L_PAREN;
|
|
14
|
+
R_PAREN = lexer_1.TokenType.R_PAREN;
|
|
15
|
+
COMMA = lexer_1.TokenType.COMMA;
|
|
16
|
+
// 一元运算符,对应的值为运算符优先级
|
|
17
|
+
unaryOperators = {
|
|
18
|
+
[lexer_1.TokenType.NOT]: 3,
|
|
19
|
+
[lexer_1.TokenType.FLIP]: 11,
|
|
20
|
+
};
|
|
21
|
+
// 二元运算符
|
|
22
|
+
binaryOperators = {
|
|
23
|
+
[lexer_1.TokenType.OR]: { priority: 1, associativity: Associativity.LEFT },
|
|
24
|
+
[lexer_1.TokenType.AND]: { priority: 2, associativity: Associativity.LEFT },
|
|
25
|
+
[lexer_1.TokenType.EQ]: { priority: 4, associativity: Associativity.LEFT },
|
|
26
|
+
[lexer_1.TokenType.NE]: { priority: 4, associativity: Associativity.LEFT },
|
|
27
|
+
[lexer_1.TokenType.LESS]: { priority: 4, associativity: Associativity.LEFT },
|
|
28
|
+
[lexer_1.TokenType.LE]: { priority: 4, associativity: Associativity.LEFT },
|
|
29
|
+
[lexer_1.TokenType.GREATER]: { priority: 4, associativity: Associativity.LEFT },
|
|
30
|
+
[lexer_1.TokenType.GE]: { priority: 4, associativity: Associativity.LEFT },
|
|
31
|
+
[lexer_1.TokenType.STRICT_EQ]: { priority: 4, associativity: Associativity.LEFT },
|
|
32
|
+
[lexer_1.TokenType.BITOR]: { priority: 5, associativity: Associativity.LEFT },
|
|
33
|
+
[lexer_1.TokenType.XOR]: { priority: 6, associativity: Associativity.LEFT },
|
|
34
|
+
[lexer_1.TokenType.BITAND]: { priority: 7, associativity: Associativity.LEFT },
|
|
35
|
+
[lexer_1.TokenType.SHL]: { priority: 8, associativity: Associativity.LEFT },
|
|
36
|
+
[lexer_1.TokenType.SHR]: { priority: 8, associativity: Associativity.LEFT },
|
|
37
|
+
[lexer_1.TokenType.ADD]: { priority: 9, associativity: Associativity.LEFT },
|
|
38
|
+
[lexer_1.TokenType.SUB]: { priority: 9, associativity: Associativity.LEFT },
|
|
39
|
+
[lexer_1.TokenType.MUL]: { priority: 10, associativity: Associativity.LEFT },
|
|
40
|
+
[lexer_1.TokenType.DIV]: { priority: 10, associativity: Associativity.LEFT },
|
|
41
|
+
[lexer_1.TokenType.IDIV]: { priority: 10, associativity: Associativity.LEFT },
|
|
42
|
+
[lexer_1.TokenType.MOD]: { priority: 10, associativity: Associativity.LEFT },
|
|
43
|
+
[lexer_1.TokenType.POW]: { priority: 12, associativity: Associativity.RIGHT },
|
|
44
|
+
};
|
|
45
|
+
// 函数,对应的值为参数个数
|
|
46
|
+
functions = {
|
|
47
|
+
[lexer_1.TokenType.MAX]: 2,
|
|
48
|
+
[lexer_1.TokenType.MIN]: 2,
|
|
49
|
+
[lexer_1.TokenType.ANGLE]: 2,
|
|
50
|
+
[lexer_1.TokenType.ANGLE_DIFF]: 2,
|
|
51
|
+
[lexer_1.TokenType.LEN]: 2,
|
|
52
|
+
[lexer_1.TokenType.NOISE]: 2,
|
|
53
|
+
[lexer_1.TokenType.ABS]: 1,
|
|
54
|
+
[lexer_1.TokenType.LOG]: 1,
|
|
55
|
+
[lexer_1.TokenType.LOG10]: 1,
|
|
56
|
+
[lexer_1.TokenType.FLOOR]: 1,
|
|
57
|
+
[lexer_1.TokenType.CEIL]: 1,
|
|
58
|
+
[lexer_1.TokenType.SQRT]: 1,
|
|
59
|
+
[lexer_1.TokenType.RAND]: 1,
|
|
60
|
+
[lexer_1.TokenType.SIN]: 1,
|
|
61
|
+
[lexer_1.TokenType.COS]: 1,
|
|
62
|
+
[lexer_1.TokenType.TAN]: 1,
|
|
63
|
+
[lexer_1.TokenType.ASIN]: 1,
|
|
64
|
+
[lexer_1.TokenType.ACOS]: 1,
|
|
65
|
+
[lexer_1.TokenType.ATAN]: 1,
|
|
66
|
+
};
|
|
67
|
+
// 字面量
|
|
68
|
+
literals = {
|
|
69
|
+
[lexer_1.TokenType.NUMBER]: (token) => parseFloat(token.raw),
|
|
70
|
+
[lexer_1.TokenType.STRING]: (token) => JSON.parse(token.raw),
|
|
71
|
+
[lexer_1.TokenType.TRUE]: (_) => true,
|
|
72
|
+
[lexer_1.TokenType.FALSE]: (_) => false,
|
|
73
|
+
[lexer_1.TokenType.NULL]: (_) => null,
|
|
74
|
+
};
|
|
75
|
+
identifiers = {
|
|
76
|
+
[lexer_1.TokenType.IDENTIFIER]: (token) => ({
|
|
77
|
+
type: nodes_1.NodeType.IDENTIFIER_EXPRESSION,
|
|
78
|
+
tokens: [token],
|
|
79
|
+
value: token.raw,
|
|
80
|
+
identifierType: nodes_1.IdentifierType.SIMPLE,
|
|
81
|
+
isMindustry: false,
|
|
82
|
+
}),
|
|
83
|
+
[lexer_1.TokenType.MINDUSTRY_IDENTIFIER]: (token) => ({
|
|
84
|
+
type: nodes_1.NodeType.IDENTIFIER_EXPRESSION,
|
|
85
|
+
tokens: [token],
|
|
86
|
+
value: token.raw,
|
|
87
|
+
isMindustry: true,
|
|
88
|
+
}),
|
|
89
|
+
};
|
|
90
|
+
buildUnaryOp = (token, child) => ({
|
|
91
|
+
type: nodes_1.NodeType.UNARY_OP_EXPRESSION,
|
|
92
|
+
tokens: [token, ...child.tokens],
|
|
93
|
+
opType: token.type,
|
|
94
|
+
child,
|
|
95
|
+
});
|
|
96
|
+
buildBinaryOp = (token, lChild, rChild) => ({
|
|
97
|
+
type: nodes_1.NodeType.BINARY_OP_EXPRESSION,
|
|
98
|
+
tokens: [...lChild.tokens, token, ...rChild.tokens],
|
|
99
|
+
opType: token.type,
|
|
100
|
+
lChild,
|
|
101
|
+
rChild,
|
|
102
|
+
});
|
|
103
|
+
buildFunction = (token, children, // 包含所有成分,如解析`max(a + b, c)`时,传入['(', 'a + b', ',', 'c', ')']
|
|
104
|
+
args) => ({
|
|
105
|
+
type: nodes_1.NodeType.FUNCTION_CALL_EXPRESSION,
|
|
106
|
+
tokens: children.flatMap((c) => {
|
|
107
|
+
if (c.type < MIN_NODE_TYPE) {
|
|
108
|
+
return [c];
|
|
109
|
+
}
|
|
110
|
+
else {
|
|
111
|
+
return c.tokens;
|
|
112
|
+
}
|
|
113
|
+
}),
|
|
114
|
+
function: token.type,
|
|
115
|
+
args,
|
|
116
|
+
});
|
|
117
|
+
buildMacro = (token, children, inputArgs, outputArgs) => ({
|
|
118
|
+
type: nodes_1.NodeType.MACRO_CALL_EXPRESSION,
|
|
119
|
+
tokens: children.flatMap((c) => {
|
|
120
|
+
if (c.type < MIN_NODE_TYPE) {
|
|
121
|
+
return [c];
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
return c.tokens;
|
|
125
|
+
}
|
|
126
|
+
}),
|
|
127
|
+
name: token.raw,
|
|
128
|
+
inputArgs,
|
|
129
|
+
outputArgs,
|
|
130
|
+
});
|
|
131
|
+
buildLiteral = (token) => {
|
|
132
|
+
const factory = this.literals[token.type];
|
|
133
|
+
if (!factory) {
|
|
134
|
+
throw Error();
|
|
135
|
+
}
|
|
136
|
+
return {
|
|
137
|
+
type: nodes_1.NodeType.LITERAL_EXPRESSION,
|
|
138
|
+
tokens: [token],
|
|
139
|
+
value: factory(token),
|
|
140
|
+
};
|
|
141
|
+
};
|
|
142
|
+
buildIdentifier = (token) => {
|
|
143
|
+
const factory = this.identifiers[token.type];
|
|
144
|
+
if (!factory) {
|
|
145
|
+
throw Error();
|
|
146
|
+
}
|
|
147
|
+
return factory(token);
|
|
148
|
+
};
|
|
149
|
+
buildDefinedIdentifier = (definitionToken, identifierToken) => ({
|
|
150
|
+
type: nodes_1.NodeType.IDENTIFIER_EXPRESSION,
|
|
151
|
+
tokens: [definitionToken, identifierToken],
|
|
152
|
+
value: identifierToken.raw,
|
|
153
|
+
identifierType: definitionToken.type === lexer_1.TokenType.LET ? nodes_1.IdentifierType.LET : nodes_1.IdentifierType.CONST,
|
|
154
|
+
isMindustry: false,
|
|
155
|
+
});
|
|
156
|
+
parse(tokens, from) {
|
|
157
|
+
const isTerminator = (tokenType) => {
|
|
158
|
+
return (tokenType === this.COMMA ||
|
|
159
|
+
tokenType === this.R_PAREN ||
|
|
160
|
+
tokenType === lexer_1.TokenType.SEMICOLON ||
|
|
161
|
+
tokenType === lexer_1.TokenType.R_BRACE);
|
|
162
|
+
};
|
|
163
|
+
const error = (message, index) => {
|
|
164
|
+
const token = tokens[index];
|
|
165
|
+
if (!token) {
|
|
166
|
+
return new Error(`${message} at end of input`);
|
|
167
|
+
}
|
|
168
|
+
return new Error(`${message} at ${token.row}:${token.col}`);
|
|
169
|
+
};
|
|
170
|
+
const parseIdentifierList = (startIndex, lParenToken) => {
|
|
171
|
+
const children = [lParenToken];
|
|
172
|
+
const identifiers = [];
|
|
173
|
+
let index = startIndex;
|
|
174
|
+
if (index >= tokens.length) {
|
|
175
|
+
throw error('Unclosed parenthesized identifier list', startIndex - 1);
|
|
176
|
+
}
|
|
177
|
+
if (tokens[index].type === this.R_PAREN) {
|
|
178
|
+
children.push(tokens[index]);
|
|
179
|
+
return { children, identifiers, next: index + 1 };
|
|
180
|
+
}
|
|
181
|
+
while (index < tokens.length) {
|
|
182
|
+
const token = tokens[index];
|
|
183
|
+
let identifierNode;
|
|
184
|
+
if (token.type === lexer_1.TokenType.LET || token.type === lexer_1.TokenType.CONST) {
|
|
185
|
+
const identifierToken = tokens[index + 1];
|
|
186
|
+
if (!identifierToken || identifierToken.type !== lexer_1.TokenType.IDENTIFIER) {
|
|
187
|
+
throw error('Expected identifier after "let" or "const" in macro output arguments', index + 1);
|
|
188
|
+
}
|
|
189
|
+
identifierNode = this.buildDefinedIdentifier(token, identifierToken);
|
|
190
|
+
index += 2;
|
|
191
|
+
}
|
|
192
|
+
else if (token.type === lexer_1.TokenType.IDENTIFIER || token.type === lexer_1.TokenType.MINDUSTRY_IDENTIFIER) {
|
|
193
|
+
identifierNode = this.buildIdentifier(token);
|
|
194
|
+
index += 1;
|
|
195
|
+
}
|
|
196
|
+
else {
|
|
197
|
+
throw error('Expected identifier or "let"/"const" definition in macro output arguments', index);
|
|
198
|
+
}
|
|
199
|
+
identifiers.push(identifierNode);
|
|
200
|
+
children.push(identifierNode);
|
|
201
|
+
if (index >= tokens.length) {
|
|
202
|
+
throw error('Unclosed parenthesized identifier list', index - 1);
|
|
203
|
+
}
|
|
204
|
+
const separator = tokens[index];
|
|
205
|
+
if (separator.type === this.COMMA) {
|
|
206
|
+
children.push(separator);
|
|
207
|
+
index += 1;
|
|
208
|
+
continue;
|
|
209
|
+
}
|
|
210
|
+
if (separator.type === this.R_PAREN) {
|
|
211
|
+
children.push(separator);
|
|
212
|
+
return { children, identifiers, next: index + 1 };
|
|
213
|
+
}
|
|
214
|
+
throw error('Expected "," or ")" in macro output arguments', index);
|
|
215
|
+
}
|
|
216
|
+
throw error('Unclosed parenthesized identifier list', index - 1);
|
|
217
|
+
};
|
|
218
|
+
const parseExpressionList = (startIndex, lParenToken) => {
|
|
219
|
+
const children = [lParenToken];
|
|
220
|
+
const args = [];
|
|
221
|
+
let index = startIndex;
|
|
222
|
+
if (index >= tokens.length) {
|
|
223
|
+
throw error('Unclosed parenthesized expression list', startIndex - 1);
|
|
224
|
+
}
|
|
225
|
+
if (tokens[index].type === this.R_PAREN) {
|
|
226
|
+
children.push(tokens[index]);
|
|
227
|
+
return { children, args, next: index + 1 };
|
|
228
|
+
}
|
|
229
|
+
while (index < tokens.length) {
|
|
230
|
+
const parsed = parseExpression(index, 0);
|
|
231
|
+
args.push(parsed.node);
|
|
232
|
+
children.push(parsed.node);
|
|
233
|
+
index = parsed.next;
|
|
234
|
+
if (index >= tokens.length) {
|
|
235
|
+
throw error('Unclosed parenthesized expression list', index - 1);
|
|
236
|
+
}
|
|
237
|
+
const separator = tokens[index];
|
|
238
|
+
if (separator.type === this.COMMA) {
|
|
239
|
+
children.push(separator);
|
|
240
|
+
index += 1;
|
|
241
|
+
continue;
|
|
242
|
+
}
|
|
243
|
+
if (separator.type === this.R_PAREN) {
|
|
244
|
+
children.push(separator);
|
|
245
|
+
return { children, args, next: index + 1 };
|
|
246
|
+
}
|
|
247
|
+
throw error('Expected "," or ")" in argument list', index);
|
|
248
|
+
}
|
|
249
|
+
throw error('Unclosed parenthesized expression list', index - 1);
|
|
250
|
+
};
|
|
251
|
+
const parsePrimary = (startIndex) => {
|
|
252
|
+
const token = tokens[startIndex];
|
|
253
|
+
if (!token) {
|
|
254
|
+
throw error('Expected expression', startIndex);
|
|
255
|
+
}
|
|
256
|
+
const literalFactory = this.literals[token.type];
|
|
257
|
+
if (literalFactory) {
|
|
258
|
+
return { node: this.buildLiteral(token), next: startIndex + 1 };
|
|
259
|
+
}
|
|
260
|
+
const functionArgCount = this.functions[token.type];
|
|
261
|
+
if (functionArgCount !== undefined) {
|
|
262
|
+
const lParenIndex = startIndex + 1;
|
|
263
|
+
const lParen = tokens[lParenIndex];
|
|
264
|
+
if (!lParen || lParen.type !== this.L_PAREN) {
|
|
265
|
+
throw error('Expected "(" after function name', lParenIndex);
|
|
266
|
+
}
|
|
267
|
+
const parsedArgs = parseExpressionList(lParenIndex + 1, lParen);
|
|
268
|
+
if (parsedArgs.args.length !== functionArgCount) {
|
|
269
|
+
throw error(`Function \"${token.raw}\" expects ${functionArgCount} argument(s), but got ${parsedArgs.args.length}`, lParenIndex);
|
|
270
|
+
}
|
|
271
|
+
return {
|
|
272
|
+
node: this.buildFunction(token, [token, ...parsedArgs.children], parsedArgs.args),
|
|
273
|
+
next: parsedArgs.next,
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
const identifierFactory = this.identifiers[token.type];
|
|
277
|
+
if (identifierFactory) {
|
|
278
|
+
if (token.type === lexer_1.TokenType.IDENTIFIER) {
|
|
279
|
+
const lParen1 = tokens[startIndex + 1];
|
|
280
|
+
if (lParen1 && lParen1.type === this.L_PAREN) {
|
|
281
|
+
const parsedInputArgs = parseExpressionList(startIndex + 2, lParen1);
|
|
282
|
+
const lParen2 = tokens[parsedInputArgs.next];
|
|
283
|
+
if (!lParen2 || lParen2.type !== this.L_PAREN) {
|
|
284
|
+
throw error('Expected second "(" for macro output arguments', parsedInputArgs.next);
|
|
285
|
+
}
|
|
286
|
+
const parsedOutputArgs = parseIdentifierList(parsedInputArgs.next + 1, lParen2);
|
|
287
|
+
return {
|
|
288
|
+
node: this.buildMacro(token, [token, ...parsedInputArgs.children, ...parsedOutputArgs.children], parsedInputArgs.args, parsedOutputArgs.identifiers),
|
|
289
|
+
next: parsedOutputArgs.next,
|
|
290
|
+
};
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
return { node: this.buildIdentifier(token), next: startIndex + 1 };
|
|
294
|
+
}
|
|
295
|
+
if (token.type === this.L_PAREN) {
|
|
296
|
+
const parsed = parseExpression(startIndex + 1, 0);
|
|
297
|
+
const rParen = tokens[parsed.next];
|
|
298
|
+
if (!rParen || rParen.type !== this.R_PAREN) {
|
|
299
|
+
throw error('Expected ")" to close parenthesized expression', parsed.next);
|
|
300
|
+
}
|
|
301
|
+
return {
|
|
302
|
+
node: {
|
|
303
|
+
...parsed.node,
|
|
304
|
+
tokens: [token, ...parsed.node.tokens, rParen],
|
|
305
|
+
},
|
|
306
|
+
next: parsed.next + 1,
|
|
307
|
+
};
|
|
308
|
+
}
|
|
309
|
+
throw error('Expected expression', startIndex);
|
|
310
|
+
};
|
|
311
|
+
const parseExpression = (startIndex, minPriority) => {
|
|
312
|
+
let index = startIndex;
|
|
313
|
+
const unaryToken = tokens[index];
|
|
314
|
+
const unaryPriority = unaryToken ? this.unaryOperators[unaryToken.type] : undefined;
|
|
315
|
+
let left;
|
|
316
|
+
if (unaryToken && unaryPriority !== undefined) {
|
|
317
|
+
const parsedChild = parseExpression(index + 1, unaryPriority);
|
|
318
|
+
left = this.buildUnaryOp(unaryToken, parsedChild.node);
|
|
319
|
+
index = parsedChild.next;
|
|
320
|
+
}
|
|
321
|
+
else {
|
|
322
|
+
const primary = parsePrimary(index);
|
|
323
|
+
left = primary.node;
|
|
324
|
+
index = primary.next;
|
|
325
|
+
}
|
|
326
|
+
while (index < tokens.length) {
|
|
327
|
+
const operatorToken = tokens[index];
|
|
328
|
+
if (isTerminator(operatorToken.type)) {
|
|
329
|
+
break;
|
|
330
|
+
}
|
|
331
|
+
const operator = this.binaryOperators[operatorToken.type];
|
|
332
|
+
if (!operator || operator.priority < minPriority) {
|
|
333
|
+
break;
|
|
334
|
+
}
|
|
335
|
+
const nextMinPriority = operator.associativity === Associativity.LEFT ? operator.priority + 1 : operator.priority;
|
|
336
|
+
const right = parseExpression(index + 1, nextMinPriority);
|
|
337
|
+
left = this.buildBinaryOp(operatorToken, left, right.node);
|
|
338
|
+
index = right.next;
|
|
339
|
+
}
|
|
340
|
+
return { node: left, next: index };
|
|
341
|
+
};
|
|
342
|
+
const parsed = parseExpression(from, 0);
|
|
343
|
+
return { node: { child: parsed.node, tokens: parsed.node.tokens }, next: parsed.next };
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
const parserInstance = new ExpressionParser();
|
|
347
|
+
const expressionParser = (tokens, from) => parserInstance.parse(tokens, from);
|
|
348
|
+
exports.expressionParser = expressionParser;
|
|
349
|
+
//# sourceMappingURL=expression-parser.js.map
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
export { parser } from './parser';
|
|
2
|
+
export { NodeType, AssignType, ControlType, RadarCondition, RadarSortConfig, UnitLocateCategory, UnitLocateBuildingGroup, LoopControlType, IdentifierType, StatementType, } from './nodes';
|
|
3
|
+
export type { ASTNode, ASTNodeTypes, DocumentNode, ImportStatementNode, StatementNode, ExpressionNode, UnaryOpType, UnaryOpExpressionNode, BinaryOpType, BinaryOpExpressionNode, Functions, FunctionCallExpressionNode, MacroCallExpressionNode, LiteralExpressionNode, MindustryIdentifierExpressionNode, MinduscriptIdentifierExpressionNode, IdentifierExpressionNode, ExpressionChildNode, } from './nodes';
|
|
4
|
+
import type { NodeType } from './nodes';
|
|
5
|
+
import type * as Nodes from './nodes';
|
|
6
|
+
type StatementNodeReExport<T extends {
|
|
7
|
+
type: NodeType.SINGLE_STATEMENT | NodeType.BLOCK_STATEMENT;
|
|
8
|
+
}> = T extends any ? Omit<T, 'type'> & {
|
|
9
|
+
type: NodeType.STATEMENT;
|
|
10
|
+
} : never;
|
|
11
|
+
export type EmptyStatementNode = StatementNodeReExport<Nodes.EmptyStatementNode>;
|
|
12
|
+
export type VariableDefineStatementNode = StatementNodeReExport<Nodes.VariableDefineStatementNode>;
|
|
13
|
+
export type AssignStatementNode = StatementNodeReExport<Nodes.AssignStatementNode>;
|
|
14
|
+
export type StatementNodeRead = StatementNodeReExport<Nodes.StatementNodeRead>;
|
|
15
|
+
export type StatementNodeWrite = StatementNodeReExport<Nodes.StatementNodeWrite>;
|
|
16
|
+
export type StatementNodeDrawClear = StatementNodeReExport<Nodes.StatementNodeDrawClear>;
|
|
17
|
+
export type StatementNodeDrawColor = StatementNodeReExport<Nodes.StatementNodeDrawColor>;
|
|
18
|
+
export type StatementNodeDrawCol = StatementNodeReExport<Nodes.StatementNodeDrawCol>;
|
|
19
|
+
export type StatementNodeDrawStroke = StatementNodeReExport<Nodes.StatementNodeDrawStroke>;
|
|
20
|
+
export type StatementNodeDrawLine = StatementNodeReExport<Nodes.StatementNodeDrawLine>;
|
|
21
|
+
export type StatementNodeDrawRect = StatementNodeReExport<Nodes.StatementNodeDrawRect>;
|
|
22
|
+
export type StatementNodeDrawLineRect = StatementNodeReExport<Nodes.StatementNodeDrawLineRect>;
|
|
23
|
+
export type StatementNodeDrawPoly = StatementNodeReExport<Nodes.StatementNodeDrawPoly>;
|
|
24
|
+
export type StatementNodeDrawLinePoly = StatementNodeReExport<Nodes.StatementNodeDrawLinePoly>;
|
|
25
|
+
export type StatementNodeDrawTriangle = StatementNodeReExport<Nodes.StatementNodeDrawTriangle>;
|
|
26
|
+
export type StatementNodeDrawImage = StatementNodeReExport<Nodes.StatementNodeDrawImage>;
|
|
27
|
+
export type StatementNodePrint = StatementNodeReExport<Nodes.StatementNodePrint>;
|
|
28
|
+
export type StatementNodeDrawFlush = StatementNodeReExport<Nodes.StatementNodeDrawFlush>;
|
|
29
|
+
export type StatementNodePrintFlush = StatementNodeReExport<Nodes.StatementNodePrintFlush>;
|
|
30
|
+
export type StatementNodeGetLink = StatementNodeReExport<Nodes.StatementNodeGetLink>;
|
|
31
|
+
export type StatementNodeSetEnabled = StatementNodeReExport<Nodes.StatementNodeSetEnabled>;
|
|
32
|
+
export type StatementNodeSetShoot = StatementNodeReExport<Nodes.StatementNodeSetShoot>;
|
|
33
|
+
export type StatementNodeSetShootP = StatementNodeReExport<Nodes.StatementNodeSetShootP>;
|
|
34
|
+
export type StatementNodeSetConfig = StatementNodeReExport<Nodes.StatementNodeSetConfig>;
|
|
35
|
+
export type StatementNodeSetColor = StatementNodeReExport<Nodes.StatementNodeSetColor>;
|
|
36
|
+
export type StatementNodeRadar = StatementNodeReExport<Nodes.StatementNodeRadar>;
|
|
37
|
+
export type StatementNodeSensor = StatementNodeReExport<Nodes.StatementNodeSensor>;
|
|
38
|
+
export type StatementNodePackColor = StatementNodeReExport<Nodes.StatementNodePackColor>;
|
|
39
|
+
export type StatementNodeWait = StatementNodeReExport<Nodes.StatementNodeWait>;
|
|
40
|
+
export type StatementNodeCpuStop = StatementNodeReExport<Nodes.StatementNodeCpuStop>;
|
|
41
|
+
export type StatementNodeUnitBind = StatementNodeReExport<Nodes.StatementNodeUnitBind>;
|
|
42
|
+
export type StatementNodeUnitRadar = StatementNodeReExport<Nodes.StatementNodeUnitRadar>;
|
|
43
|
+
export type StatementNodeUnitLocateOre = StatementNodeReExport<Nodes.StatementNodeUnitLocateOre>;
|
|
44
|
+
export type StatementNodeUnitLocateBuilding = StatementNodeReExport<Nodes.StatementNodeUnitLocateBuilding>;
|
|
45
|
+
export type StatementNodeUnitLocateSpawn = StatementNodeReExport<Nodes.StatementNodeUnitLocateSpawn>;
|
|
46
|
+
export type StatementNodeUnitLocateDamaged = StatementNodeReExport<Nodes.StatementNodeUnitLocateDamaged>;
|
|
47
|
+
export type StatementNodeIdle = StatementNodeReExport<Nodes.StatementNodeIdle>;
|
|
48
|
+
export type StatementNodeStop = StatementNodeReExport<Nodes.StatementNodeStop>;
|
|
49
|
+
export type StatementNodeMove = StatementNodeReExport<Nodes.StatementNodeMove>;
|
|
50
|
+
export type StatementNodeApproach = StatementNodeReExport<Nodes.StatementNodeApproach>;
|
|
51
|
+
export type StatementNodePathFind = StatementNodeReExport<Nodes.StatementNodePathFind>;
|
|
52
|
+
export type StatementNodeAutoPathFind = StatementNodeReExport<Nodes.StatementNodeAutoPathFind>;
|
|
53
|
+
export type StatementNodeBoost = StatementNodeReExport<Nodes.StatementNodeBoost>;
|
|
54
|
+
export type StatementNodeTarget = StatementNodeReExport<Nodes.StatementNodeTarget>;
|
|
55
|
+
export type StatementNodeTargetP = StatementNodeReExport<Nodes.StatementNodeTargetP>;
|
|
56
|
+
export type StatementNodeItemDrop = StatementNodeReExport<Nodes.StatementNodeItemDrop>;
|
|
57
|
+
export type StatementNodeItemTake = StatementNodeReExport<Nodes.StatementNodeItemTake>;
|
|
58
|
+
export type StatementNodePayDrop = StatementNodeReExport<Nodes.StatementNodePayDrop>;
|
|
59
|
+
export type StatementNodePayTake = StatementNodeReExport<Nodes.StatementNodePayTake>;
|
|
60
|
+
export type StatementNodePayEnter = StatementNodeReExport<Nodes.StatementNodePayEnter>;
|
|
61
|
+
export type StatementNodeMine = StatementNodeReExport<Nodes.StatementNodeMine>;
|
|
62
|
+
export type StatementNodeFlag = StatementNodeReExport<Nodes.StatementNodeFlag>;
|
|
63
|
+
export type StatementNodeBuild = StatementNodeReExport<Nodes.StatementNodeBuild>;
|
|
64
|
+
export type StatementNodeGetBlock = StatementNodeReExport<Nodes.StatementNodeGetBlock>;
|
|
65
|
+
export type StatementNodeWithin = StatementNodeReExport<Nodes.StatementNodeWithin>;
|
|
66
|
+
export type StatementNodeUnbind = StatementNodeReExport<Nodes.StatementNodeUnbind>;
|
|
67
|
+
export type ControlStatementNode = StatementNodeReExport<Nodes.ControlStatementNode>;
|
|
68
|
+
export type MacroDefineStatementNode = StatementNodeReExport<Nodes.MacroDefineStatementNode>;
|
|
69
|
+
export type MacroCallStatementNode = StatementNodeReExport<Nodes.MacroCallStatementNode>;
|
|
70
|
+
export type IfElseStatementNode = StatementNodeReExport<Nodes.IfElseStatementNode>;
|
|
71
|
+
export type IfStatementNode = StatementNodeReExport<Nodes.IfStatementNode>;
|
|
72
|
+
export type ForStatementNode = StatementNodeReExport<Nodes.ForStatementNode>;
|
|
73
|
+
export type WhileStatementNode = StatementNodeReExport<Nodes.WhileStatementNode>;
|
|
74
|
+
export type LoopControlStatementNode = StatementNodeReExport<Nodes.LoopControlStatementNode>;
|
|
75
|
+
export type ReturnStatementNode = StatementNodeReExport<Nodes.ReturnStatementNode>;
|
|
76
|
+
export type BindStatementNode = StatementNodeReExport<Nodes.BindStatementNode>;
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.StatementType = exports.IdentifierType = exports.LoopControlType = exports.UnitLocateBuildingGroup = exports.UnitLocateCategory = exports.RadarSortConfig = exports.RadarCondition = exports.ControlType = exports.AssignType = exports.NodeType = exports.parser = void 0;
|
|
4
|
+
var parser_1 = require("./parser");
|
|
5
|
+
Object.defineProperty(exports, "parser", { enumerable: true, get: function () { return parser_1.parser; } });
|
|
6
|
+
var nodes_1 = require("./nodes");
|
|
7
|
+
Object.defineProperty(exports, "NodeType", { enumerable: true, get: function () { return nodes_1.NodeType; } });
|
|
8
|
+
Object.defineProperty(exports, "AssignType", { enumerable: true, get: function () { return nodes_1.AssignType; } });
|
|
9
|
+
Object.defineProperty(exports, "ControlType", { enumerable: true, get: function () { return nodes_1.ControlType; } });
|
|
10
|
+
Object.defineProperty(exports, "RadarCondition", { enumerable: true, get: function () { return nodes_1.RadarCondition; } });
|
|
11
|
+
Object.defineProperty(exports, "RadarSortConfig", { enumerable: true, get: function () { return nodes_1.RadarSortConfig; } });
|
|
12
|
+
Object.defineProperty(exports, "UnitLocateCategory", { enumerable: true, get: function () { return nodes_1.UnitLocateCategory; } });
|
|
13
|
+
Object.defineProperty(exports, "UnitLocateBuildingGroup", { enumerable: true, get: function () { return nodes_1.UnitLocateBuildingGroup; } });
|
|
14
|
+
Object.defineProperty(exports, "LoopControlType", { enumerable: true, get: function () { return nodes_1.LoopControlType; } });
|
|
15
|
+
Object.defineProperty(exports, "IdentifierType", { enumerable: true, get: function () { return nodes_1.IdentifierType; } });
|
|
16
|
+
Object.defineProperty(exports, "StatementType", { enumerable: true, get: function () { return nodes_1.StatementType; } });
|
|
17
|
+
//# sourceMappingURL=index.js.map
|