drift-parser 0.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/ast.json +72 -0
  2. package/dist/index.d.ts +2 -0
  3. package/dist/index.js +8 -0
  4. package/dist/src/ast/ast.d.ts +0 -0
  5. package/dist/src/ast/exports.d.ts +1 -0
  6. package/dist/src/ast/expr.d.ts +0 -0
  7. package/dist/src/ast/stmt.d.ts +0 -0
  8. package/dist/src/ast/type.d.ts +31 -0
  9. package/dist/src/lexer/exports.d.ts +2 -0
  10. package/dist/src/lexer/tokenizer.d.ts +36 -0
  11. package/dist/src/lexer/tokens.d.ts +174 -0
  12. package/dist/src/parser/exports.d.ts +1 -0
  13. package/dist/src/parser/expr.d.ts +5 -0
  14. package/dist/src/parser/lookup.d.ts +28 -0
  15. package/dist/src/parser/parser.d.ts +23 -0
  16. package/dist/src/parser/stmt.d.ts +3 -0
  17. package/dist/src/parser/type.d.ts +0 -0
  18. package/dist/src/utils/combineLocation.d.ts +2 -0
  19. package/dist/src/utils/genexpr.d.ts +16 -0
  20. package/dist/src/utils/mapAll.d.ts +0 -0
  21. package/dist/src/utils/registerParse.d.ts +7 -0
  22. package/index.d.ts +1 -0
  23. package/index.ts +2 -0
  24. package/package.json +22 -0
  25. package/scripts/build.js +50 -0
  26. package/src/ast/ast.ts +0 -0
  27. package/src/ast/exports.ts +3 -0
  28. package/src/ast/expr.ts +122 -0
  29. package/src/ast/stmt.ts +126 -0
  30. package/src/ast/type.ts +46 -0
  31. package/src/lexer/exports.ts +2 -0
  32. package/src/lexer/tokenizer.ts +395 -0
  33. package/src/lexer/tokens.ts +241 -0
  34. package/src/parser/exports.ts +1 -0
  35. package/src/parser/expr.ts +82 -0
  36. package/src/parser/lookup.ts +69 -0
  37. package/src/parser/parser.ts +166 -0
  38. package/src/parser/stmt.ts +151 -0
  39. package/src/parser/type.ts +89 -0
  40. package/src/utils/combineLocation.ts +7 -0
  41. package/src/utils/mapAll.ts +43 -0
  42. package/src/utils/registerParse.ts +117 -0
  43. package/tests/astTest.js +44 -0
  44. package/tests/printTest.mjs +7 -0
  45. package/tests/tokenize.js +92 -0
  46. package/tests/typenames.js +15 -0
  47. package/tsconfig.json +15 -0
@@ -0,0 +1,241 @@
1
+ export enum TokenType {
2
+ EOF,
3
+ /** SPACES ONLY */
4
+ WHITESPACE,
5
+ /** \n character */
6
+ NEWLINE,
7
+ LIFETIME_EXTEND, // x!
8
+ NULL,
9
+ TRUE,
10
+ FALSE,
11
+ NUMBER,
12
+ STRING,
13
+ IDENTIFIER,
14
+ ARRAYTYPE_IDENTIFIER,
15
+
16
+ // Grouping
17
+ LEFT_BRACKET,
18
+ RIGHT_BRACKET,
19
+ LEFT_BRACE,
20
+ RIGHT_BRACE,
21
+ LEFT_PAREN,
22
+ RIGHT_PAREN,
23
+
24
+ // EQUALITY
25
+ ASSINGMENT_EQUALS,
26
+ /** This is the equality check operator == */
27
+ EQUALS,
28
+ /**
29
+ * b==
30
+ * performs a byte by byte comparison for things like strings like "a" b== "a"
31
+ */
32
+ BYTE_EQUALS,
33
+ NOT_EQUALS,
34
+
35
+ // EQUIVALENCE
36
+ LESS,
37
+ LESS_EQUALS,
38
+ GREATER,
39
+ GREATER_EQUALS,
40
+
41
+ // LOGICAL
42
+
43
+ AND,
44
+ OR,
45
+ NOT,
46
+
47
+ DOT,
48
+ /** range as in : [0..10] */
49
+ RANGE_OPERATOR,
50
+ SPREAD_OPERATOR,
51
+ SEMI_COLON,
52
+ COLON, // for ternary + a lot more
53
+ QUESTION_OPERATOR, // for ternary, eg condition ? result : elseresult
54
+ COMMA,
55
+
56
+ // shorthand operators
57
+ PLUS_PLUS,
58
+ MINUS_MINUS,
59
+ PLUS_EQUALS,
60
+ MINUS_EQUALS,
61
+ MUL_EQUALS, // *=
62
+ DIV_EQUALS, // /=
63
+ MOD_EQUALS, // %=
64
+ EXPONENTIATION_EQUALS, // **=
65
+
66
+ // Math operators
67
+ PLUS,
68
+ MINUS,
69
+ DIVIDE,
70
+ MUL,
71
+ MODULO,
72
+ EXPONENTATION,
73
+
74
+ //Bitwise operators
75
+ BW_NOT,
76
+ BW_AND,
77
+ BW_OR,
78
+ BW_XOR,
79
+ BW_LEFTSHIFT,
80
+ BW_RIGHTSHIFT,
81
+ BW_UNSIGNED_RIGHTSHIFT,
82
+ // maybe
83
+ BW_ROL, // r<<
84
+ BW_ROR, // r >> - rotate right and left, macros of other bitwise ops that shift but rotate
85
+
86
+ BW_AND_EQUALS, // &=
87
+ BW_OR_EQUALS, // |=
88
+ BW_XOR_EQUALS, // ^=
89
+ BW_LEFTSHIFT_EQUALS, // <<=
90
+ BW_RIGHTSHIFT_EQUALS, // >>=
91
+ // maybe
92
+ BW_ROL_EQUALS, // ridiculous but r<<=
93
+ BW_ROR_EQUALS, // r>>=
94
+
95
+ //SEMANTIC
96
+ SINGLE_COMMENT, //
97
+ MULTI_COMMENT, // ### ... ###
98
+
99
+ // Reserved keywords + primitives
100
+
101
+ LET, // maybe change to set?
102
+ CONST,
103
+ PRIMITIVE_TYPE, //u8/byte, string, array(slice), u16 u32 u64 u128 + signed, float<-->f32, double<-->f64, quad<-->quadruple<-->f128
104
+ MYRESP, //my or myresp, can be a prefix to an expr,
105
+ TOFREE, //tf or tofree
106
+
107
+
108
+ /**
109
+ * import std = "std";
110
+ * import { add, mul } = "math.dr";
111
+ * potentially import module = c "module.h";
112
+ *
113
+ */
114
+ IMPORT,
115
+ AS, // import { a as b } -- also single type casts if not in an import like 5::<i32> as const,
116
+ TURBOFISH, // :: - sorry original goals, its just nice to do 5::i32 or 5::my instead of (5) as <i32>, you can also do 5::<i32,const> for multiple
117
+
118
+ CLASS,
119
+ NEW,
120
+ FN, // func def
121
+ RETURN,
122
+ IF,
123
+ ELSE,
124
+ ELSEIF,
125
+ FOR,
126
+ FOR_EACH, //idk yet
127
+ WHILE,
128
+ EXPORT,
129
+ /**
130
+ * The operator for an `@` decorator above a function.
131
+ */
132
+ TYPENAME, // typename "hi" ==
133
+ STRUCT,
134
+
135
+ //COMPILE OPTIONS
136
+ /**
137
+ Sets compile options, eg, USE unsafe_features;
138
+ And parser options like USE c_logicals; makes it || and && instead of or / and and ! instead of not.
139
+ */
140
+ USE,
141
+
142
+ //Memory management specific
143
+
144
+ //MISC
145
+ /**
146
+ * Example function `a` demonstrating an unwrapped return from function b.
147
+ *
148
+ * ```rust
149
+ * fn a() {
150
+ * const b = unwrap {
151
+ * let abc = 5; //local abc
152
+ * return "top level return" + abc; //only returns the block
153
+ * };
154
+ * return b;
155
+ * }
156
+ * ```
157
+ */
158
+ UNRWAP,
159
+ AT_COMPILE, // ATCOMPILE { } -> the same as UNWRAP but comptime
160
+ };
161
+
162
+ export const ReservedTokens = {
163
+ "let": TokenType.LET,
164
+ "const": TokenType.CONST,
165
+ "import": TokenType.IMPORT,
166
+ "as": TokenType.AS,
167
+ "class": TokenType.CLASS,
168
+ "new": TokenType.NEW,
169
+ "fn": TokenType.FN,
170
+ "return": TokenType.RETURN,
171
+ "if": TokenType.IF,
172
+ "else": TokenType.ELSE,
173
+ "elseif": TokenType.ELSEIF,
174
+ "for": TokenType.FOR,
175
+ "foreach": TokenType.FOR_EACH,
176
+ "while": TokenType.WHILE,
177
+ "export": TokenType.EXPORT,
178
+ "typename": TokenType.TYPENAME,
179
+ "struct": TokenType.STRUCT,
180
+ "use": TokenType.USE,
181
+ "unwrap": TokenType.UNRWAP,
182
+ };
183
+
184
+ export function TypeName(type: TokenType): string {
185
+ return TokenType[type];
186
+ }
187
+
188
+ export interface Position {
189
+ line: number;
190
+ col: number;
191
+ }
192
+
193
+ export interface LocationInterface {
194
+ start: Position;
195
+ end: Position;
196
+ }
197
+
198
+ export class Token {
199
+ public type: TokenType;
200
+ public name: string;
201
+ public value: string;
202
+ public loc: LocationInterface;
203
+ public range: [number, number];
204
+
205
+ constructor(type: TokenType, value: string, location: LocationInterface, range: [number, number]) {
206
+ this.type = type;
207
+ this.name = TypeName(type);
208
+ this.value = value;
209
+ this.loc = location;
210
+ this.range = range;
211
+ }
212
+
213
+ /**
214
+ * Checks if the token's type matches any of the provided token types.
215
+ *
216
+ * @param {...TokenType[]} checkTypes The token types to compare to.
217
+ * @returns {boolean} True if this token's type is in the list, otherwise false.
218
+ */
219
+ IsA(...checkTypes: TokenType[]): boolean {
220
+ for (let i = 0; i < checkTypes.length; i++) {
221
+ if (this.type == checkTypes[i]) {
222
+ return true;
223
+ }
224
+ }
225
+
226
+ return false;
227
+ }
228
+
229
+ Print(): void {
230
+ const { IDENTIFIER, NUMBER, STRING, SINGLE_COMMENT, MULTI_COMMENT } = TokenType;
231
+ if (this.IsA(IDENTIFIER, NUMBER, STRING, SINGLE_COMMENT, MULTI_COMMENT )) {
232
+ console.log(`${TypeName(this.type)}(${this.value})`)
233
+ } else {
234
+ console.log(`${TypeName(this.type)}()`)
235
+ }
236
+ }
237
+
238
+ toString(): string {
239
+ return this.value;
240
+ }
241
+ }
@@ -0,0 +1 @@
1
+ export { Parser } from './parser'
@@ -0,0 +1,82 @@
1
+ import { TokenType } from "../lexer/tokens";
2
+ import { Parser } from "./parser";
3
+ // import { Expression, NumberExpression, PrefixExpression, StringExpression, SymbolExpression } from "../ast/exports";
4
+ import { bp_lu, nud_lu, led_lu, BP } from "./lookup"; // wherever you put those
5
+ import { Expr, mkexpr, Node, NodeKind } from "../ast/type";
6
+
7
+ function tokenLocationInfo(token: { loc?: any; range?: [number, number] }): string {
8
+ const locStr = token.loc ? ` at ${JSON.stringify(token.loc)}` : "";
9
+ const rangeStr = token.range ? ` [${token.range[0]}, ${token.range[1]}]` : "";
10
+ return locStr + rangeStr;
11
+ }
12
+
13
+ export function parseExpr(p: Parser, bp: BP = BP.Default): Expr {
14
+ const tokenKind = p.currentTokenKind();
15
+ const nud = nud_lu[tokenKind];
16
+ if (!nud) {
17
+ const token = p.currentToken();
18
+ throw new Error(`nud handler expected for token ${TokenType[tokenKind]}${tokenLocationInfo(token)}`);
19
+ }
20
+
21
+ let left = nud(p);
22
+
23
+ while ((bp_lu[p.currentTokenKind()] ?? BP.Default) > bp) {
24
+ const nextTokenKind = p.currentTokenKind();
25
+ const led = led_lu[nextTokenKind];
26
+ if (!led) {
27
+ const token = p.currentToken();
28
+ throw new Error(`led handler expected for token ${TokenType[nextTokenKind]}${tokenLocationInfo(token)}`);
29
+ }
30
+ left = led(p, left, bp);
31
+ }
32
+
33
+ return left;
34
+ }
35
+
36
+
37
+ export function parsePrimaryExpr(p: Parser): Expr {
38
+ switch (p.currentTokenKind()) {
39
+ case TokenType.NUMBER: {
40
+ const token = p.advance();
41
+ return mkexpr({
42
+ value: token.value,
43
+ type: 'NumberExpression',
44
+ loc: token.loc,
45
+ range: token.range,
46
+ })
47
+ }
48
+ case TokenType.STRING: {
49
+ const token = p.advance();
50
+ return mkexpr({
51
+ value: token.value,
52
+ type: 'StringExpression',
53
+ loc: token.loc,
54
+ range: token.range,
55
+ })
56
+ }
57
+ case TokenType.IDENTIFIER: {
58
+ const token = p.advance();
59
+ return mkexpr({
60
+ value: token.value,
61
+ type: 'SymbolExpression',
62
+ loc: token.loc,
63
+ range: token.range,
64
+ })
65
+ }
66
+ default: {
67
+ const token = p.currentToken();
68
+ throw new Error(`Can't create primary expression from token type ${TokenType[token.type]}${tokenLocationInfo(token)}`);
69
+ }
70
+ }
71
+ }
72
+
73
+ // type PrefixExpr = InstanceType<typeof PrefixExpression>;
74
+ // export function parsePrefixExpression(p: Parser): PrefixExpr {
75
+ // const opToken = p.advance()
76
+ // const rhs = parseExpr(p, BP.Unary)
77
+ // return new PrefixExpression(opToken, rhs);
78
+ // }
79
+
80
+ // export function parseBinaryExpression(p: Parser): BinaryExpr {
81
+
82
+ // }
@@ -0,0 +1,69 @@
1
+ import { Token, TokenType } from "../lexer/tokens";
2
+ import { Expr, Stmt } from "../ast/exports";
3
+ import { Parser } from "./parser";
4
+ import { parsePrimaryExpr } from "./expr";
5
+ // import { parseUsingStatement, parseVariableDeclaration } from "./stmt";
6
+
7
+ export enum BP {
8
+ Default,
9
+ Comma,
10
+ Assignment,
11
+ Logical,
12
+ Relational,
13
+ Additive,
14
+ Multiplicative,
15
+ Unary,
16
+ Call,
17
+ Member,
18
+ Primary,
19
+ }
20
+
21
+ // Change StmtHandler to return Statement[]
22
+ export type StmtHandler = (p: Parser) => Stmt[];
23
+ export type NudHandler = (p: Parser) => Expr;
24
+ export type LedHandler = (p: Parser, left: Expr, bp: BP) => Expr;
25
+
26
+ const _bp_lu: Partial<Record<TokenType, BP>> = {};
27
+ const _nud_lu: Partial<Record<TokenType, NudHandler>> = {};
28
+ const _led_lu: Partial<Record<TokenType, LedHandler>> = {}
29
+ const _stmt_lu: Partial<Record<TokenType, StmtHandler>> = {}
30
+ export function led(kind: TokenType, bp: BP, ledFn: LedHandler): void {
31
+ _bp_lu[kind] = bp;
32
+ _led_lu[kind] = ledFn;
33
+ }
34
+
35
+ export function nud(kind: TokenType, nudFn: NudHandler): void {
36
+ _nud_lu[kind] = nudFn;
37
+ }
38
+
39
+ export function stmt(kind: TokenType, stmtFn: StmtHandler): void {
40
+ _bp_lu[kind] = BP.Default;
41
+ _stmt_lu[kind] = stmtFn;
42
+ }
43
+
44
+ export function createTokenLookups(): void {
45
+ // prefix expression nud handlers
46
+
47
+ nud(TokenType.NUMBER, parsePrimaryExpr);
48
+ nud(TokenType.IDENTIFIER, parsePrimaryExpr);
49
+ nud(TokenType.STRING, parsePrimaryExpr);
50
+ // nud(TokenType.MINUS, parsePrefixExpression);
51
+
52
+
53
+ led(TokenType.MINUS, BP.Additive, parsePrimaryExpr);
54
+
55
+ // statement handlers
56
+
57
+ // stmt(TokenType.USE, parseUsingStatement);
58
+ // stmt(TokenType.LET, parseVariableDeclaration);
59
+ // stmt(TokenType.CONST, parseVariableDeclaration)
60
+ }
61
+
62
+ createTokenLookups();
63
+
64
+ export {
65
+ _bp_lu as bp_lu,
66
+ _nud_lu as nud_lu,
67
+ _led_lu as led_lu,
68
+ _stmt_lu as stmt_lu,
69
+ };
@@ -0,0 +1,166 @@
1
+ import { mkstmt, Stmt } from "../ast/exports";
2
+ import { Token, TokenType } from "../lexer/tokens";
3
+ // import { _toJSON } from "../utils/mapAll";
4
+ import { stmt } from "./lookup";
5
+ import { parseStmt } from "./stmt";
6
+
7
+ export class Parser {
8
+ private tokens: Token[];
9
+ private pos: number = 0;
10
+ public allowNewline = false;
11
+
12
+ constructor(tokens: Token[]) {
13
+ this.tokens = tokens;
14
+ }
15
+
16
+ static parse(tokens: Token[]): Stmt {
17
+ const parser = new Parser(tokens);
18
+ const body: Stmt[] = [];
19
+
20
+ while (parser.hasTokens()) {
21
+ body.push(...parseStmt(parser));
22
+ }
23
+
24
+ // Filter out the EOF token (if present) for accurate location data
25
+ const actualTokens = tokens.filter((t) => t.type !== TokenType.EOF);
26
+
27
+ if (actualTokens.length === 0) {
28
+ throw new Error("Cannot parse an empty token stream.");
29
+ }
30
+
31
+ const start = actualTokens[0];
32
+ const end = actualTokens[actualTokens.length - 1];
33
+
34
+ const loc = {
35
+ start: start.loc.start,
36
+ end: end.loc.end,
37
+ };
38
+
39
+ const range: [number, number] = [start.range[0], end.range[1]];
40
+
41
+
42
+ // export class BlockStatement extends Statement {
43
+ // public body: Statement[];
44
+
45
+ // constructor(body: Statement[], loc: LocationInterface, range: [number, number]) {
46
+ // super(loc, range);
47
+ // this.body = body;
48
+ // }
49
+
50
+ // toJSON() {
51
+ // return {
52
+ // ...super.toJSON(),
53
+ // body: this.body.map((s) => s.toJSON()),
54
+ // };
55
+ // }
56
+ // }
57
+ return mkstmt({
58
+ type: 'BlockStatement',
59
+ body: body,
60
+ loc,
61
+ range,
62
+ })
63
+ // return new BlockStatement(body, loc, range);
64
+ }
65
+
66
+ // static parseJSON(tokens: Token[]): object {
67
+ // return _toJSON(this.parse(tokens))
68
+ // }
69
+
70
+ mark(): number {
71
+ return this.pos;
72
+ }
73
+
74
+ reset(pos: number): void {
75
+ if (pos < 0 || pos > this.tokens.length) {
76
+ throw new Error(`Invalid reset position: ${pos}`);
77
+ }
78
+ this.pos = pos;
79
+ }
80
+
81
+ currentToken(): Token {
82
+ return this.tokens[this.pos];
83
+ }
84
+
85
+ nextToken(): Token {
86
+ return this.tokens[this.pos + 1];
87
+ }
88
+
89
+ previousToken(): Token {
90
+ return this.tokens[this.pos - 1];
91
+ }
92
+
93
+ nthToken(n: number): Token {
94
+ return this.tokens[this.pos + n];
95
+ }
96
+
97
+ advance(): Token {
98
+ const token = this.currentToken();
99
+ do {
100
+ this.pos++;
101
+ } while (this.currentToken()?.type === TokenType.WHITESPACE);
102
+ return token;
103
+ }
104
+
105
+ hasTokens(): boolean {
106
+ return (
107
+ this.pos < this.tokens.length && this.currentTokenKind() !== TokenType.EOF
108
+ );
109
+ }
110
+
111
+ currentTokenKind(): TokenType {
112
+ return this.currentToken().type;
113
+ }
114
+
115
+ expect(expectedKind: TokenType): Token {
116
+ return this.expectError(expectedKind);
117
+ }
118
+
119
+ expectOne(...expectedKind: TokenType[]): Token {
120
+ return this.expectOneError(undefined, ...expectedKind);
121
+ }
122
+
123
+ parseTerminator(): Token {
124
+ const expectedKinds: TokenType[] = this.allowNewline
125
+ ? [TokenType.SEMI_COLON, TokenType.EOF]
126
+ : [TokenType.SEMI_COLON, TokenType.NEWLINE, TokenType.EOF];
127
+
128
+ const current = this.currentToken();
129
+
130
+ if (current.type === TokenType.EOF) {
131
+ return current;
132
+ }
133
+
134
+ return this.expectOne(...expectedKinds);
135
+ }
136
+
137
+
138
+
139
+ expectError(expectedKind: TokenType, err?: string): Token {
140
+ const token = this.currentToken();
141
+ const kind = token.type;
142
+
143
+ if (kind !== expectedKind) {
144
+ if (!err) {
145
+ err = `Expected token: ${TokenType[expectedKind]} but got ${TokenType[kind]}`;
146
+ }
147
+ throw new Error(err);
148
+ }
149
+
150
+ return this.advance();
151
+ }
152
+
153
+ expectOneError(err?: string, ...expectedKind: TokenType[]): Token {
154
+ const token = this.currentToken();
155
+ const kind = token.type;
156
+
157
+ if (!expectedKind.includes(kind)) {
158
+ if (!err) {
159
+ err = `Expected token: ${expectedKind.map(k => TokenType[k]).join(' OR ')} but got ${TokenType[kind]}`;
160
+ }
161
+ throw new Error(err);
162
+ }
163
+
164
+ return this.advance();
165
+ }
166
+ }
@@ -0,0 +1,151 @@
1
+ // ./parser/parse-stmt.ts
2
+ import { Parser } from "./parser";
3
+ import { parseExpr } from "./expr";
4
+ import { stmt_lu } from "./lookup";
5
+ // import {
6
+ // Statement,
7
+ // ExpressionStatement,
8
+ // TerminatorStatement,
9
+ // UsingStatement,
10
+ // VariableDeclarationStatement,
11
+ // } from "../ast/stmt";
12
+ import { TokenType } from "../lexer/tokens";
13
+ import { BP } from "./lookup";
14
+ // import { parse_type } from "./type";
15
+ import { mkexpr, mkstmt, Stmt } from "../ast/type";
16
+
17
+ export function parseStmt(p: Parser): Stmt[] {
18
+ const kind = p.currentTokenKind();
19
+
20
+ // if (kind === TokenType.SEMI_COLON || kind === TokenType.NEWLINE) {
21
+ if (kind === TokenType.SEMI_COLON) {
22
+ const tok = p.advance();
23
+ // export class TerminatorStatement extends Statement {
24
+ // public kind: "semicolon" | "newline" | "eof";
25
+
26
+ // constructor(token: Token) {
27
+ // super(token.loc, token.range);
28
+
29
+ // if (token.type === TokenType.EOF) {
30
+ // this.kind = "eof";
31
+ // } else {
32
+ // this.kind = token.value === ";" ? "semicolon" : "newline";
33
+ // }
34
+ // }
35
+
36
+ // toJSON() {
37
+ // return {
38
+ // ...super.toJSON(),
39
+ // kind: this.kind,
40
+ // };
41
+ // }
42
+ // }
43
+ return [mkstmt({
44
+ type: "TerminatorStatement",
45
+ loc: tok.loc,
46
+ range: tok.range,
47
+ kind: tok.type == TokenType.EOF ? "EOF" : tok.value == ";" ? "semicolon" : "newline",
48
+ })];
49
+ }
50
+
51
+ const fn = stmt_lu[kind];
52
+ if (fn) {
53
+ const result = fn(p);
54
+ return Array.isArray(result) ? result : [result];
55
+ }
56
+
57
+ const expr = parseExpr(p, BP.Default);
58
+ const terminator = p.parseTerminator();
59
+
60
+ const loc = {
61
+ start: expr.loc.start,
62
+ end: terminator.loc.end,
63
+ };
64
+ const range: [number, number] = [expr.range[0], terminator.range[1]];
65
+
66
+
67
+ // export class ExpressionStatement extends Statement {
68
+ // public expression: Expression;
69
+
70
+ // constructor(expression: Expression, loc: LocationInterface, range: [number, number]) {
71
+ // super(loc, range);
72
+ // this.expression = expression;
73
+ // }
74
+
75
+ // toJSON() {
76
+ // return {
77
+ // ...super.toJSON(),
78
+ // expression: this.expression.toJSON(),
79
+ // };
80
+ // }
81
+ // }
82
+
83
+
84
+ return [
85
+ mkstmt({
86
+ type: "ExpressionStatement",
87
+ expression: expr,
88
+ loc: loc,
89
+ range: range,
90
+ }),
91
+ mkstmt({
92
+ type: "TerminatorStatement",
93
+ loc: terminator.loc,
94
+ range: terminator.range,
95
+ kind: terminator.type == TokenType.EOF ? "EOF" : terminator.value == ";" ? "semicolon" : "newline",
96
+ })
97
+ ];
98
+ }
99
+
100
+
101
+ // export function parseUsingStatement(p: Parser): Stmt[] {
102
+ // const useTok = p.expect(TokenType.USE);
103
+ // const idTok = p.expect(TokenType.IDENTIFIER);
104
+
105
+ // let asExpr: Expr | undefined;
106
+ // if (p.currentTokenKind() === TokenType.AS) {
107
+ // p.advance();
108
+ // asExpr = parseExpr(p, BP.Default);
109
+ // }
110
+
111
+ // const terminator = p.parseTerminator();
112
+ // const loc = {
113
+ // start: useTok.loc.start,
114
+ // end: terminator.loc.end,
115
+ // };
116
+ // const range: [number, number] = [useTok.range[0], terminator.range[1]];
117
+
118
+ // return [
119
+ // new UsingStatement(idTok, asExpr, terminator, loc, range),
120
+ // new TerminatorStatement(terminator),
121
+ // ];
122
+ // }
123
+
124
+ // export function parseVariableDeclaration(p: Parser): Statement[] {
125
+ // const letTok = p.expectOne(TokenType.LET, TokenType.CONST);
126
+ // const nameTok = p.expect(TokenType.IDENTIFIER);
127
+
128
+ // let varType: Type | undefined;
129
+ // if (p.currentTokenKind() === TokenType.COLON) {
130
+ // p.advance();
131
+ // varType = parse_type(p, 0);
132
+ // }
133
+
134
+ // let initExpr: Expression | undefined;
135
+ // if (p.currentTokenKind() === TokenType.ASSINGMENT_EQUALS) {
136
+ // p.advance();
137
+ // initExpr = parseExpr(p, BP.Default);
138
+ // }
139
+
140
+ // const terminator = p.parseTerminator();
141
+ // const loc = {
142
+ // start: letTok.loc.start,
143
+ // end: terminator.loc.end,
144
+ // };
145
+ // const range: [number, number] = [letTok.range[0], terminator.range[1]];
146
+
147
+ // return [
148
+ // new VariableDeclarationStatement(nameTok, varType, initExpr, terminator, letTok.IsA(TokenType.CONST), loc, range),
149
+ // new TerminatorStatement(terminator),
150
+ // ];
151
+ // }