@borgar/fx 4.12.0 → 5.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index-BMr6cTgc.d.cts +1444 -0
- package/dist/index-BMr6cTgc.d.ts +1444 -0
- package/dist/index.cjs +3054 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +2984 -0
- package/dist/index.js.map +1 -0
- package/dist/xlsx/index.cjs +3120 -0
- package/dist/xlsx/index.cjs.map +1 -0
- package/dist/xlsx/index.d.cts +55 -0
- package/dist/xlsx/index.d.ts +55 -0
- package/dist/xlsx/index.js +3049 -0
- package/dist/xlsx/index.js.map +1 -0
- package/docs/API.md +2959 -718
- package/docs/AST_format.md +2 -2
- package/eslint.config.mjs +40 -0
- package/lib/a1.spec.ts +32 -0
- package/lib/a1.ts +26 -0
- package/lib/addA1RangeBounds.ts +50 -0
- package/lib/addTokenMeta.spec.ts +166 -0
- package/lib/{addTokenMeta.js → addTokenMeta.ts} +53 -33
- package/lib/astTypes.ts +211 -0
- package/lib/cloneToken.ts +29 -0
- package/lib/{constants.js → constants.ts} +6 -3
- package/lib/fixRanges.spec.ts +220 -0
- package/lib/fixRanges.ts +260 -0
- package/lib/fromCol.spec.ts +15 -0
- package/lib/{fromCol.js → fromCol.ts} +1 -1
- package/lib/index.spec.ts +119 -0
- package/lib/index.ts +76 -0
- package/lib/isNodeType.ts +151 -0
- package/lib/isType.spec.ts +208 -0
- package/lib/{isType.js → isType.ts} +26 -25
- package/lib/lexers/advRangeOp.ts +18 -0
- package/lib/lexers/canEndRange.ts +25 -0
- package/lib/lexers/lexBoolean.ts +55 -0
- package/lib/lexers/lexContext.ts +104 -0
- package/lib/lexers/lexError.ts +15 -0
- package/lib/lexers/lexFunction.ts +37 -0
- package/lib/lexers/lexNameFuncCntx.ts +112 -0
- package/lib/lexers/lexNamed.ts +60 -0
- package/lib/lexers/lexNewLine.ts +12 -0
- package/lib/lexers/lexNumber.ts +48 -0
- package/lib/lexers/lexOperator.ts +26 -0
- package/lib/lexers/lexRange.ts +15 -0
- package/lib/lexers/lexRangeA1.ts +134 -0
- package/lib/lexers/lexRangeR1C1.ts +146 -0
- package/lib/lexers/lexRangeTrim.ts +26 -0
- package/lib/lexers/lexRefOp.ts +19 -0
- package/lib/lexers/lexString.ts +22 -0
- package/lib/lexers/lexStructured.ts +25 -0
- package/lib/lexers/lexWhitespace.ts +31 -0
- package/lib/lexers/sets.ts +51 -0
- package/lib/mergeRefTokens.spec.ts +141 -0
- package/lib/{mergeRefTokens.js → mergeRefTokens.ts} +47 -32
- package/lib/nodeTypes.ts +54 -0
- package/lib/parse.spec.ts +1410 -0
- package/lib/{parser.js → parse.ts} +81 -63
- package/lib/parseA1Range.spec.ts +233 -0
- package/lib/parseA1Range.ts +206 -0
- package/lib/parseA1Ref.spec.ts +337 -0
- package/lib/parseA1Ref.ts +115 -0
- package/lib/parseR1C1Range.ts +191 -0
- package/lib/parseR1C1Ref.spec.ts +323 -0
- package/lib/parseR1C1Ref.ts +127 -0
- package/lib/parseRef.spec.ts +90 -0
- package/lib/parseRef.ts +240 -0
- package/lib/parseSRange.ts +240 -0
- package/lib/parseStructRef.spec.ts +168 -0
- package/lib/parseStructRef.ts +76 -0
- package/lib/stringifyA1Range.spec.ts +72 -0
- package/lib/stringifyA1Range.ts +72 -0
- package/lib/stringifyA1Ref.spec.ts +64 -0
- package/lib/stringifyA1Ref.ts +59 -0
- package/lib/{stringifyPrefix.js → stringifyPrefix.ts} +17 -2
- package/lib/stringifyR1C1Range.spec.ts +92 -0
- package/lib/stringifyR1C1Range.ts +73 -0
- package/lib/stringifyR1C1Ref.spec.ts +63 -0
- package/lib/stringifyR1C1Ref.ts +67 -0
- package/lib/stringifyStructRef.spec.ts +124 -0
- package/lib/stringifyStructRef.ts +113 -0
- package/lib/stringifyTokens.ts +15 -0
- package/lib/toCol.spec.ts +11 -0
- package/lib/{toCol.js → toCol.ts} +4 -4
- package/lib/tokenTypes.ts +76 -0
- package/lib/tokenize-srefs.spec.ts +429 -0
- package/lib/tokenize.spec.ts +2103 -0
- package/lib/tokenize.ts +346 -0
- package/lib/translate.spec.ts +35 -0
- package/lib/translateToA1.spec.ts +247 -0
- package/lib/translateToA1.ts +231 -0
- package/lib/translateToR1C1.spec.ts +227 -0
- package/lib/translateToR1C1.ts +145 -0
- package/lib/types.ts +179 -0
- package/lib/xlsx/index.spec.ts +27 -0
- package/lib/xlsx/index.ts +32 -0
- package/package.json +46 -30
- package/tsconfig.json +28 -0
- package/typedoc-ignore-links.ts +17 -0
- package/typedoc.json +41 -0
- package/.eslintrc +0 -22
- package/dist/fx.d.ts +0 -823
- package/dist/fx.js +0 -2
- package/dist/package.json +0 -1
- package/lib/a1.js +0 -348
- package/lib/a1.spec.js +0 -458
- package/lib/addTokenMeta.spec.js +0 -153
- package/lib/astTypes.js +0 -96
- package/lib/extraTypes.js +0 -74
- package/lib/fixRanges.js +0 -104
- package/lib/fixRanges.spec.js +0 -170
- package/lib/fromCol.spec.js +0 -11
- package/lib/index.js +0 -134
- package/lib/index.spec.js +0 -67
- package/lib/isType.spec.js +0 -168
- package/lib/lexer-srefs.spec.js +0 -324
- package/lib/lexer.js +0 -283
- package/lib/lexer.spec.js +0 -1953
- package/lib/lexerParts.js +0 -228
- package/lib/mergeRefTokens.spec.js +0 -121
- package/lib/package.json +0 -1
- package/lib/parseRef.js +0 -157
- package/lib/parseRef.spec.js +0 -71
- package/lib/parseSRange.js +0 -167
- package/lib/parseStructRef.js +0 -48
- package/lib/parseStructRef.spec.js +0 -164
- package/lib/parser.spec.js +0 -1208
- package/lib/rc.js +0 -341
- package/lib/rc.spec.js +0 -403
- package/lib/stringifyStructRef.js +0 -80
- package/lib/stringifyStructRef.spec.js +0 -182
- package/lib/toCol.spec.js +0 -11
- package/lib/translate-toA1.spec.js +0 -214
- package/lib/translate-toRC.spec.js +0 -197
- package/lib/translate.js +0 -239
- package/lib/translate.spec.js +0 -21
- package/rollup.config.mjs +0 -22
- package/tsd.json +0 -12
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { lexError } from './lexError.ts';
|
|
2
|
+
import { lexRangeTrim } from './lexRangeTrim.ts';
|
|
3
|
+
import { lexOperator } from './lexOperator.ts';
|
|
4
|
+
import { lexBoolean } from './lexBoolean.ts';
|
|
5
|
+
import { lexNewLine } from './lexNewLine.ts';
|
|
6
|
+
import { lexWhitespace } from './lexWhitespace.ts';
|
|
7
|
+
import { lexString } from './lexString.ts';
|
|
8
|
+
import { lexContextQuoted, lexContextUnquoted } from './lexContext.ts';
|
|
9
|
+
import { lexRange } from './lexRange.ts';
|
|
10
|
+
import { lexStructured } from './lexStructured.ts';
|
|
11
|
+
import { lexNumber } from './lexNumber.ts';
|
|
12
|
+
import { lexNamed } from './lexNamed.ts';
|
|
13
|
+
import { lexRefOp } from './lexRefOp.ts';
|
|
14
|
+
import { lexNameFuncCntx } from './lexNameFuncCntx.ts';
|
|
15
|
+
import type { Token } from '../types.ts';
|
|
16
|
+
|
|
17
|
+
export type PartLexer = (
|
|
18
|
+
str: string,
|
|
19
|
+
pos: number,
|
|
20
|
+
options?: Partial<{
|
|
21
|
+
xlsx: boolean,
|
|
22
|
+
allowTerniary: boolean,
|
|
23
|
+
allowTernary: boolean,
|
|
24
|
+
mergeRefs: boolean,
|
|
25
|
+
r1c1: boolean
|
|
26
|
+
}>
|
|
27
|
+
) => Token | undefined;
|
|
28
|
+
|
|
29
|
+
export const lexers: PartLexer[] = [
|
|
30
|
+
lexError,
|
|
31
|
+
lexRangeTrim,
|
|
32
|
+
lexOperator,
|
|
33
|
+
lexNewLine,
|
|
34
|
+
lexWhitespace,
|
|
35
|
+
lexString,
|
|
36
|
+
lexRange,
|
|
37
|
+
lexNumber,
|
|
38
|
+
lexBoolean,
|
|
39
|
+
lexContextQuoted,
|
|
40
|
+
lexNameFuncCntx,
|
|
41
|
+
lexStructured
|
|
42
|
+
];
|
|
43
|
+
|
|
44
|
+
export const lexersRefs = [
|
|
45
|
+
lexRefOp,
|
|
46
|
+
lexContextQuoted,
|
|
47
|
+
lexContextUnquoted,
|
|
48
|
+
lexRange,
|
|
49
|
+
lexStructured,
|
|
50
|
+
lexNamed
|
|
51
|
+
];
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import { describe, test, expect } from 'vitest';
|
|
2
|
+
import { CONTEXT, FUNCTION, FX_PREFIX, OPERATOR, REF_RANGE, REF_BEAM, REF_NAMED, REF_TERNARY, UNKNOWN } from './constants.ts';
|
|
3
|
+
import { mergeRefTokens } from './mergeRefTokens.ts';
|
|
4
|
+
import { tokenize } from './tokenize.ts';
|
|
5
|
+
|
|
6
|
+
describe('mergeRefTokens', () => {
|
|
7
|
+
test('merges reference tokens and preserves metadata', () => {
|
|
8
|
+
const list = tokenize('=SUM([Wb1]Sheet1!A1:B2)', { mergeRefs: false, withLocation: true });
|
|
9
|
+
|
|
10
|
+
expect(list).toEqual([
|
|
11
|
+
{ type: FX_PREFIX, value: '=', loc: [ 0, 1 ] },
|
|
12
|
+
{ type: FUNCTION, value: 'SUM', loc: [ 1, 4 ] },
|
|
13
|
+
{ type: OPERATOR, value: '(', loc: [ 4, 5 ] },
|
|
14
|
+
|
|
15
|
+
{ type: CONTEXT, value: '[Wb1]Sheet1', loc: [ 5, 16 ] },
|
|
16
|
+
{ type: OPERATOR, value: '!', loc: [ 16, 17 ] },
|
|
17
|
+
{ type: REF_RANGE, value: 'A1', loc: [ 17, 19 ] },
|
|
18
|
+
{ type: OPERATOR, value: ':', loc: [ 19, 20 ] },
|
|
19
|
+
{ type: REF_RANGE, value: 'B2', loc: [ 20, 22 ] },
|
|
20
|
+
|
|
21
|
+
{ type: OPERATOR, value: ')', loc: [ 22, 23 ] }
|
|
22
|
+
]);
|
|
23
|
+
|
|
24
|
+
// set IDs on all tokens about to be joined
|
|
25
|
+
list[3].id = 'id1';
|
|
26
|
+
list[4].id = 'id2';
|
|
27
|
+
list[5].id = 'id3';
|
|
28
|
+
list[6].id = 'id4';
|
|
29
|
+
list[7].id = 'id5';
|
|
30
|
+
|
|
31
|
+
const mergedList = mergeRefTokens(list);
|
|
32
|
+
expect(mergedList).toEqual([
|
|
33
|
+
{ type: FX_PREFIX, value: '=', loc: [ 0, 1 ] },
|
|
34
|
+
{ type: FUNCTION, value: 'SUM', loc: [ 1, 4 ] },
|
|
35
|
+
{ type: OPERATOR, value: '(', loc: [ 4, 5 ] },
|
|
36
|
+
{ type: REF_RANGE,
|
|
37
|
+
id: 'id5', // token has the id of the first one
|
|
38
|
+
value: '[Wb1]Sheet1!A1:B2',
|
|
39
|
+
loc: [ 5, 22 ] },
|
|
40
|
+
{ type: OPERATOR, value: ')', loc: [ 22, 23 ] }
|
|
41
|
+
]);
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
describe('tokenize with mergeRefs enabled', () => {
|
|
45
|
+
const opts = { mergeRefs: true, allowTernary: true };
|
|
46
|
+
|
|
47
|
+
test('basic cell references', () => {
|
|
48
|
+
expect(tokenize('A1', opts)).toEqual([
|
|
49
|
+
{ type: REF_RANGE, value: 'A1' }
|
|
50
|
+
]);
|
|
51
|
+
|
|
52
|
+
expect(tokenize('A1:A1', opts)).toEqual([
|
|
53
|
+
{ type: REF_RANGE, value: 'A1:A1' }
|
|
54
|
+
]);
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
test('beam references', () => {
|
|
58
|
+
expect(tokenize('A:A', opts)).toEqual([
|
|
59
|
+
{ type: REF_BEAM, value: 'A:A' }
|
|
60
|
+
]);
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
test('ternary references', () => {
|
|
64
|
+
expect(tokenize('A1:A', opts)).toEqual([
|
|
65
|
+
{ type: REF_TERNARY, value: 'A1:A' }
|
|
66
|
+
]);
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
test('quoted sheet references', () => {
|
|
70
|
+
expect(tokenize('\'Sheet1\'!A1', opts)).toEqual([
|
|
71
|
+
{ type: REF_RANGE, value: '\'Sheet1\'!A1' }
|
|
72
|
+
]);
|
|
73
|
+
|
|
74
|
+
expect(tokenize('\'Sheet1\'!A:A', opts)).toEqual([
|
|
75
|
+
{ type: REF_BEAM, value: '\'Sheet1\'!A:A' }
|
|
76
|
+
]);
|
|
77
|
+
|
|
78
|
+
expect(tokenize('\'Sheet1\'!A1:A', opts)).toEqual([
|
|
79
|
+
{ type: REF_TERNARY, value: '\'Sheet1\'!A1:A' }
|
|
80
|
+
]);
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
test('unquoted sheet references', () => {
|
|
84
|
+
expect(tokenize('Sheet1!A1', opts)).toEqual([
|
|
85
|
+
{ type: REF_RANGE, value: 'Sheet1!A1' }
|
|
86
|
+
]);
|
|
87
|
+
|
|
88
|
+
expect(tokenize('Sheet1!A:A', opts)).toEqual([
|
|
89
|
+
{ type: REF_BEAM, value: 'Sheet1!A:A' }
|
|
90
|
+
]);
|
|
91
|
+
|
|
92
|
+
expect(tokenize('Sheet1!A1:A', opts)).toEqual([
|
|
93
|
+
{ type: REF_TERNARY, value: 'Sheet1!A1:A' }
|
|
94
|
+
]);
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
test('workbook references', () => {
|
|
98
|
+
expect(tokenize('[WB]Sheet1!A1', opts)).toEqual([
|
|
99
|
+
{ type: REF_RANGE, value: '[WB]Sheet1!A1' }
|
|
100
|
+
]);
|
|
101
|
+
|
|
102
|
+
expect(tokenize('[WB]Sheet1!A:A', opts)).toEqual([
|
|
103
|
+
{ type: REF_BEAM, value: '[WB]Sheet1!A:A' }
|
|
104
|
+
]);
|
|
105
|
+
|
|
106
|
+
expect(tokenize('[WB]Sheet1!A1:A', opts)).toEqual([
|
|
107
|
+
{ type: REF_TERNARY, value: '[WB]Sheet1!A1:A' }
|
|
108
|
+
]);
|
|
109
|
+
|
|
110
|
+
expect(tokenize('[WB]Sheet1!A1.:.C3', opts)).toEqual([
|
|
111
|
+
{ type: REF_RANGE, value: '[WB]Sheet1!A1.:.C3' }
|
|
112
|
+
]);
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
test('named references', () => {
|
|
116
|
+
expect(tokenize('foo', opts)).toEqual([
|
|
117
|
+
{ type: REF_NAMED, value: 'foo' }
|
|
118
|
+
]);
|
|
119
|
+
|
|
120
|
+
expect(tokenize('\'quoted\'!foo', opts)).toEqual([
|
|
121
|
+
{ type: REF_NAMED, value: '\'quoted\'!foo' }
|
|
122
|
+
]);
|
|
123
|
+
|
|
124
|
+
expect(tokenize('Sheet1!foo', opts)).toEqual([
|
|
125
|
+
{ type: REF_NAMED, value: 'Sheet1!foo' }
|
|
126
|
+
]);
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
test('path references with different formats', () => {
|
|
130
|
+
expect(tokenize('[path]!foo', opts)).toEqual([
|
|
131
|
+
{ type: UNKNOWN, value: '[path]' },
|
|
132
|
+
{ type: OPERATOR, value: '!' },
|
|
133
|
+
{ type: REF_NAMED, value: 'foo' }
|
|
134
|
+
]);
|
|
135
|
+
|
|
136
|
+
expect(tokenize('[path]prefix!foo', opts)).toEqual([
|
|
137
|
+
{ type: REF_NAMED, value: '[path]prefix!foo' }
|
|
138
|
+
]);
|
|
139
|
+
});
|
|
140
|
+
});
|
|
141
|
+
});
|
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import { CONTEXT, CONTEXT_QUOTE, REF_RANGE, REF_NAMED, REF_BEAM, REF_TERNARY, OPERATOR, REF_STRUCT } from './constants.
|
|
1
|
+
import { CONTEXT, CONTEXT_QUOTE, REF_RANGE, REF_NAMED, REF_BEAM, REF_TERNARY, OPERATOR, REF_STRUCT } from './constants.ts';
|
|
2
|
+
import type { Token } from './types.ts';
|
|
2
3
|
|
|
3
4
|
const END = '$';
|
|
4
5
|
|
|
@@ -33,13 +34,17 @@ const validRunsMerge = [
|
|
|
33
34
|
[ CONTEXT_QUOTE, '!', REF_NAMED, REF_STRUCT ]
|
|
34
35
|
];
|
|
35
36
|
|
|
37
|
+
type TypeNode = {
|
|
38
|
+
[key: string]: TypeNode | boolean;
|
|
39
|
+
};
|
|
40
|
+
|
|
36
41
|
// valid token runs are converted to a tree structure
|
|
37
|
-
const refPartsTree = {};
|
|
38
|
-
function packList (f, node) {
|
|
42
|
+
const refPartsTree: TypeNode = {};
|
|
43
|
+
function packList (f: string[], node: TypeNode) {
|
|
39
44
|
if (f.length) {
|
|
40
45
|
const key = f[0];
|
|
41
|
-
node[key]
|
|
42
|
-
packList(f.slice(1), node[key]);
|
|
46
|
+
if (!node[key]) { node[key] = {}; }
|
|
47
|
+
packList(f.slice(1), node[key] as TypeNode);
|
|
43
48
|
}
|
|
44
49
|
else {
|
|
45
50
|
node[END] = true;
|
|
@@ -49,20 +54,24 @@ validRunsMerge.forEach(run => packList(run.concat().reverse(), refPartsTree));
|
|
|
49
54
|
|
|
50
55
|
// attempt to match a backwards run of tokens from a given point
|
|
51
56
|
// to a path in the tree
|
|
52
|
-
const matcher = (tokens, currNode, anchorIndex, index = 0) => {
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
57
|
+
const matcher = (tokens: Token[], currNode, anchorIndex, index = 0) => {
|
|
58
|
+
let i = index;
|
|
59
|
+
let node = currNode;
|
|
60
|
+
const max = tokens.length - index;
|
|
61
|
+
// keep walking as long as the next backward token matches a child key
|
|
62
|
+
while (i <= max) {
|
|
63
|
+
const token = tokens[anchorIndex - i];
|
|
64
|
+
if (token) {
|
|
65
|
+
const key = (token.type === OPERATOR) ? token.value : token.type;
|
|
66
|
+
if (key in node) {
|
|
67
|
+
node = node[key];
|
|
68
|
+
i += 1;
|
|
69
|
+
continue;
|
|
70
|
+
}
|
|
58
71
|
}
|
|
72
|
+
// can't advance further; accept only if current node is a terminal
|
|
73
|
+
return node[END] ? i : 0;
|
|
59
74
|
}
|
|
60
|
-
if (currNode[END]) {
|
|
61
|
-
// we may end here so this is a match
|
|
62
|
-
return index;
|
|
63
|
-
}
|
|
64
|
-
// no match
|
|
65
|
-
return 0;
|
|
66
75
|
};
|
|
67
76
|
|
|
68
77
|
/**
|
|
@@ -72,28 +81,34 @@ const matcher = (tokens, currNode, anchorIndex, index = 0) => {
|
|
|
72
81
|
* as whole references (`Sheet1!A1:B2`) rather than separate tokens for each
|
|
73
82
|
* part: (`Sheet1`,`!`,`A1`,`:`,`B2`).
|
|
74
83
|
*
|
|
75
|
-
* @param
|
|
76
|
-
* @returns
|
|
84
|
+
* @param tokenlist An array of tokens.
|
|
85
|
+
* @returns A new list of tokens with range parts merged.
|
|
77
86
|
*/
|
|
78
|
-
export function mergeRefTokens (tokenlist) {
|
|
87
|
+
export function mergeRefTokens (tokenlist: Token[]): Token[] {
|
|
79
88
|
const finalTokens = [];
|
|
80
89
|
// this seeks backwards because it's really the range part
|
|
81
90
|
// that controls what can be joined.
|
|
82
91
|
for (let i = tokenlist.length - 1; i >= 0; i--) {
|
|
83
92
|
let token = tokenlist[i];
|
|
84
|
-
const
|
|
85
|
-
if
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
+
const type = token.type;
|
|
94
|
+
// Quick check if token type could even start a valid run
|
|
95
|
+
if (type === REF_RANGE || type === REF_BEAM || type === REF_TERNARY ||
|
|
96
|
+
type === REF_NAMED || type === REF_STRUCT) {
|
|
97
|
+
const valid = matcher(tokenlist, refPartsTree, i);
|
|
98
|
+
if (valid > 1) {
|
|
99
|
+
token = { ...token, value: '' };
|
|
100
|
+
const start = i - valid + 1;
|
|
101
|
+
for (let j = start; j <= i; j++) {
|
|
102
|
+
token.value += tokenlist[j].value;
|
|
103
|
+
}
|
|
104
|
+
// adjust the offsets to include all the text
|
|
105
|
+
if (token.loc && tokenlist[start].loc) {
|
|
106
|
+
token.loc[0] = tokenlist[start].loc[0];
|
|
107
|
+
}
|
|
108
|
+
i -= valid - 1;
|
|
93
109
|
}
|
|
94
|
-
i -= valid - 1;
|
|
95
110
|
}
|
|
96
|
-
finalTokens.
|
|
111
|
+
finalTokens[finalTokens.length] = token;
|
|
97
112
|
}
|
|
98
|
-
return finalTokens;
|
|
113
|
+
return finalTokens.reverse();
|
|
99
114
|
}
|
package/lib/nodeTypes.ts
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import {
|
|
2
|
+
// AST types
|
|
3
|
+
UNARY,
|
|
4
|
+
BINARY,
|
|
5
|
+
REFERENCE,
|
|
6
|
+
LITERAL,
|
|
7
|
+
ERROR_LITERAL,
|
|
8
|
+
CALL,
|
|
9
|
+
ARRAY,
|
|
10
|
+
IDENTIFIER,
|
|
11
|
+
LAMBDA,
|
|
12
|
+
LET,
|
|
13
|
+
LET_DECL
|
|
14
|
+
} from './constants.ts';
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* A dictionary of the types used to identify AST node variants.
|
|
18
|
+
*
|
|
19
|
+
* @prop UNARY - A unary operation (`10%`)
|
|
20
|
+
* @prop BINARY - A binary operation (`10+10`)
|
|
21
|
+
* @prop REFERENCE - A range identifier (`A1`)
|
|
22
|
+
* @prop LITERAL - A literal (number, string, or boolean) (`123`, `"foo"`, `false`)
|
|
23
|
+
* @prop ERROR - An error literal (`#VALUE!`)
|
|
24
|
+
* @prop CALL - A function call expression (`SUM(1,2)`)
|
|
25
|
+
* @prop ARRAY - An array expression (`{1,2;3,4}`)
|
|
26
|
+
* @prop IDENTIFIER - A function name identifier (`SUM`)
|
|
27
|
+
* @prop LAMBDA - A LAMBDA expression (`LAMBDA(x,y,x*y)``)
|
|
28
|
+
* @prop LET - A LET expression (`LET(a,A1*10,b,SUM(F:F),a*b)`)
|
|
29
|
+
* @prop LET_DECL - A LET declaration (LET(`a,A1*10`...)
|
|
30
|
+
*/
|
|
31
|
+
export const nodeTypes = Object.freeze({
|
|
32
|
+
/** A unary operation (`10%`) */
|
|
33
|
+
UNARY,
|
|
34
|
+
/** A binary operation (`10+10`) */
|
|
35
|
+
BINARY,
|
|
36
|
+
/** A range identifier (`A1`) */
|
|
37
|
+
REFERENCE,
|
|
38
|
+
/** A literal (number, string, or boolean) (`123`, `"foo"`, `false`) */
|
|
39
|
+
LITERAL,
|
|
40
|
+
/** An error literal (`#VALUE!`) */
|
|
41
|
+
ERROR: ERROR_LITERAL,
|
|
42
|
+
/** A function call expression (`SUM(1,2)`) */
|
|
43
|
+
CALL,
|
|
44
|
+
/** An array expression (`{1,2;3,4}`) */
|
|
45
|
+
ARRAY,
|
|
46
|
+
/** A function name identifier (`SUM`) */
|
|
47
|
+
IDENTIFIER,
|
|
48
|
+
/** A LAMBDA expression (`LAMBDA(x,y,x*y)``) */
|
|
49
|
+
LAMBDA,
|
|
50
|
+
/** A LET expression (`LET(a,A1*10,b,SUM(F:F),a*b)`) */
|
|
51
|
+
LET,
|
|
52
|
+
/** A LET declaration (LET(`a,A1*10`...)*/
|
|
53
|
+
LET_DECL
|
|
54
|
+
});
|