grammar-well 2.0.7 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. package/build/generator/artifacts/basic.d.ts +1 -1
  2. package/build/generator/artifacts/basic.js.map +1 -1
  3. package/build/generator/artifacts/lexer.d.ts +2 -2
  4. package/build/generator/artifacts/lexer.js +1 -1
  5. package/build/generator/artifacts/lexer.js.map +1 -1
  6. package/build/generator/artifacts/lr.d.ts +5 -5
  7. package/build/generator/artifacts/lr.js +2 -2
  8. package/build/generator/artifacts/lr.js.map +1 -1
  9. package/build/generator/generator.d.ts +3 -3
  10. package/build/generator/generator.js +3 -3
  11. package/build/generator/generator.js.map +1 -1
  12. package/build/generator/grammars/index.d.ts +2 -2
  13. package/build/generator/grammars/index.js +2 -2
  14. package/build/generator/import-resolvers/auto.d.ts +1 -1
  15. package/build/generator/import-resolvers/browser.d.ts +1 -1
  16. package/build/generator/import-resolvers/browser.js.map +1 -1
  17. package/build/generator/import-resolvers/dictionary.d.ts +1 -1
  18. package/build/generator/import-resolvers/dictionary.js.map +1 -1
  19. package/build/generator/import-resolvers/filesystem.d.ts +1 -1
  20. package/build/generator/index.d.ts +3 -3
  21. package/build/generator/index.js +3 -3
  22. package/build/generator/state.d.ts +1 -1
  23. package/build/generator/stringify/common.d.ts +2 -2
  24. package/build/generator/stringify/common.js.map +1 -1
  25. package/build/generator/stringify/exports/javascript.d.ts +1 -1
  26. package/build/generator/stringify/exports/json.d.ts +1 -1
  27. package/build/generator/stringify/exports/registry.d.ts +6 -6
  28. package/build/generator/stringify/exports/typescript.d.ts +1 -1
  29. package/build/generator/stringify/grammar/v2.d.ts +1 -1
  30. package/build/generator/stringify/javascript.d.ts +2 -2
  31. package/build/generator/stringify/javascript.js.map +1 -1
  32. package/build/index.d.ts +7 -7
  33. package/build/index.js +7 -7
  34. package/build/lexers/character-lexer.d.ts +1 -1
  35. package/build/lexers/stateful-lexer.d.ts +1 -1
  36. package/build/lexers/token-buffer.d.ts +3 -3
  37. package/build/lexers/token-buffer.js +2 -2
  38. package/build/lexers/token-buffer.js.map +1 -1
  39. package/build/parser/algorithms/cyk.d.ts +2 -2
  40. package/build/parser/algorithms/earley.d.ts +15 -12
  41. package/build/parser/algorithms/earley.js +39 -14
  42. package/build/parser/algorithms/earley.js.map +1 -1
  43. package/build/parser/algorithms/lrk/algorithm.d.ts +2 -2
  44. package/build/parser/algorithms/lrk/algorithm.js +31 -21
  45. package/build/parser/algorithms/lrk/algorithm.js.map +1 -1
  46. package/build/parser/algorithms/lrk/canonical-collection.d.ts +10 -10
  47. package/build/parser/algorithms/lrk/canonical-collection.js +73 -49
  48. package/build/parser/algorithms/lrk/canonical-collection.js.map +1 -1
  49. package/build/parser/algorithms/lrk/closure.d.ts +1 -1
  50. package/build/parser/algorithms/lrk/closure.js.map +1 -1
  51. package/build/parser/algorithms/lrk/stack.d.ts +8 -18
  52. package/build/parser/algorithms/lrk/stack.js +11 -24
  53. package/build/parser/algorithms/lrk/stack.js.map +1 -1
  54. package/build/parser/algorithms/lrk/state.d.ts +1 -1
  55. package/build/parser/algorithms/lrk/typings.d.ts +16 -0
  56. package/build/parser/algorithms/lrk/typings.js +2 -0
  57. package/build/parser/algorithms/lrk/typings.js.map +1 -0
  58. package/build/parser/parse.d.ts +1 -1
  59. package/build/typings/ast.d.ts +1 -1
  60. package/build/typings/generator.d.ts +4 -4
  61. package/build/typings/index.d.ts +8 -8
  62. package/build/typings/index.js +4 -4
  63. package/build/typings/runtime.d.ts +3 -3
  64. package/build/utility/format.js +3 -3
  65. package/build/utility/general.d.ts +2 -2
  66. package/build/utility/general.js +1 -1
  67. package/build/utility/general.js.map +1 -1
  68. package/build/utility/index.d.ts +4 -4
  69. package/build/utility/index.js +4 -4
  70. package/build/utility/lint.d.ts +1 -1
  71. package/build/utility/monarch.d.ts +1 -1
  72. package/build/utility/parsing.d.ts +2 -2
  73. package/build/utility/text-format.d.ts +2 -2
  74. package/build/version.json +1 -1
  75. package/package.json +8 -9
  76. package/src/generator/artifacts/basic.ts +6 -4
  77. package/src/generator/artifacts/lexer.ts +6 -4
  78. package/src/generator/artifacts/lr.ts +10 -7
  79. package/src/generator/generator.ts +17 -11
  80. package/src/generator/grammars/index.ts +2 -2
  81. package/src/generator/grammars/v1.ts +1 -1
  82. package/src/generator/grammars/v2.ts +1 -1
  83. package/src/generator/import-resolvers/auto.ts +3 -3
  84. package/src/generator/import-resolvers/browser.ts +5 -2
  85. package/src/generator/import-resolvers/dictionary.ts +5 -2
  86. package/src/generator/import-resolvers/filesystem.ts +1 -1
  87. package/src/generator/index.ts +3 -3
  88. package/src/generator/state.ts +1 -1
  89. package/src/generator/stringify/common.ts +6 -3
  90. package/src/generator/stringify/exports/javascript.ts +1 -1
  91. package/src/generator/stringify/exports/json.ts +1 -1
  92. package/src/generator/stringify/exports/registry.ts +4 -4
  93. package/src/generator/stringify/exports/typescript.ts +1 -1
  94. package/src/generator/stringify/grammar/v2.ts +1 -1
  95. package/src/generator/stringify/javascript.ts +12 -8
  96. package/src/index.ts +7 -7
  97. package/src/lexers/character-lexer.ts +1 -1
  98. package/src/lexers/stateful-lexer.ts +1 -1
  99. package/src/lexers/token-buffer.ts +16 -3
  100. package/src/parser/algorithms/cyk.ts +4 -4
  101. package/src/parser/algorithms/earley.ts +68 -20
  102. package/src/parser/algorithms/lrk/algorithm.ts +40 -25
  103. package/src/parser/algorithms/lrk/canonical-collection.ts +84 -55
  104. package/src/parser/algorithms/lrk/stack.ts +12 -37
  105. package/src/parser/algorithms/lrk/typings.ts +13 -0
  106. package/src/parser/parse.ts +8 -8
  107. package/src/typings/ast.ts +1 -1
  108. package/src/typings/generator.ts +4 -4
  109. package/src/typings/index.ts +8 -8
  110. package/src/typings/runtime.ts +3 -3
  111. package/src/utility/format.ts +4 -4
  112. package/src/utility/general.ts +4 -3
  113. package/src/utility/index.ts +4 -4
  114. package/src/utility/lint.ts +1 -1
  115. package/src/utility/monarch.ts +1 -1
  116. package/src/utility/parsing.ts +1 -1
  117. package/src/utility/text-format.ts +2 -2
  118. package/src/version.json +1 -1
  119. package/tsconfig.tsbuildinfo +1 -1
  120. package/src/parser/algorithms/lrk/closure.ts +0 -37
  121. package/src/parser/algorithms/lrk/state.ts +0 -10
@@ -1,4 +1,4 @@
1
- import { RuntimeLexer, RuntimeLexerToken, TQRestorePoint } from '../typings/index.js';
1
+ import type { RuntimeLexer, RuntimeLexerToken, TQRestorePoint } from '../typings/index.ts';
2
2
 
3
3
  export class TokenBuffer {
4
4
  private history: RuntimeLexerToken[] = [];
@@ -6,6 +6,9 @@ export class TokenBuffer {
6
6
 
7
7
  private $historyIndex = -1;
8
8
 
9
+ public lexer: RuntimeLexer;
10
+ private tokenProcessor?: (token: RuntimeLexerToken) => RuntimeLexerToken;
11
+
9
12
  get offset() { return this.active?.offset || 0 }
10
13
  get line() { return this.active?.line || 0 }
11
14
  get column() { return this.active?.column || 0; }
@@ -15,7 +18,13 @@ export class TokenBuffer {
15
18
  return { historyIndex: this.$historyIndex, offset: this.offset };
16
19
  }
17
20
 
18
- constructor(public lexer: RuntimeLexer, private tokenProcessor?: (token: RuntimeLexerToken) => RuntimeLexerToken) { }
21
+ constructor(
22
+ lexer: RuntimeLexer,
23
+ tokenProcessor?: (token: RuntimeLexerToken) => RuntimeLexerToken
24
+ ) {
25
+ this.lexer = lexer;
26
+ this.tokenProcessor = tokenProcessor;
27
+ }
19
28
 
20
29
  reset(buffer: string) {
21
30
  this.lexer.feed(buffer);
@@ -92,7 +101,11 @@ export class TokenBuffer {
92
101
  }
93
102
 
94
103
  class TokenIterator {
95
- constructor(private buffer: TokenBuffer) { }
104
+ private buffer: TokenBuffer;
105
+
106
+ constructor(buffer: TokenBuffer) {
107
+ this.buffer = buffer;
108
+ }
96
109
 
97
110
  next() {
98
111
  const token = this.buffer.next()
@@ -1,7 +1,7 @@
1
- import { RuntimeGrammarProductionRule, RuntimeGrammarRuleSymbol, RuntimeParserClass, RuntimeLexerToken } from "../../typings/index.js";
2
- import { TokenBuffer } from "../../lexers/token-buffer.js";
3
- import { Matrix } from "../../utility/general.js";
4
- import { ParserUtility } from "../../utility/parsing.js";
1
+ import type { RuntimeGrammarProductionRule, RuntimeGrammarRuleSymbol, RuntimeParserClass, RuntimeLexerToken } from "../../typings/index.ts";
2
+ import { TokenBuffer } from "../../lexers/token-buffer.ts";
3
+ import { Matrix } from "../../utility/general.ts";
4
+ import { ParserUtility } from "../../utility/parsing.ts";
5
5
 
6
6
  export function CYK(language: RuntimeParserClass & { tokens: TokenBuffer }, _options = {}) {
7
7
  const { grammar } = language.artifacts;
@@ -1,16 +1,18 @@
1
- import { Dictionary, RuntimeGrammarProductionRule, RuntimeParserClass } from "../../typings/index.js";
2
- import { TokenBuffer } from "../../lexers/token-buffer.js";
3
- import { TextFormatter } from "../../utility/text-format.js";
4
- import { ParserUtility } from "../../utility/parsing.js";
1
+ import type { Dictionary, RuntimeGrammarProductionRule, RuntimeLexerToken, RuntimeParserClass } from "../../typings/index.ts";
2
+ import { TokenBuffer } from "../../lexers/token-buffer.ts";
3
+ import { TextFormatter } from "../../utility/text-format.ts";
4
+ import { ParserUtility } from "../../utility/parsing.ts";
5
5
 
6
6
  export interface EarleyParserOptions {
7
7
  keepHistory?: boolean;
8
+ postProcessing?: 'eager' | 'lazy';
8
9
  }
9
10
 
10
11
  export function Earley(language: RuntimeParserClass & { tokens: TokenBuffer }, options: EarleyParserOptions = {}) {
11
12
  const { tokens } = language;
12
13
  const { rules, start } = language.artifacts.grammar;
13
- const column = new Column(rules, 0);
14
+ const StateClass = options.postProcessing === 'eager' ? EagerState : LazyState;
15
+ const column = new Column(rules, 0, StateClass);
14
16
  const table: Column[] = [column];
15
17
  column.wants[start] = [];
16
18
  column.predict(start);
@@ -27,7 +29,7 @@ export function Earley(language: RuntimeParserClass & { tokens: TokenBuffer }, o
27
29
 
28
30
  current++;
29
31
 
30
- const nextColumn = new Column(rules, current);
32
+ const nextColumn = new Column(rules, current, StateClass);
31
33
  table.push(nextColumn);
32
34
 
33
35
  const literal = token.value;
@@ -58,9 +60,25 @@ export function Earley(language: RuntimeParserClass & { tokens: TokenBuffer }, o
58
60
  results.push(data);
59
61
  }
60
62
  }
63
+
64
+ if (StateClass == LazyState) {
65
+ const clone = results.length > 1;
66
+ for (let i = 0; i < results.length; i++) {
67
+ results[i] = PostProcess(results[i], clone);
68
+ }
69
+ }
61
70
  return { results, info: { table } };
62
71
  }
63
72
 
73
+ function PostProcess(ast: PreAST | RuntimeLexerToken, clone?: boolean) {
74
+ if (!Array.isArray(ast))
75
+ return clone ? { ...ast } : ast;
76
+ const data = [];
77
+ for (let i = 0; i < ast[1].length; i++) {
78
+ data[i] = PostProcess(ast[1][i], clone);
79
+ }
80
+ return ParserUtility.PostProcess(ast[0], data, ast[2]);
81
+ }
64
82
 
65
83
  class Column {
66
84
  data: any;
@@ -68,11 +86,19 @@ class Column {
68
86
  wants: Dictionary<State[]> = Object.create(null);// states indexed by the non-terminal they expect
69
87
  scannable: State[] = [];// list of states that expect a token
70
88
  completed: Dictionary<State[]> = Object.create(null); // states that are nullable
89
+ private rules: Dictionary<RuntimeGrammarProductionRule[]>;
90
+ public index: number;
91
+ private StateClass: Concrete<typeof State>;
71
92
 
72
93
  constructor(
73
- private rules: Dictionary<RuntimeGrammarProductionRule[]>,
74
- public index: number
75
- ) { }
94
+ rules: Dictionary<RuntimeGrammarProductionRule[]>,
95
+ index: number,
96
+ StateClass: Concrete<typeof State>
97
+ ) {
98
+ this.rules = rules;
99
+ this.index = index;
100
+ this.StateClass = StateClass;
101
+ }
76
102
 
77
103
 
78
104
  process() {
@@ -123,7 +149,7 @@ class Column {
123
149
  return;
124
150
 
125
151
  for (const rule of this.rules[exp]) {
126
- this.states.push(new State(rule, 0, this.index, this.wants[exp]));
152
+ this.states.push(new this.StateClass(rule, 0, this.index, this.wants[exp]));
127
153
  }
128
154
  }
129
155
 
@@ -143,22 +169,30 @@ class Column {
143
169
  }
144
170
  }
145
171
 
146
- class State {
172
+ abstract class State {
147
173
  isComplete: boolean;
148
174
  data: any = [];
149
175
  left: State;
150
176
  right: State | StateToken;
177
+ public rule: RuntimeGrammarProductionRule;
178
+ public dot: number;
179
+ public reference: number;
180
+ public wantedBy: State[];
151
181
  constructor(
152
- public rule: RuntimeGrammarProductionRule,
153
- public dot: number,
154
- public reference: number,
155
- public wantedBy: State[]
182
+ rule: RuntimeGrammarProductionRule,
183
+ dot: number,
184
+ reference: number,
185
+ wantedBy: State[]
156
186
  ) {
187
+ this.rule = rule;
188
+ this.dot = dot;
189
+ this.reference = reference;
190
+ this.wantedBy = wantedBy;
157
191
  this.isComplete = this.dot === rule.symbols.length;
158
192
  }
159
193
 
160
194
  nextState(child: State | StateToken) {
161
- const state = new State(this.rule, this.dot + 1, this.reference, this.wantedBy);
195
+ const state = new (this.constructor as any)(this.rule, this.dot + 1, this.reference, this.wantedBy);
162
196
  state.left = this;
163
197
  state.right = child;
164
198
  if (state.isComplete) {
@@ -169,9 +203,7 @@ class State {
169
203
  }
170
204
 
171
205
 
172
- finish() {
173
- this.data = ParserUtility.PostProcess(this.rule, this.data, { reference: this.reference, dot: this.dot });
174
- }
206
+ abstract finish(): void;
175
207
 
176
208
  protected build() {
177
209
  const children = [];
@@ -185,9 +217,25 @@ class State {
185
217
  }
186
218
  }
187
219
 
220
+ class EagerState extends State {
221
+ finish() {
222
+ this.data = ParserUtility.PostProcess(this.rule, this.data, { reference: this.reference, dot: this.dot });
223
+ }
224
+ }
225
+
226
+ class LazyState extends State {
227
+ finish() {
228
+ this.data = [this.rule, this.data, { reference: this.reference, dot: this.dot }];
229
+ }
230
+ }
231
+
188
232
  interface StateToken {
189
233
  data: any,
190
234
  token: any,
191
235
  isToken: boolean,
192
236
  reference: number
193
- }
237
+ }
238
+
239
+ type PreAST = [RuntimeGrammarProductionRule, (RuntimeLexerToken | PreAST)[], { reference: number, dot: number }];
240
+ type Concrete<T extends abstract new (...args: any) => any> =
241
+ new (...args: ConstructorParameters<T>) => InstanceType<T>;
@@ -1,37 +1,52 @@
1
- import { RuntimeParserClass } from "../../../typings/index.js";
2
- import { TokenBuffer } from "../../../lexers/token-buffer.js";
3
- import { ParserUtility } from "../../../utility/parsing.js";
4
- import { CanonicalCollection } from "./canonical-collection.js";
5
- import { LRStack } from "./stack.js";
1
+ import type { RuntimeParserClass } from "../../../typings/index.ts";
2
+ import { TokenBuffer } from "../../../lexers/token-buffer.ts";
3
+ import { ParserUtility } from "../../../utility/parsing.ts";
4
+ import { CanonicalCollection } from "./canonical-collection.ts";
5
+ import { Stack } from "./stack.ts";
6
+ import type { State } from "./typings.ts";
6
7
 
7
8
  export function LRK(language: RuntimeParserClass & { tokens: TokenBuffer }, options = {}) {
8
9
  const { grammar } = language.artifacts;
9
10
  const { tokens } = language;
10
- const { states, rules: rules } = new CanonicalCollection(grammar);
11
- const stack = new LRStack();
12
- const s = states.get('0.0');
13
- stack.append(s.rule.name);
14
- stack.shift(s);
11
+ const { start } = new CanonicalCollection(grammar);
12
+ const stateStack = new Stack<State>();
13
+ const inputStack = new Stack<any>();
14
+ stateStack.push(start);
15
+
15
16
  let token;
16
17
 
17
- while (token = tokens.next()) {
18
- for (const [symbol, state] of stack.current.state.actions) {
19
- if (ParserUtility.SymbolMatchesToken(symbol, token)) {
20
- stack.append(symbol);
21
- stack.shift(states.get(state));
22
- stack.current.value = token;
23
- break;
24
- }
18
+ tokenloop: while (token = tokens.next()) {
19
+ const match = stateStack.current.actions.find(a => ParserUtility.SymbolMatchesToken(a.symbol, token));
20
+
21
+ if (match) {
22
+ inputStack.push(token);
23
+ stateStack.push(match.state);
24
+ } else {
25
+ throw new Error("Syntax Error: Unexpected Token");
25
26
  }
26
- while (stack.current.state?.isFinal) {
27
- const rule = rules.fetch(stack.current.state.reduce);
28
- stack.reduce(rule);
29
- stack.current.value = ParserUtility.PostProcess(rule, stack.current.children.map(v => v.value));
30
- const s = stack.previous.state.goto.get(rule.name);
31
- stack.shift(states.get(s));
32
27
 
28
+ while (stateStack.current.reduce) {
29
+ const rule = stateStack.current.reduce;
30
+ const value = inputStack.pop(rule.symbols.length);
31
+ stateStack.pop(rule.symbols.length);
32
+ inputStack.push(ParserUtility.PostProcess(rule, value));
33
+ const nextState = stateStack.current.goto[rule.name];
34
+
35
+ if (!nextState)
36
+ break tokenloop;
37
+
38
+ stateStack.push(nextState);
33
39
  }
34
40
  }
35
41
 
36
- return { results: [stack.current.value] }
42
+ if (stateStack.size > 1) {
43
+ throw new Error("Syntax Error: Unexpected End of Input");
44
+ }
45
+ const peek = tokens.next();
46
+ if (peek) {
47
+ console.log(peek)
48
+ throw new Error("Syntax Error: Expected End of Input");
49
+ }
50
+
51
+ return { results: [inputStack.current[0]] }
37
52
  }
@@ -1,79 +1,108 @@
1
- import { RuntimeGrammarProductionRule, RuntimeGrammarRuleSymbol, RuntimeParserClass } from "../../../typings/index.js";
2
- import { ParserUtility } from "../../../utility/parsing.js";
3
- import { BiMap } from "./bimap.js";
4
- import { ClosureBuilder } from "./closure.js";
5
- import { State } from "./state.js";
1
+ import type { RuntimeGrammarProductionRule, RuntimeGrammarRuleSymbol, RuntimeParserClass } from "../../../typings/index.ts";
2
+ import { ParserUtility } from "../../../utility/parsing.ts";
3
+ import { TextFormatter } from "../../../utility/text-format.ts";
4
+ import { BiMap } from "./bimap.ts";
5
+ import type { LRItem, State } from "./typings.ts";
6
6
 
7
7
  export class CanonicalCollection {
8
- states: Map<string, State> = new Map();
9
- rules: BiMap<RuntimeGrammarProductionRule> = new BiMap();
10
- terminals: BiMap<RuntimeGrammarRuleSymbol> = new BiMap();
8
+ public start: State;
9
+ public rules: BiMap<RuntimeGrammarProductionRule> = new BiMap();
10
+ public grammar: RuntimeParserClass['artifacts']['grammar'];
11
+ private cache: { [key: string]: State } = {};
11
12
 
12
- private closure: ClosureBuilder;
13
13
  constructor(
14
- public grammar: RuntimeParserClass['artifacts']['grammar']
14
+ grammar: RuntimeParserClass['artifacts']['grammar']
15
15
  ) {
16
+ this.grammar = grammar;
16
17
  const augmented = {
17
18
  name: Symbol() as unknown as string,
18
19
  symbols: [this.grammar.start]
19
20
  }
20
- this.grammar['rules'][augmented.name] = [augmented];
21
- this.closure = new ClosureBuilder(this.grammar);
21
+ this.grammar.rules[augmented.name] = [augmented];
22
22
  this.rules.id(augmented);
23
- this.addState(this.grammar['rules'][augmented.name][0], 0);
24
- this.linkStates('0.0');
23
+ this.start = this.generateState([{ rule: augmented, dot: 0 }]);
25
24
  }
26
25
 
27
- private addState(rule: RuntimeGrammarProductionRule, dot: number) {
28
- const id = this.getStateId(rule, dot);
29
- if (this.states.has(id))
30
- return;
26
+ private generateState(kernel: LRItem[]): State {
27
+ const id = this.canonicalStateId(kernel);
28
+ if (this.cache[id])
29
+ return this.cache[id];
31
30
 
32
- const state: State = {
33
- items: [],
34
- isFinal: false,
35
- actions: new Map(),
36
- goto: new Map(),
37
- reduce: null,
38
- rule: rule
31
+ this.cache[id] = { id };
32
+ if (kernel.length == 1 && kernel[0].rule.symbols.length == kernel[0].dot) {
33
+ this.cache[id].reduce = kernel[0].rule;
34
+ return this.cache[id];
39
35
  }
40
36
 
41
- state.items.push({ rule, dot });
42
- if (rule.symbols.length == dot)
43
- state.isFinal = true;
37
+ const items = [...kernel];
38
+ const visited = new Set<string>();
39
+ const refs: { [key: string]: RuntimeGrammarRuleSymbol } = {}
40
+ const actions: { [key: string]: LRItem[] } = {};
41
+ const goto: { [key: string]: LRItem[] } = {};
42
+ for (let i = 0; i < items.length; i++) {
43
+ const { rule, dot } = items[i];
44
+ const id = this.canonicalLRItemId(items[i]);
45
+ if (dot == rule.symbols.length)
46
+ throw new Error('Reduce Conflict on state: ' + id + `\n${items.map(v => TextFormatter.GrammarRule(v.rule, v.dot)).join('\n')}`);
44
47
 
45
- this.states.set(id, state);
48
+ if (visited.has(id))
49
+ continue;
50
+ visited.add(id);
51
+ const symbol = rule.symbols[dot];
52
+ const name = this.canonicalSymbolId(symbol);
53
+ refs[name] = symbol;
46
54
 
47
- state.items.push(...this.closure.get(rule.symbols[dot] as string))
48
-
49
- if (!state.isFinal)
50
- for (const { rule, dot } of state.items) {
51
- this.addState(rule, dot + 1);
55
+ if (symbol && !ParserUtility.SymbolIsTerminal(symbol)) {
56
+ const prods = this.grammar.rules[symbol] || [];
57
+ for (const rule of prods) {
58
+ items.push({ rule, dot: 0 });
59
+ }
60
+ goto[name] = goto[name] || [];
61
+ goto[name].push({ rule, dot: dot + 1 });
62
+ } else {
63
+ actions[name] = actions[name] || [];
64
+ actions[name].push({ rule, dot: dot + 1 });
52
65
  }
53
- }
66
+ }
67
+ this.cache[id].actions = [];
68
+ this.cache[id].goto = {};
54
69
 
55
- private linkStates(id: string, completed: Set<string> = new Set()) {
56
- completed.add(id);
57
- const state = this.states.get(id);
58
- if (!state.isFinal) {
59
- for (const { rule, dot } of state.items) {
60
- const symbol = rule.symbols[dot];
61
- const itemStateId = this.getStateId(rule, dot + 1);
62
- if (ParserUtility.SymbolIsTerminal(symbol) && typeof symbol != 'symbol') {
63
- state.actions.set(symbol, itemStateId);
64
- } else {
65
- state.goto.set(symbol, itemStateId);
66
- }
70
+ for (const key in actions) {
71
+ this.cache[id].actions.push({ symbol: refs[key], state: this.generateState(actions[key]) })
72
+ }
67
73
 
68
- if (!completed.has(itemStateId))
69
- this.linkStates(itemStateId, completed);
70
- }
71
- } else {
72
- state.reduce = this.rules.id(state.rule);
74
+ for (const key in goto) {
75
+ this.cache[id].goto[refs[key] as string] = this.generateState(goto[key]);
73
76
  }
77
+
78
+ return this.cache[id];
79
+ }
80
+
81
+ private canonicalStateId(items: LRItem[]): string {
82
+ return items
83
+ .map(item => this.canonicalLRItemId(item))
84
+ .sort()
85
+ .join('|');
86
+ }
87
+
88
+ private canonicalLRItemId(item: LRItem) {
89
+ return `${this.rules.id(item.rule)}:${item.dot}`;
74
90
  }
75
91
 
76
- private getStateId(rule: RuntimeGrammarProductionRule, dot: number) {
77
- return this.rules.id(rule) + '.' + dot;
92
+ private canonicalSymbolId(symbol: RuntimeGrammarRuleSymbol) {
93
+ if (typeof symbol === 'symbol')
94
+ return `SY:START`;
95
+ if (typeof symbol === 'string')
96
+ return `NT:${symbol}`;
97
+ if (typeof symbol == 'function')
98
+ return `FN:${symbol.toString()}`;
99
+ if (!symbol)
100
+ return
101
+ if (symbol instanceof RegExp)
102
+ return `RG:${symbol.source}`;
103
+ if ("token" in symbol)
104
+ return `TK:${symbol.token}`;
105
+ if ("literal" in symbol)
106
+ return `LT:${symbol.literal}`;
78
107
  }
79
- }
108
+ }
@@ -1,53 +1,28 @@
1
- import { RuntimeGrammarProductionRule, RuntimeGrammarRuleSymbol } from "../../../typings/index.js";
2
- import { State } from "./state.js";
1
+ export class Stack<T> {
3
2
 
4
- export class LRStack {
3
+ private stack: T[] = [];
5
4
 
6
- stack: LRStackItem[] = [];
7
-
8
- get current() {
9
- return this.stack[this.stack.length - 1];
5
+ get size() {
6
+ return this.stack.length;
10
7
  }
11
8
 
12
9
  get previous() {
13
10
  return this.stack[this.stack.length - 2];
14
11
  }
15
12
 
16
-
17
- shift(state: State) {
18
- this.current.state = state;
13
+ get current() {
14
+ return this.stack[this.stack.length - 1];
19
15
  }
20
16
 
21
- reduce(rule: RuntimeGrammarProductionRule) {
22
- const n = LRStack.NewItem();
23
- const l = rule.symbols.length;
24
- n.children = this.stack.splice(l * -1, l);
25
- n.children.forEach(v => delete v.state);
26
- n.rule = rule;
27
- n.symbol = rule.name;
28
- this.stack.push(n);
17
+ set current(item: T) {
18
+ this.stack[this.stack.length - 1] = item;
29
19
  }
30
20
 
31
- append(symbol: RuntimeGrammarRuleSymbol) {
32
- this.stack.push(LRStack.NewItem())
33
- this.current.symbol = symbol;
21
+ push(...items: T[]) {
22
+ return this.stack.push(...items);
34
23
  }
35
24
 
36
- static NewItem(): LRStackItem {
37
- return {
38
- children: [],
39
- state: null,
40
- symbol: null,
41
- rule: null,
42
- value: null
43
- }
25
+ pop(n: number = 1): T[] {
26
+ return this.stack.splice(n * -1, n);
44
27
  }
45
- }
46
-
47
- interface LRStackItem {
48
- children: LRStackItem[];
49
- state: State;
50
- symbol: RuntimeGrammarRuleSymbol;
51
- rule: RuntimeGrammarProductionRule;
52
- value: any;
53
28
  }
@@ -0,0 +1,13 @@
1
+ import type { RuntimeGrammarProductionRule, RuntimeGrammarRuleSymbol } from "../../../typings/index.ts";
2
+
3
+ export interface LRItem {
4
+ rule: RuntimeGrammarProductionRule;
5
+ dot: number;
6
+ }
7
+
8
+ export interface State {
9
+ id: string;
10
+ actions?: { symbol: RuntimeGrammarRuleSymbol, state: State }[];
11
+ goto?: { [key: string]: State };
12
+ reduce?: RuntimeGrammarProductionRule;
13
+ }
@@ -1,11 +1,11 @@
1
- import { ParserAlgorithm, RuntimeParserClass } from "../typings/index.js";
2
- import { CharacterLexer } from "../lexers/character-lexer.js";
3
- import { StatefulLexer } from "../lexers/stateful-lexer.js";
4
- import { TokenBuffer } from "../lexers/token-buffer.js";
5
- import { CYK } from "./algorithms/cyk.js";
6
- import { Earley } from "./algorithms/earley.js";
7
- import { LRK } from "./algorithms/lrk/algorithm.js";
8
- import { ParserUtility } from "../utility/parsing.js";
1
+ import type { ParserAlgorithm, RuntimeParserClass } from "../typings/index.ts";
2
+ import { CharacterLexer } from "../lexers/character-lexer.ts";
3
+ import { StatefulLexer } from "../lexers/stateful-lexer.ts";
4
+ import { TokenBuffer } from "../lexers/token-buffer.ts";
5
+ import { CYK } from "./algorithms/cyk.ts";
6
+ import { Earley } from "./algorithms/earley.ts";
7
+ import { LRK } from "./algorithms/lrk/algorithm.ts";
8
+ import { ParserUtility } from "../utility/parsing.ts";
9
9
 
10
10
  const ParserRegistry: { [key: string]: ParserAlgorithm } = {
11
11
  earley: Earley,
@@ -1,4 +1,4 @@
1
- import { Dictionary } from "./common.js";
1
+ import type { Dictionary } from "./common.ts";
2
2
 
3
3
  export type AST = ASTDirectives[];
4
4
 
@@ -1,7 +1,7 @@
1
- import { GeneratorState } from "../generator/state.js";
2
- import { ASTGrammarSymbolLiteral, ASTGrammarSymbolNonTerminal, ASTGrammarSymbolRegex, ASTGrammarSymbolToken, ASTJavaScriptBuiltin, ASTJavaScriptLiteral, ASTJavaScriptTemplate, ASTLexerStateImportRule, ASTLexerStateMatchRule, ASTLexerStateNonMatchRule } from "./ast.js";
3
- import { Dictionary } from "./common.js";
4
- import { ImportResolver, ImportResolverConstructor } from "./index.js";
1
+ import { GeneratorState } from "../generator/state.ts";
2
+ import type { ASTGrammarSymbolLiteral, ASTGrammarSymbolNonTerminal, ASTGrammarSymbolRegex, ASTGrammarSymbolToken, ASTJavaScriptBuiltin, ASTJavaScriptLiteral, ASTJavaScriptTemplate, ASTLexerStateImportRule, ASTLexerStateMatchRule, ASTLexerStateNonMatchRule } from "./ast.ts";
3
+ import type { Dictionary } from "./common.ts";
4
+ import type { ImportResolver, ImportResolverConstructor } from "./index.ts";
5
5
 
6
6
 
7
7
  export type GenerateOptions = GeneratorOptions & { output?: GeneratorOutputOptions };
@@ -1,11 +1,11 @@
1
- import { TokenBuffer } from "../lexers/token-buffer.js";
2
- import { ParserUtility } from "../utility/parsing.js";
3
- import { ASTLexerStateNonMatchRule } from "./ast.js";
4
- import { RuntimeGrammarProductionRule, RuntimeGrammarRuleSymbol, RuntimeParserClass, RuntimeLexerStateMatchRule } from "./runtime.js";
5
- export * from './ast.js';
6
- export * from './common.js';
7
- export * from './generator.js';
8
- export * from './runtime.js';
1
+ import type { TokenBuffer } from "../lexers/token-buffer.ts";
2
+ import type { ParserUtility } from "../utility/parsing.ts";
3
+ import type { ASTLexerStateNonMatchRule } from "./ast.ts";
4
+ import type { RuntimeGrammarProductionRule, RuntimeGrammarRuleSymbol, RuntimeParserClass, RuntimeLexerStateMatchRule } from "./runtime.ts";
5
+ export * from './ast.ts';
6
+ export * from './common.ts';
7
+ export * from './generator.ts';
8
+ export * from './runtime.ts';
9
9
 
10
10
  export interface ImportResolver {
11
11
  path(path: string): string;
@@ -1,6 +1,6 @@
1
- import { LRState } from "./index.js";
2
- import { ASTGrammarSymbolLiteral, ASTGrammarSymbolToken, ASTLexerStateNonMatchRule } from "./ast.js";
3
- import { Dictionary } from "./common.js";
1
+ import type { LRState } from "./index.ts";
2
+ import type { ASTGrammarSymbolLiteral, ASTGrammarSymbolToken, ASTLexerStateNonMatchRule } from "./ast.ts";
3
+ import type { Dictionary } from "./common.ts";
4
4
 
5
5
  export interface RuntimeParserClass {
6
6
  artifacts: {
@@ -1,7 +1,7 @@
1
- import V1Grammar from '../generator/grammars/v1.js';
2
- import V2Grammar from '../generator/grammars/v2.js';
3
- import { V2GrammarString } from "../generator/stringify/grammar/v2.js";
4
- import { Parse } from '../parser/parse.js';
1
+ import V1Grammar from '../generator/grammars/v1.ts';
2
+ import V2Grammar from '../generator/grammars/v2.ts';
3
+ import { V2GrammarString } from "../generator/stringify/grammar/v2.ts";
4
+ import { Parse } from '../parser/parse.ts';
5
5
 
6
6
  export function Format(source: string, sourceVersion: '1' | '2' = '2') {
7
7
  const grammar = sourceVersion.toString() == '1' ? V1Grammar : V2Grammar;
@@ -1,4 +1,4 @@
1
- import { Dictionary, GeneratorGrammarSymbol } from "../typings/index.js";
1
+ import type { Dictionary, GeneratorGrammarSymbol } from "../typings/index.ts";
2
2
 
3
3
  export class Collection<T> {
4
4
  categorized: Dictionary<Dictionary<number>> = {};
@@ -85,10 +85,11 @@ export class Matrix<T> {
85
85
  set x(x: number) { x != this.$x && this.resize(x, this.y); }
86
86
  get y() { return this.$y }
87
87
  set y(y: number) { y != this.$y && this.resize(this.x, y); }
88
-
88
+ private initial?: T | ((...args: any) => T);
89
89
  matrix: GetCallbackOrValue<T>[][] = [];
90
90
 
91
- constructor(x: number, y: number, private initial?: T | ((...args: any) => T)) {
91
+ constructor(x: number, y: number, initial?: T | ((...args: any) => T)) {
92
+ this.initial = initial;
92
93
  this.resize(x, y);
93
94
  }
94
95