@bikky/replication 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (140) hide show
  1. package/Constants/Errors.d.ts +27 -0
  2. package/Constants/Errors.js +75 -0
  3. package/Constants/Logging.d.ts +17 -0
  4. package/Constants/Logging.js +97 -0
  5. package/Constants/ReplicableRegistry.d.ts +37 -0
  6. package/Constants/ReplicableRegistry.js +234 -0
  7. package/Constants/SerialisationTypes.d.ts +82 -0
  8. package/Constants/SerialisationTypes.js +160 -0
  9. package/Constants/SourceMaps.d.ts +10 -0
  10. package/Constants/SourceMaps.js +12 -0
  11. package/Constants/TraversalStep.d.ts +5 -0
  12. package/Constants/TraversalStep.js +2 -0
  13. package/Constants/Versions.d.ts +15 -0
  14. package/Constants/Versions.js +63 -0
  15. package/Expressions/Compiler/BuiltinGrammar.d.ts +234 -0
  16. package/Expressions/Compiler/BuiltinGrammar.js +446 -0
  17. package/Expressions/Compiler/ExpressionGrammar.d.ts +89 -0
  18. package/Expressions/Compiler/ExpressionGrammar.js +70 -0
  19. package/Expressions/Compiler/Parser.d.ts +56 -0
  20. package/Expressions/Compiler/Parser.js +314 -0
  21. package/Expressions/Compiler/Tokenizer.d.ts +52 -0
  22. package/Expressions/Compiler/Tokenizer.js +222 -0
  23. package/Expressions/Compiler/__tests__/Replicable.Expressions.Parser.test.d.ts +1 -0
  24. package/Expressions/Compiler/__tests__/Replicable.Expressions.Parser.test.js +516 -0
  25. package/Expressions/Compiler/__tests__/Replicable.Expressions.Tokenizer.test.d.ts +1 -0
  26. package/Expressions/Compiler/__tests__/Replicable.Expressions.Tokenizer.test.js +68 -0
  27. package/Expressions/CreateEvaluator.d.ts +4 -0
  28. package/Expressions/CreateEvaluator.js +85 -0
  29. package/Expressions/EvaluatorChain.d.ts +19 -0
  30. package/Expressions/EvaluatorChain.js +137 -0
  31. package/Expressions/EvaluatorSteps.d.ts +19 -0
  32. package/Expressions/EvaluatorSteps.js +12 -0
  33. package/Expressions/EvaluatorString.d.ts +21 -0
  34. package/Expressions/EvaluatorString.js +26 -0
  35. package/Expressions/Expression.d.ts +36 -0
  36. package/Expressions/Expression.js +147 -0
  37. package/Expressions/Traverser.d.ts +28 -0
  38. package/Expressions/Traverser.js +348 -0
  39. package/Expressions/TypeRegistry/Accessors.d.ts +26 -0
  40. package/Expressions/TypeRegistry/Accessors.js +58 -0
  41. package/Expressions/TypeRegistry/ChainCollections.d.ts +51 -0
  42. package/Expressions/TypeRegistry/ChainCollections.js +134 -0
  43. package/Expressions/TypeRegistry/ChainTypes.d.ts +23 -0
  44. package/Expressions/TypeRegistry/ChainTypes.js +46 -0
  45. package/Expressions/TypeRegistry/CustomAPI.d.ts +36 -0
  46. package/Expressions/TypeRegistry/CustomAPI.js +181 -0
  47. package/Expressions/TypeRegistry/Primitive.d.ts +19 -0
  48. package/Expressions/TypeRegistry/Primitive.js +47 -0
  49. package/Expressions/TypeRegistry/Registry.d.ts +27 -0
  50. package/Expressions/TypeRegistry/Registry.js +270 -0
  51. package/Expressions/TypeRegistry/ReplAPI.d.ts +41 -0
  52. package/Expressions/TypeRegistry/ReplAPI.js +220 -0
  53. package/Expressions/TypeRegistry/Scope.d.ts +24 -0
  54. package/Expressions/TypeRegistry/Scope.js +44 -0
  55. package/Expressions/TypeRegistry/Types.d.ts +23 -0
  56. package/Expressions/TypeRegistry/Types.js +1 -0
  57. package/Expressions/TypeRegistry/__tests__/Replicable.Expressions.Accessors.test.d.ts +1 -0
  58. package/Expressions/TypeRegistry/__tests__/Replicable.Expressions.Accessors.test.js +31 -0
  59. package/Expressions/__tests__/ExpressionExamples.d.ts +28 -0
  60. package/Expressions/__tests__/ExpressionExamples.js +50 -0
  61. package/Expressions/__tests__/Replicable.Expressions.Expressions.test.d.ts +1 -0
  62. package/Expressions/__tests__/Replicable.Expressions.Expressions.test.js +166 -0
  63. package/IDPool.d.ts +18 -0
  64. package/IDPool.data.d.ts +17 -0
  65. package/IDPool.js +139 -0
  66. package/License.txt +1 -0
  67. package/Main.d.ts +13 -0
  68. package/Main.js +13 -0
  69. package/Networking.d.ts +60 -0
  70. package/Networking.js +626 -0
  71. package/Replicatable.d.ts +66 -0
  72. package/Replicatable.js +123 -0
  73. package/Tracking/Buffable.d.ts +68 -0
  74. package/Tracking/Buffable.js +194 -0
  75. package/Tracking/Class.d.ts +97 -0
  76. package/Tracking/Class.js +221 -0
  77. package/Tracking/Functions.d.ts +14 -0
  78. package/Tracking/Functions.js +27 -0
  79. package/Tracking/GlobalGroup.d.ts +5 -0
  80. package/Tracking/GlobalGroup.js +39 -0
  81. package/Tracking/Property.d.ts +95 -0
  82. package/Tracking/Property.js +125 -0
  83. package/Tracking/Types.d.ts +33 -0
  84. package/Tracking/Types.js +1 -0
  85. package/Tracking/__tests__/Replicable.Tracking.Decorator.test.d.ts +1 -0
  86. package/Tracking/__tests__/Replicable.Tracking.Decorator.test.js +151 -0
  87. package/Tracking/__tests__/Replicable.Tracking.Deserialisation.test.d.ts +1 -0
  88. package/Tracking/__tests__/Replicable.Tracking.Deserialisation.test.js +253 -0
  89. package/Tracking/__tests__/Replicable.Tracking.MixinSchemaGeneration.test.d.ts +1 -0
  90. package/Tracking/__tests__/Replicable.Tracking.MixinSchemaGeneration.test.js +135 -0
  91. package/Tracking/__tests__/Replicable.Tracking.Struct.test.d.ts +1 -0
  92. package/Tracking/__tests__/Replicable.Tracking.Struct.test.js +66 -0
  93. package/Tracking/__tests__/Replicable.Tracking.Type.test.d.ts +1 -0
  94. package/Tracking/__tests__/Replicable.Tracking.Type.test.js +67 -0
  95. package/Transformers/Configurer.d.ts +39 -0
  96. package/Transformers/Configurer.js +415 -0
  97. package/Transformers/Constructor.d.ts +12 -0
  98. package/Transformers/Constructor.js +44 -0
  99. package/Transformers/Definitions.d.ts +102 -0
  100. package/Transformers/Definitions.js +626 -0
  101. package/Transformers/Loader.d.ts +45 -0
  102. package/Transformers/Loader.js +350 -0
  103. package/Transformers/Progress.d.ts +32 -0
  104. package/Transformers/Progress.js +429 -0
  105. package/Transformers/Reference.d.ts +37 -0
  106. package/Transformers/Reference.js +212 -0
  107. package/Transformers/SchemaGenerator.d.ts +102 -0
  108. package/Transformers/SchemaGenerator.js +564 -0
  109. package/Transformers/Serialiser.d.ts +31 -0
  110. package/Transformers/Serialiser.js +366 -0
  111. package/Transformers/Utils.d.ts +33 -0
  112. package/Transformers/Utils.js +287 -0
  113. package/Transformers/__tests__/Examples.d.ts +168 -0
  114. package/Transformers/__tests__/Examples.js +263 -0
  115. package/Transformers/__tests__/Replicable.Transformers.Definitions.test.d.ts +1 -0
  116. package/Transformers/__tests__/Replicable.Transformers.Definitions.test.js +457 -0
  117. package/Transformers/__tests__/Replicable.Transformers.Loader.test.d.ts +1 -0
  118. package/Transformers/__tests__/Replicable.Transformers.Loader.test.js +339 -0
  119. package/Transformers/__tests__/Replicable.Transformers.Progress.test.d.ts +1 -0
  120. package/Transformers/__tests__/Replicable.Transformers.Progress.test.js +256 -0
  121. package/Transformers/__tests__/Replicable.Transformers.Reference.test.d.ts +1 -0
  122. package/Transformers/__tests__/Replicable.Transformers.Reference.test.js +167 -0
  123. package/Transformers/__tests__/Replicable.Transformers.SchemaGenerator.test.d.ts +1 -0
  124. package/Transformers/__tests__/Replicable.Transformers.SchemaGenerator.test.js +400 -0
  125. package/Transformers/__tests__/Replicable.Transformers.SchemaGeneratorOutput.test.d.ts +1 -0
  126. package/Transformers/__tests__/Replicable.Transformers.SchemaGeneratorOutput.test.js +441 -0
  127. package/Transformers/__tests__/Replicable.Transformers.Serialiser.test.d.ts +1 -0
  128. package/Transformers/__tests__/Replicable.Transformers.Serialiser.test.js +320 -0
  129. package/Transformers/__tests__/Replicable.Transformers.Utils.test.d.ts +1 -0
  130. package/Transformers/__tests__/Replicable.Transformers.Utils.test.js +534 -0
  131. package/__tests__/Replicable.Expressions.test.d.ts +1 -0
  132. package/__tests__/Replicable.Expressions.test.js +166 -0
  133. package/__tests__/Replicable.IDPool.test.d.ts +1 -0
  134. package/__tests__/Replicable.IDPool.test.js +11 -0
  135. package/__tests__/Replicable.ReplicableRegistry.test.d.ts +1 -0
  136. package/__tests__/Replicable.ReplicableRegistry.test.js +154 -0
  137. package/__tests__/Replicable.Serialisation.test.d.ts +1 -0
  138. package/__tests__/Replicable.Serialisation.test.js +283 -0
  139. package/package.json +14 -0
  140. package/tsconfig.json +19 -0
@@ -0,0 +1,314 @@
1
+ import { ExpressionGrammar } from "./ExpressionGrammar.js";
2
+ import { getEASTTokenValues, isToken, TokenizeExpression, TokenType } from "./Tokenizer.js";
3
+ import { GetVersionIndex } from "../../Constants/Versions.js";
4
+ let debug = false;
5
+ export function CheckRuleDone(grammar, currentRuleIndex) {
6
+ var _a, _b;
7
+ //Ran out of input to iterate over but still need to match more parts of this rule.
8
+ for (let remainingRule = currentRuleIndex; remainingRule < grammar.rules.length; remainingRule++) {
9
+ if (!grammar.rules[remainingRule].optional || ((_b = (_a = grammar.rules[remainingRule].repeat) === null || _a === void 0 ? void 0 : _a[0]) !== null && _b !== void 0 ? _b : 1) > 0) {
10
+ return false;
11
+ }
12
+ }
13
+ //Ran out of input but finished matching or everything else is optional.
14
+ return true;
15
+ }
16
+ export function IsRuleSkippable(rule, previousRuleMatched) {
17
+ let skippable = !!rule.optional || (rule.repeat && rule.repeat[0] == 0);
18
+ //This must exist if the previous rule existed but must not match if the previous rule didn't exist.
19
+ if (rule.optional == "previous") {
20
+ if (!previousRuleMatched) {
21
+ return true;
22
+ }
23
+ else {
24
+ skippable = false;
25
+ }
26
+ }
27
+ return !!skippable;
28
+ }
29
+ export function ComputeValidTypes(rule, extraTypes) {
30
+ let validTypes = Array.isArray(rule.type) ? [...rule.type] : [rule.type];
31
+ if (validTypes.includes(ExpressionGrammar.SpecialType.INPUT_TYPES)) {
32
+ console.debug(`* adding ${extraTypes.map((e) => ExpressionGrammar.NodeType[e])} to ${ExpressionGrammar.NodeType[rule.type]}`);
33
+ validTypes.splice(validTypes.indexOf(ExpressionGrammar.SpecialType.INPUT_TYPES), 1, ...extraTypes);
34
+ }
35
+ return validTypes;
36
+ }
37
+ export function ComputeChildInputs(rule, extraTypes) {
38
+ var _a;
39
+ let childInputs = (_a = rule.input) !== null && _a !== void 0 ? _a : [];
40
+ if (childInputs.includes(ExpressionGrammar.SpecialType.INPUT_TYPES)) {
41
+ console.debug(`** sending ${extraTypes.map((e) => ExpressionGrammar.NodeType[e])} to child`);
42
+ childInputs.splice(childInputs.indexOf(ExpressionGrammar.SpecialType.INPUT_TYPES), 1, ...extraTypes);
43
+ }
44
+ return childInputs;
45
+ }
46
+ export function TokenDataMatchesRule(version, gram, token) {
47
+ var _a;
48
+ if (gram.value) {
49
+ return gram.value == token.data;
50
+ }
51
+ if (gram.values) {
52
+ if (Array.isArray(gram.values)) {
53
+ return gram.values.includes(token.data);
54
+ }
55
+ else {
56
+ let vals = getEASTTokenValues(version, gram.values);
57
+ return vals.has(token.data);
58
+ }
59
+ }
60
+ return !((_a = gram.excludeValues) === null || _a === void 0 ? void 0 : _a.includes(token.data));
61
+ }
62
+ export function PrintNodeName(value) {
63
+ if (isToken(value)) {
64
+ return TokenType[value.type];
65
+ }
66
+ return ExpressionGrammar.NodeType[value.type];
67
+ }
68
+ export function PrintTokenText(token) {
69
+ if (token.data.length > 20) {
70
+ return token.data.slice(0, 20) + "...";
71
+ }
72
+ return token.data;
73
+ }
74
+ export function MakeNewNodeFrom(details, value) {
75
+ //If it's a token, make a copy and add a file reference to convert it to a node.
76
+ if (isToken(value)) {
77
+ return Object.assign(Object.assign({}, value), { file: details.parser.file });
78
+ }
79
+ //Otherwise add it as-is.
80
+ else {
81
+ return value;
82
+ }
83
+ }
84
+ //Sets the node's values object to equal the values specified by the rule.
85
+ export function AddToPartialEASTNode(details, rule, progress, value) {
86
+ //Set the values to a particular name.
87
+ if (rule.name) {
88
+ console.debug(`${details.debugTabs}Adding ${PrintNodeName(value)} ${rule.repeat ? "to" : "as"} ${rule.name}`);
89
+ //Repeated rules are stored as arrays
90
+ if (rule.repeat) {
91
+ if (rule.unroll) {
92
+ throw new Error(`Can't unroll repeated grammar rule.`);
93
+ }
94
+ //Create the array if it doesn't exist.
95
+ if (!progress.values[rule.name]) {
96
+ progress.values[rule.name] = [];
97
+ }
98
+ //Ensure the array is an array.
99
+ if (!Array.isArray(progress.values[rule.name])) {
100
+ throw new Error(`Node ${PrintNodeName(progress)}'s ${rule.name} should be an array but isn't.`);
101
+ }
102
+ progress.values[rule.name]
103
+ .push(MakeNewNodeFrom(details, value));
104
+ }
105
+ else {
106
+ progress.values[rule.name] = MakeNewNodeFrom(details, value);
107
+ }
108
+ }
109
+ else {
110
+ if (rule.unroll) {
111
+ if (!("values" in value)) {
112
+ throw new Error("Can't unroll tokens.");
113
+ }
114
+ console.debug(`${details.debugTabs} - Unrolling ${Object.keys(value.values)} to `
115
+ + `${PrintNodeName(progress)}'s ${Object.keys(progress.values)}`);
116
+ progress.values = Object.assign(Object.assign({}, value.values), progress.values);
117
+ }
118
+ // Skip any rule that isn't given a name or unroll, since it's not meant to be stored.
119
+ }
120
+ //Only increment token position if token.
121
+ if (isToken(value)) {
122
+ details.parser.consumeToken();
123
+ }
124
+ }
125
+ export function MatchRule(details, rule, progress, historicalMatches) {
126
+ var _a, _b;
127
+ let validTypes = ComputeValidTypes(rule, details.extraTypes);
128
+ let childInputs = ComputeChildInputs(rule, details.extraTypes);
129
+ for (let type of validTypes) {
130
+ //Check if type is the current valid type.
131
+ if (type == details.parser.token.type) {
132
+ console.debug(`${details.debugTabs} - Matched a ${TokenType[type]} ${PrintTokenText(details.parser.token)}`);
133
+ //Token type is correct but value is incorrect, so abort.
134
+ if (!TokenDataMatchesRule(details.parser.version, rule, details.parser.token)) {
135
+ console.debug(`${details.debugTabs} - Token data ${PrintTokenText(details.parser.token)} is undesirable, skipping type.`);
136
+ continue;
137
+ }
138
+ AddToPartialEASTNode(details, rule, progress, details.parser.token);
139
+ return progress;
140
+ }
141
+ //Or if we're currently looking at a compound node type, we'll see if we can create it!
142
+ else if (type in ExpressionGrammar.NodeType) {
143
+ console.debug(`${details.debugTabs} - Trying to make a ${ExpressionGrammar.NodeType[type]}`);
144
+ //Try make a node.
145
+ //Run through all of the rules of a possible compound rule.
146
+ //If the rules match, then make a new node.
147
+ let newNode = MatchGrammar(Object.assign(Object.assign({}, details), { extraTypes: childInputs }), type, historicalMatches);
148
+ //Node succeeded, continue.
149
+ if (newNode) {
150
+ console.debug(`${details.debugTabs}Made a ${ExpressionGrammar.NodeType[type]}`);
151
+ AddToPartialEASTNode(details, rule, progress, newNode);
152
+ return progress;
153
+ }
154
+ //Node failed, try the next type.
155
+ else {
156
+ continue;
157
+ }
158
+ }
159
+ }
160
+ console.debug(`${details.debugTabs}Failed to create a ${PrintNodeName(progress)}, '${details.parser.token.data}' != '${(_b = (_a = rule.debug_name) !== null && _a !== void 0 ? _a : rule.value) !== null && _b !== void 0 ? _b : rule.values}'`);
161
+ return null;
162
+ }
163
+ export function MatchRepeatingRule(details, rule, progress, historicalMatches) {
164
+ var _a, _b, _c, _d;
165
+ let ruleCurrentRepeatNumber = 0;
166
+ let continueRepeating = true;
167
+ do {
168
+ //For each type that may be next, there is an early exit if repeating but no more tokens.
169
+ let valid = details.parser.done ? null : MatchRule(details, rule, progress, historicalMatches);
170
+ if (valid == null) {
171
+ //Repeat too much, so force continue.
172
+ if (ruleCurrentRepeatNumber + 1 >= rule.repeat[1]) {
173
+ return progress;
174
+ }
175
+ //Didn't repeat enough, so die in a fire!
176
+ else if (ruleCurrentRepeatNumber < rule.repeat[0]) {
177
+ console.debug(`${details.debugTabs}Repeated ${ruleCurrentRepeatNumber} times, which is less than ${rule.repeat[0]}!`);
178
+ if (!details.parser.done) {
179
+ console.debug(`${details.debugTabs}Failed to create a repeating`
180
+ + ` ${PrintNodeName(progress)}, '${details.parser.token.data}' != '${(_b = (_a = rule.debug_name) !== null && _a !== void 0 ? _a : rule.value) !== null && _b !== void 0 ? _b : rule.values}'`);
181
+ }
182
+ else {
183
+ console.debug(`${details.debugTabs}Ran out of tokens to repeat ${(_d = (_c = rule.debug_name) !== null && _c !== void 0 ? _c : rule.value) !== null && _d !== void 0 ? _d : rule.values}`);
184
+ }
185
+ return null;
186
+ }
187
+ //Repeated enough, but didn't match.
188
+ else {
189
+ return progress;
190
+ }
191
+ }
192
+ //Consume repeat token if present!
193
+ if (rule.repeatSeparator) {
194
+ if (rule.repeatSeparator.optional == "previous") {
195
+ throw new Error(`Repeat separator's optionality can't be based on previous rule.`);
196
+ }
197
+ if (details.parser.token.type == rule.repeatSeparator.type
198
+ && TokenDataMatchesRule(details.parser.version, rule.repeatSeparator, details.parser.token)) {
199
+ console.debug(`${details.debugTabs}Skipping ${PrintNodeName(details.parser.token)} ${details.parser.token.data}`);
200
+ details.parser.consumeToken();
201
+ }
202
+ else {
203
+ continueRepeating = !!rule.repeatSeparator.optional;
204
+ }
205
+ }
206
+ ruleCurrentRepeatNumber++;
207
+ } while (continueRepeating);
208
+ return progress;
209
+ }
210
+ export function MatchGrammar(details, grammarType, historicalMatches) {
211
+ let grammar = ExpressionGrammar.getRule(details.parser.versionIndex, grammarType);
212
+ let tokenIndexAtStartOfNode = details.parser.currentNodeIndex;
213
+ let previousRuleMatched = true;
214
+ let start = details.parser.token.position;
215
+ let column = details.parser.token.column;
216
+ let line = details.parser.token.line;
217
+ // We've hit a loop - we've already tried to evaluate this grammar type for this node.
218
+ // We should only need to do this for the first rule in a grammar, because it's fine for
219
+ // rules to be recursive after the first entry (e.g. having multiple parenthesised expressions
220
+ // starting with multiple '('s ).
221
+ if (historicalMatches.includes(grammar.ID)) {
222
+ return null;
223
+ }
224
+ let node = { type: grammarType, values: {}, debugName: PrintNodeName({ type: grammarType }) };
225
+ let childDetails = Object.assign(Object.assign({}, details), { debugTabs: details.debugTabs + "\t" });
226
+ for (let grammarRuleIndex = 0; grammarRuleIndex < grammar.rules.length; grammarRuleIndex++) {
227
+ if (details.parser.done) {
228
+ if (CheckRuleDone(grammar, grammarRuleIndex)) {
229
+ break;
230
+ }
231
+ else {
232
+ details.parser.resetToken(tokenIndexAtStartOfNode);
233
+ return null;
234
+ }
235
+ }
236
+ let rule = grammar.rules[grammarRuleIndex];
237
+ let ruleSkippable = IsRuleSkippable(rule, previousRuleMatched);
238
+ let ruleRepeat = !!rule.repeat;
239
+ let history = grammarRuleIndex == 0 ? [...historicalMatches, grammar.ID] : [];
240
+ let result = ruleRepeat ? MatchRepeatingRule(childDetails, rule, node, history) : MatchRule(childDetails, rule, node, history);
241
+ if (!result) {
242
+ if (ruleSkippable) {
243
+ continue;
244
+ }
245
+ else {
246
+ details.parser.resetToken(tokenIndexAtStartOfNode);
247
+ return null;
248
+ }
249
+ }
250
+ }
251
+ if (!node) {
252
+ details.parser.resetToken(tokenIndexAtStartOfNode);
253
+ return null;
254
+ }
255
+ let lastToken = details.parser.token ? details.parser.token : details.parser.tokens[details.parser.tokens.length - 1];
256
+ return Object.assign(Object.assign({}, node), { file: details.parser.file, position: start, version: details.parser.version, column: column, line: line, length: lastToken.position + lastToken.length - start });
257
+ }
258
+ export class ExpressionParser {
259
+ constructor(version, lines, sourceLocation) {
260
+ this.tokens = TokenizeExpression(lines, version);
261
+ this.versionIndex = GetVersionIndex(version);
262
+ this.version = version;
263
+ this.sourceLocation = sourceLocation;
264
+ this.currentNodeIndex = 0;
265
+ this.done = false;
266
+ this.token = this.tokens[0];
267
+ if (this.token.type == TokenType.WHITESPACE)
268
+ this.consumeToken();
269
+ this.file = {
270
+ text: lines,
271
+ sourceLocation: sourceLocation
272
+ };
273
+ }
274
+ consumeToken() {
275
+ var _a;
276
+ this.currentNodeIndex++;
277
+ while (((_a = this.tokens[this.currentNodeIndex]) === null || _a === void 0 ? void 0 : _a.type) == TokenType.WHITESPACE) {
278
+ this.currentNodeIndex++;
279
+ }
280
+ this.token = this.tokens[this.currentNodeIndex];
281
+ if (this.token) {
282
+ debug &&
283
+ console.log(`[New token is ${this.currentNodeIndex}: ${TokenType[this.token.type]} '${this.token.data.slice(0, Math.min(this.token.data.length, 20))}']`);
284
+ }
285
+ this.done = this.currentNodeIndex >= this.tokens.length;
286
+ }
287
+ //We reset if we fail to generate a node.
288
+ resetToken(index) {
289
+ this.currentNodeIndex = index;
290
+ this.token = this.tokens[this.currentNodeIndex];
291
+ console.debug(`[Reset token to ${this.currentNodeIndex}: ${TokenType[this.token.type]} '${this.token.data.slice(0, Math.min(this.token.data.length, 20))}']`);
292
+ this.done = this.currentNodeIndex >= this.tokens.length;
293
+ }
294
+ run() {
295
+ if (this.token.type == TokenType.WHITESPACE)
296
+ this.consumeToken();
297
+ //Start with a file :]
298
+ let node = MatchGrammar({
299
+ parser: this,
300
+ extraTypes: [],
301
+ debugTabs: ""
302
+ }, ExpressionGrammar.NodeType.FILE, []);
303
+ if (node == null)
304
+ return null;
305
+ let prop;
306
+ for (prop in node) {
307
+ this.file[prop] = node[prop];
308
+ }
309
+ return this.file;
310
+ }
311
+ }
312
+ export function CreateASTFromString(version, lines, sourceLocation) {
313
+ return new ExpressionParser(version, lines, sourceLocation).run();
314
+ }
@@ -0,0 +1,52 @@
1
+ export declare enum TokenType {
2
+ BLOCK_COMMENT = 0,
3
+ LINE_COMMENT = 1,
4
+ PREPROCESSOR = 2,
5
+ OPERATOR = 3,
6
+ INTEGER = 4,
7
+ FLOAT = 5,
8
+ VAR_TYPE = 6,
9
+ VAR_QUAL = 7,
10
+ IDENTIFIER = 8,
11
+ BUILTIN = 9,
12
+ KEYWORD = 10,
13
+ WHITESPACE = 11,
14
+ HEX = 12
15
+ }
16
+ export interface TokenCategory {
17
+ versions: [Set<string> | RegExp];
18
+ type: number;
19
+ debug_name: string;
20
+ precedence: number;
21
+ }
22
+ export declare enum Precedence {
23
+ Important = 0,
24
+ Block = 1,
25
+ Line = 2,
26
+ Intricate_Builtins = 4,
27
+ Standard_Builtins = 6,
28
+ Operators = 8,
29
+ User_Identifiers = 10,
30
+ LeastImportant = 11
31
+ }
32
+ export declare const EAST_Tokens: {
33
+ [category: string]: TokenCategory;
34
+ };
35
+ export interface Token {
36
+ type: TokenType;
37
+ data: string;
38
+ position: number;
39
+ length: number;
40
+ line: number;
41
+ column: number;
42
+ }
43
+ export declare function isToken(object: any): object is Token;
44
+ export declare function getEASTTokenValues(fileVersion: string, category: TokenCategory): Set<string>;
45
+ /**
46
+ *
47
+ * @param expression The whole string
48
+ * @param version The version of the expression language this was compiled against
49
+ * @param offset The offset from the start of the file that this expression occurred within.
50
+ * @constructor
51
+ */
52
+ export declare function TokenizeExpression(expression: string, version: string, offset?: number): Token[];
@@ -0,0 +1,222 @@
1
+ import { GetVersionIndex } from "../../Constants/Versions.js";
2
+ let debug = false;
3
+ export var TokenType;
4
+ (function (TokenType) {
5
+ TokenType[TokenType["BLOCK_COMMENT"] = 0] = "BLOCK_COMMENT";
6
+ TokenType[TokenType["LINE_COMMENT"] = 1] = "LINE_COMMENT";
7
+ TokenType[TokenType["PREPROCESSOR"] = 2] = "PREPROCESSOR";
8
+ TokenType[TokenType["OPERATOR"] = 3] = "OPERATOR";
9
+ TokenType[TokenType["INTEGER"] = 4] = "INTEGER";
10
+ TokenType[TokenType["FLOAT"] = 5] = "FLOAT";
11
+ TokenType[TokenType["VAR_TYPE"] = 6] = "VAR_TYPE";
12
+ TokenType[TokenType["VAR_QUAL"] = 7] = "VAR_QUAL";
13
+ TokenType[TokenType["IDENTIFIER"] = 8] = "IDENTIFIER";
14
+ TokenType[TokenType["BUILTIN"] = 9] = "BUILTIN";
15
+ TokenType[TokenType["KEYWORD"] = 10] = "KEYWORD";
16
+ TokenType[TokenType["WHITESPACE"] = 11] = "WHITESPACE";
17
+ TokenType[TokenType["HEX"] = 12] = "HEX";
18
+ })(TokenType || (TokenType = {}));
19
+ export var Precedence;
20
+ (function (Precedence) {
21
+ Precedence[Precedence["Important"] = 0] = "Important";
22
+ Precedence[Precedence["Block"] = 1] = "Block";
23
+ Precedence[Precedence["Line"] = 2] = "Line";
24
+ Precedence[Precedence["Intricate_Builtins"] = 4] = "Intricate_Builtins";
25
+ Precedence[Precedence["Standard_Builtins"] = 6] = "Standard_Builtins";
26
+ Precedence[Precedence["Operators"] = 8] = "Operators";
27
+ Precedence[Precedence["User_Identifiers"] = 10] = "User_Identifiers";
28
+ Precedence[Precedence["LeastImportant"] = 11] = "LeastImportant";
29
+ })(Precedence || (Precedence = {}));
30
+ export const EAST_Tokens = {
31
+ Builtin_BlockComment: {
32
+ type: TokenType.BLOCK_COMMENT,
33
+ versions: [/\/\*(.|\n|\r)*?\*\//g],
34
+ debug_name: "Block Comment",
35
+ precedence: Precedence.Block
36
+ },
37
+ Builtin_LineComment: {
38
+ type: TokenType.LINE_COMMENT,
39
+ versions: [/\/\/.*/g],
40
+ debug_name: "Line Comment",
41
+ precedence: Precedence.Line
42
+ },
43
+ Builtin_Preprocessor: {
44
+ type: TokenType.PREPROCESSOR,
45
+ versions: [/#.*/gm],
46
+ debug_name: "Preprocessor Directive",
47
+ precedence: Precedence.Line
48
+ },
49
+ //Precedence 3 so that it is a lower priority than number (because of '.').
50
+ Builtin_Hex: {
51
+ type: TokenType.HEX,
52
+ versions: [/\b0x[0-9abcdef]+\b/g],
53
+ debug_name: "Hex Number",
54
+ precedence: Precedence.Intricate_Builtins
55
+ },
56
+ //Precedence 2 so that it is a lower priority than float and hex.
57
+ Builtin_Integer: {
58
+ type: TokenType.INTEGER,
59
+ versions: [/\b[0-9]+\b/g],
60
+ debug_name: "Integer",
61
+ precedence: Precedence.Standard_Builtins
62
+ },
63
+ Builtin_Float: {
64
+ type: TokenType.FLOAT,
65
+ versions: [/\b([0-9]+[.][0-9]+([eE][0-9]+)?)|([0-9]+[fF])\b/g],
66
+ debug_name: "Float",
67
+ precedence: Precedence.Intricate_Builtins
68
+ },
69
+ //Lower priority than builtin, keyword and hex.
70
+ Builtin_Identifier: {
71
+ type: TokenType.IDENTIFIER,
72
+ versions: [/\b[a-zA-Z_][a-zA-Z0-9_]+\b/g],
73
+ debug_name: "Identifier",
74
+ precedence: Precedence.User_Identifiers
75
+ },
76
+ //The regex for this might be a bit limited, it should probably be revisited.
77
+ Builtin_Whitespace: {
78
+ type: TokenType.WHITESPACE,
79
+ versions: [/[ \t\r\n]+/g],
80
+ debug_name: "Whitespace",
81
+ precedence: Precedence.Intricate_Builtins
82
+ },
83
+ Operator: {
84
+ versions: [new Set([".", "(", ")", ",", "=", ":"])],
85
+ type: TokenType.OPERATOR,
86
+ debug_name: "Operator",
87
+ precedence: Precedence.Operators
88
+ }
89
+ };
90
+ export function isToken(object) {
91
+ return "type" in object && typeof TokenType[object.type] !== "undefined";
92
+ }
93
+ export function getEASTTokenValues(fileVersion, category) {
94
+ let index = GetVersionIndex(fileVersion);
95
+ if (index < 0)
96
+ throw new Error(`Can't parse file using version that isn't supported: ${fileVersion}`);
97
+ let vals = new Set();
98
+ for (; index >= 0; index--) {
99
+ let thisVersionVals = category.versions[index];
100
+ if (thisVersionVals instanceof RegExp) {
101
+ if (vals.size == 0) {
102
+ throw new Error(`Token is regexp not values.`);
103
+ }
104
+ else {
105
+ break;
106
+ }
107
+ }
108
+ for (let val of thisVersionVals) {
109
+ vals.add(val);
110
+ }
111
+ }
112
+ return vals;
113
+ }
114
+ function getEASTTokenRegex(fileVersion, category) {
115
+ let index = GetVersionIndex(fileVersion);
116
+ if (index < 0)
117
+ throw new Error(`Can't parse file using version that isn't supported: ${fileVersion}`);
118
+ let vals = new Set();
119
+ for (; index >= 0; index--) {
120
+ let thisVersionVals = category.versions[index];
121
+ //Regular expression overrides all previously defined syntax.
122
+ if (thisVersionVals instanceof RegExp) {
123
+ if (vals.size == 0) {
124
+ return thisVersionVals;
125
+ }
126
+ else {
127
+ break;
128
+ }
129
+ }
130
+ else if (thisVersionVals) {
131
+ for (let val of thisVersionVals) {
132
+ vals.add(val);
133
+ }
134
+ }
135
+ }
136
+ return new RegExp([...vals].map((e) => {
137
+ return "[.*+?^${}()|[]\\]".includes(e) ? "\\" + e : e;
138
+ }).join("|"), "g");
139
+ }
140
+ function makeVersionParser(version) {
141
+ let parser = [];
142
+ for (let category of Object.values(EAST_Tokens)) {
143
+ parser.push({
144
+ type: category.type,
145
+ regex: getEASTTokenRegex(version, category),
146
+ precedence: category.precedence,
147
+ name: category.debug_name
148
+ });
149
+ }
150
+ return parser;
151
+ }
152
+ let Parsers = new Map();
153
+ /**
154
+ *
155
+ * @param expression The whole string
156
+ * @param version The version of the expression language this was compiled against
157
+ * @param offset The offset from the start of the file that this expression occurred within.
158
+ * @constructor
159
+ */
160
+ export function TokenizeExpression(expression, version, offset = 0) {
161
+ let rawTokens = [];
162
+ if (!Parsers.has(version))
163
+ Parsers.set(version, makeVersionParser(version));
164
+ let parser = Parsers.get(version);
165
+ for (let category of parser) {
166
+ let matches = expression.matchAll(category.regex);
167
+ debug && console.log(TokenType[category.type], category.precedence);
168
+ for (let match of matches) {
169
+ rawTokens.push({
170
+ data: match[0],
171
+ position: match.index,
172
+ length: match[0].length,
173
+ precedence: category.precedence,
174
+ type: category.type
175
+ });
176
+ }
177
+ }
178
+ //Newlines are stored based on the last character in that line, once a line's position exceeds the node's position we know we're
179
+ // at a line that contains this given node.
180
+ let newlines = [...expression.matchAll(/\r\n|\n|\r/g)].map((e) => e.index);
181
+ newlines.unshift(0);
182
+ //Add one more line for anything left-over.
183
+ newlines.push(expression.length);
184
+ /**
185
+ * The first token takes precedence, but if multiple start at the same time then we sort
186
+ * by precedence.
187
+ */
188
+ rawTokens = rawTokens.sort((a, b) => {
189
+ let offset = a.position - b.position;
190
+ if (offset == 0) {
191
+ return a.precedence - b.precedence;
192
+ }
193
+ return offset;
194
+ });
195
+ let lastToken;
196
+ let tokens = [];
197
+ for (let rtoken of rawTokens) {
198
+ if (lastToken && lastToken.position + lastToken.length > rtoken.position) {
199
+ //We are in the middle of a different kind of identifier, so skip
200
+ // this token.
201
+ //console.log("Discarding", TokenType[token.type], token.data, token.precedence, "because of", TokenType[lastToken.type], lastToken.data, token.precedence);
202
+ continue;
203
+ }
204
+ let line;
205
+ for (let i = 0; i < newlines.length; i++) {
206
+ if (newlines[i] > rtoken.position + offset) {
207
+ line = i;
208
+ }
209
+ }
210
+ tokens.push({
211
+ data: rtoken.data,
212
+ type: rtoken.type,
213
+ //Add an offset, since multiple expressions can exist in the same string.
214
+ position: rtoken.position + offset,
215
+ length: rtoken.length,
216
+ line: line,
217
+ column: rtoken.position + offset - newlines[line - 1]
218
+ });
219
+ lastToken = rtoken;
220
+ }
221
+ return tokens;
222
+ }
@@ -0,0 +1 @@
1
+ import "../BuiltinGrammar.js";