@bablr/boot 0.8.1 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.js CHANGED
@@ -6,12 +6,22 @@ import * as instruction from './languages/instruction.js';
6
6
  import { TemplateParser } from './miniparser.js';
7
7
  import { buildEmbeddedMatcher, buildEmbeddedRegex } from '@bablr/agast-vm-helpers/builders';
8
8
 
9
+ const trees = new WeakMap();
10
+
9
11
  export const buildTag = (language, defaultType) => {
10
12
  const defaultTag = (quasis, ...exprs) => {
11
- return new TemplateParser(language, quasis.raw, exprs).eval({
12
- language: language.name,
13
- type: defaultType,
14
- });
13
+ let tree;
14
+ if (trees.has(quasis) && !exprs.length) {
15
+ tree = trees.get(quasis);
16
+ } else {
17
+ tree = new TemplateParser(language, quasis.raw, exprs).eval({
18
+ language: language.name,
19
+ type: defaultType,
20
+ });
21
+
22
+ trees.set(quasis, tree);
23
+ }
24
+ return tree;
15
25
  };
16
26
 
17
27
  return new Proxy(defaultTag, {
@@ -20,18 +30,29 @@ export const buildTag = (language, defaultType) => {
20
30
  },
21
31
 
22
32
  get(_, type) {
33
+ const trees = new WeakMap();
34
+
23
35
  return (quasis, ...exprs) => {
24
- return new TemplateParser(language, quasis.raw, exprs).eval({
25
- language: language.name,
26
- type,
27
- });
36
+ let tree;
37
+ if (trees.has(quasis) && !exprs.length) {
38
+ tree = trees.get(quasis);
39
+ } else {
40
+ tree = new TemplateParser(language, quasis.raw, exprs).eval({
41
+ language: language.name,
42
+ type,
43
+ });
44
+
45
+ trees.set(quasis, tree);
46
+ }
47
+ return tree;
28
48
  };
29
49
  },
30
50
  });
31
51
  };
32
52
 
33
- export const parse = (language, type, sourceText) => {
34
- return new TemplateParser(language, [sourceText], []).eval({
53
+ export const parse = (language, type, sourceText, expressions = []) => {
54
+ let source = Array.isArray(sourceText) ? sourceText : [sourceText];
55
+ return new TemplateParser(language, source, expressions).eval({
35
56
  language: language.name,
36
57
  type,
37
58
  });
@@ -1,5 +1,6 @@
1
1
  import * as sym from '@bablr/agast-vm-helpers/symbols';
2
2
  import * as JSON from './json.js';
3
+ import { get, sourceTextFor } from '@bablr/agast-helpers/tree';
3
4
 
4
5
  const _ = /\s+/y;
5
6
  const PN = 'Punctuator';
@@ -33,16 +34,15 @@ export const covers = new Map([
33
34
  'OpenNodeTag',
34
35
  'CloseNodeTag',
35
36
  'LiteralTag',
36
- 'Trivia',
37
+ 'BindingTag',
38
+ 'InitializerTag',
39
+ 'AttributeDefinition',
37
40
  'Number',
38
41
  'Digit',
39
42
  'Content',
40
43
  'NodeFlags',
41
44
  ]),
42
45
  ],
43
- ['TagType', new Set(['Identifier', 'GlobalIdentifier'])],
44
- ['Tag', new Set(['LiteralTag', 'Trivia'])],
45
- ['PropertyValue', new Set(['GapTag', 'Node', 'NullTag'])],
46
46
  ]);
47
47
 
48
48
  export const grammar = class CSTMLMiniparserGrammar {
@@ -50,7 +50,7 @@ export const grammar = class CSTMLMiniparserGrammar {
50
50
  Document(p) {
51
51
  p.eatProduction('DoctypeTag', { path: 'doctype' });
52
52
  p.eatMatchTrivia(_);
53
- p.eatProduction('Node', { path: 'tree' }, { fragment: true });
53
+ p.eatProduction('Node', { path: 'tree' }, { forceFragment: true });
54
54
  }
55
55
 
56
56
  // @Node
@@ -80,61 +80,104 @@ export const grammar = class CSTMLMiniparserGrammar {
80
80
  p.eat('<//>', PN, { path: 'sigilToken' });
81
81
  }
82
82
 
83
+ // @Node
84
+ LiteralTag(p) {
85
+ p.eatProduction('JSON:String', { path: 'value' });
86
+ }
87
+
88
+ //@Node
89
+ InitializerTag(p) {
90
+ let isArray;
91
+ if ((isArray = p.match('['))) {
92
+ p.eat('[]', PN, { path: 'sigilToken' });
93
+ } else {
94
+ p.eat('undefined', PN, { path: 'sigilToken' });
95
+ }
96
+ return { attrs: { isArray } };
97
+ }
98
+
83
99
  // @Node
84
100
  Node(p, props) {
85
- let open = p.eatProduction('OpenNodeTag', { path: 'open' }, props);
101
+ let open = p.eatProduction('OpenNodeTag', { path: 'open', noInterpolate: true }, props);
86
102
 
87
103
  p.eatMatchTrivia(_);
88
104
 
89
- if (open.properties.flags?.token) {
90
- p.eatProduction('NodeChild', { path: 'children[]' }, { token: true });
91
- p.eatMatchTrivia(_);
92
- } else if (!open.properties.selfClosingTagToken) {
93
- while (!(p.match('</') || p.done)) {
94
- p.eatProduction('NodeChild', { path: 'children[]' });
105
+ if (open.attributes.balanced) {
106
+ let token = !!get(['flags', 'token'], open);
107
+
108
+ while (p.atExpression || !(p.match(/<\/[^/]/y) || p.done)) {
109
+ p.eatProduction('NodeChild', { path: 'children[]' }, { token });
95
110
  p.eatMatchTrivia(_);
96
111
  }
97
- }
98
112
 
99
- if (!open.properties.selfClosingTagToken) {
100
- p.eatProduction('CloseNodeTag', { path: 'close' });
113
+ p.eatProduction('CloseNodeTag', { path: 'close', noInterpolate: true });
101
114
  }
102
115
  }
103
116
 
104
117
  NodeChild(p, props) {
105
118
  const { token } = props || {};
106
119
 
120
+ if (p.match('{')) {
121
+ p.eatProduction('AttributeDefinition');
122
+ }
123
+
107
124
  if (token) {
108
125
  if (p.match(/<\*?@/y)) {
109
- p.eatProduction('Node');
126
+ p.eatProduction('Property');
110
127
  } else {
111
128
  p.eatProduction('LiteralTag');
112
129
  }
113
130
  } else {
114
- if (p.match(/<\*?#/y)) {
115
- p.eatProduction('Node');
116
- } else if (p.match(/[a-zA-Z`\\\u{80}-\u{10ffff}.]|[.#@]/uy)) {
131
+ if (p.match(/[:<a-zA-Z`\\\u{80}-\u{10ffff}.]|[.#@_]/uy) || p.atExpression) {
117
132
  p.eatProduction('Property');
118
133
  } else if (p.match(/['"]/y)) {
119
- p.eatProduction('LiteralTag');
134
+ p.eatProduction('Property');
135
+ } else {
136
+ p.fail();
120
137
  }
121
138
  }
122
139
  }
123
140
 
141
+ // @Node
142
+ AttributeDefinition(p) {
143
+ p.eat('{', PN, { path: 'openToken', balanced: '}' });
144
+ p.eatMatchTrivia(_);
145
+ p.eatProduction('IdentifierPath', { path: 'key' });
146
+ p.eatMatchTrivia(_);
147
+ p.eat(':', PN, { path: 'sigilToken' });
148
+ p.eatMatchTrivia(_);
149
+ p.eatProduction('JSON:Expression', { path: 'value' });
150
+ p.eatMatchTrivia(_);
151
+ p.eat('}', PN, { path: 'closeToken', balancer: true });
152
+ }
153
+
124
154
  // @Node
125
155
  Property(p) {
126
- p.eatProduction('ReferenceTag', { path: 'reference' });
156
+ let ref = null;
157
+ if (p.match(/[a-zA-Z`\\\u{80}-\u{10ffff}.]|[.#@_]/uy)) {
158
+ ref = p.eatProduction('ReferenceTag', { path: 'reference' });
159
+ }
127
160
  p.eatMatchTrivia(_);
128
- p.eatProduction('PropertyValue', { path: 'value' });
161
+ if (p.match(':')) {
162
+ p.eatProduction('BindingTag', { path: 'binding' });
163
+ p.eatMatchTrivia(_);
164
+ }
165
+ let refType = ref && get('type', ref);
166
+ p.eatProduction('PropertyValue', {
167
+ path: 'value',
168
+ allowFragment: refType && sourceTextFor(refType) === '_',
169
+ });
129
170
  }
130
171
 
131
- PropertyValue(p) {
172
+ PropertyValue(p, { allowFragment }) {
132
173
  if (p.match('null')) {
133
174
  p.eatProduction('NullTag');
175
+ } else if (p.match(/\[\]|undefined/y)) {
176
+ p.eatProduction('InitializerTag');
134
177
  } else if (p.match('<//>')) {
135
178
  p.eatProduction('GapTag');
136
179
  } else {
137
- p.eatProduction('Node');
180
+ p.eatProduction('Node', { allowFragment, propertyValue: true });
138
181
  }
139
182
  }
140
183
 
@@ -143,11 +186,16 @@ export const grammar = class CSTMLMiniparserGrammar {
143
186
  p.eatMatch('*', PN, { path: 'tokenToken' });
144
187
  p.eatMatch('$', PN, { path: 'hasGapToken' });
145
188
  p.eatMatch('_', PN, { path: 'fragmentToken' });
146
- p.eatMatch('_', PN, { path: 'coverFragmentToken' });
189
+ p.eatMatch('_', PN, { path: 'multiFragmentToken' });
147
190
  }
148
191
 
149
192
  // @Node
150
- OpenNodeTag(p, { fragment } = {}) {
193
+ OpenNodeTag(p, { forceFragment = false, allowFragment = true, propertyValue = false } = {}) {
194
+ if (p.match(/['"]/y)) {
195
+ p.eatProduction('JSON:String', { path: 'literalValue' });
196
+ return;
197
+ }
198
+
151
199
  p.eat('<', PN, { path: 'openToken', startSpan: 'Tag', balanced: '>' });
152
200
 
153
201
  let flags = null;
@@ -157,38 +205,48 @@ export const grammar = class CSTMLMiniparserGrammar {
157
205
 
158
206
  let sp = null;
159
207
 
160
- if (fragment && !flags.properties.fragmentToken) throw new Error();
208
+ let fragmentFlag = get('fragmentToken', flags);
209
+ let multiFragmentFlag = get('multiFragmentFlag', flags);
210
+ let tokenFlag = get('tokenToken', flags);
161
211
 
162
- if (!flags.properties.fragmentToken && !p.match(/./sy)) {
212
+ if (propertyValue && fragmentFlag && !multiFragmentFlag) throw new Error();
213
+
214
+ if (forceFragment && !fragmentFlag) throw new Error();
215
+
216
+ if (!fragmentFlag && !p.match(/./sy)) {
217
+ throw new Error();
218
+ }
219
+
220
+ if (fragmentFlag && !allowFragment) {
163
221
  throw new Error();
164
222
  }
165
223
 
166
- if (!flags.properties.fragmentToken) {
167
- p.eatProduction('TagType', { path: 'type' });
224
+ if (!fragmentFlag) {
225
+ p.eatProduction('Identifier', { path: 'type' });
168
226
 
169
227
  sp = p.eatMatchTrivia(_);
170
228
 
171
229
  let iv;
172
230
 
173
231
  if (sp && (p.match(/['"]/y) || p.atExpression)) {
174
- iv = p.eatProduction('JSON:String', { path: 'intrinsicValue' });
232
+ iv = p.eatProduction('JSON:String', { path: 'literalValue' });
175
233
 
176
234
  sp = p.eatMatchTrivia(_);
177
235
  }
178
236
 
179
- if (!flags.properties.tokenToken && iv) {
180
- throw new Error();
181
- }
182
-
183
237
  if (p.match('{') || p.atExpression) {
184
- p.eatProduction('Object');
238
+ p.eatProduction('JSON:Object', { path: 'attributes' });
185
239
  sp = p.eatMatchTrivia(_);
186
240
  }
187
241
 
188
242
  p.eatMatchTrivia(_);
189
243
  }
190
- p.eatMatch('/', PN, { path: 'selfClosingTagToken' });
244
+ let sc = p.eatMatch('/', PN, { path: 'selfClosingToken' });
191
245
  p.eat('>', PN, { path: 'closeToken', endSpan: 'Tag', balancer: true });
246
+
247
+ const balanced = !sc && (p.path.depth > 0 || p.span.type !== 'Bare');
248
+
249
+ return { attrs: { balanced } };
192
250
  }
193
251
 
194
252
  // @Node
@@ -197,25 +255,13 @@ export const grammar = class CSTMLMiniparserGrammar {
197
255
  p.eat('>', PN, { path: 'closeToken', endSpan: 'Tag', balancer: true });
198
256
  }
199
257
 
200
- TagType(p) {
201
- if (
202
- p.match(/['"]|[a-zA-Z`\\\u{80}-\u{10ffff}.]+:/uy) ||
203
- (p.atExpression && p.quasis[p.quasiIdx + 1][0] === ':')
204
- ) {
205
- p.eatProduction('LanguageReference', { path: 'language' });
206
- p.eat(':', PN, { path: 'namespaceSeparatorToken' });
207
- p.eatProduction('Identifier', { path: 'type' });
208
- } else {
209
- p.eatProduction('Identifier', { path: 'type' });
210
- }
211
- }
212
-
213
- LanguageReference(p) {
214
- if (p.match(/['"]/y)) {
215
- p.eatProduction('JSON:String');
216
- } else {
258
+ // @Node
259
+ BindingTag(p) {
260
+ p.eat(':', PN, { path: 'openToken', startSpan: 'Tag', balanced: ':' });
261
+ if (!p.match(':')) {
217
262
  p.eatProduction('IdentifierPath');
218
263
  }
264
+ p.eat(':', PN, { path: 'closeToken', endSpan: 'Tag', balancer: true });
219
265
  }
220
266
 
221
267
  IdentifierPath(p) {
@@ -244,7 +290,7 @@ export const grammar = class CSTMLMiniparserGrammar {
244
290
  let lit, esc;
245
291
  do {
246
292
  if ((esc = p.match('\\'))) {
247
- p.eatMatchEscape(/\\(u(\{[0-9a-fA-F]\}|\d{4}))/y);
293
+ p.eatEscape(/\\(u(\{[0-9a-fA-F]\}|\d{4}))/y);
248
294
  } else {
249
295
  if (!quoted) {
250
296
  lit = p.eatMatchLiteral(/[a-zA-Z\u{80}-\u{10ffff}][a-zA-Z0-9_\u{80}-\u{10ffff}-]*/uy);
@@ -266,26 +312,16 @@ export const grammar = class CSTMLMiniparserGrammar {
266
312
 
267
313
  // @Node
268
314
  ReferenceTag(p) {
269
- let name;
270
- if ((name = p.match(/[.#@]/y))) {
271
- p.eat(name, PN, { path: 'name' });
272
- } else {
273
- p.eatProduction('Identifier', { path: 'name' });
315
+ let type;
316
+ if ((type = p.match(/[.#@_]/y))) {
317
+ p.eat(type, PN, { path: 'type' });
274
318
  }
275
- p.eatMatchTrivia(_);
276
- let open = p.eatMatch('[', PN, { path: 'openIndex', startSpan: 'Index', balanced: ']' });
277
319
 
278
- if (open) {
279
- p.eatMatchTrivia(_);
280
-
281
- if (p.match(/\d/)) {
282
- p.eatProduction('UnsignedInteger', { path: 'index' });
283
- }
284
-
285
- p.eatMatchTrivia(_);
286
- p.eat(']', PN, { path: 'closeIndex', endSpan: 'Index', balancer: true });
320
+ if (!type || type === '#') {
321
+ p.eatProduction('Identifier', { path: 'name' });
287
322
  }
288
323
  p.eatMatchTrivia(_);
324
+
289
325
  if (p.match(/[+$]/y)) {
290
326
  p.eatProduction('ReferenceFlags', { path: 'flags' });
291
327
  p.eatMatchTrivia(_);
@@ -1,4 +1,4 @@
1
- import objectEntries from 'iter-tools/methods/object-entries';
1
+ import objectEntries from 'iter-tools-es/methods/object-entries';
2
2
  import * as sym from '@bablr/agast-vm-helpers/symbols';
3
3
  import * as Spamex from './spamex.js';
4
4
  import * as CSTML from './cstml.js';
@@ -7,7 +7,7 @@ import * as Regex from './regex.js';
7
7
  const _ = /\s+/y;
8
8
  const PN = 'Punctuator';
9
9
  const KW = 'Keyword';
10
- const ID = 'Identifier';
10
+ const LIT = 'Identifier';
11
11
 
12
12
  export const name = 'JSON';
13
13
 
@@ -34,6 +34,7 @@ export const covers = new Map([
34
34
  'Integer',
35
35
  'String',
36
36
  'StringContent',
37
+ 'Identifier',
37
38
  ]),
38
39
  ],
39
40
  ['Expression', new Set(['Object', 'Array', 'Boolean', 'Null', 'Number', 'String'])],
@@ -68,7 +69,7 @@ export const cookEscape = (escape, span) => {
68
69
  return String.fromCodePoint(parseInt(hexMatch[1], 16));
69
70
  }
70
71
 
71
- const litPattern = span === 'Single' ? /\\([\\gnrt0'])/y : /\\([\\gnrt0"])/y;
72
+ const litPattern = span.type === 'Single' ? /\\([\\gnrt0'])/y : /\\([\\gnrt0"])/y;
72
73
  const litMatch = litPattern.exec(escape);
73
74
 
74
75
  if (litMatch) {
@@ -125,7 +126,11 @@ export const grammar = class JSONMiniparserGrammar {
125
126
 
126
127
  // @Node
127
128
  Property(p) {
128
- p.eat(/[a-zA-Z]+/y, ID, { path: 'key' });
129
+ if (p.match(/['"]/y)) {
130
+ p.eatProduction('String', { path: 'key' });
131
+ } else {
132
+ p.eatProduction('Identifier', { path: 'key' });
133
+ }
129
134
  p.eatMatchTrivia(_);
130
135
  p.eat(':', PN, { path: 'mapToken' });
131
136
  p.eatMatchTrivia(_);
@@ -216,6 +221,11 @@ export const grammar = class JSONMiniparserGrammar {
216
221
  p.eatLiteral(/\d/y);
217
222
  }
218
223
 
224
+ // @Node
225
+ Identifier(p) {
226
+ p.eat(/[a-zA-Z]+/y, LIT, { path: 'content' });
227
+ }
228
+
219
229
  // @Node
220
230
  String(p) {
221
231
  const q = p.match(/['"]/y) || '"';
@@ -1,5 +1,5 @@
1
1
  import * as sym from '@bablr/agast-vm-helpers/symbols';
2
- import when from 'iter-tools/methods/when';
2
+ import when from 'iter-tools-es/methods/when';
3
3
  import { escapables } from './json.js';
4
4
 
5
5
  export const name = 'Regex';
@@ -91,7 +91,7 @@ const unique = (flags) => flags.length === new Set(flags).size;
91
91
  const getSpecialPattern = (span) => {
92
92
  const { type } = span;
93
93
  if (type === 'Bare') {
94
- return /[*+{}\[\]()\.^$|\\\n\/><]/y;
94
+ return /[*+?{}\[\]()\.^$|\\\n\/><]/y;
95
95
  } else if (type === 'CharacterClass') {
96
96
  return /[\]\\]/y;
97
97
  } else if (type === 'CharacterClass:First') {
@@ -206,7 +206,7 @@ export const grammar = class RegexMiniparserGrammar {
206
206
  }
207
207
 
208
208
  if (p.match(/[*+?]|{/y)) {
209
- p.shiftProduction('Quantifier');
209
+ return { shift: 'Quantifier' };
210
210
  }
211
211
  }
212
212
 
@@ -2,6 +2,7 @@ import * as sym from '@bablr/agast-vm-helpers/symbols';
2
2
  import * as Regex from './regex.js';
3
3
  import * as CSTML from './cstml.js';
4
4
  import * as JSON from './json.js';
5
+ import { get } from '@bablr/agast-helpers/path';
5
6
 
6
7
  const _ = /\s+/y;
7
8
  const PN = 'Punctuator';
@@ -26,6 +27,7 @@ export const covers = new Map([
26
27
  'ArrayNodeMatcher',
27
28
  'NullNodeMatcher',
28
29
  'ReferenceMatcher',
30
+ 'BindingMatcher',
29
31
  'OpenNodeMatcher',
30
32
  'CloseNodeMatcher',
31
33
  'Literal',
@@ -44,7 +46,7 @@ export const covers = new Map([
44
46
  export const grammar = class SpamexMiniparserGrammar {
45
47
  // @Cover
46
48
  Matcher(p) {
47
- if (p.match(/[a-zA-Z.#@<]/y)) {
49
+ if (p.match(/[a-zA-Z.#@<:]/y)) {
48
50
  p.eatProduction('PropertyMatcher');
49
51
  } else if (p.match(/['"/]/y)) {
50
52
  p.eatProduction('StringMatcher');
@@ -80,20 +82,29 @@ export const grammar = class SpamexMiniparserGrammar {
80
82
 
81
83
  p.eatMatchTrivia(_);
82
84
 
85
+ if (p.match(':')) {
86
+ p.eatProduction('BindingMatcher', { path: 'bindingMatcher' });
87
+ }
88
+
89
+ p.eatMatchTrivia(_);
90
+
83
91
  p.eatProduction('NodeMatcher', { path: 'nodeMatcher' });
84
92
  }
85
93
 
86
94
  // @Node
87
95
  ReferenceMatcher(p) {
88
- let name;
89
- if ((name = p.match(/[.#@]/y))) {
90
- name = p.eat(name, PN, { path: 'name' });
91
- } else if (p.match(/[A-Za-z]/y)) {
96
+ let name, type;
97
+ if ((type = p.match(/[.#@]/y))) {
98
+ p.eat(type, PN, { path: 'type' });
99
+ }
100
+
101
+ if ((!type || type === '#') && p.match(/[A-Za-z]/y)) {
92
102
  name = p.eatProduction('CSTML:Identifier', { path: 'name' });
93
103
  }
94
104
 
95
105
  let open =
96
- name && p.eatMatch('[', PN, { path: 'openIndexToken', startSpan: 'Index', balanced: ']' });
106
+ (name || type) &&
107
+ p.eatMatch('[', PN, { path: 'openIndexToken', startSpan: 'Index', balanced: ']' });
97
108
 
98
109
  if (open) {
99
110
  p.eatMatchTrivia(_);
@@ -108,6 +119,15 @@ export const grammar = class SpamexMiniparserGrammar {
108
119
  p.eat(':', PN, { path: 'mapToken' });
109
120
  }
110
121
 
122
+ // @Node
123
+ BindingMatcher(p) {
124
+ p.eat(':', PN, { path: 'openToken' });
125
+ p.eatMatchTrivia(_);
126
+ p.eatProduction('CSTML:IdentifierPath', { path: 'languagePath' });
127
+ p.eatMatchTrivia(_);
128
+ p.eat(':', PN, { path: 'closeToken' });
129
+ }
130
+
111
131
  NodeMatcher(p) {
112
132
  if (p.match('<//>')) {
113
133
  p.eatProduction('GapNodeMatcher');
@@ -127,10 +147,10 @@ export const grammar = class SpamexMiniparserGrammar {
127
147
  BasicNodeMatcher(p) {
128
148
  let open = p.eatProduction('OpenNodeMatcher', { path: 'open' });
129
149
 
130
- if (!open.properties.selfClosingTagToken) {
150
+ if (!get('selfClosingToken', open)) {
131
151
  p.eatMatchTrivia(_);
132
152
 
133
- if (open.properties.flags?.token) {
153
+ if (get('flags', open)?.token) {
134
154
  // p.eatProduction('NodeChild', { path: 'children[]' }, { token: true });
135
155
  // p.eatMatchTrivia(_);
136
156
  } else {
@@ -152,20 +172,18 @@ export const grammar = class SpamexMiniparserGrammar {
152
172
  p.eatProduction('CSTML:NodeFlags', { path: 'flags' });
153
173
  }
154
174
 
155
- if (p.match(/['"]|[a-zA-Z]+:/y) || p.atExpression) {
156
- p.eatProduction('CSTML:TagType', { path: 'type', noInterpolate: true });
175
+ if (p.match(/[a-zA-Z]/y) || p.atExpression) {
176
+ p.eatProduction('CSTML:Identifier', { path: 'type' });
157
177
  } else if (p.match('?')) {
158
178
  p.eat('?', PN, { path: 'type' });
159
- } else if (p.match(' ')) {
160
- p.eatMatchTrivia(_);
161
- } else {
162
- p.eatProduction('CSTML:Identifier', { path: 'type' });
179
+ } else if (p.match('_')) {
180
+ p.eat('_', PN, { path: 'type' });
163
181
  }
164
182
 
165
183
  let sp = p.eatMatchTrivia(_);
166
184
 
167
185
  if (sp && ((p.match(/['"/]/y) && !p.match('/>')) || p.atExpression)) {
168
- p.eatProduction('StringMatcher', { path: 'intrinsicValue' });
186
+ p.eatProduction('StringMatcher', { path: 'literalValue' });
169
187
 
170
188
  sp = p.eatMatchTrivia(_);
171
189
  }
@@ -176,7 +194,7 @@ export const grammar = class SpamexMiniparserGrammar {
176
194
  }
177
195
 
178
196
  p.eatMatchTrivia(_);
179
- p.eatMatch('/', PN, { path: 'selfClosingTagToken' });
197
+ p.eatMatch('/', PN, { path: 'selfClosingToken' });
180
198
  p.eat('>', PN, { path: 'closeToken', endSpan: 'Tag', balancer: true });
181
199
  }
182
200
 
package/lib/match.js CHANGED
@@ -51,8 +51,7 @@ export class Match {
51
51
 
52
52
  if (isNode && !isCover) {
53
53
  path.node = createNode();
54
- path.node.type = type;
55
- path.node.language = resolvedLanguage.canonicalURL;
54
+ path.node.type = Symbol.for(type);
56
55
  }
57
56
 
58
57
  return new Match(null, resolvedLanguage, id, attrs, path);
@@ -78,7 +77,6 @@ export class Match {
78
77
  }
79
78
  path.node = createNode();
80
79
  path.node.type = Symbol.for(type);
81
- path.node.language = resolvedLanguage.canonicalURL;
82
80
  }
83
81
 
84
82
  return new Match(this, resolvedLanguage, id, { ...baseAttrs, ...attrs }, path);
package/lib/miniparser.js CHANGED
@@ -1,12 +1,11 @@
1
1
  import escapeRegex from 'escape-string-regexp';
2
- import arrayLast from 'iter-tools/methods/array-last';
3
- import isString from 'iter-tools/methods/is-string';
4
- import isObject from 'iter-tools/methods/is-object';
5
- import find from 'iter-tools/methods/find';
6
- import every from 'iter-tools/methods/every';
2
+ import arrayLast from 'iter-tools-es/methods/array-last';
3
+ import isString from 'iter-tools-es/methods/is-string';
4
+ import isObject from 'iter-tools-es/methods/is-object';
5
+ import find from 'iter-tools-es/methods/find';
6
+ import every from 'iter-tools-es/methods/every';
7
7
  import * as sym from '@bablr/agast-vm-helpers/symbols';
8
8
  import { Match } from './match.js';
9
- import { parsePath } from './path.js';
10
9
  import { isRegex, isArray, getPrototypeOf } from './utils.js';
11
10
  import { ReferenceTag, LiteralTag } from '@bablr/agast-helpers/symbols';
12
11
  import {
@@ -16,9 +15,10 @@ import {
16
15
  buildReferenceTag,
17
16
  nodeFlags,
18
17
  } from '@bablr/agast-helpers/builders';
19
- import { add, buildToken } from '@bablr/agast-helpers/tree';
20
- import * as sumtree from '@bablr/agast-helpers/sumtree';
21
- import { get } from '@bablr/agast-helpers/path';
18
+ import { add, buildToken, shift } from '@bablr/agast-helpers/tree';
19
+ import * as Tags from '@bablr/agast-helpers/tags';
20
+ import { buildPathSegment, get } from '@bablr/agast-helpers/path';
21
+ import { parseReference } from '@bablr/agast-helpers/shorthand';
22
22
 
23
23
  const Escape = Symbol.for('Escape');
24
24
 
@@ -138,7 +138,7 @@ export class TemplateParser {
138
138
  }
139
139
  }
140
140
 
141
- eval(id, attrs = {}, props = {}, shift = null) {
141
+ eval(id, attrs = {}, props = {}, shift_ = null) {
142
142
  const parentMatch = this.m;
143
143
  const parentPath = this.path?.node ? this.path : this.path?.parent;
144
144
  const { type } = id;
@@ -176,16 +176,16 @@ export class TemplateParser {
176
176
 
177
177
  if (parentPath?.node && (isNode || covers.has(type))) {
178
178
  const { node } = parentPath;
179
- const path = parsePath(this.m.attrs.path);
179
+ const path = parseReference(this.m.attrs.path);
180
180
 
181
181
  if (isArray(result)) {
182
182
  for (const value of result) {
183
- node.children = sumtree.push(node.children, path);
183
+ node.tags = Tags.push(node.tags, path);
184
184
 
185
185
  add(node, path, value);
186
186
  }
187
187
  } else {
188
- node.children = sumtree.push(node.children, path);
188
+ node.tags = Tags.push(node.tags, path);
189
189
 
190
190
  add(node, path, result);
191
191
  }
@@ -201,10 +201,7 @@ export class TemplateParser {
201
201
 
202
202
  if (isNode) {
203
203
  let { node } = this.path;
204
- node.children = sumtree.push(
205
- node.children,
206
- buildOpenNodeTag(nodeFlags, node.language, node.type),
207
- );
204
+ node.tags = Tags.push(node.tags, buildOpenNodeTag(nodeFlags, node.type));
208
205
  }
209
206
 
210
207
  const result = getPrototypeOf(grammar)[type].call(grammar, this, props);
@@ -217,21 +214,29 @@ export class TemplateParser {
217
214
  const { node } = this.path;
218
215
  if (result?.attrs) {
219
216
  node.attributes = result.attrs;
220
- node.children = sumtree.replaceAt(
217
+ node.tags = Tags.replaceAt(
221
218
  0,
222
- node.children,
223
- buildOpenNodeTag(nodeFlags, node.language, node.type, result.attrs),
219
+ node.tags,
220
+ buildOpenNodeTag(nodeFlags, node.type, result.attrs),
224
221
  );
225
222
  }
226
223
 
227
- node.children = sumtree.push(node.children, buildCloseNodeTag());
224
+ node.tags = Tags.push(node.tags, buildCloseNodeTag());
228
225
 
229
226
  if (parentPath?.node && !covers.has(type)) {
230
- const path = parsePath(this.m.attrs.path);
227
+ const path = parseReference(this.m.attrs.path);
231
228
 
232
- add(parentPath.node, path, node, shift);
229
+ if (shift_ == null) {
230
+ add(parentPath.node, path, node);
231
+ } else {
232
+ shift(parentPath.node, path, node);
233
+ }
233
234
  }
234
235
  }
236
+
237
+ if (result?.shift) {
238
+ this.shiftProduction(result.shift);
239
+ }
235
240
  }
236
241
 
237
242
  this.m = this.m.parent;
@@ -239,13 +244,12 @@ export class TemplateParser {
239
244
  if (this.path?.node) {
240
245
  const isTag = (child) => [LiteralTag, Escape].includes(child.type);
241
246
 
242
- const { children } = this.path.node;
247
+ const { tags } = this.path.node;
243
248
 
244
- if (find(isTag, sumtree.traverse(children)) && every(isTag, sumtree.traverse(children))) {
249
+ if (find(isTag, Tags.traverse(tags)) && every(isTag, Tags.traverse(tags))) {
245
250
  throw new Error('strings must be wrapped in nodes');
246
251
  }
247
252
  }
248
-
249
253
  return path.node;
250
254
  }
251
255
 
@@ -289,21 +293,20 @@ export class TemplateParser {
289
293
 
290
294
  this.held = null;
291
295
 
292
- const path = parsePath(attrs.path);
296
+ const path = parseReference(attrs.path);
293
297
 
294
- add(node, path, held, 0);
298
+ add(node, path, held);
295
299
 
296
300
  return held;
297
301
  }
298
302
 
299
303
  shiftProduction(id, attrs = {}, props = {}) {
300
304
  const { node } = this;
301
- const { properties } = node;
302
305
  // don't push a new path onto the stack
303
306
 
304
307
  // get the most recently produced node and detach it from its parent
305
308
 
306
- const ref = sumtree.getAt(-2, node.children);
309
+ const ref = Tags.getAt(-1, node.tags).value.tags[0];
307
310
 
308
311
  if (!ref.value.flags.expression) throw new Error();
309
312
 
@@ -311,13 +314,13 @@ export class TemplateParser {
311
314
  throw new Error();
312
315
  }
313
316
 
314
- this.held = get(ref, node);
317
+ this.held = get(buildPathSegment(ref.value.name, -1), node);
315
318
 
316
319
  let id_ = this.buildId(id);
317
320
 
318
321
  const shifted = this.eval(id_, attrs, props, 1);
319
322
 
320
- // add(node, ref, shifted, 1);
323
+ // shift(node, ref, shifted);
321
324
 
322
325
  return shifted;
323
326
  }
@@ -336,10 +339,9 @@ export class TemplateParser {
336
339
 
337
340
  this.updateSpans(attrs);
338
341
 
339
- const path_ = parsePath(attrs.path);
340
- const language = this.language.canonicalURL;
342
+ const path_ = parseReference(attrs.path);
341
343
 
342
- add(this.node, path_, buildToken(language, type, result, _attrs));
344
+ add(this.node, path_, buildToken(type, result, _attrs));
343
345
 
344
346
  return result;
345
347
  }
@@ -360,10 +362,9 @@ export class TemplateParser {
360
362
 
361
363
  this.idx += result.length;
362
364
 
363
- const path = parsePath(attrs.path);
364
- const language = this.language.canonicalURL;
365
+ const path = parseReference(attrs.path);
365
366
 
366
- add(this.node, path, buildToken(language, type, result));
367
+ add(this.node, path, buildToken(type, result));
367
368
  }
368
369
  return result;
369
370
  }
@@ -375,11 +376,7 @@ export class TemplateParser {
375
376
 
376
377
  this.idx += result.length;
377
378
 
378
- add(
379
- this.node,
380
- buildReferenceTag('#'),
381
- buildToken('https://bablr.org/languages/core/en/space-tab-newline', 'Space', result),
382
- );
379
+ add(this.node, buildReferenceTag('#'), buildToken('Space', result));
383
380
 
384
381
  return result;
385
382
  }
@@ -390,11 +387,7 @@ export class TemplateParser {
390
387
  if (result) {
391
388
  this.idx += result.length;
392
389
 
393
- add(
394
- this.node,
395
- buildReferenceTag('#'),
396
- buildToken('https://bablr.org/languages/core/en/space-tab-newline', 'Space', result),
397
- );
390
+ add(this.node, buildReferenceTag('#'), buildToken('Space', result));
398
391
  }
399
392
 
400
393
  return result;
@@ -411,11 +404,7 @@ export class TemplateParser {
411
404
  const cooked = this.language.cookEscape(result, this.span);
412
405
  const attributes = { cooked };
413
406
 
414
- add(
415
- this.node,
416
- buildReferenceTag('@'),
417
- buildToken(this.language.canonicalURL, 'Escape', raw, attributes),
418
- );
407
+ add(this.node, buildReferenceTag('@'), buildToken('Escape', raw, attributes));
419
408
 
420
409
  return result;
421
410
  }
@@ -430,11 +419,7 @@ export class TemplateParser {
430
419
  const cooked = this.language.cookEscape(result, this.span);
431
420
  const attributes = { cooked };
432
421
 
433
- add(
434
- this.node,
435
- buildReferenceTag('@'),
436
- buildToken(this.language.canonicalURL, 'Escape', raw, attributes),
437
- );
422
+ add(this.node, buildReferenceTag('@'), buildToken('Escape', raw, attributes));
438
423
  }
439
424
 
440
425
  return result;
@@ -447,7 +432,7 @@ export class TemplateParser {
447
432
 
448
433
  this.idx += result.length;
449
434
 
450
- this.node.children = sumtree.push(this.node.children, buildLiteralTag(result));
435
+ this.node.tags = Tags.push(this.node.tags, buildLiteralTag(result));
451
436
 
452
437
  return result;
453
438
  }
@@ -458,7 +443,7 @@ export class TemplateParser {
458
443
  if (result) {
459
444
  this.idx += result.length;
460
445
 
461
- this.node.children = sumtree.push(this.node.children, buildLiteralTag(result));
446
+ this.node.tags = Tags.push(this.node.tags, buildLiteralTag(result));
462
447
  }
463
448
 
464
449
  return result;
package/lib/path.js CHANGED
@@ -1,23 +1,9 @@
1
- import { buildReferenceTag } from '@bablr/agast-helpers/tree';
2
-
3
- const { hasOwn, freeze } = Object;
4
- const { isArray } = Array;
5
-
6
- export const parsePath = (str) => {
7
- const match = /^([a-zA-Z]+)(\[\])?(\+)?(\$)?$/.exec(str);
8
-
9
- if (!match) throw new Error();
10
-
11
- let flags = freeze({ expression: !!match[3], hasGap: !!match[4] });
12
-
13
- return buildReferenceTag(match[1], !!match[2], flags);
14
- };
15
-
16
1
  export class Path {
17
2
  constructor(id, attributes, parent = null) {
18
3
  this.id = id;
19
4
  this.attributes = attributes;
20
5
  this.parent = parent;
6
+ this.depth = parent ? parent.depth + 1 : 0
21
7
 
22
8
  this.node = null;
23
9
  }
@@ -46,36 +32,3 @@ export class Path {
46
32
  return new Path(id, attrs);
47
33
  }
48
34
  }
49
-
50
- export class PathResolver {
51
- constructor(node) {
52
- this.node = node;
53
- this.counters = {};
54
- }
55
-
56
- get(path) {
57
- const { node, counters } = this;
58
-
59
- const { isArray: pathIsArray, name } = path;
60
-
61
- if (!hasOwn(node.properties, name)) {
62
- throw new Error(`cannot resolve {path: ${name}}`);
63
- }
64
-
65
- let value = node.properties[name];
66
-
67
- if (pathIsArray) {
68
- if (!isArray(value)) {
69
- throw new Error(`cannot resolve {path: ${name}}: not an array`);
70
- }
71
-
72
- const counter = counters[name] ?? 0;
73
-
74
- counters[name] = counter + 1;
75
-
76
- value = value[counter];
77
- }
78
-
79
- return value;
80
- }
81
- }
package/lib/utils.js CHANGED
@@ -1,5 +1,5 @@
1
- import isArray from 'iter-tools/methods/is-array';
2
- import isString from 'iter-tools/methods/is-string';
1
+ import isArray from 'iter-tools-es/methods/is-array';
2
+ import isString from 'iter-tools-es/methods/is-string';
3
3
 
4
4
  const { getPrototypeOf } = Object;
5
5
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bablr/boot",
3
- "version": "0.8.1",
3
+ "version": "0.10.0",
4
4
  "description": "Compile-time tools for bootstrapping BABLR VM",
5
5
  "engines": {
6
6
  "node": ">=12.0.0"
@@ -20,11 +20,11 @@
20
20
  ],
21
21
  "sideEffects": false,
22
22
  "dependencies": {
23
- "@bablr/agast-helpers": "0.7.1",
24
- "@bablr/agast-vm-helpers": "0.7.1",
25
- "@iter-tools/imm-stack": "1.1.0",
23
+ "@bablr/agast-helpers": "0.9.0",
24
+ "@bablr/agast-vm-helpers": "0.9.0",
25
+ "@iter-tools/imm-stack": "1.2.0",
26
26
  "escape-string-regexp": "5.0.0",
27
- "iter-tools": "^7.5.3"
27
+ "iter-tools-es": "^7.0.2"
28
28
  },
29
29
  "devDependencies": {
30
30
  "@babel/cli": "^7.23.0",