@bablr/language-en-bablr-vm-instruction 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/grammar.js CHANGED
@@ -1,11 +1,11 @@
1
1
  /* @macrome
2
2
  * @generatedby @bablr/macrome-generator-bablr
3
- * @generatedfrom ./grammar.macro.js#19ce18f8cc0bb0def947bcdbc1a08a138868bca5
3
+ * @generatedfrom ./grammar.macro.js#9be2c245172bdcf562b438022c5c18f36e2b54a9
4
4
  * This file is autogenerated. Please do not edit it directly.
5
5
  * When editing run `npx macrome watch` then change the file this is generated from.
6
6
  */
7
7
  import _applyDecs from "@babel/runtime/helpers/applyDecs2305";
8
- import { interpolateArray as _interpolateArray, interpolateArrayChildren as _interpolateArrayChildren, interpolateString as _interpolateString } from "@bablr/agast-helpers/template";
8
+ import { interpolateArray as _interpolateArray, interpolateFragmentChildren as _interpolateFragmentChildren, interpolateString as _interpolateString } from "@bablr/agast-helpers/template";
9
9
  import * as _l from "@bablr/agast-vm-helpers/languages";
10
10
  import * as _t from "@bablr/agast-helpers/shorthand";
11
11
  let _initProto, _ArrayDecs, _ObjectDecs, _TupleDecs, _BooleanDecs, _NullDecs, _IdentifierDecs, _IntegerDecs, _InfinityDecs, _PunctuatorDecs, _KeywordDecs, _AnyDecs;
@@ -20,46 +20,48 @@ export function* eatMatchTrivia() {
20
20
  if (yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
21
21
  verb: _t.s_node(_l.Instruction, "Identifier", "match"),
22
22
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.ref`closeToken`], {
23
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
23
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
24
24
  values: [_t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`], {
25
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
25
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
26
26
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`], {
27
27
  elements: [_t.node(_l.Regex, "CharacterClass", [_t.ref`openToken`, _t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`closeToken`], {
28
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "["),
28
+ openToken: _t.s_node(_l.Regex, "Punctuator", "["),
29
29
  elements: [_t.node(_l.Regex, "Character", [_t.lit(" ")], {}, {}), _t.node(_l.Regex, "Character", [_t.embedded(_t.s_e_node(_l.Regex, "Escape", [_t.lit("\\t")], {}, {
30
30
  cooked: "\t"
31
31
  }))], {}, {}), _t.node(_l.Regex, "Character", [_t.embedded(_t.s_e_node(_l.Regex, "Escape", [_t.lit("\\n")], {}, {
32
32
  cooked: "\n"
33
33
  }))], {}, {})],
34
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "]")
34
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "]")
35
35
  }, {
36
36
  negate: false
37
37
  })]
38
38
  }, {})],
39
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
39
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
40
40
  }, {})],
41
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
41
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
42
42
  }, {})
43
43
  }, {})) {
44
44
  return yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
45
45
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
46
46
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.ref`closeToken`], {
47
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
47
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
48
48
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
49
49
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`language`, _t.ref`namespaceSeparatorToken`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
50
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
50
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
51
51
  flags: _t.node(_l.CSTML, "Flags", [_t.ref`triviaToken`, _t.ref`tokenToken`], {
52
- triviaToken: _t.s_i_node(_l.CSTML, "Punctuator", "#"),
53
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
52
+ triviaToken: _t.s_node(_l.CSTML, "Punctuator", "#"),
53
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
54
54
  }, {}),
55
- language: _t.s_node(_l.Spamex, "Identifier", "Space"),
56
- namespaceSeparatorToken: _t.s_i_node(_l.Spamex, "Punctuator", ":"),
57
- type: _t.s_node(_l.Spamex, "Identifier", "Space"),
58
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
59
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
55
+ language: _t.node(_l.CSTML, "IdentifierPath", [_t.ref`segments[]`, _t.arr(), _t.ref`segments[]`], {
56
+ segments: [_t.s_node(_l.CSTML, "Identifier", "Space")]
57
+ }, {}),
58
+ namespaceSeparatorToken: _t.s_node(_l.CSTML, "Punctuator", ":"),
59
+ type: _t.s_node(_l.CSTML, "Identifier", "Space"),
60
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
61
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
60
62
  }, {})
61
63
  }, {})],
62
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
64
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
63
65
  }, {})
64
66
  }, {});
65
67
  }
@@ -76,44 +78,44 @@ export const grammar = class BABLRVMInstructionGrammar {
76
78
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
77
79
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
78
80
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
79
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
81
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
80
82
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
81
83
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
82
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
84
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
83
85
  flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
84
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
86
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
85
87
  }, {}),
86
88
  type: _t.s_node(_l.Spamex, "Identifier", "Identifier"),
87
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
88
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
89
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
90
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
89
91
  }, {})
90
92
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
91
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
92
- content: _t.node(_l.CSTML, "Content", [_t.lit("callee")], {}, {}),
93
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
93
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
94
+ content: _t.node(_l.CSTML, "Content", [_t.lit("callee$")], {}, {}),
95
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
94
96
  }, {})],
95
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
97
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
96
98
  }, {})
97
99
  }, {});
98
100
  yield* eatMatchTrivia();
99
101
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
100
102
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
101
103
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
102
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
104
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
103
105
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
104
106
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
105
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
107
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
106
108
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
107
109
  type: _t.s_node(_l.Spamex, "Identifier", "Tuple"),
108
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
109
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
110
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
111
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
110
112
  }, {})
111
113
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
112
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
113
- content: _t.node(_l.CSTML, "Content", [_t.lit("arguments")], {}, {}),
114
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
114
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
115
+ content: _t.node(_l.CSTML, "Content", [_t.lit("arguments$")], {}, {}),
116
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
115
117
  }, {})],
116
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
118
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
117
119
  }, {})
118
120
  }, {});
119
121
  }
@@ -121,175 +123,175 @@ export const grammar = class BABLRVMInstructionGrammar {
121
123
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
122
124
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
123
125
  arguments: _t.node(_l.Instruction, "Tuple", [9, [[_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`], [_t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`]]], {
124
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
126
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
125
127
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
126
128
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
127
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
129
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
128
130
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
129
131
  type: _t.s_node(_l.Spamex, "Identifier", "Any"),
130
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
131
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
132
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
133
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
132
134
  }, {})
133
135
  }, {}), _t.node(_l.Instruction, "Null", [_t.ref`sigilToken`], {
134
- sigilToken: _t.s_i_node(_l.Instruction, "Keyword", "null")
136
+ sigilToken: _t.s_node(_l.Instruction, "Keyword", "null")
135
137
  }, {}), _t.node(_l.Instruction, "Array", [19, [[_t.ref`openToken`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {})), _t.ref`elements[]`, _t.arr()], [_t.ref`elements[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {})), _t.ref`elements[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {}))], [_t.ref`elements[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {})), _t.ref`elements[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {}))], [_t.ref`elements[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {})), _t.ref`elements[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {})), _t.ref`elements[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {})), _t.ref`closeToken`]]], {
136
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "["),
138
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "["),
137
139
  elements: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
138
140
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
139
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
141
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
140
142
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
141
143
  type: _t.s_node(_l.Spamex, "Identifier", "Array"),
142
144
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
143
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
145
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
144
146
  content: _t.node(_l.CSTML, "Content", [_t.lit("[")], {}, {}),
145
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
147
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
146
148
  }, {}),
147
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
148
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
149
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
150
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
149
151
  }, {})
150
152
  }, {}), _t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
151
153
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
152
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
154
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
153
155
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
154
156
  type: _t.s_node(_l.Spamex, "Identifier", "Object"),
155
157
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
156
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
158
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
157
159
  content: _t.node(_l.CSTML, "Content", [_t.lit("{")], {}, {}),
158
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
160
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
159
161
  }, {}),
160
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
161
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
162
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
163
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
162
164
  }, {})
163
165
  }, {}), _t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
164
166
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
165
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
167
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
166
168
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
167
169
  type: _t.s_node(_l.Spamex, "Identifier", "Tuple"),
168
170
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
169
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
171
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
170
172
  content: _t.node(_l.CSTML, "Content", [_t.lit("(")], {}, {}),
171
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
173
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
172
174
  }, {}),
173
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
174
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
175
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
176
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
175
177
  }, {})
176
178
  }, {}), _t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
177
179
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
178
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
180
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
179
181
  flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
180
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
182
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
181
183
  }, {}),
182
184
  type: _t.s_node(_l.Spamex, "Identifier", "Boolean"),
183
185
  intrinsicValue: _t.node(_l.Regex, "Pattern", [9, [[_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`], [_t.ref`separators[]`, _t.arr(), _t.ref`separators[]`, _t.ref`alternatives[]`, _t.ref`closeToken`]]], {
184
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
186
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
185
187
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`], {
186
188
  elements: [_t.node(_l.Regex, "Character", [_t.lit("t")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("r")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("u")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("e")], {}, {})]
187
189
  }, {}), _t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`], {
188
190
  elements: [_t.node(_l.Regex, "Character", [_t.lit("f")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("a")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("l")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("s")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("e")], {}, {})]
189
191
  }, {})],
190
- separators: [_t.s_i_node(_l.Regex, "Punctuator", "|")],
191
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
192
+ separators: [_t.s_node(_l.Regex, "Punctuator", "|")],
193
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
192
194
  }, {}),
193
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
194
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
195
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
196
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
195
197
  }, {})
196
198
  }, {}), _t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
197
199
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
198
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
200
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
199
201
  flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
200
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
202
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
201
203
  }, {}),
202
204
  type: _t.s_node(_l.Spamex, "Identifier", "Null"),
203
205
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
204
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
206
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
205
207
  content: _t.node(_l.CSTML, "Content", [_t.lit("null")], {}, {}),
206
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
208
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
207
209
  }, {}),
208
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
209
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
210
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
211
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
210
212
  }, {})
211
213
  }, {}), _t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
212
214
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
213
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
215
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
214
216
  flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
215
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
217
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
216
218
  }, {}),
217
219
  type: _t.s_node(_l.Spamex, "Identifier", "Identifier"),
218
220
  intrinsicValue: _t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`], {
219
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
221
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
220
222
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`], {
221
223
  elements: [_t.node(_l.Regex, "Quantifier", [_t.ref`element`, _t.ref`value`], {
222
224
  element: _t.node(_l.Regex, "CharacterClass", [_t.ref`openToken`, _t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`closeToken`], {
223
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "["),
225
+ openToken: _t.s_node(_l.Regex, "Punctuator", "["),
224
226
  elements: [_t.node(_l.Regex, "CharacterClassRange", [_t.ref`min`, _t.ref`rangeToken`, _t.ref`max`], {
225
227
  min: _t.node(_l.Regex, "Character", [_t.lit("a")], {}, {}),
226
- rangeToken: _t.s_i_node(_l.Regex, "Punctuator", "-"),
228
+ rangeToken: _t.s_node(_l.Regex, "Punctuator", "-"),
227
229
  max: _t.node(_l.Regex, "Character", [_t.lit("z")], {}, {})
228
230
  }, {}), _t.node(_l.Regex, "CharacterClassRange", [_t.ref`min`, _t.ref`rangeToken`, _t.ref`max`], {
229
231
  min: _t.node(_l.Regex, "Character", [_t.lit("A")], {}, {}),
230
- rangeToken: _t.s_i_node(_l.Regex, "Punctuator", "-"),
232
+ rangeToken: _t.s_node(_l.Regex, "Punctuator", "-"),
231
233
  max: _t.node(_l.Regex, "Character", [_t.lit("Z")], {}, {})
232
234
  }, {})],
233
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "]")
235
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "]")
234
236
  }, {
235
237
  negate: false
236
238
  }),
237
- value: _t.s_i_node(_l.Regex, "Keyword", "+")
239
+ value: _t.s_node(_l.Regex, "Keyword", "+")
238
240
  }, {
239
241
  min: 1,
240
242
  max: Infinity,
241
243
  greedy: true
242
244
  })]
243
245
  }, {})],
244
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
246
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
245
247
  }, {}),
246
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
247
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
248
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
249
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
248
250
  }, {})
249
251
  }, {}), _t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
250
252
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
251
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
253
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
252
254
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
253
255
  type: _t.s_node(_l.Spamex, "Identifier", "Number"),
254
256
  intrinsicValue: _t.node(_l.Regex, "Pattern", [9, [[_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`], [_t.ref`separators[]`, _t.arr(), _t.ref`separators[]`, _t.ref`alternatives[]`, _t.ref`closeToken`]]], {
255
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
257
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
256
258
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`], {
257
259
  elements: [_t.node(_l.Regex, "CharacterClass", [_t.ref`openToken`, _t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`closeToken`], {
258
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "["),
260
+ openToken: _t.s_node(_l.Regex, "Punctuator", "["),
259
261
  elements: [_t.node(_l.Regex, "DigitCharacterSet", [_t.ref`escapeToken`, _t.ref`value`], {
260
- escapeToken: _t.s_i_node(_l.Regex, "Punctuator", "\\"),
261
- value: _t.s_i_node(_l.Regex, "Keyword", "d")
262
+ escapeToken: _t.s_node(_l.Regex, "Punctuator", "\\"),
263
+ value: _t.s_node(_l.Regex, "Keyword", "d")
262
264
  }, {})],
263
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "]")
265
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "]")
264
266
  }, {
265
267
  negate: false
266
268
  })]
267
269
  }, {}), _t.node(_l.Regex, "Alternative", [11, [[_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`], [_t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`]]], {
268
270
  elements: [9, [[_t.node(_l.Regex, "Quantifier", [_t.ref`element`, _t.ref`value`], {
269
271
  element: _t.node(_l.Regex, "CharacterClass", [_t.ref`openToken`, _t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`closeToken`], {
270
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "["),
272
+ openToken: _t.s_node(_l.Regex, "Punctuator", "["),
271
273
  elements: [_t.node(_l.Regex, "Character", [_t.lit("+")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("-")], {}, {})],
272
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "]")
274
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "]")
273
275
  }, {
274
276
  negate: false
275
277
  }),
276
- value: _t.s_i_node(_l.Regex, "Keyword", "?")
278
+ value: _t.s_node(_l.Regex, "Keyword", "?")
277
279
  }, {
278
280
  min: 0,
279
281
  max: 1,
280
282
  greedy: true
281
283
  }), _t.node(_l.Regex, "Character", [_t.lit("I")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("n")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("f")], {}, {})], [_t.node(_l.Regex, "Character", [_t.lit("i")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("n")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("i")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("t")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("y")], {}, {})]]]
282
284
  }, {})],
283
- separators: [_t.s_i_node(_l.Regex, "Punctuator", "|")],
284
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
285
+ separators: [_t.s_node(_l.Regex, "Punctuator", "|")],
286
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
285
287
  }, {}),
286
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
287
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
288
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
289
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
288
290
  }, {})
289
291
  }, {})],
290
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", "]")
292
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", "]")
291
293
  }, {})],
292
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
294
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
293
295
  }, {})
294
296
  }, {});
295
297
  }
@@ -297,38 +299,37 @@ export const grammar = class BABLRVMInstructionGrammar {
297
299
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
298
300
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
299
301
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
300
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
302
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
301
303
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
302
304
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [12, [[_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {}))], [_t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`attributes[]`, _t.arr(), _t.ref`attributes[]`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`]]], {
303
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
304
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
305
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
306
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
305
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
306
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
307
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
307
308
  }, {}),
308
309
  type: _t.s_node(_l.Spamex, "Identifier", "Punctuator"),
309
310
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
310
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
311
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
311
312
  content: _t.node(_l.CSTML, "Content", [_t.lit("[")], {}, {}),
312
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
313
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
313
314
  }, {}),
314
315
  attributes: [_t.node(_l.Spamex, "MappingAttribute", [_t.ref`key`, _t.ref`mapToken`, _t.ref`value`], {
315
316
  key: _t.s_node(_l.Spamex, "Identifier", "balanced"),
316
- mapToken: _t.s_i_node(_l.Spamex, "Punctuator", "="),
317
+ mapToken: _t.s_node(_l.Spamex, "Punctuator", "="),
317
318
  value: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
318
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
319
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
319
320
  content: _t.node(_l.CSTML, "Content", [_t.lit("]")], {}, {}),
320
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
321
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
321
322
  }, {})
322
323
  }, {})],
323
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
324
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
324
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
325
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
325
326
  }, {})
326
327
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
327
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
328
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
328
329
  content: _t.node(_l.CSTML, "Content", [_t.lit("openToken")], {}, {}),
329
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
330
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
330
331
  }, {})],
331
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
332
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
332
333
  }, {})
333
334
  }, {});
334
335
  let sp = yield* eatMatchTrivia();
@@ -336,17 +337,17 @@ export const grammar = class BABLRVMInstructionGrammar {
336
337
  while ((first || sp) && (yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
337
338
  verb: _t.s_node(_l.Instruction, "Identifier", "match"),
338
339
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.ref`closeToken`], {
339
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
340
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
340
341
  values: [_t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`, _t.ref`flags`], {
341
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
342
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
342
343
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`], {
343
344
  elements: [_t.node(_l.Regex, "AnyCharacterSet", [_t.ref`sigilToken`], {
344
- sigilToken: _t.s_i_node(_l.Regex, "Keyword", ".")
345
+ sigilToken: _t.s_node(_l.Regex, "Keyword", ".")
345
346
  }, {})]
346
347
  }, {})],
347
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
348
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/"),
348
349
  flags: _t.node(_l.Regex, "Flags", [_t.ref`tokens[]`, _t.arr(), _t.ref`tokens[]`], {
349
- tokens: [_t.s_i_node(_l.Regex, "Keyword", "y")]
350
+ tokens: [_t.s_node(_l.Regex, "Keyword", "y")]
350
351
  }, {
351
352
  global: false,
352
353
  ignoreCase: false,
@@ -356,27 +357,27 @@ export const grammar = class BABLRVMInstructionGrammar {
356
357
  sticky: true
357
358
  })
358
359
  }, {})],
359
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
360
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
360
361
  }, {})
361
362
  }, {}))) {
362
363
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
363
364
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
364
365
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
365
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
366
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
366
367
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
367
368
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
368
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
369
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
369
370
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
370
371
  type: _t.s_node(_l.Spamex, "Identifier", "Expression"),
371
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
372
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
372
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
373
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
373
374
  }, {})
374
375
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
375
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
376
- content: _t.node(_l.CSTML, "Content", [_t.lit("elements[]")], {}, {}),
377
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
376
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
377
+ content: _t.node(_l.CSTML, "Content", [_t.lit("elements[]$")], {}, {}),
378
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
378
379
  }, {})],
379
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
380
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
380
381
  }, {})
381
382
  }, {});
382
383
  sp = yield* eatMatchTrivia();
@@ -385,32 +386,31 @@ export const grammar = class BABLRVMInstructionGrammar {
385
386
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
386
387
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
387
388
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
388
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
389
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
389
390
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
390
391
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [12, [[_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {}))], [_t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`attributes[]`, _t.arr(), _t.ref`attributes[]`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`]]], {
391
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
392
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
393
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
394
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
392
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
393
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
394
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
395
395
  }, {}),
396
396
  type: _t.s_node(_l.Spamex, "Identifier", "Punctuator"),
397
397
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
398
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
398
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
399
399
  content: _t.node(_l.CSTML, "Content", [_t.lit("]")], {}, {}),
400
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
400
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
401
401
  }, {}),
402
402
  attributes: [_t.node(_l.Spamex, "BooleanAttribute", [_t.ref`key`], {
403
403
  key: _t.s_node(_l.Spamex, "Identifier", "balancer")
404
404
  }, {})],
405
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
406
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
405
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
406
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
407
407
  }, {})
408
408
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
409
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
409
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
410
410
  content: _t.node(_l.CSTML, "Content", [_t.lit("closeToken")], {}, {}),
411
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
411
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
412
412
  }, {})],
413
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
413
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
414
414
  }, {})
415
415
  }, {});
416
416
  }
@@ -418,38 +418,37 @@ export const grammar = class BABLRVMInstructionGrammar {
418
418
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
419
419
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
420
420
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
421
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
421
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
422
422
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
423
423
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [12, [[_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {}))], [_t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`attributes[]`, _t.arr(), _t.ref`attributes[]`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`]]], {
424
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
425
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
426
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
427
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
424
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
425
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
426
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
428
427
  }, {}),
429
428
  type: _t.s_node(_l.Spamex, "Identifier", "Punctuator"),
430
429
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
431
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
430
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
432
431
  content: _t.node(_l.CSTML, "Content", [_t.lit("{")], {}, {}),
433
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
432
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
434
433
  }, {}),
435
434
  attributes: [_t.node(_l.Spamex, "MappingAttribute", [_t.ref`key`, _t.ref`mapToken`, _t.ref`value`], {
436
435
  key: _t.s_node(_l.Spamex, "Identifier", "balanced"),
437
- mapToken: _t.s_i_node(_l.Spamex, "Punctuator", "="),
436
+ mapToken: _t.s_node(_l.Spamex, "Punctuator", "="),
438
437
  value: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
439
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
438
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
440
439
  content: _t.node(_l.CSTML, "Content", [_t.lit("}")], {}, {}),
441
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
440
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
442
441
  }, {})
443
442
  }, {})],
444
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
445
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
443
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
444
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
446
445
  }, {})
447
446
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
448
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
447
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
449
448
  content: _t.node(_l.CSTML, "Content", [_t.lit("openToken")], {}, {}),
450
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
449
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
451
450
  }, {})],
452
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
451
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
453
452
  }, {})
454
453
  }, {});
455
454
  let sp = yield* eatMatchTrivia();
@@ -457,17 +456,17 @@ export const grammar = class BABLRVMInstructionGrammar {
457
456
  while ((first || sp) && (yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
458
457
  verb: _t.s_node(_l.Instruction, "Identifier", "match"),
459
458
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.ref`closeToken`], {
460
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
459
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
461
460
  values: [_t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`, _t.ref`flags`], {
462
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
461
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
463
462
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`], {
464
463
  elements: [_t.node(_l.Regex, "AnyCharacterSet", [_t.ref`sigilToken`], {
465
- sigilToken: _t.s_i_node(_l.Regex, "Keyword", ".")
464
+ sigilToken: _t.s_node(_l.Regex, "Keyword", ".")
466
465
  }, {})]
467
466
  }, {})],
468
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
467
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/"),
469
468
  flags: _t.node(_l.Regex, "Flags", [_t.ref`tokens[]`, _t.arr(), _t.ref`tokens[]`], {
470
- tokens: [_t.s_i_node(_l.Regex, "Keyword", "y")]
469
+ tokens: [_t.s_node(_l.Regex, "Keyword", "y")]
471
470
  }, {
472
471
  global: false,
473
472
  ignoreCase: false,
@@ -477,27 +476,27 @@ export const grammar = class BABLRVMInstructionGrammar {
477
476
  sticky: true
478
477
  })
479
478
  }, {})],
480
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
479
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
481
480
  }, {})
482
481
  }, {}))) {
483
482
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
484
483
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
485
484
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
486
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
485
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
487
486
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
488
487
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
489
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
488
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
490
489
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
491
490
  type: _t.s_node(_l.Spamex, "Identifier", "Property"),
492
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
493
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
491
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
492
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
494
493
  }, {})
495
494
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
496
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
497
- content: _t.node(_l.CSTML, "Content", [_t.lit("properties[]")], {}, {}),
498
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
495
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
496
+ content: _t.node(_l.CSTML, "Content", [_t.lit("properties[]$")], {}, {}),
497
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
499
498
  }, {})],
500
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
499
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
501
500
  }, {})
502
501
  }, {});
503
502
  sp = yield* eatMatchTrivia();
@@ -506,32 +505,31 @@ export const grammar = class BABLRVMInstructionGrammar {
506
505
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
507
506
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
508
507
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
509
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
508
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
510
509
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
511
510
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [12, [[_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {}))], [_t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`attributes[]`, _t.arr(), _t.ref`attributes[]`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`]]], {
512
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
513
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
514
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
515
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
511
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
512
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
513
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
516
514
  }, {}),
517
515
  type: _t.s_node(_l.Spamex, "Identifier", "Punctuator"),
518
516
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
519
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
517
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
520
518
  content: _t.node(_l.CSTML, "Content", [_t.lit("}")], {}, {}),
521
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
519
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
522
520
  }, {}),
523
521
  attributes: [_t.node(_l.Spamex, "BooleanAttribute", [_t.ref`key`], {
524
522
  key: _t.s_node(_l.Spamex, "Identifier", "balancer")
525
523
  }, {})],
526
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
527
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
524
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
525
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
528
526
  }, {})
529
527
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
530
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
528
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
531
529
  content: _t.node(_l.CSTML, "Content", [_t.lit("closeToken")], {}, {}),
532
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
530
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
533
531
  }, {})],
534
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
532
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
535
533
  }, {})
536
534
  }, {});
537
535
  }
@@ -539,73 +537,72 @@ export const grammar = class BABLRVMInstructionGrammar {
539
537
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
540
538
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
541
539
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
542
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
540
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
543
541
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
544
542
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
545
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
543
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
546
544
  flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
547
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
545
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
548
546
  }, {}),
549
547
  type: _t.s_node(_l.Spamex, "Identifier", "Identifier"),
550
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
551
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
548
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
549
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
552
550
  }, {})
553
551
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
554
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
555
- content: _t.node(_l.CSTML, "Content", [_t.lit("key")], {}, {}),
556
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
552
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
553
+ content: _t.node(_l.CSTML, "Content", [_t.lit("key$")], {}, {}),
554
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
557
555
  }, {})],
558
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
556
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
559
557
  }, {})
560
558
  }, {});
561
559
  yield* eatMatchTrivia();
562
560
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
563
561
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
564
562
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
565
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
563
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
566
564
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
567
565
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
568
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
569
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
570
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
571
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
566
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
567
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
568
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
572
569
  }, {}),
573
570
  type: _t.s_node(_l.Spamex, "Identifier", "Punctuator"),
574
571
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
575
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
572
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
576
573
  content: _t.node(_l.CSTML, "Content", [_t.lit(":")], {}, {}),
577
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
574
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
578
575
  }, {}),
579
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
580
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
576
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
577
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
581
578
  }, {})
582
579
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
583
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
580
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
584
581
  content: _t.node(_l.CSTML, "Content", [_t.lit("mapToken")], {}, {}),
585
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
582
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
586
583
  }, {})],
587
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
584
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
588
585
  }, {})
589
586
  }, {});
590
587
  yield* eatMatchTrivia();
591
588
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
592
589
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
593
590
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
594
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
591
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
595
592
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
596
593
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
597
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
594
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
598
595
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
599
596
  type: _t.s_node(_l.Spamex, "Identifier", "Expression"),
600
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
601
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
597
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
598
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
602
599
  }, {})
603
600
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
604
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
605
- content: _t.node(_l.CSTML, "Content", [_t.lit("value")], {}, {}),
606
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
601
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
602
+ content: _t.node(_l.CSTML, "Content", [_t.lit("value$")], {}, {}),
603
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
607
604
  }, {})],
608
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
605
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
609
606
  }, {})
610
607
  }, {});
611
608
  }
@@ -613,38 +610,37 @@ export const grammar = class BABLRVMInstructionGrammar {
613
610
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
614
611
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
615
612
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
616
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
613
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
617
614
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
618
615
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [12, [[_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {}))], [_t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`attributes[]`, _t.arr(), _t.ref`attributes[]`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`]]], {
619
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
620
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
621
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
622
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
616
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
617
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
618
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
623
619
  }, {}),
624
620
  type: _t.s_node(_l.Spamex, "Identifier", "Punctuator"),
625
621
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
626
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
622
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
627
623
  content: _t.node(_l.CSTML, "Content", [_t.lit("(")], {}, {}),
628
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
624
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
629
625
  }, {}),
630
626
  attributes: [_t.node(_l.Spamex, "MappingAttribute", [_t.ref`key`, _t.ref`mapToken`, _t.ref`value`], {
631
627
  key: _t.s_node(_l.Spamex, "Identifier", "balanced"),
632
- mapToken: _t.s_i_node(_l.Spamex, "Punctuator", "="),
628
+ mapToken: _t.s_node(_l.Spamex, "Punctuator", "="),
633
629
  value: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
634
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
630
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
635
631
  content: _t.node(_l.CSTML, "Content", [_t.lit(")")], {}, {}),
636
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
632
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
637
633
  }, {})
638
634
  }, {})],
639
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
640
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
635
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
636
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
641
637
  }, {})
642
638
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
643
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
639
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
644
640
  content: _t.node(_l.CSTML, "Content", [_t.lit("openToken")], {}, {}),
645
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
641
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
646
642
  }, {})],
647
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
643
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
648
644
  }, {})
649
645
  }, {});
650
646
  let sp = yield* eatMatchTrivia();
@@ -652,17 +648,17 @@ export const grammar = class BABLRVMInstructionGrammar {
652
648
  while ((first || sp) && (yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
653
649
  verb: _t.s_node(_l.Instruction, "Identifier", "match"),
654
650
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.ref`closeToken`], {
655
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
651
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
656
652
  values: [_t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`, _t.ref`flags`], {
657
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
653
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
658
654
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`], {
659
655
  elements: [_t.node(_l.Regex, "AnyCharacterSet", [_t.ref`sigilToken`], {
660
- sigilToken: _t.s_i_node(_l.Regex, "Keyword", ".")
656
+ sigilToken: _t.s_node(_l.Regex, "Keyword", ".")
661
657
  }, {})]
662
658
  }, {})],
663
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
659
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/"),
664
660
  flags: _t.node(_l.Regex, "Flags", [_t.ref`tokens[]`, _t.arr(), _t.ref`tokens[]`], {
665
- tokens: [_t.s_i_node(_l.Regex, "Keyword", "y")]
661
+ tokens: [_t.s_node(_l.Regex, "Keyword", "y")]
666
662
  }, {
667
663
  global: false,
668
664
  ignoreCase: false,
@@ -672,27 +668,27 @@ export const grammar = class BABLRVMInstructionGrammar {
672
668
  sticky: true
673
669
  })
674
670
  }, {})],
675
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
671
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
676
672
  }, {})
677
673
  }, {}))) {
678
674
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
679
675
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
680
676
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
681
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
677
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
682
678
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
683
679
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
684
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
680
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
685
681
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
686
682
  type: _t.s_node(_l.Spamex, "Identifier", "Expression"),
687
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
688
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
683
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
684
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
689
685
  }, {})
690
686
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
691
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
692
- content: _t.node(_l.CSTML, "Content", [_t.lit("values[]")], {}, {}),
693
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
687
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
688
+ content: _t.node(_l.CSTML, "Content", [_t.lit("values[]$")], {}, {}),
689
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
694
690
  }, {})],
695
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
691
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
696
692
  }, {})
697
693
  }, {});
698
694
  sp = yield* eatMatchTrivia();
@@ -701,32 +697,31 @@ export const grammar = class BABLRVMInstructionGrammar {
701
697
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
702
698
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
703
699
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
704
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
700
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
705
701
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
706
702
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [12, [[_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {}))], [_t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`attributes[]`, _t.arr(), _t.ref`attributes[]`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`]]], {
707
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
708
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
709
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
710
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
703
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
704
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
705
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
711
706
  }, {}),
712
707
  type: _t.s_node(_l.Spamex, "Identifier", "Punctuator"),
713
708
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
714
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
709
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
715
710
  content: _t.node(_l.CSTML, "Content", [_t.lit(")")], {}, {}),
716
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
711
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
717
712
  }, {}),
718
713
  attributes: [_t.node(_l.Spamex, "BooleanAttribute", [_t.ref`key`], {
719
714
  key: _t.s_node(_l.Spamex, "Identifier", "balancer")
720
715
  }, {})],
721
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
722
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
716
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
717
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
723
718
  }, {})
724
719
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
725
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
720
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
726
721
  content: _t.node(_l.CSTML, "Content", [_t.lit("closeToken")], {}, {}),
727
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
722
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
728
723
  }, {})],
729
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
724
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
730
725
  }, {})
731
726
  }, {});
732
727
  }
@@ -734,34 +729,33 @@ export const grammar = class BABLRVMInstructionGrammar {
734
729
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
735
730
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
736
731
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
737
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
732
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
738
733
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
739
734
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
740
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
741
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
742
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
743
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
735
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
736
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
737
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
744
738
  }, {}),
745
739
  type: _t.s_node(_l.Spamex, "Identifier", "Keyword"),
746
740
  intrinsicValue: _t.node(_l.Regex, "Pattern", [9, [[_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`], [_t.ref`separators[]`, _t.arr(), _t.ref`separators[]`, _t.ref`alternatives[]`, _t.ref`closeToken`]]], {
747
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
741
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
748
742
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`], {
749
743
  elements: [_t.node(_l.Regex, "Character", [_t.lit("t")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("r")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("u")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("e")], {}, {})]
750
744
  }, {}), _t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`elements[]`], {
751
745
  elements: [_t.node(_l.Regex, "Character", [_t.lit("f")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("a")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("l")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("s")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("e")], {}, {})]
752
746
  }, {})],
753
- separators: [_t.s_i_node(_l.Regex, "Punctuator", "|")],
754
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
747
+ separators: [_t.s_node(_l.Regex, "Punctuator", "|")],
748
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
755
749
  }, {}),
756
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
757
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
750
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
751
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
758
752
  }, {})
759
753
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
760
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
754
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
761
755
  content: _t.node(_l.CSTML, "Content", [_t.lit("sigilToken")], {}, {}),
762
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
756
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
763
757
  }, {})],
764
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
758
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
765
759
  }, {})
766
760
  }, {});
767
761
  }
@@ -769,29 +763,28 @@ export const grammar = class BABLRVMInstructionGrammar {
769
763
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
770
764
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
771
765
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
772
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
766
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
773
767
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
774
768
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
775
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
776
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
777
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
778
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
769
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
770
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
771
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
779
772
  }, {}),
780
773
  type: _t.s_node(_l.Spamex, "Identifier", "Keyword"),
781
774
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
782
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
775
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
783
776
  content: _t.node(_l.CSTML, "Content", [_t.lit("null")], {}, {}),
784
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
777
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
785
778
  }, {}),
786
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
787
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
779
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
780
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
788
781
  }, {})
789
782
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
790
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
783
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
791
784
  content: _t.node(_l.CSTML, "Content", [_t.lit("sigilToken")], {}, {}),
792
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
785
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
793
786
  }, {})],
794
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
787
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
795
788
  }, {})
796
789
  }, {});
797
790
  }
@@ -799,36 +792,36 @@ export const grammar = class BABLRVMInstructionGrammar {
799
792
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
800
793
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
801
794
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.ref`closeToken`], {
802
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
795
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
803
796
  values: [_t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`], {
804
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
797
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
805
798
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`], {
806
799
  elements: [_t.node(_l.Regex, "Quantifier", [_t.ref`element`, _t.ref`value`], {
807
800
  element: _t.node(_l.Regex, "CharacterClass", [_t.ref`openToken`, _t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`closeToken`], {
808
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "["),
801
+ openToken: _t.s_node(_l.Regex, "Punctuator", "["),
809
802
  elements: [_t.node(_l.Regex, "CharacterClassRange", [_t.ref`min`, _t.ref`rangeToken`, _t.ref`max`], {
810
803
  min: _t.node(_l.Regex, "Character", [_t.lit("a")], {}, {}),
811
- rangeToken: _t.s_i_node(_l.Regex, "Punctuator", "-"),
804
+ rangeToken: _t.s_node(_l.Regex, "Punctuator", "-"),
812
805
  max: _t.node(_l.Regex, "Character", [_t.lit("z")], {}, {})
813
806
  }, {}), _t.node(_l.Regex, "CharacterClassRange", [_t.ref`min`, _t.ref`rangeToken`, _t.ref`max`], {
814
807
  min: _t.node(_l.Regex, "Character", [_t.lit("A")], {}, {}),
815
- rangeToken: _t.s_i_node(_l.Regex, "Punctuator", "-"),
808
+ rangeToken: _t.s_node(_l.Regex, "Punctuator", "-"),
816
809
  max: _t.node(_l.Regex, "Character", [_t.lit("Z")], {}, {})
817
810
  }, {})],
818
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "]")
811
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "]")
819
812
  }, {
820
813
  negate: false
821
814
  }),
822
- value: _t.s_i_node(_l.Regex, "Keyword", "+")
815
+ value: _t.s_node(_l.Regex, "Keyword", "+")
823
816
  }, {
824
817
  min: 1,
825
818
  max: Infinity,
826
819
  greedy: true
827
820
  })]
828
821
  }, {})],
829
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
822
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
830
823
  }, {})],
831
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
824
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
832
825
  }, {})
833
826
  }, {});
834
827
  }
@@ -836,69 +829,69 @@ export const grammar = class BABLRVMInstructionGrammar {
836
829
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
837
830
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
838
831
  arguments: _t.node(_l.Instruction, "Tuple", [9, [[_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`], [_t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`]]], {
839
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
832
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
840
833
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
841
834
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
842
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
835
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
843
836
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
844
837
  type: _t.s_node(_l.Spamex, "Identifier", "Any"),
845
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
846
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
838
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
839
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
847
840
  }, {})
848
841
  }, {}), _t.node(_l.Instruction, "Null", [_t.ref`sigilToken`], {
849
- sigilToken: _t.s_i_node(_l.Instruction, "Keyword", "null")
842
+ sigilToken: _t.s_node(_l.Instruction, "Keyword", "null")
850
843
  }, {}), _t.node(_l.Instruction, "Array", [9, [[_t.ref`openToken`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {})), _t.ref`elements[]`, _t.arr()], [_t.ref`elements[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {})), _t.ref`elements[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit("\n ")], {}, {})), _t.ref`closeToken`]]], {
851
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "["),
844
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "["),
852
845
  elements: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
853
846
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
854
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
847
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
855
848
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
856
849
  type: _t.s_node(_l.Spamex, "Identifier", "Integer"),
857
850
  intrinsicValue: _t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`], {
858
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
851
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
859
852
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`], {
860
853
  elements: [_t.node(_l.Regex, "Quantifier", [_t.ref`element`, _t.ref`value`], {
861
854
  element: _t.node(_l.Regex, "Character", [_t.lit("-")], {}, {}),
862
- value: _t.s_i_node(_l.Regex, "Keyword", "?")
855
+ value: _t.s_node(_l.Regex, "Keyword", "?")
863
856
  }, {
864
857
  min: 0,
865
858
  max: 1,
866
859
  greedy: true
867
860
  }), _t.node(_l.Regex, "DigitCharacterSet", [_t.ref`escapeToken`, _t.ref`value`], {
868
- escapeToken: _t.s_i_node(_l.Regex, "Punctuator", "\\"),
869
- value: _t.s_i_node(_l.Regex, "Keyword", "d")
861
+ escapeToken: _t.s_node(_l.Regex, "Punctuator", "\\"),
862
+ value: _t.s_node(_l.Regex, "Keyword", "d")
870
863
  }, {})]
871
864
  }, {})],
872
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
865
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
873
866
  }, {}),
874
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
875
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
867
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
868
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
876
869
  }, {})
877
870
  }, {}), _t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
878
871
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
879
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
872
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
880
873
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
881
874
  type: _t.s_node(_l.Spamex, "Identifier", "Infinity"),
882
875
  intrinsicValue: _t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`], {
883
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
876
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
884
877
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`], {
885
878
  elements: [_t.node(_l.Regex, "CharacterClass", [_t.ref`openToken`, _t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`closeToken`], {
886
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "["),
879
+ openToken: _t.s_node(_l.Regex, "Punctuator", "["),
887
880
  elements: [_t.node(_l.Regex, "Character", [_t.lit("+")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("-")], {}, {})],
888
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "]")
881
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "]")
889
882
  }, {
890
883
  negate: false
891
884
  }), _t.node(_l.Regex, "Character", [_t.lit("I")], {}, {})]
892
885
  }, {})],
893
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
886
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
894
887
  }, {}),
895
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
896
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
888
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
889
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
897
890
  }, {})
898
891
  }, {})],
899
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", "]")
892
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", "]")
900
893
  }, {})],
901
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
894
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
902
895
  }, {})
903
896
  }, {});
904
897
  }
@@ -906,21 +899,21 @@ export const grammar = class BABLRVMInstructionGrammar {
906
899
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
907
900
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
908
901
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
909
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
902
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
910
903
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
911
904
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
912
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
905
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
913
906
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
914
907
  type: _t.s_node(_l.Spamex, "Identifier", "Digits"),
915
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
916
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
908
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
909
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
917
910
  }, {})
918
911
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
919
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
912
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
920
913
  content: _t.node(_l.CSTML, "Content", [_t.lit("digits[]")], {}, {}),
921
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
914
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
922
915
  }, {})],
923
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
916
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
924
917
  }, {})
925
918
  }, {});
926
919
  }
@@ -928,49 +921,48 @@ export const grammar = class BABLRVMInstructionGrammar {
928
921
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
929
922
  verb: _t.s_node(_l.Instruction, "Identifier", "eatMatch"),
930
923
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
931
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
924
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
932
925
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
933
926
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
934
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
935
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
936
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
937
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
927
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
928
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
929
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
938
930
  }, {}),
939
931
  type: _t.s_node(_l.Spamex, "Identifier", "Punctuator"),
940
932
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
941
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
933
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
942
934
  content: _t.node(_l.CSTML, "Content", [_t.lit("-")], {}, {}),
943
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
935
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
944
936
  }, {}),
945
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
946
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
937
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
938
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
947
939
  }, {})
948
940
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
949
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
941
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
950
942
  content: _t.node(_l.CSTML, "Content", [_t.lit("negativeToken")], {}, {}),
951
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
943
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
952
944
  }, {})],
953
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
945
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
954
946
  }, {})
955
947
  }, {});
956
948
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
957
949
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
958
950
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
959
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
951
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
960
952
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
961
953
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
962
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
954
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
963
955
  flags: _t.node(_l.CSTML, "Flags", [], {}, {}),
964
956
  type: _t.s_node(_l.Spamex, "Identifier", "Digits"),
965
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
966
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
957
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
958
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
967
959
  }, {})
968
960
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
969
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
970
- content: _t.node(_l.CSTML, "Content", [_t.lit("digits[]")], {}, {}),
971
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
961
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
962
+ content: _t.node(_l.CSTML, "Content", [_t.lit("digits[]$")], {}, {}),
963
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
972
964
  }, {})],
973
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
965
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
974
966
  }, {})
975
967
  }, {});
976
968
  }
@@ -978,29 +970,29 @@ export const grammar = class BABLRVMInstructionGrammar {
978
970
  while (yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
979
971
  verb: _t.s_node(_l.Instruction, "Identifier", "eatMatch"),
980
972
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.ref`closeToken`], {
981
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
973
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
982
974
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
983
975
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
984
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
976
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
985
977
  flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
986
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
978
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
987
979
  }, {}),
988
980
  type: _t.s_node(_l.Spamex, "Identifier", "Digit"),
989
981
  intrinsicValue: _t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`], {
990
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
982
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
991
983
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`], {
992
984
  elements: [_t.node(_l.Regex, "DigitCharacterSet", [_t.ref`escapeToken`, _t.ref`value`], {
993
- escapeToken: _t.s_i_node(_l.Regex, "Punctuator", "\\"),
994
- value: _t.s_i_node(_l.Regex, "Keyword", "d")
985
+ escapeToken: _t.s_node(_l.Regex, "Punctuator", "\\"),
986
+ value: _t.s_node(_l.Regex, "Keyword", "d")
995
987
  }, {})]
996
988
  }, {})],
997
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
989
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
998
990
  }, {}),
999
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
1000
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
991
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
992
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
1001
993
  }, {})
1002
994
  }, {})],
1003
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
995
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
1004
996
  }, {})
1005
997
  }, {}));
1006
998
  }
@@ -1008,18 +1000,18 @@ export const grammar = class BABLRVMInstructionGrammar {
1008
1000
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
1009
1001
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
1010
1002
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.ref`closeToken`], {
1011
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
1003
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
1012
1004
  values: [_t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`], {
1013
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
1005
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
1014
1006
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`], {
1015
1007
  elements: [_t.node(_l.Regex, "DigitCharacterSet", [_t.ref`escapeToken`, _t.ref`value`], {
1016
- escapeToken: _t.s_i_node(_l.Regex, "Punctuator", "\\"),
1017
- value: _t.s_i_node(_l.Regex, "Keyword", "d")
1008
+ escapeToken: _t.s_node(_l.Regex, "Punctuator", "\\"),
1009
+ value: _t.s_node(_l.Regex, "Keyword", "d")
1018
1010
  }, {})]
1019
1011
  }, {})],
1020
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
1012
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
1021
1013
  }, {})],
1022
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
1014
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
1023
1015
  }, {})
1024
1016
  }, {});
1025
1017
  }
@@ -1027,65 +1019,63 @@ export const grammar = class BABLRVMInstructionGrammar {
1027
1019
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
1028
1020
  verb: _t.s_node(_l.Instruction, "Identifier", "eatMatch"),
1029
1021
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
1030
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
1022
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
1031
1023
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
1032
1024
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
1033
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
1034
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
1035
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
1036
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
1025
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
1026
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
1027
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
1037
1028
  }, {}),
1038
1029
  type: _t.s_node(_l.Spamex, "Identifier", "Punctuator"),
1039
1030
  intrinsicValue: _t.node(_l.Regex, "Pattern", [_t.ref`openToken`, _t.ref`alternatives[]`, _t.arr(), _t.ref`alternatives[]`, _t.ref`closeToken`], {
1040
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "/"),
1031
+ openToken: _t.s_node(_l.Regex, "Punctuator", "/"),
1041
1032
  alternatives: [_t.node(_l.Regex, "Alternative", [_t.ref`elements[]`, _t.arr(), _t.ref`elements[]`], {
1042
1033
  elements: [_t.node(_l.Regex, "CharacterClass", [_t.ref`openToken`, _t.ref`elements[]`, _t.arr(), _t.ref`elements[]`, _t.ref`elements[]`, _t.ref`closeToken`], {
1043
- openToken: _t.s_i_node(_l.Regex, "Punctuator", "["),
1034
+ openToken: _t.s_node(_l.Regex, "Punctuator", "["),
1044
1035
  elements: [_t.node(_l.Regex, "Character", [_t.lit("+")], {}, {}), _t.node(_l.Regex, "Character", [_t.lit("-")], {}, {})],
1045
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "]")
1036
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "]")
1046
1037
  }, {
1047
1038
  negate: false
1048
1039
  })]
1049
1040
  }, {})],
1050
- closeToken: _t.s_i_node(_l.Regex, "Punctuator", "/")
1041
+ closeToken: _t.s_node(_l.Regex, "Punctuator", "/")
1051
1042
  }, {}),
1052
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
1053
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
1043
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
1044
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
1054
1045
  }, {})
1055
1046
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
1056
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
1047
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
1057
1048
  content: _t.node(_l.CSTML, "Content", [_t.lit("signToken")], {}, {}),
1058
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
1049
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
1059
1050
  }, {})],
1060
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
1051
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
1061
1052
  }, {})
1062
1053
  }, {});
1063
1054
  yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
1064
1055
  verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
1065
1056
  arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`openToken`, _t.ref`values[]`, _t.arr(), _t.ref`values[]`, _t.embedded(_t.node(_l.Instruction, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`closeToken`], {
1066
- openToken: _t.s_i_node(_l.Instruction, "Punctuator", "("),
1057
+ openToken: _t.s_node(_l.Instruction, "Punctuator", "("),
1067
1058
  values: [_t.node(_l.Spamex, "NodeMatcher", [_t.ref`open`], {
1068
1059
  open: _t.node(_l.Spamex, "OpenNodeMatcher", [_t.ref`openToken`, _t.ref`flags`, _t.ref`type`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`intrinsicValue`, _t.embedded(_t.node(_l.Spamex, "Space", [_t.lit(" ")], {}, {})), _t.ref`selfClosingTagToken`, _t.ref`closeToken`], {
1069
- openToken: _t.s_i_node(_l.Spamex, "Punctuator", "<"),
1070
- flags: _t.node(_l.CSTML, "Flags", [_t.ref`intrinsicToken`, _t.ref`tokenToken`], {
1071
- intrinsicToken: _t.s_i_node(_l.CSTML, "Punctuator", "~"),
1072
- tokenToken: _t.s_i_node(_l.CSTML, "Punctuator", "*")
1060
+ openToken: _t.s_node(_l.Spamex, "Punctuator", "<"),
1061
+ flags: _t.node(_l.CSTML, "Flags", [_t.ref`tokenToken`], {
1062
+ tokenToken: _t.s_node(_l.CSTML, "Punctuator", "*")
1073
1063
  }, {}),
1074
1064
  type: _t.s_node(_l.Spamex, "Identifier", "Keyword"),
1075
1065
  intrinsicValue: _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
1076
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
1066
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
1077
1067
  content: _t.node(_l.CSTML, "Content", [_t.lit("Infinity")], {}, {}),
1078
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
1068
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
1079
1069
  }, {}),
1080
- selfClosingTagToken: _t.s_i_node(_l.Spamex, "Punctuator", "/"),
1081
- closeToken: _t.s_i_node(_l.Spamex, "Punctuator", ">")
1070
+ selfClosingTagToken: _t.s_node(_l.Spamex, "Punctuator", "/"),
1071
+ closeToken: _t.s_node(_l.Spamex, "Punctuator", ">")
1082
1072
  }, {})
1083
1073
  }, {}), _t.node(_l.CSTML, "String", [_t.ref`openToken`, _t.ref`content`, _t.ref`closeToken`], {
1084
- openToken: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
1074
+ openToken: _t.s_node(_l.CSTML, "Punctuator", "'"),
1085
1075
  content: _t.node(_l.CSTML, "Content", [_t.lit("sigilToken")], {}, {}),
1086
- closeToken: _t.s_i_node(_l.CSTML, "Punctuator", "'")
1076
+ closeToken: _t.s_node(_l.CSTML, "Punctuator", "'")
1087
1077
  }, {})],
1088
- closeToken: _t.s_i_node(_l.Instruction, "Punctuator", ")")
1078
+ closeToken: _t.s_node(_l.Instruction, "Punctuator", ")")
1089
1079
  }, {})
1090
1080
  }, {});
1091
1081
  }