@bablr/boot 0.2.2 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/builders.js +0 -14
- package/lib/index.js +5 -1
- package/lib/languages/cstml.js +53 -86
- package/lib/languages/instruction.js +13 -14
- package/lib/languages/regex.js +47 -38
- package/lib/languages/spamex.js +23 -22
- package/lib/miniparser.js +2 -0
- package/lib/path.js +1 -1
- package/lib/print.js +5 -24
- package/package.json +1 -1
- package/shorthand.macro.js +1 -1
package/lib/builders.js
CHANGED
|
@@ -70,18 +70,10 @@ const buildNodeOpenTag = (flags, type, attributes = {}) => {
|
|
|
70
70
|
return freeze({ type: 'OpenNodeTag', value: freeze({ flags, type, attributes }) });
|
|
71
71
|
};
|
|
72
72
|
|
|
73
|
-
const buildFragmentOpenTag = (flags = {}, language) => {
|
|
74
|
-
return freeze({ type: 'OpenFragmentTag', value: freeze({ flags: freeze(flags), language }) });
|
|
75
|
-
};
|
|
76
|
-
|
|
77
73
|
const buildNodeCloseTag = (type) => {
|
|
78
74
|
return freeze({ type: 'CloseNodeTag', value: freeze({ type }) });
|
|
79
75
|
};
|
|
80
76
|
|
|
81
|
-
const buildFragmentCloseTag = () => {
|
|
82
|
-
return freeze({ type: 'CloseFragmentTag', value: freeze({}) });
|
|
83
|
-
};
|
|
84
|
-
|
|
85
77
|
const buildLiteral = (value) => {
|
|
86
78
|
return freeze({ type: 'Literal', value });
|
|
87
79
|
};
|
|
@@ -374,9 +366,7 @@ const lit = (str) => buildLiteral(stripArray(str));
|
|
|
374
366
|
|
|
375
367
|
const gap = buildGap;
|
|
376
368
|
const nodeOpen = buildNodeOpenTag;
|
|
377
|
-
const fragOpen = buildFragmentOpenTag;
|
|
378
369
|
const nodeClose = buildNodeCloseTag;
|
|
379
|
-
const fragClose = buildFragmentCloseTag;
|
|
380
370
|
const node = buildNode;
|
|
381
371
|
const s_node = buildSyntacticNode;
|
|
382
372
|
const e_node = buildEscapeNode;
|
|
@@ -398,9 +388,7 @@ module.exports = {
|
|
|
398
388
|
buildReference,
|
|
399
389
|
buildGap,
|
|
400
390
|
buildNodeOpenTag,
|
|
401
|
-
buildFragmentOpenTag,
|
|
402
391
|
buildNodeCloseTag,
|
|
403
|
-
buildFragmentCloseTag,
|
|
404
392
|
buildLiteral,
|
|
405
393
|
buildNode,
|
|
406
394
|
buildSyntacticNode,
|
|
@@ -411,9 +399,7 @@ module.exports = {
|
|
|
411
399
|
lit,
|
|
412
400
|
gap,
|
|
413
401
|
nodeOpen,
|
|
414
|
-
fragOpen,
|
|
415
402
|
nodeClose,
|
|
416
|
-
fragClose,
|
|
417
403
|
node,
|
|
418
404
|
s_node,
|
|
419
405
|
e_node,
|
package/lib/index.js
CHANGED
|
@@ -66,6 +66,10 @@ const parse = (language, type, sourceText) => {
|
|
|
66
66
|
const getAgASTValue = (language, miniNode) => {
|
|
67
67
|
if (!miniNode) return miniNode;
|
|
68
68
|
|
|
69
|
+
if (isArray(miniNode)) {
|
|
70
|
+
return miniNode.map(node => getAgASTValue(node));
|
|
71
|
+
}
|
|
72
|
+
|
|
69
73
|
const { language: languageName, type, attributes } = miniNode;
|
|
70
74
|
const flags = { escape: false, trivia: false, token: false, intrinsic: false };
|
|
71
75
|
const properties = {};
|
|
@@ -138,7 +142,7 @@ const getAgASTValue = (language, miniNode) => {
|
|
|
138
142
|
|
|
139
143
|
const str = buildTag(cstml, 'String');
|
|
140
144
|
const num = buildTag(cstml, 'Integer');
|
|
141
|
-
const cst = buildTag(cstml, '
|
|
145
|
+
const cst = buildTag(cstml, 'Node');
|
|
142
146
|
const re = buildTag(regex, 'Pattern');
|
|
143
147
|
const i = buildTag(instruction, 'Call');
|
|
144
148
|
|
package/lib/languages/cstml.js
CHANGED
|
@@ -68,77 +68,62 @@ const covers = buildCovers({
|
|
|
68
68
|
'Gap',
|
|
69
69
|
'Node',
|
|
70
70
|
'IdentifierPath',
|
|
71
|
-
'OpenFragmentTag',
|
|
72
71
|
'OpenNodeTag',
|
|
73
72
|
'CloseNodeTag',
|
|
74
|
-
'CloseFragmentTag',
|
|
75
73
|
'Terminal',
|
|
76
74
|
'Number',
|
|
77
75
|
'Digit',
|
|
78
76
|
'String',
|
|
79
77
|
'Content',
|
|
80
78
|
'UnsignedInteger',
|
|
79
|
+
'Flags',
|
|
81
80
|
],
|
|
82
|
-
[sym.fragment]: ['Attributes'
|
|
81
|
+
[sym.fragment]: ['Attributes'],
|
|
83
82
|
Attribute: ['MappingAttribute', 'BooleanAttribute'],
|
|
84
83
|
AttributeValue: ['String', 'Number'],
|
|
85
84
|
TagType: ['Identifier', 'GlobalIdentifier'],
|
|
86
|
-
Terminal: ['Literal', 'Trivia'
|
|
85
|
+
Terminal: ['Literal', 'Trivia'],
|
|
87
86
|
PropertyValue: ['Gap', 'Node', 'Null'],
|
|
88
|
-
EmbeddedTerminal: ['Literal'
|
|
87
|
+
EmbeddedTerminal: ['Literal'],
|
|
89
88
|
Number: ['Integer', 'Infinity'],
|
|
90
89
|
});
|
|
91
90
|
|
|
92
91
|
const grammar = class CSTMLMiniparserGrammar {
|
|
93
|
-
Fragment(p) {
|
|
94
|
-
p.eatMatchTrivia(_);
|
|
95
|
-
p.eatProduction('OpenFragmentTag', { path: 'open' });
|
|
96
|
-
p.eatMatchTrivia(_);
|
|
97
|
-
while (p.match(/<[^/]/y) || p.atExpression) {
|
|
98
|
-
p.eatProduction('Node', { path: 'root' });
|
|
99
|
-
p.eatMatchTrivia(_);
|
|
100
|
-
}
|
|
101
|
-
p.eatProduction('CloseFragmentTag', { path: 'close' });
|
|
102
|
-
p.eatMatchTrivia(_);
|
|
103
|
-
}
|
|
104
|
-
|
|
105
92
|
// @Node
|
|
106
93
|
Document(p) {
|
|
107
94
|
p.eatProduction('DoctypeTag', { path: 'doctype' });
|
|
108
|
-
p.eatProduction('
|
|
95
|
+
p.eatProduction('Node', { path: 'tree' });
|
|
109
96
|
}
|
|
110
97
|
|
|
111
98
|
// @Node
|
|
112
99
|
DoctypeTag(p) {
|
|
113
|
-
p.eat('<!', PN, { path: '
|
|
100
|
+
p.eat('<!', PN, { path: 'openToken' });
|
|
114
101
|
p.eatProduction('UnsignedInteger', { path: 'version' });
|
|
115
|
-
p.eat(':', PN, { path: '
|
|
116
|
-
p.eat('cstml', KW, { path: '
|
|
102
|
+
p.eat(':', PN, { path: 'versionSeparatorToken' });
|
|
103
|
+
p.eat('cstml', KW, { path: 'doctypeToken' });
|
|
117
104
|
|
|
118
105
|
let sp = p.eatMatchTrivia(_);
|
|
119
106
|
|
|
120
|
-
if ((sp && p.match(
|
|
107
|
+
if ((sp && p.match(/[a-zA-Z]+/y)) || p.atExpression) {
|
|
121
108
|
p.eatProduction('Attributes');
|
|
122
109
|
sp = p.eatMatchTrivia(_);
|
|
123
110
|
}
|
|
124
111
|
|
|
125
|
-
p.eat('>', PN, { path: '
|
|
112
|
+
p.eat('>', PN, { path: 'closeToken' });
|
|
126
113
|
}
|
|
127
114
|
|
|
128
115
|
// @Node
|
|
129
116
|
Null(p) {
|
|
130
|
-
p.eat('null', KW, { path: '
|
|
117
|
+
p.eat('null', KW, { path: 'sigilToken' });
|
|
131
118
|
}
|
|
132
119
|
|
|
133
120
|
// @Node
|
|
134
121
|
Gap(p) {
|
|
135
|
-
p.eat('<//>', PN, { path: '
|
|
122
|
+
p.eat('<//>', PN, { path: 'sigilToken' });
|
|
136
123
|
}
|
|
137
124
|
|
|
138
125
|
// @Node
|
|
139
126
|
Node(p) {
|
|
140
|
-
if (p.match('<>')) throw new Error('Fragment is not a node');
|
|
141
|
-
|
|
142
127
|
let open = p.eatProduction('OpenNodeTag', { path: 'open' });
|
|
143
128
|
|
|
144
129
|
p.eatMatchTrivia(_);
|
|
@@ -160,11 +145,15 @@ const grammar = class CSTMLMiniparserGrammar {
|
|
|
160
145
|
const { token } = props || {};
|
|
161
146
|
|
|
162
147
|
if (token) {
|
|
163
|
-
p.
|
|
148
|
+
if (p.match(/<\*?@/y)) {
|
|
149
|
+
p.eatProduction('Node');
|
|
150
|
+
} else {
|
|
151
|
+
p.eatProduction('Literal');
|
|
152
|
+
}
|
|
164
153
|
} else {
|
|
165
154
|
if (p.match(/<\*?#/y)) {
|
|
166
155
|
p.eatProduction('Node');
|
|
167
|
-
} else if (p.match(
|
|
156
|
+
} else if (p.match(/[a-zA-Z]/y)) {
|
|
168
157
|
p.eatProduction('Property');
|
|
169
158
|
} else if (p.match(/['"]/y)) {
|
|
170
159
|
p.eatProduction('Literal');
|
|
@@ -176,8 +165,6 @@ const grammar = class CSTMLMiniparserGrammar {
|
|
|
176
165
|
Property(p) {
|
|
177
166
|
p.eatProduction('Reference', { path: 'reference' });
|
|
178
167
|
p.eatMatchTrivia(_);
|
|
179
|
-
p.eat(':', PN, { path: 'mapOperator' });
|
|
180
|
-
p.eatMatchTrivia(_);
|
|
181
168
|
p.eatProduction('PropertyValue', { path: 'value' });
|
|
182
169
|
}
|
|
183
170
|
|
|
@@ -192,67 +179,64 @@ const grammar = class CSTMLMiniparserGrammar {
|
|
|
192
179
|
}
|
|
193
180
|
|
|
194
181
|
// @Node
|
|
195
|
-
|
|
196
|
-
p.
|
|
197
|
-
p.
|
|
182
|
+
Flags(p) {
|
|
183
|
+
let tr = p.eatMatch('#', PN, { path: 'triviaToken' });
|
|
184
|
+
p.eatMatch('~', PN, { path: 'intrinsicToken' });
|
|
185
|
+
p.eatMatch('*', PN, { path: 'tokenToken' });
|
|
186
|
+
let esc = p.eatMatch('@', PN, { path: 'escapeToken' });
|
|
187
|
+
let exp = p.eatMatch('+', PN, { path: 'expressionToken' });
|
|
188
|
+
|
|
189
|
+
if ((tr && esc) || (exp && (tr || esc))) throw new Error();
|
|
198
190
|
}
|
|
199
191
|
|
|
200
192
|
// @Node
|
|
201
193
|
OpenNodeTag(p) {
|
|
202
|
-
p.eat('<', PN, { path: '
|
|
194
|
+
p.eat('<', PN, { path: 'openToken', startSpan: 'Tag', balanced: '>' });
|
|
203
195
|
|
|
204
|
-
let
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
if ((tr && esc) || (exp && (tr || esc))) throw new Error();
|
|
196
|
+
let flags = null;
|
|
197
|
+
if (!p.atExpression) {
|
|
198
|
+
flags = p.eatProduction('Flags', { path: 'flags' });
|
|
199
|
+
}
|
|
210
200
|
|
|
211
201
|
p.eatProduction('TagType', { path: 'type' });
|
|
212
202
|
|
|
213
203
|
let sp = p.eatMatchTrivia(_);
|
|
214
204
|
|
|
215
205
|
let iv;
|
|
216
|
-
if (
|
|
206
|
+
if (flags.properties.intrinsic && sp && (p.match(/['"/]/y) || p.atExpression)) {
|
|
217
207
|
iv = p.eatProduction('String', { path: 'intrinsicValue' });
|
|
218
208
|
|
|
219
209
|
sp = p.eatMatchTrivia(_);
|
|
220
210
|
}
|
|
221
211
|
|
|
222
|
-
if ((sp && p.match(
|
|
212
|
+
if ((sp && p.match(/[a-zA-Z]+/y)) || p.atExpression) {
|
|
223
213
|
p.eatProduction('Attributes');
|
|
224
214
|
sp = p.eatMatchTrivia(_);
|
|
225
215
|
}
|
|
226
216
|
|
|
227
217
|
p.eatMatchTrivia(_);
|
|
228
218
|
if (iv) {
|
|
229
|
-
p.eat('/', PN, { path: '
|
|
219
|
+
p.eat('/', PN, { path: 'selfClosingTagToken' });
|
|
230
220
|
}
|
|
231
|
-
p.eat('>', PN, { path: '
|
|
221
|
+
p.eat('>', PN, { path: 'closeToken', endSpan: 'Tag', balancer: true });
|
|
232
222
|
}
|
|
233
223
|
|
|
234
224
|
// @Node
|
|
235
225
|
CloseNodeTag(p) {
|
|
236
|
-
p.eat('</', PN, { path: '
|
|
237
|
-
p.eat('>', PN, { path: '
|
|
238
|
-
}
|
|
239
|
-
|
|
240
|
-
// @Node
|
|
241
|
-
CloseFragmentTag(p) {
|
|
242
|
-
p.eat('</', PN, { path: 'open', startSpan: 'Tag', balanced: '>' });
|
|
243
|
-
p.eat('>', PN, { path: 'close', endSpan: 'Tag', balancer: true });
|
|
226
|
+
p.eat('</', PN, { path: 'openToken', startSpan: 'Tag', balanced: '>' });
|
|
227
|
+
p.eat('>', PN, { path: 'closeToken', endSpan: 'Tag', balancer: true });
|
|
244
228
|
}
|
|
245
229
|
|
|
246
230
|
// @Fragment
|
|
247
231
|
Attributes(p) {
|
|
248
232
|
let sp = true;
|
|
249
|
-
while (sp && (p.match(
|
|
233
|
+
while (sp && (p.match(/[a-zA-Z]+/y) || p.atExpression)) {
|
|
250
234
|
if (p.atExpression) {
|
|
251
235
|
p.eatProduction('Attributes'); // ??
|
|
252
236
|
} else {
|
|
253
237
|
p.eatProduction('Attribute', { path: 'attributes[]' });
|
|
254
238
|
}
|
|
255
|
-
if (p.match(/\s
|
|
239
|
+
if (p.match(/\s+[a-zA-Z]/y) || (p.match(/\s+$/y) && !p.quasisDone)) {
|
|
256
240
|
sp = p.eatMatchTrivia(_);
|
|
257
241
|
} else {
|
|
258
242
|
sp = false;
|
|
@@ -262,7 +246,7 @@ const grammar = class CSTMLMiniparserGrammar {
|
|
|
262
246
|
|
|
263
247
|
// @Cover
|
|
264
248
|
Attribute(p) {
|
|
265
|
-
if (p.match(
|
|
249
|
+
if (p.match(/[a-zA-Z][[a-zA-Z]-_]*\s*=/y)) {
|
|
266
250
|
p.eatProduction('MappingAttribute');
|
|
267
251
|
} else {
|
|
268
252
|
p.eatProduction('BooleanAttribute');
|
|
@@ -271,15 +255,15 @@ const grammar = class CSTMLMiniparserGrammar {
|
|
|
271
255
|
|
|
272
256
|
// @Node
|
|
273
257
|
BooleanAttribute(p) {
|
|
274
|
-
p.eat('!', KW, { path: '
|
|
275
|
-
p.eat(
|
|
258
|
+
p.eat('!', KW, { path: 'negateToken' });
|
|
259
|
+
p.eat(/[a-zA-Z]+/y, ID, { path: 'key' });
|
|
276
260
|
}
|
|
277
261
|
|
|
278
262
|
// @Node
|
|
279
263
|
MappingAttribute(p) {
|
|
280
|
-
p.eat(
|
|
264
|
+
p.eat(/[a-zA-Z]+/y, LIT, { path: 'key' });
|
|
281
265
|
p.eatMatchTrivia(_);
|
|
282
|
-
p.eat('=', PN, { path: '
|
|
266
|
+
p.eat('=', PN, { path: 'mapToken' });
|
|
283
267
|
p.eatMatchTrivia(_);
|
|
284
268
|
p.eatProduction('AttributeValue', { path: 'value' });
|
|
285
269
|
}
|
|
@@ -294,9 +278,9 @@ const grammar = class CSTMLMiniparserGrammar {
|
|
|
294
278
|
}
|
|
295
279
|
|
|
296
280
|
TagType(p) {
|
|
297
|
-
if (p.match(/[
|
|
281
|
+
if (p.match(/[[a-zA-Z].]+:/y)) {
|
|
298
282
|
p.eatProduction('LanguageReference', { path: 'language' });
|
|
299
|
-
p.eat(':', PN, { path: '
|
|
283
|
+
p.eat(':', PN, { path: 'namespaceSeparatorToken' });
|
|
300
284
|
p.eatProduction('Identifier', { path: 'type' });
|
|
301
285
|
} else {
|
|
302
286
|
p.eatProduction('Identifier', { path: 'type' });
|
|
@@ -321,16 +305,12 @@ const grammar = class CSTMLMiniparserGrammar {
|
|
|
321
305
|
|
|
322
306
|
// @Node
|
|
323
307
|
Identifier(p) {
|
|
324
|
-
p.eatLiteral(
|
|
308
|
+
p.eatLiteral(/[a-zA-Z][[a-zA-Z]-_]*/y);
|
|
325
309
|
}
|
|
326
310
|
|
|
327
311
|
// @Cover
|
|
328
312
|
Terminal(p) {
|
|
329
|
-
if (p.match(
|
|
330
|
-
p.eatProduction('Escape');
|
|
331
|
-
} else if (p.match(/#['"]/y)) {
|
|
332
|
-
p.eatProduction('Trivia');
|
|
333
|
-
} else if (p.match(/['"]/y)) {
|
|
313
|
+
if (p.match(/['"]/y)) {
|
|
334
314
|
p.eatProduction('Literal');
|
|
335
315
|
} else {
|
|
336
316
|
throw new Error();
|
|
@@ -341,7 +321,9 @@ const grammar = class CSTMLMiniparserGrammar {
|
|
|
341
321
|
Reference(p) {
|
|
342
322
|
p.eatProduction('Identifier', { path: 'name' });
|
|
343
323
|
p.eatMatchTrivia(_);
|
|
344
|
-
p.eatMatch('[]', PN, { path: '
|
|
324
|
+
p.eatMatch('[]', PN, { path: 'arrayToken' });
|
|
325
|
+
p.eatMatchTrivia(_);
|
|
326
|
+
p.eat(':', PN, { path: 'mapToken' });
|
|
345
327
|
}
|
|
346
328
|
|
|
347
329
|
// @Cover
|
|
@@ -355,21 +337,6 @@ const grammar = class CSTMLMiniparserGrammar {
|
|
|
355
337
|
}
|
|
356
338
|
}
|
|
357
339
|
|
|
358
|
-
// @Node
|
|
359
|
-
Escape(p) {
|
|
360
|
-
p.eat('!', PN, { path: 'escapeOperator' });
|
|
361
|
-
p.eatProduction('String', { path: 'rawValue' });
|
|
362
|
-
p.eatMatchTrivia(_);
|
|
363
|
-
p.eat(':', PN, { path: 'rawOperator' });
|
|
364
|
-
p.eatProduction('String', { path: 'value' });
|
|
365
|
-
}
|
|
366
|
-
|
|
367
|
-
// @Node
|
|
368
|
-
Trivia(p) {
|
|
369
|
-
p.eat('#', PN, { path: 'trivializeOperator' });
|
|
370
|
-
p.eatProduction('String', { path: 'value' });
|
|
371
|
-
}
|
|
372
|
-
|
|
373
340
|
// @Node
|
|
374
341
|
Literal(p) {
|
|
375
342
|
p.eatProduction('String', { path: 'value' });
|
|
@@ -417,11 +384,11 @@ const grammar = class CSTMLMiniparserGrammar {
|
|
|
417
384
|
|
|
418
385
|
const span = q === '"' ? 'Double' : 'Single';
|
|
419
386
|
|
|
420
|
-
p.eat(q, PN, { path: '
|
|
387
|
+
p.eat(q, PN, { path: 'openToken', startSpan: span, balanced: q });
|
|
421
388
|
while (p.match(/./sy) || p.atExpression) {
|
|
422
389
|
p.eatProduction('Content', { path: 'content' });
|
|
423
390
|
}
|
|
424
|
-
p.eat(q, PN, { path: '
|
|
391
|
+
p.eat(q, PN, { path: 'closeToken', endSpan: span, balancer: true });
|
|
425
392
|
}
|
|
426
393
|
|
|
427
394
|
// @Node
|
|
@@ -35,9 +35,8 @@ const covers = buildCovers({
|
|
|
35
35
|
const grammar = class InstructionMiniparserGrammar {
|
|
36
36
|
// @Node
|
|
37
37
|
Call(p) {
|
|
38
|
-
p.eat(
|
|
38
|
+
p.eat(/[a-zA-Z]+/y, ID, { path: 'verb' });
|
|
39
39
|
p.eatMatchTrivia(_);
|
|
40
|
-
p.eatMatch(/[!#]/y, PN, { path: 'verbSuffix' });
|
|
41
40
|
p.eatProduction('Tuple', { path: 'arguments' });
|
|
42
41
|
}
|
|
43
42
|
|
|
@@ -59,8 +58,8 @@ const grammar = class InstructionMiniparserGrammar {
|
|
|
59
58
|
p.eatProduction('Boolean');
|
|
60
59
|
} else if (p.match('null')) {
|
|
61
60
|
p.eatProduction('Null');
|
|
62
|
-
} else if (p.match(
|
|
63
|
-
p.eat(
|
|
61
|
+
} else if (p.match(/[a-zA-Z]/y)) {
|
|
62
|
+
p.eat(/[a-zA-Z]+/y, ID, p.m.attributes);
|
|
64
63
|
} else if (p.match('<')) {
|
|
65
64
|
p.eatProduction('Spamex:Matcher');
|
|
66
65
|
}
|
|
@@ -68,7 +67,7 @@ const grammar = class InstructionMiniparserGrammar {
|
|
|
68
67
|
|
|
69
68
|
// @Node
|
|
70
69
|
Object(p) {
|
|
71
|
-
p.eat('{', PN, { path: '
|
|
70
|
+
p.eat('{', PN, { path: 'openToken', balanced: '}' });
|
|
72
71
|
|
|
73
72
|
p.eatMatchTrivia(_);
|
|
74
73
|
|
|
@@ -82,21 +81,21 @@ const grammar = class InstructionMiniparserGrammar {
|
|
|
82
81
|
|
|
83
82
|
p.eatMatchTrivia(_);
|
|
84
83
|
|
|
85
|
-
p.eat('}', PN, { path: '
|
|
84
|
+
p.eat('}', PN, { path: 'closeToken', balancer: true });
|
|
86
85
|
}
|
|
87
86
|
|
|
88
87
|
// @Node
|
|
89
88
|
Property(p) {
|
|
90
|
-
p.eat(
|
|
89
|
+
p.eat(/[a-zA-Z]+/y, LIT, { path: 'key' });
|
|
91
90
|
p.eatMatchTrivia(_);
|
|
92
|
-
p.eat(':', PN, { path: '
|
|
91
|
+
p.eat(':', PN, { path: 'mapToken' });
|
|
93
92
|
p.eatMatchTrivia(_);
|
|
94
93
|
p.eatProduction('Expression', { path: 'value' });
|
|
95
94
|
}
|
|
96
95
|
|
|
97
96
|
// @Node
|
|
98
97
|
Array(p) {
|
|
99
|
-
p.eat('[', PN, { path: '
|
|
98
|
+
p.eat('[', PN, { path: 'openToken', balanced: ']' });
|
|
100
99
|
|
|
101
100
|
p.eatMatchTrivia(_);
|
|
102
101
|
|
|
@@ -108,12 +107,12 @@ const grammar = class InstructionMiniparserGrammar {
|
|
|
108
107
|
first = false;
|
|
109
108
|
}
|
|
110
109
|
|
|
111
|
-
p.eat(']', PN, { path: '
|
|
110
|
+
p.eat(']', PN, { path: 'closeToken', balancer: true });
|
|
112
111
|
}
|
|
113
112
|
|
|
114
113
|
// @Node
|
|
115
114
|
Tuple(p) {
|
|
116
|
-
p.eat('(', PN, { path: '
|
|
115
|
+
p.eat('(', PN, { path: 'openToken', balanced: ')' });
|
|
117
116
|
|
|
118
117
|
let sep = p.eatMatchTrivia(_);
|
|
119
118
|
|
|
@@ -124,17 +123,17 @@ const grammar = class InstructionMiniparserGrammar {
|
|
|
124
123
|
i++;
|
|
125
124
|
}
|
|
126
125
|
|
|
127
|
-
p.eat(')', PN, { path: '
|
|
126
|
+
p.eat(')', PN, { path: 'closeToken', balancer: true });
|
|
128
127
|
}
|
|
129
128
|
|
|
130
129
|
// @Node
|
|
131
130
|
Boolean(p) {
|
|
132
|
-
p.eat(/true|false/y, KW, { path: '
|
|
131
|
+
p.eat(/true|false/y, KW, { path: 'sigilToken' });
|
|
133
132
|
}
|
|
134
133
|
|
|
135
134
|
// @Node
|
|
136
135
|
Null(p) {
|
|
137
|
-
p.eat('null', KW, { path: '
|
|
136
|
+
p.eat('null', KW, { path: 'sigilToken' });
|
|
138
137
|
}
|
|
139
138
|
};
|
|
140
139
|
|
package/lib/languages/regex.js
CHANGED
|
@@ -12,7 +12,7 @@ const dependencies = {};
|
|
|
12
12
|
const covers = buildCovers({
|
|
13
13
|
[node]: [
|
|
14
14
|
'RegExpLiteral',
|
|
15
|
-
'
|
|
15
|
+
'Flags',
|
|
16
16
|
'Pattern',
|
|
17
17
|
'Alternative',
|
|
18
18
|
'Group',
|
|
@@ -55,7 +55,6 @@ const flags = {
|
|
|
55
55
|
unicode: 'u',
|
|
56
56
|
sticky: 'y',
|
|
57
57
|
};
|
|
58
|
-
const flagsReverse = Object.fromEntries(Object.entries(flags).map(([key, value]) => [value, key]));
|
|
59
58
|
|
|
60
59
|
const PN = 'Punctuator';
|
|
61
60
|
const KW = 'Keyword';
|
|
@@ -97,10 +96,10 @@ const cookEscape = (escape, span) => {
|
|
|
97
96
|
return String.fromCodePoint(parseInt(hexMatch[1], 16));
|
|
98
97
|
}
|
|
99
98
|
|
|
100
|
-
let litMatch = /\\([nrt0])/y.exec(escape);
|
|
99
|
+
let litMatch = /\\([\\nrt0])/y.exec(escape);
|
|
101
100
|
|
|
102
101
|
if (litMatch) {
|
|
103
|
-
return escapables.get(litMatch[1]);
|
|
102
|
+
return escapables.get(litMatch[1]) || litMatch[1];
|
|
104
103
|
}
|
|
105
104
|
|
|
106
105
|
let specialMatch = getSpecialPattern(span).exec(escape.slice(1));
|
|
@@ -115,26 +114,32 @@ const cookEscape = (escape, span) => {
|
|
|
115
114
|
const grammar = class RegexMiniparserGrammar {
|
|
116
115
|
// @Node
|
|
117
116
|
Pattern(p) {
|
|
118
|
-
p.eat('/', PN, { path: '
|
|
117
|
+
p.eat('/', PN, { path: 'openToken', balanced: '/' });
|
|
119
118
|
p.eatProduction('Alternatives', { path: 'alternatives[]' });
|
|
120
|
-
p.eat('/', PN, { path: '
|
|
121
|
-
|
|
119
|
+
p.eat('/', PN, { path: 'closeToken', balancer: true });
|
|
120
|
+
|
|
121
|
+
if (p.match(/[gimsuy]/y || p.atExpression)) {
|
|
122
|
+
p.eatProduction('Flags', { path: 'flags' });
|
|
123
|
+
}
|
|
122
124
|
}
|
|
123
125
|
|
|
126
|
+
// @Node
|
|
124
127
|
Flags(p) {
|
|
125
|
-
const
|
|
128
|
+
const flagsStr = p.match(/[gimsuy]+/y) || '';
|
|
126
129
|
|
|
127
|
-
if (!unique(
|
|
130
|
+
if (flagsStr && !unique(flagsStr)) throw new Error('flags must be unique');
|
|
128
131
|
|
|
129
|
-
|
|
130
|
-
|
|
132
|
+
const attrs = {};
|
|
133
|
+
|
|
134
|
+
for (const { 0: name, 1: chr } of Object.entries(flags)) {
|
|
135
|
+
attrs[name] = flagsStr.includes(chr);
|
|
131
136
|
}
|
|
132
|
-
}
|
|
133
137
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
+
for (const flag of flagsStr) {
|
|
139
|
+
p.eat(flag, KW, { path: 'tokens[]' });
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
return { attrs };
|
|
138
143
|
}
|
|
139
144
|
|
|
140
145
|
Alternatives(p) {
|
|
@@ -149,7 +154,7 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
149
154
|
}
|
|
150
155
|
|
|
151
156
|
Elements(p) {
|
|
152
|
-
while (p.match(/[^|]/y)) {
|
|
157
|
+
while (p.match(/[^|]/y || p.atExpression)) {
|
|
153
158
|
p.eatProduction('Element');
|
|
154
159
|
}
|
|
155
160
|
}
|
|
@@ -181,16 +186,16 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
181
186
|
|
|
182
187
|
// @Node
|
|
183
188
|
Group(p) {
|
|
184
|
-
p.eat('(?:', PN, { path: '
|
|
189
|
+
p.eat('(?:', PN, { path: 'openToken', balanced: ')' });
|
|
185
190
|
p.eatProduction('Alternatives', { path: 'alternatives[]' });
|
|
186
|
-
p.eat(')', PN, { path: '
|
|
191
|
+
p.eat(')', PN, { path: 'closeToken', balancer: true });
|
|
187
192
|
}
|
|
188
193
|
|
|
189
194
|
// @Node
|
|
190
195
|
CapturingGroup(p) {
|
|
191
|
-
p.eat('(', PN, { path: '
|
|
196
|
+
p.eat('(', PN, { path: 'openToken', balanced: ')' });
|
|
192
197
|
p.eatProduction('Alternatives', { path: 'alternatives[]' });
|
|
193
|
-
p.eat(')', PN, { path: '
|
|
198
|
+
p.eat(')', PN, { path: 'closeToken', balancer: true });
|
|
194
199
|
}
|
|
195
200
|
|
|
196
201
|
Assertion(p) {
|
|
@@ -206,20 +211,20 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
206
211
|
// @CoveredBy('Assertion')
|
|
207
212
|
// @Node
|
|
208
213
|
StartOfInputAssertion(p) {
|
|
209
|
-
p.eat('^', KW, { path: '
|
|
214
|
+
p.eat('^', KW, { path: 'sigilToken' });
|
|
210
215
|
}
|
|
211
216
|
|
|
212
217
|
// @CoveredBy('Assertion')
|
|
213
218
|
// @Node
|
|
214
219
|
EndOfInputAssertion(p) {
|
|
215
|
-
p.eat('$', KW, { path: '
|
|
220
|
+
p.eat('$', KW, { path: 'sigilToken' });
|
|
216
221
|
}
|
|
217
222
|
|
|
218
223
|
// @CoveredBy('Assertion')
|
|
219
224
|
// @Node
|
|
220
225
|
WordBoundaryAssertion(p) {
|
|
221
226
|
let attrs;
|
|
222
|
-
if (p.eatMatch('\\', ESC, { path: '
|
|
227
|
+
if (p.eatMatch('\\', ESC, { path: 'escapeToken' })) {
|
|
223
228
|
const m = p.eat(/b/iy, KW, { path: 'value' });
|
|
224
229
|
attrs = { negate: m === 'B' };
|
|
225
230
|
} else {
|
|
@@ -242,6 +247,8 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
242
247
|
)
|
|
243
248
|
) {
|
|
244
249
|
// done
|
|
250
|
+
} else if (p.eatMatchEscape(new RegExp(String.raw`\\${specialPattern.source}`, 'y'))) {
|
|
251
|
+
// done
|
|
245
252
|
} else {
|
|
246
253
|
throw new Error('escape required');
|
|
247
254
|
}
|
|
@@ -256,9 +263,9 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
256
263
|
|
|
257
264
|
// @Node
|
|
258
265
|
CharacterClass(p) {
|
|
259
|
-
p.eat('[', PN, { path: '
|
|
266
|
+
p.eat('[', PN, { path: 'openToken', balanced: ']', startSpan: 'CharacterClass' });
|
|
260
267
|
|
|
261
|
-
const negate = !!p.eatMatch('^', KW, { path: '
|
|
268
|
+
const negate = !!p.eatMatch('^', KW, { path: 'negateToken', boolean: true });
|
|
262
269
|
|
|
263
270
|
let first = !negate;
|
|
264
271
|
while (p.match(/./sy)) {
|
|
@@ -266,7 +273,9 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
266
273
|
first = false;
|
|
267
274
|
}
|
|
268
275
|
|
|
269
|
-
p.eat(']', PN, { path: '
|
|
276
|
+
p.eat(']', PN, { path: 'closeToken', balancer: true, endSpan: 'CharacterClass' });
|
|
277
|
+
|
|
278
|
+
return { attrs: { negate } };
|
|
270
279
|
}
|
|
271
280
|
|
|
272
281
|
// @Cover
|
|
@@ -284,7 +293,7 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
284
293
|
|
|
285
294
|
// @Node
|
|
286
295
|
Gap(p) {
|
|
287
|
-
p.eat('\\', PN, { path: '
|
|
296
|
+
p.eat('\\', PN, { path: 'escapeToken' });
|
|
288
297
|
p.eat('g', KW, { path: 'value' });
|
|
289
298
|
}
|
|
290
299
|
|
|
@@ -294,7 +303,7 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
294
303
|
path: 'min',
|
|
295
304
|
...when(first, { span: 'CharacterClass:First' }),
|
|
296
305
|
});
|
|
297
|
-
p.eat('-', PN, { path: '
|
|
306
|
+
p.eat('-', PN, { path: 'rangeToken' });
|
|
298
307
|
p.eatProduction('Character', { path: 'max' });
|
|
299
308
|
}
|
|
300
309
|
|
|
@@ -321,13 +330,13 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
321
330
|
// @CoveredBy('CharacterSet')
|
|
322
331
|
// @Node
|
|
323
332
|
AnyCharacterSet(p) {
|
|
324
|
-
p.eat('.', KW, { path: '
|
|
333
|
+
p.eat('.', KW, { path: 'sigilToken' });
|
|
325
334
|
}
|
|
326
335
|
|
|
327
336
|
// @CoveredBy('CharacterSet')
|
|
328
337
|
// @Node
|
|
329
338
|
WordCharacterSet(p) {
|
|
330
|
-
p.eat('\\', PN, { path: '
|
|
339
|
+
p.eat('\\', PN, { path: 'escapeToken' });
|
|
331
340
|
|
|
332
341
|
let attrs;
|
|
333
342
|
|
|
@@ -343,7 +352,7 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
343
352
|
// @CoveredBy('CharacterSet')
|
|
344
353
|
// @Node
|
|
345
354
|
SpaceCharacterSet(p) {
|
|
346
|
-
p.eat('\\', PN, { path: '
|
|
355
|
+
p.eat('\\', PN, { path: 'escapeToken' });
|
|
347
356
|
|
|
348
357
|
let attrs;
|
|
349
358
|
|
|
@@ -359,7 +368,7 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
359
368
|
// @CoveredBy('CharacterSet')
|
|
360
369
|
// @Node
|
|
361
370
|
DigitCharacterSet(p) {
|
|
362
|
-
p.eat('\\', PN, { path: '
|
|
371
|
+
p.eat('\\', PN, { path: 'escapeToken' });
|
|
363
372
|
|
|
364
373
|
let attrs;
|
|
365
374
|
|
|
@@ -379,15 +388,15 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
379
388
|
let attrs;
|
|
380
389
|
|
|
381
390
|
if (p.eatMatch('*', KW, { path: 'value' })) {
|
|
382
|
-
const greedy = !p.eatMatch('?', KW, { path: '
|
|
391
|
+
const greedy = !p.eatMatch('?', KW, { path: 'greedyToken' });
|
|
383
392
|
attrs = { min: 0, max: Infinity, greedy };
|
|
384
393
|
} else if (p.eatMatch('+', KW, { path: 'value' })) {
|
|
385
|
-
const greedy = !p.eatMatch('?', KW, { path: '
|
|
394
|
+
const greedy = !p.eatMatch('?', KW, { path: 'greedyToken' });
|
|
386
395
|
attrs = { min: 1, max: Infinity, greedy };
|
|
387
396
|
} else if (p.eatMatch('?', KW, { path: 'value' })) {
|
|
388
397
|
attrs = { min: 0, max: 1, greedy: true };
|
|
389
398
|
} else if (p.match('{')) {
|
|
390
|
-
p.eat('{', PN, { path: '
|
|
399
|
+
p.eat('{', PN, { path: 'openToken', balanced: '}' });
|
|
391
400
|
|
|
392
401
|
let max;
|
|
393
402
|
let min = p.eat(/\d+/y, 'Number', { path: 'min' });
|
|
@@ -396,9 +405,9 @@ const grammar = class RegexMiniparserGrammar {
|
|
|
396
405
|
max = p.eatMatch(/\d+/y, 'Number', { path: 'max' });
|
|
397
406
|
}
|
|
398
407
|
|
|
399
|
-
p.eat('}', PN, { path: '
|
|
408
|
+
p.eat('}', PN, { path: 'closeToken', balancer: true });
|
|
400
409
|
|
|
401
|
-
const greedy = !p.eatMatch('?', KW, { path: '
|
|
410
|
+
const greedy = !p.eatMatch('?', KW, { path: 'greedyToken' });
|
|
402
411
|
|
|
403
412
|
attrs = { min: min && parseInt(min, 10), max: max && parseInt(max, 10), greedy };
|
|
404
413
|
}
|
package/lib/languages/spamex.js
CHANGED
|
@@ -25,7 +25,7 @@ const covers = buildCovers({
|
|
|
25
25
|
const grammar = class SpamexMiniparserGrammar {
|
|
26
26
|
// @Cover
|
|
27
27
|
Matcher(p) {
|
|
28
|
-
if (p.match(/<
|
|
28
|
+
if (p.match(/<[^!/]/y)) {
|
|
29
29
|
p.eatProduction('NodeMatcher');
|
|
30
30
|
} else if (p.match(/['"]/y)) {
|
|
31
31
|
p.eatProduction('CSTML:String');
|
|
@@ -38,21 +38,22 @@ const grammar = class SpamexMiniparserGrammar {
|
|
|
38
38
|
|
|
39
39
|
// @Node
|
|
40
40
|
NodeMatcher(p) {
|
|
41
|
-
p.eat('<', PN, { path: '
|
|
41
|
+
p.eat('<', PN, { path: 'openToken', startSpan: 'Tag', balanced: '>' });
|
|
42
42
|
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
let exp = p.eatMatch('+', PN, { path: 'expressionFlag' });
|
|
47
|
-
|
|
48
|
-
if ((tr && esc) || (exp && (tr || esc))) throw new Error();
|
|
43
|
+
if (!p.atExpression) {
|
|
44
|
+
p.eatProduction('CSTML:Flags', { path: 'flags' });
|
|
45
|
+
}
|
|
49
46
|
|
|
50
|
-
if (p.match(
|
|
51
|
-
p.eat(
|
|
52
|
-
p.eat(':', PN, { path: '
|
|
53
|
-
p.eat(
|
|
47
|
+
if (p.match(/[a-zA-Z]+:/y)) {
|
|
48
|
+
p.eat(/[a-zA-Z]+/y, ID, { path: 'language' });
|
|
49
|
+
p.eat(':', PN, { path: 'namespaceSeparatorToken' });
|
|
50
|
+
p.eat(/[a-zA-Z]+/y, ID, { path: 'type' });
|
|
54
51
|
} else {
|
|
55
|
-
p.
|
|
52
|
+
if (p.atExpression) {
|
|
53
|
+
p.eatProduction('Identifier', { path: 'type' });
|
|
54
|
+
} else {
|
|
55
|
+
p.eatMatch(/[a-zA-Z]+/y, ID, { path: 'type' });
|
|
56
|
+
}
|
|
56
57
|
}
|
|
57
58
|
|
|
58
59
|
let sp = p.eatMatchTrivia(_);
|
|
@@ -63,20 +64,20 @@ const grammar = class SpamexMiniparserGrammar {
|
|
|
63
64
|
sp = p.eatMatchTrivia(_);
|
|
64
65
|
}
|
|
65
66
|
|
|
66
|
-
if ((sp && p.match(
|
|
67
|
+
if ((sp && p.match(/[a-zA-Z]+/y)) || p.atExpression) {
|
|
67
68
|
p.eatProduction('Attributes', { path: 'attributes[]' });
|
|
68
69
|
sp = p.eatMatchTrivia(_);
|
|
69
70
|
}
|
|
70
71
|
|
|
71
72
|
p.eatMatchTrivia(_);
|
|
72
|
-
p.eat('>', PN, { path: '
|
|
73
|
+
p.eat('>', PN, { path: 'closeToken', endSpan: 'Tag', balancer: true });
|
|
73
74
|
}
|
|
74
75
|
|
|
75
76
|
Attributes(p) {
|
|
76
77
|
let sp = true;
|
|
77
|
-
while (sp && (p.match(
|
|
78
|
+
while (sp && (p.match(/[a-zA-Z]+/y) || p.atExpression)) {
|
|
78
79
|
p.eatProduction('Attribute');
|
|
79
|
-
if (p.match(/\s
|
|
80
|
+
if (p.match(/\s+[a-zA-Z]/y)) {
|
|
80
81
|
sp = p.eatMatchTrivia(_);
|
|
81
82
|
}
|
|
82
83
|
}
|
|
@@ -84,7 +85,7 @@ const grammar = class SpamexMiniparserGrammar {
|
|
|
84
85
|
|
|
85
86
|
// @Cover
|
|
86
87
|
Attribute(p) {
|
|
87
|
-
if (p.match(
|
|
88
|
+
if (p.match(/[a-zA-Z]+\s*=/y)) {
|
|
88
89
|
p.eatProduction('MappingAttribute');
|
|
89
90
|
} else {
|
|
90
91
|
p.eatProduction('BooleanAttribute');
|
|
@@ -93,14 +94,14 @@ const grammar = class SpamexMiniparserGrammar {
|
|
|
93
94
|
|
|
94
95
|
// @Node
|
|
95
96
|
BooleanAttribute(p) {
|
|
96
|
-
p.eat(
|
|
97
|
+
p.eat(/[a-zA-Z]+/y, LIT, { path: 'key' });
|
|
97
98
|
}
|
|
98
99
|
|
|
99
100
|
// @Node
|
|
100
101
|
MappingAttribute(p) {
|
|
101
|
-
p.eat(
|
|
102
|
+
p.eat(/[a-zA-Z]+/y, LIT, { path: 'key' });
|
|
102
103
|
p.eatMatchTrivia(_);
|
|
103
|
-
p.eat('=', PN, { path: '
|
|
104
|
+
p.eat('=', PN, { path: 'mapToken' });
|
|
104
105
|
p.eatMatchTrivia(_);
|
|
105
106
|
p.eatProduction('AttributeValue', { path: 'value' });
|
|
106
107
|
}
|
|
@@ -125,7 +126,7 @@ const grammar = class SpamexMiniparserGrammar {
|
|
|
125
126
|
|
|
126
127
|
// @Node
|
|
127
128
|
Identifier(p) {
|
|
128
|
-
p.eatLiteral(
|
|
129
|
+
p.eatLiteral(/[a-zA-Z]+/y);
|
|
129
130
|
}
|
|
130
131
|
};
|
|
131
132
|
|
package/lib/miniparser.js
CHANGED
|
@@ -10,6 +10,8 @@ const { set, isRegex, isArray, getPrototypeOf, buildNode } = require('./utils.js
|
|
|
10
10
|
|
|
11
11
|
class TemplateParser {
|
|
12
12
|
constructor(rootLanguage, quasis, expressions) {
|
|
13
|
+
if (!quasis) throw new Error();
|
|
14
|
+
|
|
13
15
|
this.rootLanguage = rootLanguage;
|
|
14
16
|
this.spans = [];
|
|
15
17
|
this.quasis = quasis;
|
package/lib/path.js
CHANGED
|
@@ -15,7 +15,7 @@ const stripPathBraces = (str) => (str.endsWith('[]') ? str.slice(0, -2) : str);
|
|
|
15
15
|
const parsePath = (str) => {
|
|
16
16
|
const name = stripPathBraces(str);
|
|
17
17
|
|
|
18
|
-
if (
|
|
18
|
+
if (!/[a-zA-Z]+$/.test(name)) throw new Error();
|
|
19
19
|
|
|
20
20
|
return { isArray: name !== str, name };
|
|
21
21
|
};
|
package/lib/print.js
CHANGED
|
@@ -84,7 +84,7 @@ const buildDoctypeTag = () => {
|
|
|
84
84
|
return freeze({ type: 'DoctypeTag', value: { doctype: 'cstml', version: 0 } });
|
|
85
85
|
};
|
|
86
86
|
|
|
87
|
-
const buildNodeOpenTag = (flags, language, type, attributes = {}) => {
|
|
87
|
+
const buildNodeOpenTag = (flags = {}, language = null, type = null, attributes = {}) => {
|
|
88
88
|
let { token, trivia, escape } = flags;
|
|
89
89
|
|
|
90
90
|
token = !!token;
|
|
@@ -97,27 +97,17 @@ const buildNodeOpenTag = (flags, language, type, attributes = {}) => {
|
|
|
97
97
|
});
|
|
98
98
|
};
|
|
99
99
|
|
|
100
|
-
const nodeFlags = freeze({ escape: false, trivia: false, token: false });
|
|
101
|
-
|
|
102
|
-
const buildFragmentOpenTag = (flags = nodeFlags) => {
|
|
103
|
-
return freeze({ type: 'OpenFragmentTag', value: freeze({ flags: freeze(flags) }) });
|
|
104
|
-
};
|
|
105
|
-
|
|
106
100
|
const buildNodeCloseTag = (type = null, language = null) => {
|
|
107
101
|
return freeze({ type: 'CloseNodeTag', value: freeze({ language, type }) });
|
|
108
102
|
};
|
|
109
103
|
|
|
110
|
-
const buildFragmentCloseTag = () => {
|
|
111
|
-
return freeze({ type: 'CloseFragmentTag', value: freeze({}) });
|
|
112
|
-
};
|
|
113
|
-
|
|
114
104
|
function* streamFromTree(rootNode) {
|
|
115
105
|
if (!rootNode || rootNode.type === 'Gap') {
|
|
116
106
|
return rootNode;
|
|
117
107
|
}
|
|
118
108
|
|
|
119
109
|
yield buildDoctypeTag();
|
|
120
|
-
yield
|
|
110
|
+
yield buildNodeOpenTag(undefined, rootNode.language[0]);
|
|
121
111
|
|
|
122
112
|
let stack = emptyStack.push(buildFrame(rootNode));
|
|
123
113
|
|
|
@@ -173,7 +163,7 @@ function* streamFromTree(rootNode) {
|
|
|
173
163
|
|
|
174
164
|
stack = stack.pop();
|
|
175
165
|
}
|
|
176
|
-
yield
|
|
166
|
+
yield buildNodeCloseTag();
|
|
177
167
|
}
|
|
178
168
|
|
|
179
169
|
const printExpression = (expr) => {
|
|
@@ -262,19 +252,10 @@ const printTerminal = (terminal) => {
|
|
|
262
252
|
return `<${printFlags(flags)}${printTagPath(tagLanguage, type)}${attributesFrag}>`;
|
|
263
253
|
}
|
|
264
254
|
|
|
265
|
-
case 'OpenFragmentTag': {
|
|
266
|
-
const { flags } = terminal.value;
|
|
267
|
-
return `<${printFlags(flags)}>`;
|
|
268
|
-
}
|
|
269
|
-
|
|
270
255
|
case 'CloseNodeTag': {
|
|
271
256
|
return `</>`;
|
|
272
257
|
}
|
|
273
258
|
|
|
274
|
-
case 'CloseFragmentTag': {
|
|
275
|
-
return `</>`;
|
|
276
|
-
}
|
|
277
|
-
|
|
278
259
|
default:
|
|
279
260
|
throw new Error();
|
|
280
261
|
}
|
|
@@ -296,7 +277,7 @@ const printPrettyCSTML = (tree, indent = ' ') => {
|
|
|
296
277
|
printed += '\n';
|
|
297
278
|
}
|
|
298
279
|
|
|
299
|
-
if (
|
|
280
|
+
if (terminal.type === 'CloseNodeTag') {
|
|
300
281
|
indentLevel--;
|
|
301
282
|
}
|
|
302
283
|
|
|
@@ -307,7 +288,7 @@ const printPrettyCSTML = (tree, indent = ' ') => {
|
|
|
307
288
|
}
|
|
308
289
|
printed += printTerminal(terminal);
|
|
309
290
|
|
|
310
|
-
if (
|
|
291
|
+
if (terminal.type === 'OpenNodeTag') {
|
|
311
292
|
indentLevel++;
|
|
312
293
|
}
|
|
313
294
|
|
package/package.json
CHANGED