@bablr/helpers 0.1.7 → 0.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/decorators.js +47 -4
- package/lib/enhancers.js +16 -0
- package/lib/grammar.js +56 -47
- package/lib/productions.generated.js +82 -68
- package/lib/productions.js +11 -12
- package/lib/source.js +173 -0
- package/lib/trivia.js +24 -30
- package/package.json +9 -5
- package/lib/path.js +0 -1
- package/lib/token.js +0 -23
package/lib/decorators.js
CHANGED
|
@@ -1,8 +1,51 @@
|
|
|
1
|
-
|
|
1
|
+
import * as sym from './symbols.js';
|
|
2
2
|
|
|
3
|
-
export const
|
|
3
|
+
export const AllowEmpty = (desc, context) => {
|
|
4
4
|
context.addInitializer(function () {
|
|
5
|
-
|
|
6
|
-
|
|
5
|
+
let emptyables = this.emptyables;
|
|
6
|
+
|
|
7
|
+
if (!emptyables) {
|
|
8
|
+
emptyables = this.emptyables = new Set();
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
emptyables.add(context.name);
|
|
12
|
+
});
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
export const CoveredBy = (type) => (desc, context) => {
|
|
16
|
+
context.addInitializer(function () {
|
|
17
|
+
let covers = this.covers;
|
|
18
|
+
|
|
19
|
+
if (!covers) {
|
|
20
|
+
covers = this.covers = new Map();
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
let coveredTypes = covers.get(type);
|
|
24
|
+
|
|
25
|
+
if (!coveredTypes) {
|
|
26
|
+
coveredTypes = new Set();
|
|
27
|
+
covers.set(type, coveredTypes);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
coveredTypes.add(context.name);
|
|
31
|
+
});
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
export const InjectFrom = (obj) => (_stub, context) => {
|
|
35
|
+
if (!Object.hasOwn(obj, context.name)) {
|
|
36
|
+
throw new Error('Bad injection');
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return obj[context.name];
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
export const Node = (desc, context) => {
|
|
43
|
+
return CoveredBy(sym.node)(desc, context);
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
export const Attributes = (attributes) => (desc, context) => {
|
|
47
|
+
context.addInitializer(function () {
|
|
48
|
+
this.attributes = this.attributes || new Map();
|
|
49
|
+
this.attributes.set(context.name, attributes);
|
|
7
50
|
});
|
|
8
51
|
};
|
package/lib/enhancers.js
CHANGED
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
import {
|
|
2
|
+
enhanceStrategyBuilderWithDebugLogging as logStrategy,
|
|
3
|
+
enhanceStrategyBuilderWithEmittedLogging as logEmitted,
|
|
4
|
+
} from '@bablr/strategy_enhancer-debug-log';
|
|
5
|
+
import { enhanceProductionWithDebugLogging as createProductionLogger } from '@bablr/language_enhancer-debug-log';
|
|
6
|
+
|
|
1
7
|
const { getOwnPropertyNames, hasOwn } = Object;
|
|
2
8
|
|
|
3
9
|
export const memoize = (original) => {
|
|
@@ -51,3 +57,13 @@ export const mapProductions = (fn, Grammar) => {
|
|
|
51
57
|
|
|
52
58
|
return MappedGrammar;
|
|
53
59
|
};
|
|
60
|
+
|
|
61
|
+
export const buildDebugEnhancers = (log) => {
|
|
62
|
+
return {
|
|
63
|
+
agastStrategy: (strategy) => logEmitted(strategy, '>>> ', log),
|
|
64
|
+
bablrStrategy: (strategy) => logStrategy(strategy, ' ', log),
|
|
65
|
+
bablrProduction: createProductionLogger(' ', log),
|
|
66
|
+
};
|
|
67
|
+
};
|
|
68
|
+
|
|
69
|
+
export const debugEnhancers = buildDebugEnhancers();
|
package/lib/grammar.js
CHANGED
|
@@ -6,6 +6,28 @@ const { isArray } = Array;
|
|
|
6
6
|
const isSymbol = (value) => typeof value === 'symbol';
|
|
7
7
|
const isType = (value) => isString(value) || isSymbol(value);
|
|
8
8
|
|
|
9
|
+
export const resolveLanguage = (language, path) => {
|
|
10
|
+
let l = language;
|
|
11
|
+
|
|
12
|
+
if (!l) {
|
|
13
|
+
throw new Error();
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
if (path == null) {
|
|
17
|
+
return language;
|
|
18
|
+
} else if (isString(path)) {
|
|
19
|
+
l = l.dependencies[path];
|
|
20
|
+
} else if (isArray(path)) {
|
|
21
|
+
for (const segment of path) {
|
|
22
|
+
l = l.dependencies[segment];
|
|
23
|
+
}
|
|
24
|
+
} else {
|
|
25
|
+
throw new Error();
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return l;
|
|
29
|
+
};
|
|
30
|
+
|
|
9
31
|
export const explodeSubtypes = (aliases, exploded, types) => {
|
|
10
32
|
for (const type of types) {
|
|
11
33
|
const explodedTypes = aliases.get(type);
|
|
@@ -43,65 +65,52 @@ export const getProduction = (grammar, type) => {
|
|
|
43
65
|
return getPrototypeOf(grammar)[type];
|
|
44
66
|
};
|
|
45
67
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
const
|
|
49
|
-
|
|
50
|
-
|
|
68
|
+
const __buildDependentLanguages = (language, languages = new Map()) => {
|
|
69
|
+
for (const dependentLanguage of Object.values(language.dependencies || {})) {
|
|
70
|
+
const { canonicalURL } = dependentLanguage;
|
|
71
|
+
if (languages.has(canonicalURL) && dependentLanguage !== languages.get(canonicalURL)) {
|
|
72
|
+
throw new Error();
|
|
73
|
+
}
|
|
51
74
|
|
|
52
|
-
|
|
75
|
+
if (!languages.has(dependentLanguage)) {
|
|
76
|
+
__buildDependentLanguages(dependentLanguage, languages);
|
|
77
|
+
}
|
|
53
78
|
}
|
|
79
|
+
languages.set(language.canonicalURL, language);
|
|
54
80
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
}
|
|
81
|
+
return languages;
|
|
82
|
+
};
|
|
58
83
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
84
|
+
export const buildDependentLanguages = (language, transformLanguage = (l) => l) => {
|
|
85
|
+
return __buildDependentLanguages(language);
|
|
86
|
+
};
|
|
62
87
|
|
|
63
|
-
|
|
64
|
-
return this.current?.done;
|
|
65
|
-
}
|
|
88
|
+
const arrayLast = (arr) => arr[arr.length - 1];
|
|
66
89
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
throw new Error('Cannot advance a coroutine that is done');
|
|
70
|
-
}
|
|
71
|
-
this.current = this.generator.next(value);
|
|
72
|
-
return this;
|
|
73
|
-
}
|
|
90
|
+
export function* zipLanguages(tokens, rootLanguage) {
|
|
91
|
+
const languages = [rootLanguage];
|
|
74
92
|
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
}
|
|
81
|
-
}
|
|
93
|
+
for (const token of tokens) {
|
|
94
|
+
switch (token.type) {
|
|
95
|
+
case 'OpenNodeTag': {
|
|
96
|
+
if (token.value.language) {
|
|
97
|
+
const dependentLanguage = languages.dependencies[token.value.language];
|
|
82
98
|
|
|
83
|
-
|
|
84
|
-
if (!this.done) {
|
|
85
|
-
this.current = { value: undefined, done: true };
|
|
99
|
+
if (!dependentLanguage) throw new Error('language was not a dependency');
|
|
86
100
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
} catch (e) {
|
|
91
|
-
caught = true;
|
|
101
|
+
languages.push(dependentLanguage);
|
|
102
|
+
}
|
|
103
|
+
break;
|
|
92
104
|
}
|
|
93
|
-
|
|
94
|
-
|
|
105
|
+
|
|
106
|
+
case 'CloseNodeTag': {
|
|
107
|
+
if (token.value.language !== arrayLast(languages).canonicalURL) {
|
|
108
|
+
languages.pop();
|
|
109
|
+
}
|
|
110
|
+
break;
|
|
95
111
|
}
|
|
96
|
-
} else {
|
|
97
|
-
throw value;
|
|
98
112
|
}
|
|
99
|
-
}
|
|
100
113
|
|
|
101
|
-
|
|
102
|
-
// ensures failures can be logged!
|
|
103
|
-
if (!this.done) {
|
|
104
|
-
this.return();
|
|
105
|
-
}
|
|
114
|
+
yield [token, arrayLast(languages)];
|
|
106
115
|
}
|
|
107
116
|
}
|
|
@@ -1,7 +1,12 @@
|
|
|
1
|
-
import { interpolateString as _interpolateString } from "@bablr/
|
|
2
|
-
import {
|
|
3
|
-
import
|
|
4
|
-
|
|
1
|
+
import { interpolateString as _interpolateString } from "@bablr/agast-helpers/template";
|
|
2
|
+
import { interpolateArrayChildren as _interpolateArrayChildren } from "@bablr/agast-helpers/template";
|
|
3
|
+
import { interpolateArray as _interpolateArray } from "@bablr/agast-helpers/template";
|
|
4
|
+
import * as _l from "@bablr/agast-vm-helpers/languages";
|
|
5
|
+
import * as _t from "@bablr/agast-helpers/shorthand";
|
|
6
|
+
export function* List({
|
|
7
|
+
value: props,
|
|
8
|
+
ctx
|
|
9
|
+
}) {
|
|
5
10
|
const {
|
|
6
11
|
element,
|
|
7
12
|
separator,
|
|
@@ -10,114 +15,123 @@ export function* List(props, s, ctx) {
|
|
|
10
15
|
} = ctx.unbox(props);
|
|
11
16
|
let sep, it;
|
|
12
17
|
for (;;) {
|
|
13
|
-
it = yield _t.node(
|
|
14
|
-
verb: _t.
|
|
15
|
-
arguments: _t.node(
|
|
16
|
-
open: _t.
|
|
17
|
-
values: [..._interpolateArray(element)
|
|
18
|
-
|
|
19
|
-
content: _t.node("String", "Content", [_t.lit`elements[]`], {}, {}),
|
|
20
|
-
close: _t.node("String", "Punctuator", [_t.lit`'`], {}, {})
|
|
21
|
-
}, {})],
|
|
22
|
-
close: _t.node("Instruction", "Punctuator", [_t.lit`)`], {}, {})
|
|
18
|
+
it = yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
|
|
19
|
+
verb: _t.s_node(_l.Instruction, "Identifier", "eatMatch"),
|
|
20
|
+
arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`open`, ..._interpolateArrayChildren(element, _t.ref`values[]`, _t.embedded(_t.t_node(_l.Comment, null, [_t.embedded(_t.t_node('Space', 'Space', [_t.lit(' ')]))]))), _t.ref`close`], {
|
|
21
|
+
open: _t.s_i_node(_l.Instruction, "Punctuator", "("),
|
|
22
|
+
values: [..._interpolateArray(element)],
|
|
23
|
+
close: _t.s_i_node(_l.Instruction, "Punctuator", ")")
|
|
23
24
|
}, {})
|
|
24
25
|
}, {});
|
|
25
26
|
if (it || allowTrailingSeparator) {
|
|
26
|
-
sep = yield _t.node(
|
|
27
|
-
verb: _t.
|
|
28
|
-
arguments: _t.node(
|
|
29
|
-
open: _t.
|
|
30
|
-
values: [..._interpolateArray(separator), _t.node(
|
|
31
|
-
open: _t.
|
|
32
|
-
content: _t.node(
|
|
33
|
-
close: _t.
|
|
27
|
+
sep = yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
|
|
28
|
+
verb: _t.s_node(_l.Instruction, "Identifier", "eatMatch"),
|
|
29
|
+
arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`open`, ..._interpolateArrayChildren(separator, _t.ref`values[]`, _t.embedded(_t.t_node(_l.Comment, null, [_t.embedded(_t.t_node('Space', 'Space', [_t.lit(' ')]))]))), _t.embedded(_t.s_t_node(_l.Space, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.ref`close`], {
|
|
30
|
+
open: _t.s_i_node(_l.Instruction, "Punctuator", "("),
|
|
31
|
+
values: [..._interpolateArray(separator), _t.node(_l.CSTML, "String", [_t.ref`open`, _t.ref`content`, _t.ref`close`], {
|
|
32
|
+
open: _t.s_i_node(_l.CSTML, "Punctuator", "'"),
|
|
33
|
+
content: _t.node(_l.CSTML, "Content", [_t.lit("separators[]")], {}, {}),
|
|
34
|
+
close: _t.s_i_node(_l.CSTML, "Punctuator", "'")
|
|
34
35
|
}, {})],
|
|
35
|
-
close: _t.
|
|
36
|
+
close: _t.s_i_node(_l.Instruction, "Punctuator", ")")
|
|
36
37
|
}, {})
|
|
37
38
|
}, {});
|
|
38
39
|
}
|
|
39
40
|
if (!(sep || allowHoles)) break;
|
|
40
41
|
}
|
|
41
42
|
}
|
|
42
|
-
export function* Any(
|
|
43
|
+
export function* Any({
|
|
44
|
+
value: matchers,
|
|
45
|
+
ctx
|
|
46
|
+
}) {
|
|
43
47
|
for (const matcher of ctx.unbox(matchers)) {
|
|
44
|
-
if (yield _t.node(
|
|
45
|
-
verb: _t.
|
|
46
|
-
arguments: _t.node(
|
|
47
|
-
open: _t.
|
|
48
|
+
if (yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
|
|
49
|
+
verb: _t.s_node(_l.Instruction, "Identifier", "eatMatch"),
|
|
50
|
+
arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`open`, ..._interpolateArrayChildren(matcher, _t.ref`values[]`, _t.embedded(_t.t_node(_l.Comment, null, [_t.embedded(_t.t_node('Space', 'Space', [_t.lit(' ')]))]))), _t.ref`close`], {
|
|
51
|
+
open: _t.s_i_node(_l.Instruction, "Punctuator", "("),
|
|
48
52
|
values: [..._interpolateArray(matcher)],
|
|
49
|
-
close: _t.
|
|
53
|
+
close: _t.s_i_node(_l.Instruction, "Punctuator", ")")
|
|
50
54
|
}, {})
|
|
51
55
|
}, {})) break;
|
|
52
56
|
}
|
|
53
57
|
}
|
|
54
|
-
export function* All(
|
|
58
|
+
export function* All({
|
|
59
|
+
value: matchers,
|
|
60
|
+
ctx
|
|
61
|
+
}) {
|
|
55
62
|
for (const matcher of ctx.unbox(matchers)) {
|
|
56
|
-
yield _t.node(
|
|
57
|
-
verb: _t.
|
|
58
|
-
arguments: _t.node(
|
|
59
|
-
open: _t.
|
|
63
|
+
yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
|
|
64
|
+
verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
|
|
65
|
+
arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`open`, ..._interpolateArrayChildren(matcher, _t.ref`values[]`, _t.embedded(_t.t_node(_l.Comment, null, [_t.embedded(_t.t_node('Space', 'Space', [_t.lit(' ')]))]))), _t.ref`close`], {
|
|
66
|
+
open: _t.s_i_node(_l.Instruction, "Punctuator", "("),
|
|
60
67
|
values: [..._interpolateArray(matcher)],
|
|
61
|
-
close: _t.
|
|
68
|
+
close: _t.s_i_node(_l.Instruction, "Punctuator", ")")
|
|
62
69
|
}, {})
|
|
63
70
|
}, {});
|
|
64
71
|
}
|
|
65
72
|
}
|
|
66
|
-
export function* Match(
|
|
73
|
+
export function* Match({
|
|
74
|
+
value: cases,
|
|
75
|
+
ctx
|
|
76
|
+
}) {
|
|
67
77
|
for (const case_ of ctx.unbox(cases)) {
|
|
68
78
|
const {
|
|
69
79
|
0: matcher,
|
|
70
|
-
1: guard
|
|
80
|
+
1: guard,
|
|
81
|
+
2: props = _t.node(_l.Instruction, "Null", [_t.ref`value`], {
|
|
82
|
+
value: _t.s_i_node(_l.Instruction, "Keyword", "null")
|
|
83
|
+
}, {})
|
|
71
84
|
} = ctx.unbox(case_);
|
|
72
|
-
if (yield _t.node(
|
|
73
|
-
verb: _t.
|
|
74
|
-
arguments: _t.node(
|
|
75
|
-
open: _t.
|
|
85
|
+
if (yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
|
|
86
|
+
verb: _t.s_node(_l.Instruction, "Identifier", "match"),
|
|
87
|
+
arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`open`, ..._interpolateArrayChildren(guard, _t.ref`values[]`, _t.embedded(_t.t_node(_l.Comment, null, [_t.embedded(_t.t_node('Space', 'Space', [_t.lit(' ')]))]))), _t.ref`close`], {
|
|
88
|
+
open: _t.s_i_node(_l.Instruction, "Punctuator", "("),
|
|
76
89
|
values: [..._interpolateArray(guard)],
|
|
77
|
-
close: _t.
|
|
90
|
+
close: _t.s_i_node(_l.Instruction, "Punctuator", ")")
|
|
78
91
|
}, {})
|
|
79
92
|
}, {})) {
|
|
80
|
-
yield _t.node(
|
|
81
|
-
verb: _t.
|
|
82
|
-
arguments: _t.node(
|
|
83
|
-
open: _t.
|
|
84
|
-
values: [..._interpolateArray(matcher)],
|
|
85
|
-
|
|
93
|
+
yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
|
|
94
|
+
verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
|
|
95
|
+
arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`open`, ..._interpolateArrayChildren(matcher, _t.ref`values[]`, _t.embedded(_t.t_node(_l.Comment, null, [_t.embedded(_t.t_node('Space', 'Space', [_t.lit(' ')]))]))), _t.embedded(_t.s_t_node(_l.Space, "Space", [_t.lit(" ")], {}, {})), _t.ref`values[]`, _t.embedded(_t.s_t_node(_l.Space, "Space", [_t.lit(" ")], {}, {})), ..._interpolateArrayChildren(props, _t.ref`values[]`, _t.embedded(_t.t_node(_l.Comment, null, [_t.embedded(_t.t_node('Space', 'Space', [_t.lit(' ')]))]))), _t.ref`close`], {
|
|
96
|
+
open: _t.s_i_node(_l.Instruction, "Punctuator", "("),
|
|
97
|
+
values: [..._interpolateArray(matcher), _t.node(_l.Instruction, "Null", [_t.ref`value`], {
|
|
98
|
+
value: _t.s_i_node(_l.Instruction, "Keyword", "null")
|
|
99
|
+
}, {}), ..._interpolateArray(props)],
|
|
100
|
+
close: _t.s_i_node(_l.Instruction, "Punctuator", ")")
|
|
86
101
|
}, {})
|
|
87
102
|
}, {});
|
|
88
103
|
break;
|
|
89
104
|
}
|
|
90
105
|
}
|
|
91
106
|
}
|
|
92
|
-
export function* Punctuator(
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
close: _t.node("Instruction", "Punctuator", [_t.lit`)`], {}, {})
|
|
107
|
+
export function* Punctuator({
|
|
108
|
+
intrinsicValue
|
|
109
|
+
}) {
|
|
110
|
+
if (!intrinsicValue) throw new Error('Intrinsic productions must have value');
|
|
111
|
+
yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
|
|
112
|
+
verb: _t.s_node(_l.Instruction, "Identifier", "eat"),
|
|
113
|
+
arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`open`, ..._interpolateArrayChildren(intrinsicValue, _t.ref`values[]`, _t.embedded(_t.t_node(_l.Comment, null, [_t.embedded(_t.t_node('Space', 'Space', [_t.lit(' ')]))]))), _t.ref`close`], {
|
|
114
|
+
open: _t.s_i_node(_l.Instruction, "Punctuator", "("),
|
|
115
|
+
values: [..._interpolateArray(intrinsicValue)],
|
|
116
|
+
close: _t.s_i_node(_l.Instruction, "Punctuator", ")")
|
|
103
117
|
}, {})
|
|
104
118
|
}, {});
|
|
105
|
-
return {
|
|
106
|
-
attrs
|
|
107
|
-
};
|
|
108
119
|
}
|
|
109
120
|
export const Keyword = Punctuator;
|
|
110
|
-
export function* Optional(
|
|
121
|
+
export function* Optional({
|
|
122
|
+
value: matchers,
|
|
123
|
+
ctx
|
|
124
|
+
}) {
|
|
111
125
|
const matchers_ = ctx.unbox(matchers);
|
|
112
126
|
if (matchers_.length > 1) {
|
|
113
127
|
throw new Error('Optional only allows one matcher');
|
|
114
128
|
}
|
|
115
|
-
yield _t.node(
|
|
116
|
-
verb: _t.
|
|
117
|
-
arguments: _t.node(
|
|
118
|
-
open: _t.
|
|
129
|
+
yield _t.node(_l.Instruction, "Call", [_t.ref`verb`, _t.ref`arguments`], {
|
|
130
|
+
verb: _t.s_node(_l.Instruction, "Identifier", "eatMatch"),
|
|
131
|
+
arguments: _t.node(_l.Instruction, "Tuple", [_t.ref`open`, ..._interpolateArrayChildren(matchers_[0], _t.ref`values[]`, _t.embedded(_t.t_node(_l.Comment, null, [_t.embedded(_t.t_node('Space', 'Space', [_t.lit(' ')]))]))), _t.ref`close`], {
|
|
132
|
+
open: _t.s_i_node(_l.Instruction, "Punctuator", "("),
|
|
119
133
|
values: [..._interpolateArray(matchers_[0])],
|
|
120
|
-
close: _t.
|
|
134
|
+
close: _t.s_i_node(_l.Instruction, "Punctuator", ")")
|
|
121
135
|
}, {})
|
|
122
136
|
}, {});
|
|
123
137
|
}
|
package/lib/productions.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { i } from '@bablr/boot/shorthand.macro';
|
|
2
2
|
|
|
3
|
-
export function* List(props,
|
|
3
|
+
export function* List({ value: props, ctx }) {
|
|
4
4
|
const {
|
|
5
5
|
element,
|
|
6
6
|
separator,
|
|
@@ -10,7 +10,7 @@ export function* List(props, s, ctx) {
|
|
|
10
10
|
|
|
11
11
|
let sep, it;
|
|
12
12
|
for (;;) {
|
|
13
|
-
it = yield i`eatMatch(${element}
|
|
13
|
+
it = yield i`eatMatch(${element})`;
|
|
14
14
|
if (it || allowTrailingSeparator) {
|
|
15
15
|
sep = yield i`eatMatch(${separator} 'separators[]')`;
|
|
16
16
|
}
|
|
@@ -18,38 +18,37 @@ export function* List(props, s, ctx) {
|
|
|
18
18
|
}
|
|
19
19
|
}
|
|
20
20
|
|
|
21
|
-
export function* Any(matchers,
|
|
21
|
+
export function* Any({ value: matchers, ctx }) {
|
|
22
22
|
for (const matcher of ctx.unbox(matchers)) {
|
|
23
23
|
if (yield i`eatMatch(${matcher})`) break;
|
|
24
24
|
}
|
|
25
25
|
}
|
|
26
26
|
|
|
27
|
-
export function* All(matchers,
|
|
27
|
+
export function* All({ value: matchers, ctx }) {
|
|
28
28
|
for (const matcher of ctx.unbox(matchers)) {
|
|
29
29
|
yield i`eat(${matcher})`;
|
|
30
30
|
}
|
|
31
31
|
}
|
|
32
32
|
|
|
33
|
-
export function* Match(cases,
|
|
33
|
+
export function* Match({ value: cases, ctx }) {
|
|
34
34
|
for (const case_ of ctx.unbox(cases)) {
|
|
35
|
-
const { 0: matcher, 1: guard } = ctx.unbox(case_);
|
|
35
|
+
const { 0: matcher, 1: guard, 2: props = i.Expression`null` } = ctx.unbox(case_);
|
|
36
36
|
if (yield i`match(${guard})`) {
|
|
37
|
-
yield i`eat(${matcher})`;
|
|
37
|
+
yield i`eat(${matcher} null ${props})`;
|
|
38
38
|
break;
|
|
39
39
|
}
|
|
40
40
|
}
|
|
41
41
|
}
|
|
42
42
|
|
|
43
|
-
export function* Punctuator(
|
|
44
|
-
|
|
45
|
-
yield i`eat(${value})`;
|
|
43
|
+
export function* Punctuator({ intrinsicValue }) {
|
|
44
|
+
if (!intrinsicValue) throw new Error('Intrinsic productions must have value');
|
|
46
45
|
|
|
47
|
-
|
|
46
|
+
yield i`eat(${intrinsicValue})`;
|
|
48
47
|
}
|
|
49
48
|
|
|
50
49
|
export const Keyword = Punctuator;
|
|
51
50
|
|
|
52
|
-
export function* Optional(matchers,
|
|
51
|
+
export function* Optional({ value: matchers, ctx }) {
|
|
53
52
|
const matchers_ = ctx.unbox(matchers);
|
|
54
53
|
if (matchers_.length > 1) {
|
|
55
54
|
throw new Error('Optional only allows one matcher');
|
package/lib/source.js
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import slice from 'iter-tools-es/methods/slice';
|
|
2
|
+
import { streamFromTree } from '@bablr/agast-helpers/tree';
|
|
3
|
+
import { StreamIterable, getStreamIterator } from '@bablr/agast-helpers/stream';
|
|
4
|
+
|
|
5
|
+
const escapables = {
|
|
6
|
+
n: '\n',
|
|
7
|
+
r: '\r',
|
|
8
|
+
t: '\t',
|
|
9
|
+
0: '\0',
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
function* __readStreamAsStreamIterator(stream) {
|
|
13
|
+
let iter = stream[Symbol.asyncIterator]();
|
|
14
|
+
let step;
|
|
15
|
+
|
|
16
|
+
for (;;) {
|
|
17
|
+
step = yield iter.next();
|
|
18
|
+
|
|
19
|
+
if (step.done) break;
|
|
20
|
+
|
|
21
|
+
yield* step.value;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if (!step.done) {
|
|
25
|
+
iter?.return();
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const readStreamAsStreamIterator = (stream) =>
|
|
30
|
+
new StreamIterable(__readStreamAsStreamIterator(stream));
|
|
31
|
+
|
|
32
|
+
const gapStr = '<//>';
|
|
33
|
+
|
|
34
|
+
function* __sourceFromReadStream(stream) {
|
|
35
|
+
let iter = getStreamIterator(readStreamAsStreamIterator(stream));
|
|
36
|
+
let step;
|
|
37
|
+
let escape = false;
|
|
38
|
+
let gapMatchIdx = 0;
|
|
39
|
+
let quote = null;
|
|
40
|
+
|
|
41
|
+
for (;;) {
|
|
42
|
+
step = iter.next();
|
|
43
|
+
|
|
44
|
+
if (step instanceof Promise) {
|
|
45
|
+
step = yield step;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
if (step.done) break;
|
|
49
|
+
|
|
50
|
+
const chr = step.value;
|
|
51
|
+
|
|
52
|
+
if (escape) {
|
|
53
|
+
if (chr === "'" || chr === '"' || chr === '\\') {
|
|
54
|
+
yield chr;
|
|
55
|
+
} else if (escapables[chr]) {
|
|
56
|
+
yield escapables[chr];
|
|
57
|
+
} else {
|
|
58
|
+
throw new Error();
|
|
59
|
+
}
|
|
60
|
+
escape = false;
|
|
61
|
+
} else {
|
|
62
|
+
if (!quote && chr === gapStr[gapMatchIdx]) {
|
|
63
|
+
gapMatchIdx++;
|
|
64
|
+
if (gapMatchIdx === gapStr.length) {
|
|
65
|
+
yield null;
|
|
66
|
+
gapMatchIdx = 0;
|
|
67
|
+
}
|
|
68
|
+
} else {
|
|
69
|
+
if (gapMatchIdx > 0) {
|
|
70
|
+
throw new Error();
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if (!quote && (chr === '"' || chr === "'")) {
|
|
74
|
+
quote = chr;
|
|
75
|
+
} else if (quote && chr === quote) {
|
|
76
|
+
quote = null;
|
|
77
|
+
} else if (quote && chr === '\\') {
|
|
78
|
+
escape = true;
|
|
79
|
+
} else if (quote) {
|
|
80
|
+
yield chr;
|
|
81
|
+
} else if (!/[\s]/.test(chr)) {
|
|
82
|
+
throw new Error('unkown syntax');
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
if (!step.done) {
|
|
89
|
+
iter?.return();
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
export const sourceFromReadStream = (stream) => new StreamIterable(__sourceFromReadStream(stream));
|
|
94
|
+
|
|
95
|
+
function* __sourceFromTokenStream(terminals) {
|
|
96
|
+
let iter = getStreamIterator(terminals);
|
|
97
|
+
let step;
|
|
98
|
+
|
|
99
|
+
for (;;) {
|
|
100
|
+
step = iter.next();
|
|
101
|
+
|
|
102
|
+
if (step instanceof Promise) {
|
|
103
|
+
yield step;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
if (step.done) break;
|
|
107
|
+
|
|
108
|
+
const terminal = step.value;
|
|
109
|
+
|
|
110
|
+
if (terminal.type === 'Literal') {
|
|
111
|
+
yield* terminal.value;
|
|
112
|
+
} else if (terminal.type === 'Gap') {
|
|
113
|
+
yield null;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
export const sourceFromTokenStream = (terminals) =>
|
|
119
|
+
new StreamIterable(__sourceFromTokenStream(terminals));
|
|
120
|
+
|
|
121
|
+
function* __sourceFromQuasis(quasis) {
|
|
122
|
+
let first = true;
|
|
123
|
+
let iter = getStreamIterator(quasis) || quasis[Symbol.iterator]();
|
|
124
|
+
let step;
|
|
125
|
+
|
|
126
|
+
for (;;) {
|
|
127
|
+
step = iter.next();
|
|
128
|
+
|
|
129
|
+
if (step instanceof Promise) {
|
|
130
|
+
step = yield step;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
if (step.done) break;
|
|
134
|
+
|
|
135
|
+
const quasi = step.value;
|
|
136
|
+
|
|
137
|
+
if (!first) yield null;
|
|
138
|
+
yield* quasi;
|
|
139
|
+
first = false;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
export const sourceFromQuasis = (quasis) => new StreamIterable(__sourceFromQuasis(quasis));
|
|
144
|
+
|
|
145
|
+
export function* fillGapsWith(expressions, stream) {
|
|
146
|
+
let exprIdx = 0;
|
|
147
|
+
let iter = getStreamIterator(stream);
|
|
148
|
+
let step;
|
|
149
|
+
|
|
150
|
+
for (;;) {
|
|
151
|
+
let step = iter.next();
|
|
152
|
+
|
|
153
|
+
if (step instanceof Promise) {
|
|
154
|
+
step = yield step;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if (step.done) break;
|
|
158
|
+
|
|
159
|
+
const token = step.value;
|
|
160
|
+
|
|
161
|
+
if (token.type === 'Gap') {
|
|
162
|
+
if (exprIdx >= expressions.length) throw new Error('not enough gaps for expressions');
|
|
163
|
+
yield* slice(2, -1, streamFromTree(expressions[exprIdx]));
|
|
164
|
+
exprIdx++;
|
|
165
|
+
} else {
|
|
166
|
+
yield token;
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
if (exprIdx !== expressions.length) {
|
|
171
|
+
throw new Error('too many expressions for gaps');
|
|
172
|
+
}
|
|
173
|
+
}
|
package/lib/trivia.js
CHANGED
|
@@ -1,10 +1,14 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { Coroutine } from '@bablr/coroutine';
|
|
2
|
+
import { reifyExpression } from '@bablr/agast-vm-helpers';
|
|
2
3
|
import { mapProductions } from './enhancers.js';
|
|
3
|
-
import { Coroutine } from './grammar.js';
|
|
4
4
|
|
|
5
5
|
const lookbehind = (context, s) => {
|
|
6
6
|
let token = s.result;
|
|
7
|
-
while (
|
|
7
|
+
while (
|
|
8
|
+
token &&
|
|
9
|
+
['OpenNodeTag', 'CloseNodeTag', 'OpenFragmentTag', 'Reference'].includes(token.type) &&
|
|
10
|
+
(token.type !== 'OpenNodeTag' || !token.value.intrinsicValue)
|
|
11
|
+
) {
|
|
8
12
|
const prevToken = context.getPreviousTerminal(token);
|
|
9
13
|
if (!prevToken) break;
|
|
10
14
|
token = prevToken;
|
|
@@ -17,64 +21,54 @@ const matchedResults = new WeakSet();
|
|
|
17
21
|
|
|
18
22
|
export const triviaEnhancer = ({ triviaIsAllowed, eatMatchTrivia }, grammar) => {
|
|
19
23
|
return mapProductions((production) => {
|
|
20
|
-
return function* (props
|
|
21
|
-
const co = new Coroutine(production(props
|
|
24
|
+
return function* (props) {
|
|
25
|
+
const co = new Coroutine(production(props));
|
|
26
|
+
const { s, ctx } = props;
|
|
22
27
|
|
|
23
|
-
co.
|
|
28
|
+
if (!co.generator) {
|
|
29
|
+
throw new Error('Production was not a generator');
|
|
30
|
+
}
|
|
24
31
|
|
|
25
|
-
|
|
32
|
+
co.advance();
|
|
26
33
|
|
|
27
34
|
try {
|
|
28
35
|
while (!co.done) {
|
|
29
|
-
const
|
|
36
|
+
const sourceInstr = co.value;
|
|
37
|
+
const instr = reifyExpression(sourceInstr);
|
|
38
|
+
const { verb, arguments: { 0: matcher } = [] } = instr;
|
|
30
39
|
let returnValue = undefined;
|
|
31
40
|
|
|
32
|
-
const {
|
|
33
|
-
verb: verbToken,
|
|
34
|
-
verbSuffix: verbSuffixToken,
|
|
35
|
-
arguments: {
|
|
36
|
-
properties: { values: { 0: matcher } = [] },
|
|
37
|
-
},
|
|
38
|
-
} = instr.properties;
|
|
39
|
-
const verb = getCooked(verbToken);
|
|
40
|
-
const verbSuffix = verbSuffixToken && getCooked(verbSuffixToken);
|
|
41
|
-
|
|
42
41
|
switch (verb) {
|
|
43
42
|
case 'eat':
|
|
44
43
|
case 'eatMatch':
|
|
45
44
|
case 'match':
|
|
46
45
|
case 'guard': {
|
|
47
|
-
if (
|
|
48
|
-
((['String', 'Pattern'].includes(matcher.type) && !s.isTerminal) ||
|
|
49
|
-
matcher.type === 'TerminalMatcher') &&
|
|
50
|
-
verbSuffix !== '#'
|
|
51
|
-
) {
|
|
46
|
+
if (matcher && matcher.type && !matcher.flags.trivia) {
|
|
52
47
|
const previous = lookbehind(ctx, s);
|
|
53
|
-
if (triviaIsAllowed(s) && !matchedResults.has(previous)) {
|
|
48
|
+
if (triviaIsAllowed(s) && (!previous || !matchedResults.has(previous))) {
|
|
54
49
|
matchedResults.add(previous);
|
|
55
|
-
yield eatMatchTrivia;
|
|
50
|
+
yield* eatMatchTrivia();
|
|
56
51
|
matchedResults.add(s.result);
|
|
57
52
|
}
|
|
58
|
-
isTerminal = true;
|
|
59
53
|
}
|
|
60
54
|
|
|
61
|
-
returnValue = returnValue || (yield
|
|
55
|
+
returnValue = returnValue || (yield sourceInstr);
|
|
62
56
|
break;
|
|
63
57
|
}
|
|
64
58
|
|
|
65
59
|
default:
|
|
66
|
-
returnValue = yield
|
|
60
|
+
returnValue = yield sourceInstr;
|
|
67
61
|
break;
|
|
68
62
|
}
|
|
69
63
|
|
|
70
64
|
co.advance(returnValue);
|
|
71
65
|
}
|
|
72
66
|
|
|
73
|
-
if (!
|
|
67
|
+
if (!s.node?.flags.token) {
|
|
74
68
|
const previous = lookbehind(ctx, s);
|
|
75
69
|
if (triviaIsAllowed(s) && !matchedResults.has(previous)) {
|
|
76
70
|
matchedResults.add(previous);
|
|
77
|
-
yield eatMatchTrivia;
|
|
71
|
+
yield* eatMatchTrivia();
|
|
78
72
|
matchedResults.add(s.result);
|
|
79
73
|
}
|
|
80
74
|
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@bablr/helpers",
|
|
3
3
|
"description": "Command helpers for use in writing BABLR grammars",
|
|
4
|
-
"version": "0.
|
|
4
|
+
"version": "0.15.0",
|
|
5
5
|
"author": "Conrad Buck<conartist6@gmail.com>",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"files": [
|
|
@@ -15,8 +15,8 @@
|
|
|
15
15
|
"./path": "./lib/path.js",
|
|
16
16
|
"./productions": "./lib/productions.generated.js",
|
|
17
17
|
"./shorthand": "./lib/shorthand.js",
|
|
18
|
+
"./source": "./lib/source.js",
|
|
18
19
|
"./symbols": "./lib/symbols.js",
|
|
19
|
-
"./token": "./lib/token.js",
|
|
20
20
|
"./trivia": "./lib/trivia.js"
|
|
21
21
|
},
|
|
22
22
|
"sideEffects": false,
|
|
@@ -25,8 +25,11 @@
|
|
|
25
25
|
"clean": "rm lib/productions.generated.js"
|
|
26
26
|
},
|
|
27
27
|
"dependencies": {
|
|
28
|
-
"@bablr/
|
|
29
|
-
"@bablr/
|
|
28
|
+
"@bablr/language_enhancer-debug-log": "0.2.0",
|
|
29
|
+
"@bablr/strategy_enhancer-debug-log": "0.1.0",
|
|
30
|
+
"@bablr/agast-helpers": "0.1.0",
|
|
31
|
+
"@bablr/agast-vm-helpers": "0.1.0",
|
|
32
|
+
"@bablr/coroutine": "0.1.0",
|
|
30
33
|
"iter-tools-es": "^7.5.3"
|
|
31
34
|
},
|
|
32
35
|
"devDependencies": {
|
|
@@ -36,7 +39,8 @@
|
|
|
36
39
|
"@babel/plugin-transform-modules-commonjs": "^7.23.0",
|
|
37
40
|
"@babel/plugin-transform-runtime": "^7.22.15",
|
|
38
41
|
"@babel/runtime": "^7.23.2",
|
|
39
|
-
"@bablr/
|
|
42
|
+
"@bablr/boot": "0.2.0",
|
|
43
|
+
"@bablr/eslint-config-base": "github:bablr-lang/eslint-config-base#d834ccc52795d6c3b96ecc6c419960fceed221a6",
|
|
40
44
|
"babel-plugin-macros": "3.1.0",
|
|
41
45
|
"enhanced-resolve": "^5.12.0",
|
|
42
46
|
"eslint": "^7.32.0",
|
package/lib/path.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export * from '@bablr/boot-helpers/path';
|
package/lib/token.js
DELETED
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
export const getCooked = (token) => {
|
|
2
|
-
return token.children
|
|
3
|
-
.map((child) => {
|
|
4
|
-
if (child.type === 'Escape') {
|
|
5
|
-
return child.value.cooked;
|
|
6
|
-
} else if (child.type === 'Literal') {
|
|
7
|
-
return child.value;
|
|
8
|
-
} else throw new Error();
|
|
9
|
-
})
|
|
10
|
-
.join('');
|
|
11
|
-
};
|
|
12
|
-
|
|
13
|
-
export const getRaw = (token) => {
|
|
14
|
-
return token.children
|
|
15
|
-
.map((child) => {
|
|
16
|
-
if (child.type === 'Escape') {
|
|
17
|
-
return child.value.raw;
|
|
18
|
-
} else if (child.type === 'Literal') {
|
|
19
|
-
return child.value;
|
|
20
|
-
} else throw new Error();
|
|
21
|
-
})
|
|
22
|
-
.join('');
|
|
23
|
-
};
|