@bearcove/codemirror-lang-styx 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/index.ts ADDED
@@ -0,0 +1,247 @@
1
+ import { parser } from "./syntax.grammar";
2
+ import {
3
+ LRLanguage,
4
+ LanguageSupport,
5
+ foldNodeProp,
6
+ foldInside,
7
+ indentNodeProp,
8
+ continuedIndent,
9
+ syntaxTree,
10
+ foldService,
11
+ Language,
12
+ } from "@codemirror/language";
13
+ import { completeFromList } from "@codemirror/autocomplete";
14
+ import { parseMixed } from "@lezer/common";
15
+ import type { SyntaxNodeRef, Input, NestedParse, Parser } from "@lezer/common";
16
+
17
+ /**
18
+ * Configuration for nested languages in heredocs.
19
+ * Maps language hints (e.g., "sql", "json") to CodeMirror LanguageSupport objects.
20
+ */
21
+ export interface NestedLanguage {
22
+ /** Language hint as it appears after comma in heredoc (e.g., "sql" in <<SQL,sql) */
23
+ tag: string;
24
+ /** The CodeMirror LanguageSupport to use for parsing */
25
+ language: LanguageSupport;
26
+ }
27
+
28
+ /**
29
+ * Parse heredoc text to extract marker info.
30
+ * Returns { delimiter, langHint, contentStart, contentEnd }
31
+ */
32
+ function parseHeredocText(text: string): {
33
+ delimiter: string;
34
+ langHint: string | null;
35
+ contentStart: number;
36
+ contentEnd: number;
37
+ } | null {
38
+ // Format: <<DELIM[,lang]\n...content...\nDELIM
39
+ const match = text.match(/^<<([A-Z][A-Z0-9_]*)(?:,([a-z][a-z0-9_.-]*))?\r?\n/);
40
+ if (!match) return null;
41
+
42
+ const delimiter = match[1];
43
+ const langHint = match[2] || null;
44
+ const headerLen = match[0].length;
45
+
46
+ // Find where the closing delimiter starts
47
+ const delimPattern = new RegExp(`^[ \\t]*${delimiter}$`, "m");
48
+ const contentMatch = text.slice(headerLen).match(delimPattern);
49
+
50
+ if (!contentMatch || contentMatch.index === undefined) {
51
+ // No closing delimiter found - content goes to end
52
+ return {
53
+ delimiter,
54
+ langHint,
55
+ contentStart: headerLen,
56
+ contentEnd: text.length,
57
+ };
58
+ }
59
+
60
+ return {
61
+ delimiter,
62
+ langHint,
63
+ contentStart: headerLen,
64
+ contentEnd: headerLen + contentMatch.index,
65
+ };
66
+ }
67
+
68
+ /**
69
+ * Creates a parser wrapper that handles nested language injection for heredocs.
70
+ */
71
+ function createMixedParser(nestedLanguages: NestedLanguage[]) {
72
+ const langMap = new Map<string, Parser>();
73
+ for (const { tag, language } of nestedLanguages) {
74
+ langMap.set(tag, language.language.parser);
75
+ }
76
+
77
+ return parseMixed((node: SyntaxNodeRef, input: Input): NestedParse | null => {
78
+ if (node.type.name !== "Heredoc") return null;
79
+
80
+ // Get the heredoc text
81
+ const text = input.read(node.from, node.to);
82
+ const parsed = parseHeredocText(text);
83
+
84
+ if (!parsed || !parsed.langHint) return null;
85
+
86
+ // Find the parser for this language hint
87
+ const nestedParser = langMap.get(parsed.langHint);
88
+ if (!nestedParser) return null;
89
+
90
+ // Return overlay for just the content portion
91
+ return {
92
+ parser: nestedParser,
93
+ overlay: [{ from: node.from + parsed.contentStart, to: node.from + parsed.contentEnd }],
94
+ };
95
+ });
96
+ }
97
+
98
+ // Custom fold service for Styx - finds Object/Sequence nodes and returns fold ranges
99
+ const styxFoldService = foldService.of((state, lineStart, lineEnd) => {
100
+ const tree = syntaxTree(state);
101
+ let node = tree.resolveInner(lineEnd, -1);
102
+
103
+ // Walk up the tree looking for Object or Sequence
104
+ for (let cur: typeof node | null = node; cur; cur = cur.parent) {
105
+ if (cur.type.name === "Object" || cur.type.name === "Sequence") {
106
+ const first = cur.firstChild;
107
+ const last = cur.lastChild;
108
+ // Only fold if:
109
+ // 1. It spans multiple lines (first.to < last.from)
110
+ // 2. The opening brace is on THIS line (first.from >= lineStart)
111
+ if (first && last && first.to < last.from && first.from >= lineStart) {
112
+ return { from: first.to, to: last.from };
113
+ }
114
+ }
115
+ }
116
+ return null;
117
+ });
118
+
119
+ // Base parser props
120
+ const baseProps = [
121
+ indentNodeProp.add({
122
+ Object: continuedIndent({ except: /^\s*\}/ }),
123
+ Sequence: continuedIndent({ except: /^\s*\)/ }),
124
+ }),
125
+ foldNodeProp.add({
126
+ Object: foldInside,
127
+ Sequence: foldInside,
128
+ }),
129
+ ];
130
+
131
+ // Language definition with syntax highlighting and code folding
132
+ // Using parser.configure() like @codemirror/lang-json does
133
+ export const styxLanguage = LRLanguage.define({
134
+ name: "styx",
135
+ parser: parser.configure({ props: baseProps }),
136
+ languageData: {
137
+ commentTokens: { line: "//" },
138
+ closeBrackets: { brackets: ["(", "{", '"'] },
139
+ },
140
+ });
141
+
142
+ /**
143
+ * Create a Styx language with nested language support for heredocs.
144
+ */
145
+ function createStyxLanguage(nestedLanguages: NestedLanguage[]): LRLanguage {
146
+ if (nestedLanguages.length === 0) {
147
+ return styxLanguage;
148
+ }
149
+
150
+ const mixedParser = parser.configure({
151
+ props: baseProps,
152
+ wrap: createMixedParser(nestedLanguages),
153
+ });
154
+
155
+ return LRLanguage.define({
156
+ name: "styx",
157
+ parser: mixedParser,
158
+ languageData: {
159
+ commentTokens: { line: "//" },
160
+ closeBrackets: { brackets: ["(", "{", '"'] },
161
+ },
162
+ });
163
+ }
164
+
165
+ // Common Styx schema tags for autocompletion
166
+ const builtinTags = [
167
+ "@string",
168
+ "@int",
169
+ "@float",
170
+ "@bool",
171
+ "@null",
172
+ "@object",
173
+ "@array",
174
+ "@optional",
175
+ "@required",
176
+ "@default",
177
+ "@enum",
178
+ "@pattern",
179
+ "@min",
180
+ "@max",
181
+ "@minLength",
182
+ "@maxLength",
183
+ ].map((label) => ({ label, type: "keyword" }));
184
+
185
+ // Basic autocompletion for tags
186
+ const styxCompletion = styxLanguage.data.of({
187
+ autocomplete: completeFromList(builtinTags),
188
+ });
189
+
190
+ /**
191
+ * Configuration options for Styx language support.
192
+ */
193
+ export interface StyxConfig {
194
+ /**
195
+ * Nested languages for heredoc content.
196
+ * Maps language hints to CodeMirror Language objects.
197
+ *
198
+ * Example:
199
+ * ```ts
200
+ * import { sql } from "@codemirror/lang-sql";
201
+ *
202
+ * styx({
203
+ * nestedLanguages: [
204
+ * { tag: "sql", language: sql() }
205
+ * ]
206
+ * })
207
+ * ```
208
+ */
209
+ nestedLanguages?: NestedLanguage[];
210
+ }
211
+
212
+ /**
213
+ * Styx language support for CodeMirror 6.
214
+ *
215
+ * Usage:
216
+ * ```ts
217
+ * import { styx } from "@bearcove/codemirror-lang-styx";
218
+ * import { EditorView, basicSetup } from "codemirror";
219
+ *
220
+ * new EditorView({
221
+ * extensions: [basicSetup, styx()],
222
+ * parent: document.body,
223
+ * });
224
+ * ```
225
+ *
226
+ * With nested language support:
227
+ * ```ts
228
+ * import { sql } from "@codemirror/lang-sql";
229
+ *
230
+ * new EditorView({
231
+ * extensions: [basicSetup, styx({ nestedLanguages: [{ tag: "sql", language: sql() }] })],
232
+ * parent: document.body,
233
+ * });
234
+ * ```
235
+ */
236
+ export function styx(config: StyxConfig = {}): LanguageSupport {
237
+ const nestedLanguages = config.nestedLanguages || [];
238
+ const lang = createStyxLanguage(nestedLanguages);
239
+
240
+ // Collect nested language supports for proper extension loading
241
+ const nestedSupports = nestedLanguages.flatMap((n) => n.language.support);
242
+
243
+ return new LanguageSupport(lang, [styxCompletion, styxFoldService, ...nestedSupports]);
244
+ }
245
+
246
+ // Re-export for advanced usage
247
+ export { parser } from "./syntax.grammar";
@@ -0,0 +1,110 @@
1
+ // Lezer grammar for Styx configuration language
2
+ // Simplified for syntax highlighting
3
+
4
+ @top Document { line* }
5
+
6
+ line {
7
+ Comment newline |
8
+ DocComment newline |
9
+ Entry newline |
10
+ newline
11
+ }
12
+
13
+ @skip { whitespace }
14
+
15
+ Entry { KeyExpr ValueExpr? }
16
+
17
+ KeyExpr {
18
+ Tag ~tagAmb KeyPayload |
19
+ Tag ~tagAmb |
20
+ KeyAtom
21
+ }
22
+
23
+ ValueExpr {
24
+ Tag ~tagAmb ValuePayload |
25
+ Tag ~tagAmb |
26
+ ValueAtom
27
+ }
28
+
29
+ Tag { tagName }
30
+
31
+ KeyPayload { QuotedScalar | RawScalar | Sequence | Object | Unit | Attributes }
32
+ ValuePayload { QuotedScalar | RawScalar | Sequence | Object | Unit | Attributes }
33
+
34
+ KeyAtom { BareScalar | QuotedScalar | RawScalar | Sequence | Object | Unit | Attributes }
35
+ ValueAtom { BareScalar | QuotedScalar | RawScalar | Heredoc | Sequence | Object | Unit | Attributes }
36
+
37
+ BareScalar { bareWord }
38
+
39
+ QuotedScalar { stringLiteral }
40
+
41
+ RawScalar { rawString }
42
+
43
+ Heredoc { heredoc }
44
+
45
+ Unit { unitAt }
46
+
47
+ Sequence { "(" SeqContent? ")" }
48
+
49
+ SeqContent { newline* SeqItem (newline* SeqItem)* newline* }
50
+
51
+ SeqItem {
52
+ Tag ~tagAmb SeqPayload |
53
+ Tag ~tagAmb |
54
+ SeqAtom
55
+ }
56
+
57
+ SeqPayload { QuotedScalar | RawScalar | Sequence | Object | Unit | Attributes }
58
+ SeqAtom { BareScalar | QuotedScalar | RawScalar | Sequence | Object | Unit | Attributes }
59
+
60
+ Object { "{" ObjContent? "}" }
61
+
62
+ ObjContent { ObjSep* ObjItem (ObjSep ObjItem)* ObjSep* }
63
+
64
+ ObjItem { DocComment? Entry }
65
+
66
+ ObjSep { "," | newline }
67
+
68
+ Attributes { attributeGroup }
69
+
70
+ Comment { lineComment }
71
+ DocComment { docCommentLine+ }
72
+
73
+ docCommentLine { docCommentToken }
74
+
75
+ @tokens {
76
+ whitespace { $[ \t]+ }
77
+ newline { "\r"? "\n" }
78
+
79
+ lineComment { "//" ![/\n\r] ![^\n\r]* }
80
+ docCommentToken { "///" ![^\n\r]* "\r"? "\n" }
81
+
82
+ tagName { "@" $[A-Za-z_] $[A-Za-z0-9_\-]* }
83
+ unitAt { "@" }
84
+
85
+ bareWord { ![{}\(\),"=@<>\r\n\t ] ![{}\(\),"<>\r\n\t ]* }
86
+
87
+ // Attribute group: one or more key>value pairs
88
+ attributeGroup {
89
+ ![{}\(\),"=@>\r\n\t ] ![{}\(\),">\r\n\t ]* ">" ![{}\(\),"\r\n\t ]+
90
+ ($[ \t]+ ![{}\(\),"=@>\r\n\t ] ![{}\(\),">\r\n\t ]* ">" ![{}\(\),"\r\n\t ]+)*
91
+ }
92
+
93
+ // String literal as a single token
94
+ stringLiteral { '"' (!["\\\n\r] | "\\" ![])* '"' }
95
+
96
+ rawString { "r" "#"* '"' !["]* '"' "#"* }
97
+
98
+ "(" ")" "{" "}" "," ">"
99
+
100
+ @precedence {
101
+ tagName, unitAt, docCommentToken, lineComment,
102
+ rawString, attributeGroup, bareWord
103
+ }
104
+ }
105
+
106
+ @external tokens heredocTokenizer from "./heredoc" { heredoc }
107
+
108
+ @external propSource styxHighlight from "./highlight"
109
+
110
+ @detectDelim
@@ -0,0 +1,29 @@
1
+ // This file was generated by lezer-generator. You probably shouldn't edit it.
2
+ export const
3
+ heredoc = 39,
4
+ Document = 1,
5
+ Comment = 2,
6
+ DocComment = 3,
7
+ Entry = 4,
8
+ KeyExpr = 5,
9
+ Tag = 6,
10
+ KeyPayload = 7,
11
+ QuotedScalar = 8,
12
+ RawScalar = 9,
13
+ Sequence = 12,
14
+ SeqContent = 13,
15
+ SeqItem = 14,
16
+ SeqPayload = 15,
17
+ Object = 18,
18
+ ObjContent = 19,
19
+ ObjSep = 20,
20
+ ObjItem = 22,
21
+ Unit = 23,
22
+ Attributes = 24,
23
+ SeqAtom = 25,
24
+ BareScalar = 26,
25
+ KeyAtom = 27,
26
+ ValueExpr = 28,
27
+ ValuePayload = 29,
28
+ ValueAtom = 30,
29
+ Heredoc = 31
@@ -0,0 +1,23 @@
1
+ // This file was generated by lezer-generator. You probably shouldn't edit it.
2
+ import {LRParser} from "@lezer/lr"
3
+ import {heredocTokenizer} from "./heredoc"
4
+ import {styxHighlight} from "./highlight"
5
+ export const parser = LRParser.deserialize({
6
+ version: 14,
7
+ states: "*tQVQPOOOOQO'#C^'#C^OzQPO'#C_OOQQ'#Cb'#CbOOQQ'#Cd'#CdOOQQ'#Ce'#CeO#aQPO'#CnOOQQ'#Cs'#CsOOQQ'#Ct'#CtOOQQ'#Cv'#CvO$VQPO'#ChOOQQ'#Cw'#CwO$rQRO'#CaOOQQ'#Ca'#CaO%lQRO'#C`O&SQPO'#DWOOQO'#DW'#DWOOQO'#C|'#C|QVQPOOOOQO'#C}'#C}OOQO-E6{-E6{OOQO'#Cp'#CpOOQO'#DP'#DPO!lQPO'#CoOOQO'#Cr'#CrO&XQPO'#CrO&cQPO'#CoOOQQ,59Y,59YO&nQPO,59YOOQO'#DO'#DOO#hQPO'#CiOOQO'#Cu'#CuO&sQPO'#CjOOQO'#Cj'#CjO'gQPO'#CiOOQQ,59S,59SO'nQPO,59SOOQQ'#Cc'#CcOOQQ,58{,58{OOQO'#C{'#C{OOQO'#Cz'#CzO'sQPO'#CxOOQO'#Cx'#CxOOQO,58z,58zOOQO,59r,59rOOQO-E6z-E6zOOQO-E6}-E6}O(QQPO,59ZOOQO,59^,59^O(]QPO'#DPO(QQPO,59ZO(QQPO,59ZOOQQ1G.t1G.tOOQO-E6|-E6|O(sQPO,59TOOQO'#Ck'#CkOOQO,59U,59UO(sQPO,59TOOQO'#DR'#DRO(sQPO,59TOOQQ1G.n1G.nOOQO'#Cy'#CyOOQO,59d,59dO(zQPO1G.uO(zQPO1G.uOOQO,59l,59lOOQO-E7O-E7OO)VQPO1G.oO)VQPO1G.oOOQO,59m,59mOOQO-E7P-E7PO)^QPO7+$aP)iQPO'#DQO)vQPO7+$ZP#hQPO'#DO",
8
+ stateData: "*W~OyOS~OZYOaUO{PO|`O}cO!ORO!PSO!QTO!RVO!SWO!TXO~O}cO|RXZRXaRX!ORX!PRX!QRX!RRX!SRX!TRX~OZYOaUOeeO|eO}cO!ORO!PSO!QTO!RVO!SWO!TXO~O`kO~P!lOZYOaUO|mO!ORO!PSO!QTO!RVO!SWO!TXO~OYsO~P#hOZYOaUO!PSO!QTO!RVO!SWO~OZTXaTXwTX|TX!OTX!PTX!QTX!RTX!STX!TTX`TXeTX~P$^OwwO!ORO!TXO|SX`SXeSX~P$^O||O~O!ORO!TXO~P$^OeeO|eO`cX~O`!UO~OY^XZ^Xa^X|^X!O^X!P^X!Q^X!R^X!S^X!T^X~P$^OY]X~P#hOY!^O~O|lX`lXelX~P$^OeeO|eO`ca~O}cO!ORO!TXO`sXesX|sX~P$^OY]a~P#hOeeO|eO`ci~OY]i~P#hOeeO|eO`cq~O}cO!ORO!TXO~P$^OY]q~P#hO!O!R}{!Q!S!T{~",
9
+ goto: "(Q{PP|!Q![!i!r#[#_#_PP#_$Q$T$gPP#_$j$mP$z#_#_%U%b%z&T&W&Z%w&^&d&o'T'h'rPPPP'|T_ObS_ObXiUg!R!jS_ObWhUg!R!jR!Qi_^OUbgi!R!j^[OUbgi!R!jdpYnr!W!Z!]!e!f!k!lRy^Rv[^ZOUbgi!R!jdoYnr!W!Z!]!e!f!k!lQu[Qx^Q!XpR!_yRtYQrYQ!WnW![r!W!]!fX!g!Z!e!k!lR!YpRlUYfUg!S!a!iX!Rj!P!T!bQjUQ!PgT!c!R!jeqYnr!W!Z!]!e!f!k!l^ZOUbgi!R!jdoYnr!W!Z!]!e!f!k!lRx^_]OUbgi!R!jR{^R!`yRz^QbOR}b[QOUbg!R!jRdQQnYY!Vn!Z!e!k!lQ!ZrS!e!W!]R!k!fQgUW!Og!S!a!iQ!SjS!a!P!TR!i!bQ!TjQ!b!PT!d!T!bQ!]rQ!f!WT!h!]!fTaOb",
10
+ nodeNames: "⚠ Document Comment DocComment Entry KeyExpr Tag KeyPayload QuotedScalar RawScalar ) ( Sequence SeqContent SeqItem SeqPayload } { Object ObjContent ObjSep , ObjItem Unit Attributes SeqAtom BareScalar KeyAtom ValueExpr ValuePayload ValueAtom Heredoc",
11
+ maxTerm: 51,
12
+ nodeProps: [
13
+ ["openedBy", 10,"(",16,"{"],
14
+ ["closedBy", 11,")",17,"}"]
15
+ ],
16
+ propSources: [styxHighlight],
17
+ skippedNodes: [0],
18
+ repeatNodeCount: 6,
19
+ tokenData: "B[~RmOX!|XY(UYZ(aZ]!|]^(f^p!|pq(Uqr!|rs(lsx!|xy)syz)xz|!||})}}!P!|!P!Q*S!Q!^!|!^!_$Q!a!b!|!b!c?b!c#f!|#f#g@U#g#o!|#o#pBQ#p#q!|#q#rBV#r;'S!|;'S;=`(O<%lO!|~#R_!T~OX!|Z]!|^p!|qr!|sx!|z|!|}!^!|!^!_$Q!_!`!|!`!a$|!a#o!|#p#q!|#r;'S!|;'S;=`(O<%lO!|~$T]OX$QZ]$Q^p$Qqr$Qsx$Qz|$Q}!`$Q!`!a$|!a#o$Q#p#q$Q#r;'S$Q;'S;=`'r<%lO$Q~%PZOX%rZ]%r^p%rqr%rsx%rz|%r}#o%r#p#q%r#r;'S%r;'S;=`'x<%lO%r~%w]!S~OX%rXY&pZ]%r^p%rpq&pqr%rsx%rz|%r}#o%r#p#q%r#r;'S%r;'S;=`'x<%lO%r~&s_OX$QXY&pZ]$Q^p$Qpq&pqr$Qsx$Qz|$Q}!_$Q!a!b$Q!c#o$Q#p#q$Q#r;'S$Q;'S;=`'r<%lO$Q~'uP;=`<%l$Q~'{P;=`<%l%r~(RP;=`<%l!|~(ZQy~XY(Upq(U~(fO|~~(iPYZ(a~(oXOY(lZ](l^r(lrs)[s#O(l#O#P)a#P;'S(l;'S;=`)m<%lO(l~)aO!P~~)dRO;'S(l;'S;=`)m<%lO(l~)pP;=`<%l(l~)xOZ~~)}OY~~*SOe~~*Xa!T~OX!|Z]!|^p!|qr!|sx!|z|!|}!P!|!P!Q+^!Q!^!|!^!_$Q!_!`!|!`!a$|!a#o!|#p#q!|#r;'S!|;'S;=`(O<%lO!|~+ch!T~OX,}XY.pZ],}^p,}pq.pqr,}rs.psx,}xz.pz|,}|}.p}!P,}!P!Q5u!Q!^,}!^!_/_!_!`,}!`!a0x!a#o,}#o#p.p#p#q,}#q#r.p#r;'S,};'S;=`5o<%lO,}~-Uh{~!T~OX,}XY.pZ],}^p,}pq.pqr,}rs.psx,}xz.pz|,}|}.p}!^,}!^!_/_!_!`,}!`!a0x!a#Q,}#Q#R!|#R#o,}#o#p.p#p#q,}#q#r.p#r;'S,};'S;=`5o<%lO,}~.uU{~OY.pZ].p^#Q.p#R;'S.p;'S;=`/X<%lO.p~/[P;=`<%l.p~/df{~OX/_XY.pZ]/_^p/_pq.pqr/_rs.psx/_xz.pz|/_|}.p}!`/_!`!a0x!a#Q/_#Q#R$Q#R#o/_#o#p.p#p#q/_#q#r.p#r;'S/_;'S;=`5c<%lO/_~0}d{~OX2]XY.pZ]2]^p2]pq.pqr2]rs.psx2]xz.pz|2]|}.p}#Q2]#Q#R%r#R#o2]#o#p.p#p#q2]#q#r.p#r;'S2];'S;=`5i<%lO2]~2dd{~!S~OX2]XY3rZ]2]^p2]pq3rqr2]rs.psx2]xz.pz|2]|}.p}#Q2]#Q#R%r#R#o2]#o#p.p#p#q2]#q#r.p#r;'S2];'S;=`5i<%lO2]~3wh{~OX/_XY3rZ]/_^p/_pq3rqr/_rs.psx/_xz.pz|/_|}.p}!_/_!_!a.p!a!b/_!b!c.p!c#Q/_#Q#R$Q#R#o/_#o#p.p#p#q/_#q#r.p#r;'S/_;'S;=`5c<%lO/_~5fP;=`<%l/_~5lP;=`<%l2]~5rP;=`<%l,}~5zj!T~OX5uXY7lYZ8XZ]5u]^8^^p5upq7lqr5urs7lsx5uxz7lz|5u|}7l}!^5u!^!_8j!_!`5u!`!a:X!a#Q5u#Q#R!|#R#o5u#o#p7l#p#q5u#q#r7l#r;'S5u;'S;=`?[<%lO5u~7oWOY7lYZ8XZ]7l]^8^^#Q7l#R;'S7l;'S;=`8d<%lO7l~8^O}~~8aPYZ8X~8gP;=`<%l7l~8mhOX8jXY7lYZ8XZ]8j]^8^^p8jpq7lqr8jrs7lsx8jxz7lz|8j|}7l}!`8j!`!a:X!a#Q8j#Q#R$Q#R#o8j#o#p7l#p#q8j#q#r7l#r;'S8j;'S;=`?O<%lO8j~:[fOX;pXY7lYZ8XZ];p]^8^^p;ppq7lqr;prs7lsx;pxz7lz|;p|}7l}#Q;p#Q#R%r#R#o;p#o#p7l#p#q;p#q#r7l#r;'S;p;'S;=`?U<%lO;p~;uf!S~OX;pXY=ZYZ8XZ];p]^8^^p;ppq=Zqr;prs7lsx;pxz7lz|;p|}7l}#Q;p#Q#R%r#R#o;p#o#p7l#p#q;p#q#r7l#r;'S;p;'S;=`?U<%lO;p~=^jOX8jXY=ZYZ8XZ]8j]^8^^p8jpq=Zqr8jrs7lsx8jxz7lz|8j|}7l}!_8j!_!a7l!a!b8j!b!c7l!c#Q8j#Q#R$Q#R#o8j#o#p7l#p#q8j#q#r7l#r;'S8j;'S;=`?O<%lO8j~?RP;=`<%l8j~?XP;=`<%l;p~?_P;=`<%l5u~?gR!R~!c!}?p#R#S?p#T#o?p~?uT!O~}!O?p!Q![?p!c!}?p#R#S?p#T#o?p~@Za!T~OX!|Z]!|^p!|qr!|rsA`st@Utx!|z|!|}!^!|!^!_$Q!_!`!|!`!a$|!a#o!|#p#q!|#r;'S!|;'S;=`(O<%lO!|~AcTOrA`rsArs;'SA`;'S;=`Az<%lOA`~AwP!Q~stAr~A}P;=`<%lA`~BVOa~~B[O`~",
20
+ tokenizers: [0, heredocTokenizer],
21
+ topRules: {"Document":[0,1]},
22
+ tokenPrec: 413
23
+ })