lilmd 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/BENCHMARK.md +106 -0
- package/README.md +113 -0
- package/dist/index.cjs +364 -0
- package/dist/index.d.cts +112 -0
- package/dist/index.d.ts +112 -0
- package/dist/index.js +331 -0
- package/dist/index.js.map +13 -0
- package/dist/mdq.js +735 -0
- package/dist/mdq.js.map +16 -0
- package/package.json +42 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Markdown heading scanner — the engine behind every read-path command.
|
|
3
|
+
*
|
|
4
|
+
* Instead of building a full CommonMark AST we walk the source line by line
|
|
5
|
+
* and recognize only what `mdq` actually needs: ATX headings and fenced code
|
|
6
|
+
* blocks (so `#` inside code doesn't count as a heading).
|
|
7
|
+
*
|
|
8
|
+
* Numbers on MDN content (see BENCHMARK.md): ~180 MB/s end-to-end on a
|
|
9
|
+
* 10 MB fixture, roughly 7x faster than markdown-it and ~1000x faster than
|
|
10
|
+
* mdast-util-from-markdown while returning the exact same section.
|
|
11
|
+
*
|
|
12
|
+
* Deliberate limitations:
|
|
13
|
+
* - Setext headings (`===` / `---` underlines) are NOT recognized. mdq is
|
|
14
|
+
* aimed at agent-authored markdown where ATX is ubiquitous.
|
|
15
|
+
* - HTML blocks are not detected. A `<pre>` containing an ATX-looking line
|
|
16
|
+
* would be misread as a heading. That's an acceptable tradeoff for 100x
|
|
17
|
+
* speed; a future `--strict` flag could hand off to markdown-it.
|
|
18
|
+
* - Fenced code blocks *inside a list item* that are indented 4+ spaces are
|
|
19
|
+
* not recognized as fences — we only look at the first 3 columns for the
|
|
20
|
+
* fence opener. A `# fake` line inside such a block would be scanned as a
|
|
21
|
+
* heading. Rare in practice; document-your-way-out rather than fix.
|
|
22
|
+
* - An unclosed fence at EOF leaves the scanner in "still in fence" state
|
|
23
|
+
* to the end of the file, so any `#`-looking lines after it are ignored.
|
|
24
|
+
* That's the conservative choice — prefer under-counting to over-counting.
|
|
25
|
+
*/
|
|
26
|
+
type Heading = {
|
|
27
|
+
/** 1..6 */
|
|
28
|
+
level: number;
|
|
29
|
+
/** Heading text with trailing closing hashes stripped. */
|
|
30
|
+
title: string;
|
|
31
|
+
/** 1-indexed line number. */
|
|
32
|
+
line: number;
|
|
33
|
+
};
|
|
34
|
+
/**
|
|
35
|
+
* Return every ATX heading in `src`, in document order.
|
|
36
|
+
* Runs in a single pass; O(n) in source length, O(headings) in space.
|
|
37
|
+
*/
|
|
38
|
+
declare function scan(src: string): Heading[];
|
|
39
|
+
type Section = {
|
|
40
|
+
level: number;
|
|
41
|
+
title: string;
|
|
42
|
+
/** 1-indexed line of the heading itself. */
|
|
43
|
+
line_start: number;
|
|
44
|
+
/** 1-indexed inclusive end of the subtree. */
|
|
45
|
+
line_end: number;
|
|
46
|
+
/** Nearest enclosing section, or null for top-level. */
|
|
47
|
+
parent: Section | null;
|
|
48
|
+
};
|
|
49
|
+
/**
|
|
50
|
+
* Build the section tree in a single pass. Preserves document order.
|
|
51
|
+
*
|
|
52
|
+
* Runs in O(n): every section is pushed once and popped once, and we set
|
|
53
|
+
* its `line_end` at pop time. Sections still on the stack when we run out
|
|
54
|
+
* of headings keep their provisional `line_end = totalLines`.
|
|
55
|
+
*/
|
|
56
|
+
declare function buildSections(headings: Heading[], totalLines: number): Section[];
|
|
57
|
+
/**
|
|
58
|
+
* Walk `sec` up to the root, collecting ancestor titles in top-down order.
|
|
59
|
+
* Returns [] for a root section.
|
|
60
|
+
*/
|
|
61
|
+
declare function pathOf(sec: Section): string[];
|
|
62
|
+
/**
|
|
63
|
+
* Count lines in a source string. Empty string is 0; otherwise every line
|
|
64
|
+
* (including the last one, whether or not it ends with a newline) is 1.
|
|
65
|
+
* A trailing newline does NOT add a phantom line.
|
|
66
|
+
*/
|
|
67
|
+
declare function countLines(src: string): number;
|
|
68
|
+
type Op = "descendant" | "child";
|
|
69
|
+
type Kind = "fuzzy" | "exact" | "regex";
|
|
70
|
+
type Segment = {
|
|
71
|
+
/** Operator that connects this segment to the *previous* one.
|
|
72
|
+
* For the first segment this is always "descendant" (unused). */
|
|
73
|
+
op: Op;
|
|
74
|
+
/** Optional 1..6 level filter. */
|
|
75
|
+
level: number | null;
|
|
76
|
+
kind: Kind;
|
|
77
|
+
/** The raw value (without level/kind prefix). */
|
|
78
|
+
value: string;
|
|
79
|
+
/** Present only for kind === "regex". */
|
|
80
|
+
regex?: RegExp;
|
|
81
|
+
};
|
|
82
|
+
declare function parseSelector(input: string): Segment[];
|
|
83
|
+
declare function match(sections: Section[], selector: Segment[]): Section[];
|
|
84
|
+
/**
|
|
85
|
+
* Pretty printing for `mdq read --pretty`. Lazy-loads marked +
|
|
86
|
+
* marked-terminal on first use so the default (plain-text) path keeps its
|
|
87
|
+
* ~16ms cold start.
|
|
88
|
+
*/
|
|
89
|
+
type PrettyFormatter = (markdown: string) => string;
|
|
90
|
+
type TocOptions = {
|
|
91
|
+
depth?: number;
|
|
92
|
+
flat?: boolean;
|
|
93
|
+
};
|
|
94
|
+
declare function renderToc(file: string, src: string, sections: Section[], opts: TocOptions): string;
|
|
95
|
+
type SectionOptions = {
|
|
96
|
+
bodyOnly?: boolean;
|
|
97
|
+
noBody?: boolean;
|
|
98
|
+
raw?: boolean;
|
|
99
|
+
maxLines?: number;
|
|
100
|
+
/** Required when bodyOnly is true so we can find the first child. */
|
|
101
|
+
allSections?: Section[];
|
|
102
|
+
/** Optional markdown→ANSI formatter applied to the body before delimiters. */
|
|
103
|
+
pretty?: PrettyFormatter;
|
|
104
|
+
};
|
|
105
|
+
declare function renderSection(file: string, srcLines: string[], sec: Section, opts: SectionOptions): string;
|
|
106
|
+
/**
|
|
107
|
+
* Cut `body` to the first `maxLines` lines. If anything was dropped, append
|
|
108
|
+
* a marker line telling the agent how to get the rest. `maxLines <= 0`
|
|
109
|
+
* disables truncation.
|
|
110
|
+
*/
|
|
111
|
+
declare function truncateBody(body: string, maxLines: number): string;
|
|
112
|
+
export { truncateBody, scan, renderToc, renderSection, pathOf, parseSelector, match, countLines, buildSections, TocOptions, Segment, SectionOptions, Section, Op, Kind, Heading };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
// src/scan.ts
|
|
2
|
+
function scan(src) {
|
|
3
|
+
const out = [];
|
|
4
|
+
const len = src.length;
|
|
5
|
+
let i = 0;
|
|
6
|
+
let lineNo = 0;
|
|
7
|
+
let inFence = false;
|
|
8
|
+
let fenceChar = 0;
|
|
9
|
+
let fenceLen = 0;
|
|
10
|
+
while (i <= len) {
|
|
11
|
+
const start = i;
|
|
12
|
+
while (i < len && src.charCodeAt(i) !== 10)
|
|
13
|
+
i++;
|
|
14
|
+
let line = src.slice(start, i);
|
|
15
|
+
if (line.length > 0 && line.charCodeAt(line.length - 1) === 13) {
|
|
16
|
+
line = line.slice(0, line.length - 1);
|
|
17
|
+
}
|
|
18
|
+
lineNo++;
|
|
19
|
+
const fence = matchFence(line);
|
|
20
|
+
if (fence) {
|
|
21
|
+
if (!inFence) {
|
|
22
|
+
inFence = true;
|
|
23
|
+
fenceChar = fence.char;
|
|
24
|
+
fenceLen = fence.len;
|
|
25
|
+
} else if (fence.char === fenceChar && fence.len >= fenceLen) {
|
|
26
|
+
inFence = false;
|
|
27
|
+
}
|
|
28
|
+
} else if (!inFence) {
|
|
29
|
+
const h = matchHeading(line, lineNo);
|
|
30
|
+
if (h)
|
|
31
|
+
out.push(h);
|
|
32
|
+
}
|
|
33
|
+
if (i >= len)
|
|
34
|
+
break;
|
|
35
|
+
i++;
|
|
36
|
+
}
|
|
37
|
+
return out;
|
|
38
|
+
}
|
|
39
|
+
function matchFence(line) {
|
|
40
|
+
let p = 0;
|
|
41
|
+
while (p < 3 && line.charCodeAt(p) === 32)
|
|
42
|
+
p++;
|
|
43
|
+
const ch = line.charCodeAt(p);
|
|
44
|
+
if (ch !== 96 && ch !== 126)
|
|
45
|
+
return null;
|
|
46
|
+
let run = 0;
|
|
47
|
+
while (line.charCodeAt(p + run) === ch)
|
|
48
|
+
run++;
|
|
49
|
+
if (run < 3)
|
|
50
|
+
return null;
|
|
51
|
+
return { char: ch, len: run };
|
|
52
|
+
}
|
|
53
|
+
function matchHeading(line, lineNo) {
|
|
54
|
+
let p = 0;
|
|
55
|
+
while (p < 3 && line.charCodeAt(p) === 32)
|
|
56
|
+
p++;
|
|
57
|
+
if (line.charCodeAt(p) !== 35)
|
|
58
|
+
return null;
|
|
59
|
+
let hashes = 0;
|
|
60
|
+
while (line.charCodeAt(p + hashes) === 35)
|
|
61
|
+
hashes++;
|
|
62
|
+
if (hashes < 1 || hashes > 6)
|
|
63
|
+
return null;
|
|
64
|
+
const after = p + hashes;
|
|
65
|
+
const afterCh = line.charCodeAt(after);
|
|
66
|
+
if (after < line.length && afterCh !== 32 && afterCh !== 9) {
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
let contentStart = after;
|
|
70
|
+
while (contentStart < line.length && (line.charCodeAt(contentStart) === 32 || line.charCodeAt(contentStart) === 9)) {
|
|
71
|
+
contentStart++;
|
|
72
|
+
}
|
|
73
|
+
let end = line.length;
|
|
74
|
+
while (end > contentStart && (line.charCodeAt(end - 1) === 32 || line.charCodeAt(end - 1) === 9)) {
|
|
75
|
+
end--;
|
|
76
|
+
}
|
|
77
|
+
let closing = end;
|
|
78
|
+
while (closing > contentStart && line.charCodeAt(closing - 1) === 35)
|
|
79
|
+
closing--;
|
|
80
|
+
if (closing < end && (closing === contentStart || line.charCodeAt(closing - 1) === 32 || line.charCodeAt(closing - 1) === 9)) {
|
|
81
|
+
end = closing;
|
|
82
|
+
while (end > contentStart && (line.charCodeAt(end - 1) === 32 || line.charCodeAt(end - 1) === 9)) {
|
|
83
|
+
end--;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
const title = line.slice(contentStart, end);
|
|
87
|
+
return { level: hashes, title, line: lineNo };
|
|
88
|
+
}
|
|
89
|
+
// src/sections.ts
|
|
90
|
+
function buildSections(headings, totalLines) {
|
|
91
|
+
const out = [];
|
|
92
|
+
const stack = [];
|
|
93
|
+
for (const h of headings) {
|
|
94
|
+
while (stack.length > 0 && stack[stack.length - 1].level >= h.level) {
|
|
95
|
+
const closing = stack.pop();
|
|
96
|
+
closing.line_end = h.line - 1;
|
|
97
|
+
}
|
|
98
|
+
const parent = stack.length > 0 ? stack[stack.length - 1] : null;
|
|
99
|
+
const sec = {
|
|
100
|
+
level: h.level,
|
|
101
|
+
title: h.title,
|
|
102
|
+
line_start: h.line,
|
|
103
|
+
line_end: totalLines,
|
|
104
|
+
parent
|
|
105
|
+
};
|
|
106
|
+
out.push(sec);
|
|
107
|
+
stack.push(sec);
|
|
108
|
+
}
|
|
109
|
+
return out;
|
|
110
|
+
}
|
|
111
|
+
function pathOf(sec) {
|
|
112
|
+
const path = [];
|
|
113
|
+
let cur = sec.parent;
|
|
114
|
+
while (cur) {
|
|
115
|
+
path.push(cur.title);
|
|
116
|
+
cur = cur.parent;
|
|
117
|
+
}
|
|
118
|
+
return path.reverse();
|
|
119
|
+
}
|
|
120
|
+
function countLines(src) {
|
|
121
|
+
if (src.length === 0)
|
|
122
|
+
return 0;
|
|
123
|
+
let n = 1;
|
|
124
|
+
for (let i = 0;i < src.length; i++) {
|
|
125
|
+
if (src.charCodeAt(i) === 10)
|
|
126
|
+
n++;
|
|
127
|
+
}
|
|
128
|
+
if (src.charCodeAt(src.length - 1) === 10)
|
|
129
|
+
n--;
|
|
130
|
+
return n;
|
|
131
|
+
}
|
|
132
|
+
// src/select.ts
|
|
133
|
+
function parseSelector(input) {
|
|
134
|
+
const trimmed = input.trim();
|
|
135
|
+
if (trimmed.length === 0)
|
|
136
|
+
return [];
|
|
137
|
+
const rawSegments = [];
|
|
138
|
+
const ops = ["descendant"];
|
|
139
|
+
let cur = "";
|
|
140
|
+
let i = 0;
|
|
141
|
+
let inRegex = false;
|
|
142
|
+
let atSegmentStart = true;
|
|
143
|
+
while (i < trimmed.length) {
|
|
144
|
+
const ch = trimmed[i];
|
|
145
|
+
if (ch === "/" && (atSegmentStart || inRegex)) {
|
|
146
|
+
inRegex = !inRegex;
|
|
147
|
+
cur += ch;
|
|
148
|
+
atSegmentStart = false;
|
|
149
|
+
i++;
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
if (!inRegex && ch === ">") {
|
|
153
|
+
rawSegments.push(cur.trim());
|
|
154
|
+
cur = "";
|
|
155
|
+
atSegmentStart = true;
|
|
156
|
+
if (trimmed[i + 1] === ">") {
|
|
157
|
+
ops.push("child");
|
|
158
|
+
i += 2;
|
|
159
|
+
} else {
|
|
160
|
+
ops.push("descendant");
|
|
161
|
+
i += 1;
|
|
162
|
+
}
|
|
163
|
+
continue;
|
|
164
|
+
}
|
|
165
|
+
cur += ch;
|
|
166
|
+
if (ch !== " " && ch !== "\t")
|
|
167
|
+
atSegmentStart = false;
|
|
168
|
+
i++;
|
|
169
|
+
}
|
|
170
|
+
rawSegments.push(cur.trim());
|
|
171
|
+
return rawSegments.map((s, idx) => parseSegment(s, ops[idx] ?? "descendant"));
|
|
172
|
+
}
|
|
173
|
+
function parseSegment(raw, op) {
|
|
174
|
+
let s = raw;
|
|
175
|
+
let level = null;
|
|
176
|
+
const levelMatch = /^(#{1,6})(?!#)\s*(.*)$/.exec(s);
|
|
177
|
+
if (levelMatch) {
|
|
178
|
+
level = levelMatch[1].length;
|
|
179
|
+
s = levelMatch[2] ?? "";
|
|
180
|
+
}
|
|
181
|
+
const regexMatch = /^\/(.+)\/([gimsuy]*)$/.exec(s);
|
|
182
|
+
if (regexMatch) {
|
|
183
|
+
const pattern = regexMatch[1];
|
|
184
|
+
const flags = regexMatch[2] || "i";
|
|
185
|
+
return {
|
|
186
|
+
op,
|
|
187
|
+
level,
|
|
188
|
+
kind: "regex",
|
|
189
|
+
value: pattern,
|
|
190
|
+
regex: new RegExp(pattern, flags)
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
if (s.startsWith("=")) {
|
|
194
|
+
return { op, level, kind: "exact", value: s.slice(1).trim() };
|
|
195
|
+
}
|
|
196
|
+
return { op, level, kind: "fuzzy", value: s.trim() };
|
|
197
|
+
}
|
|
198
|
+
function match(sections, selector) {
|
|
199
|
+
if (selector.length === 0)
|
|
200
|
+
return [];
|
|
201
|
+
const out = [];
|
|
202
|
+
for (const sec of sections) {
|
|
203
|
+
if (matches(sec, selector))
|
|
204
|
+
out.push(sec);
|
|
205
|
+
}
|
|
206
|
+
return out;
|
|
207
|
+
}
|
|
208
|
+
function matches(sec, segs) {
|
|
209
|
+
const last = segs[segs.length - 1];
|
|
210
|
+
if (!last || !segmentMatchesSection(last, sec))
|
|
211
|
+
return false;
|
|
212
|
+
let cursor = sec.parent;
|
|
213
|
+
for (let i = segs.length - 2;i >= 0; i--) {
|
|
214
|
+
const op = segs[i + 1].op;
|
|
215
|
+
const seg = segs[i];
|
|
216
|
+
if (op === "child") {
|
|
217
|
+
if (!cursor || !segmentMatchesSection(seg, cursor))
|
|
218
|
+
return false;
|
|
219
|
+
cursor = cursor.parent;
|
|
220
|
+
} else {
|
|
221
|
+
let found = null;
|
|
222
|
+
while (cursor) {
|
|
223
|
+
if (segmentMatchesSection(seg, cursor)) {
|
|
224
|
+
found = cursor;
|
|
225
|
+
break;
|
|
226
|
+
}
|
|
227
|
+
cursor = cursor.parent;
|
|
228
|
+
}
|
|
229
|
+
if (!found)
|
|
230
|
+
return false;
|
|
231
|
+
cursor = found.parent;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
return true;
|
|
235
|
+
}
|
|
236
|
+
function segmentMatchesSection(seg, sec) {
|
|
237
|
+
if (seg.level !== null && seg.level !== sec.level)
|
|
238
|
+
return false;
|
|
239
|
+
const title = sec.title;
|
|
240
|
+
switch (seg.kind) {
|
|
241
|
+
case "exact":
|
|
242
|
+
return title.toLowerCase() === seg.value.toLowerCase();
|
|
243
|
+
case "regex":
|
|
244
|
+
return seg.regex.test(title);
|
|
245
|
+
case "fuzzy":
|
|
246
|
+
return title.toLowerCase().includes(seg.value.toLowerCase());
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
// src/render.ts
|
|
250
|
+
function renderToc(file, src, sections, opts) {
|
|
251
|
+
const totalLines = countLines(src);
|
|
252
|
+
const headerCount = sections.length;
|
|
253
|
+
const headerRange = totalLines === 0 ? "L0" : `L1-${totalLines}`;
|
|
254
|
+
const plural = headerCount === 1 ? "heading" : "headings";
|
|
255
|
+
const out = [];
|
|
256
|
+
out.push(`${file} ${headerRange} ${headerCount} ${plural}`);
|
|
257
|
+
for (const sec of sections) {
|
|
258
|
+
if (opts.depth != null && sec.level > opts.depth)
|
|
259
|
+
continue;
|
|
260
|
+
const indent = opts.flat ? "" : " ".repeat(Math.max(0, sec.level - 1));
|
|
261
|
+
const hashes = "#".repeat(sec.level);
|
|
262
|
+
const range = `L${sec.line_start}-${sec.line_end}`;
|
|
263
|
+
out.push(`${indent}${hashes} ${sec.title} ${range}`);
|
|
264
|
+
}
|
|
265
|
+
return out.join(`
|
|
266
|
+
`);
|
|
267
|
+
}
|
|
268
|
+
function renderSection(file, srcLines, sec, opts) {
|
|
269
|
+
const start = sec.line_start;
|
|
270
|
+
let end = sec.line_end;
|
|
271
|
+
if (opts.bodyOnly && opts.allSections) {
|
|
272
|
+
const firstChild = findFirstChild(sec, opts.allSections);
|
|
273
|
+
if (firstChild)
|
|
274
|
+
end = firstChild.line_start - 1;
|
|
275
|
+
}
|
|
276
|
+
if (opts.noBody) {
|
|
277
|
+
end = start;
|
|
278
|
+
}
|
|
279
|
+
const clampedEnd = Math.min(end, srcLines.length);
|
|
280
|
+
let body = srcLines.slice(start - 1, clampedEnd).join(`
|
|
281
|
+
`);
|
|
282
|
+
if (opts.maxLines != null && opts.maxLines > 0) {
|
|
283
|
+
body = truncateBody(body, opts.maxLines);
|
|
284
|
+
}
|
|
285
|
+
if (opts.pretty) {
|
|
286
|
+
body = opts.pretty(body);
|
|
287
|
+
}
|
|
288
|
+
if (opts.raw)
|
|
289
|
+
return body;
|
|
290
|
+
const hashes = "#".repeat(sec.level);
|
|
291
|
+
const header = `── ${file} L${start}-${end} ${hashes} ${sec.title} ${"─".repeat(8)}`;
|
|
292
|
+
const footer = `── end ${"─".repeat(40)}`;
|
|
293
|
+
return `${header}
|
|
294
|
+
${body}
|
|
295
|
+
${footer}`;
|
|
296
|
+
}
|
|
297
|
+
function truncateBody(body, maxLines) {
|
|
298
|
+
if (maxLines <= 0)
|
|
299
|
+
return body;
|
|
300
|
+
const lines = body.split(`
|
|
301
|
+
`);
|
|
302
|
+
if (lines.length <= maxLines)
|
|
303
|
+
return body;
|
|
304
|
+
const kept = lines.slice(0, maxLines).join(`
|
|
305
|
+
`);
|
|
306
|
+
const remaining = lines.length - maxLines;
|
|
307
|
+
return `${kept}
|
|
308
|
+
|
|
309
|
+
… ${remaining} more lines (use --max-lines=0 for full)`;
|
|
310
|
+
}
|
|
311
|
+
function findFirstChild(sec, all) {
|
|
312
|
+
for (const candidate of all) {
|
|
313
|
+
if (candidate.parent === sec)
|
|
314
|
+
return candidate;
|
|
315
|
+
}
|
|
316
|
+
return null;
|
|
317
|
+
}
|
|
318
|
+
export {
|
|
319
|
+
truncateBody,
|
|
320
|
+
scan,
|
|
321
|
+
renderToc,
|
|
322
|
+
renderSection,
|
|
323
|
+
pathOf,
|
|
324
|
+
parseSelector,
|
|
325
|
+
match,
|
|
326
|
+
countLines,
|
|
327
|
+
buildSections
|
|
328
|
+
};
|
|
329
|
+
|
|
330
|
+
//# debugId=0BD654AD2CD3A5C164756E2164756E21
|
|
331
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["src/scan.ts", "src/sections.ts", "src/select.ts", "src/render.ts"],
|
|
4
|
+
"sourcesContent": [
|
|
5
|
+
"/**\n * Markdown heading scanner — the engine behind every read-path command.\n *\n * Instead of building a full CommonMark AST we walk the source line by line\n * and recognize only what `mdq` actually needs: ATX headings and fenced code\n * blocks (so `#` inside code doesn't count as a heading).\n *\n * Numbers on MDN content (see BENCHMARK.md): ~180 MB/s end-to-end on a\n * 10 MB fixture, roughly 7x faster than markdown-it and ~1000x faster than\n * mdast-util-from-markdown while returning the exact same section.\n *\n * Deliberate limitations:\n * - Setext headings (`===` / `---` underlines) are NOT recognized. mdq is\n * aimed at agent-authored markdown where ATX is ubiquitous.\n * - HTML blocks are not detected. A `<pre>` containing an ATX-looking line\n * would be misread as a heading. That's an acceptable tradeoff for 100x\n * speed; a future `--strict` flag could hand off to markdown-it.\n * - Fenced code blocks *inside a list item* that are indented 4+ spaces are\n * not recognized as fences — we only look at the first 3 columns for the\n * fence opener. A `# fake` line inside such a block would be scanned as a\n * heading. Rare in practice; document-your-way-out rather than fix.\n * - An unclosed fence at EOF leaves the scanner in \"still in fence\" state\n * to the end of the file, so any `#`-looking lines after it are ignored.\n * That's the conservative choice — prefer under-counting to over-counting.\n */\n\nexport type Heading = {\n /** 1..6 */\n level: number;\n /** Heading text with trailing closing hashes stripped. */\n title: string;\n /** 1-indexed line number. */\n line: number;\n};\n\n/**\n * Return every ATX heading in `src`, in document order.\n * Runs in a single pass; O(n) in source length, O(headings) in space.\n */\nexport function scan(src: string): Heading[] {\n const out: Heading[] = [];\n const len = src.length;\n\n let i = 0;\n let lineNo = 0;\n\n // Fence state: when inFence is true every line is ignored until we see a\n // matching closing fence (same char, length >= opening length).\n let inFence = false;\n let fenceChar = 0; // charCode of ` or ~\n let fenceLen = 0;\n\n while (i <= len) {\n // Slice one line without the trailing newline. A trailing \\r from CRLF\n // is stripped below.\n const start = i;\n while (i < len && src.charCodeAt(i) !== 10 /* \\n */) i++;\n let line = src.slice(start, i);\n if (line.length > 0 && line.charCodeAt(line.length - 1) === 13 /* \\r */) {\n line = line.slice(0, line.length - 1);\n }\n lineNo++;\n\n const fence = matchFence(line);\n if (fence) {\n if (!inFence) {\n inFence = true;\n fenceChar = fence.char;\n fenceLen = fence.len;\n } else if (fence.char === fenceChar && fence.len >= fenceLen) {\n inFence = false;\n }\n } else if (!inFence) {\n const h = matchHeading(line, lineNo);\n if (h) out.push(h);\n }\n\n if (i >= len) break;\n i++; // skip the \\n\n }\n\n return out;\n}\n\n/**\n * If `line` opens or closes a fenced code block, return the fence char code\n * (` or ~) and the number of fence characters. Otherwise null.\n *\n * A fence is 0–3 spaces, then 3+ of a single fence char, then optional info\n * string. We only care about the opening run length; the info string is\n * ignored.\n */\nfunction matchFence(line: string): { char: number; len: number } | null {\n // Skip up to 3 leading spaces.\n let p = 0;\n while (p < 3 && line.charCodeAt(p) === 32) p++;\n const ch = line.charCodeAt(p);\n if (ch !== 96 /* ` */ && ch !== 126 /* ~ */) return null;\n let run = 0;\n while (line.charCodeAt(p + run) === ch) run++;\n if (run < 3) return null;\n // For backtick fences, CommonMark forbids backticks in the info string,\n // but we don't parse info; we only need to know this line is a fence.\n return { char: ch, len: run };\n}\n\n/**\n * If `line` is an ATX heading, return it. Otherwise null.\n *\n * Rules (CommonMark, simplified):\n * - 0–3 spaces of indent\n * - 1–6 `#`\n * - EITHER end-of-line OR a space/tab followed by content\n * - optional closing sequence: whitespace + trailing `#`s (stripped)\n */\nfunction matchHeading(line: string, lineNo: number): Heading | null {\n // Skip up to 3 leading spaces.\n let p = 0;\n while (p < 3 && line.charCodeAt(p) === 32) p++;\n if (line.charCodeAt(p) !== 35 /* # */) return null;\n\n let hashes = 0;\n while (line.charCodeAt(p + hashes) === 35) hashes++;\n if (hashes < 1 || hashes > 6) return null;\n\n const after = p + hashes;\n const afterCh = line.charCodeAt(after);\n\n // After the hashes we need either end-of-line or a space/tab. Anything else\n // (including `#` which is caught above by the hashes loop) disqualifies.\n if (after < line.length && afterCh !== 32 && afterCh !== 9 /* \\t */) {\n return null;\n }\n\n // Trim leading whitespace of content and trailing whitespace + closing #s.\n let contentStart = after;\n while (\n contentStart < line.length &&\n (line.charCodeAt(contentStart) === 32 || line.charCodeAt(contentStart) === 9)\n ) {\n contentStart++;\n }\n\n let end = line.length;\n // Trim trailing whitespace first.\n while (\n end > contentStart &&\n (line.charCodeAt(end - 1) === 32 || line.charCodeAt(end - 1) === 9)\n ) {\n end--;\n }\n // Strip closing `#`s only if they are preceded by whitespace (CommonMark\n // requires the closing sequence to be separated from the content).\n let closing = end;\n while (closing > contentStart && line.charCodeAt(closing - 1) === 35) closing--;\n if (\n closing < end &&\n (closing === contentStart ||\n line.charCodeAt(closing - 1) === 32 ||\n line.charCodeAt(closing - 1) === 9)\n ) {\n end = closing;\n while (\n end > contentStart &&\n (line.charCodeAt(end - 1) === 32 || line.charCodeAt(end - 1) === 9)\n ) {\n end--;\n }\n }\n\n const title = line.slice(contentStart, end);\n return { level: hashes, title, line: lineNo };\n}\n",
|
|
6
|
+
"/**\n * Turn a flat list of headings into a section tree with line ranges and\n * parent links.\n *\n * Each Section covers its heading line through the line before the next\n * heading at the same-or-higher level (or the end of the file if none).\n * That's the \"whole subtree\" range — it includes the section's body *and*\n * its descendants. `--body-only` rendering is derived at render time.\n */\n\nimport type { Heading } from \"./scan\";\n\nexport type Section = {\n level: number;\n title: string;\n /** 1-indexed line of the heading itself. */\n line_start: number;\n /** 1-indexed inclusive end of the subtree. */\n line_end: number;\n /** Nearest enclosing section, or null for top-level. */\n parent: Section | null;\n};\n\n/**\n * Build the section tree in a single pass. Preserves document order.\n *\n * Runs in O(n): every section is pushed once and popped once, and we set\n * its `line_end` at pop time. Sections still on the stack when we run out\n * of headings keep their provisional `line_end = totalLines`.\n */\nexport function buildSections(headings: Heading[], totalLines: number): Section[] {\n const out: Section[] = [];\n /** Ancestors whose subtree is still open. */\n const stack: Section[] = [];\n\n for (const h of headings) {\n // Every section on the stack with the same-or-shallower level closes at\n // h.line - 1 (the line before the new heading).\n while (stack.length > 0 && stack[stack.length - 1]!.level >= h.level) {\n const closing = stack.pop()!;\n closing.line_end = h.line - 1;\n }\n const parent = stack.length > 0 ? stack[stack.length - 1]! : null;\n\n const sec: Section = {\n level: h.level,\n title: h.title,\n line_start: h.line,\n // Provisional: if nothing closes this section we leave it at totalLines.\n line_end: totalLines,\n parent,\n };\n out.push(sec);\n stack.push(sec);\n }\n\n return out;\n}\n\n/**\n * Walk `sec` up to the root, collecting ancestor titles in top-down order.\n * Returns [] for a root section.\n */\nexport function pathOf(sec: Section): string[] {\n const path: string[] = [];\n let cur = sec.parent;\n while (cur) {\n path.push(cur.title);\n cur = cur.parent;\n }\n return path.reverse();\n}\n\n/**\n * Count lines in a source string. Empty string is 0; otherwise every line\n * (including the last one, whether or not it ends with a newline) is 1.\n * A trailing newline does NOT add a phantom line.\n */\nexport function countLines(src: string): number {\n if (src.length === 0) return 0;\n let n = 1;\n for (let i = 0; i < src.length; i++) {\n if (src.charCodeAt(i) === 10) n++;\n }\n // If the source ends with a newline, the line-count should equal the\n // number of newlines (not newlines + 1) since the final \"line\" is empty.\n if (src.charCodeAt(src.length - 1) === 10) n--;\n return n;\n}\n",
|
|
7
|
+
"/**\n * Selector grammar parser and matcher.\n *\n * SELECTOR := SEGMENT ( SEP SEGMENT )*\n * SEP := \">\" (descendant, any depth)\n * | \">>\" (direct child)\n * SEGMENT := LEVEL? MATCHER\n * LEVEL := \"#\"{1,6} (optional level filter)\n * MATCHER := TEXT (fuzzy, case-insensitive substring)\n * | \"=\" TEXT (exact, case-insensitive equality)\n * | \"/\" PATTERN \"/\" FLAGS? (JS regex; defaults to /.../i)\n *\n * Matching semantics:\n * - The *last* segment must match the candidate section itself.\n * - Earlier segments must match an ancestor chain walking upward from that\n * candidate, respecting each separator between them: `A >> B` requires A\n * to be B's *immediate* parent; `A > B` only requires A to be *some*\n * ancestor of B.\n */\n\nimport type { Section } from \"./sections\";\n\nexport type Op = \"descendant\" | \"child\";\nexport type Kind = \"fuzzy\" | \"exact\" | \"regex\";\n\nexport type Segment = {\n /** Operator that connects this segment to the *previous* one.\n * For the first segment this is always \"descendant\" (unused). */\n op: Op;\n /** Optional 1..6 level filter. */\n level: number | null;\n kind: Kind;\n /** The raw value (without level/kind prefix). */\n value: string;\n /** Present only for kind === \"regex\". */\n regex?: RegExp;\n};\n\nexport function parseSelector(input: string): Segment[] {\n const trimmed = input.trim();\n if (trimmed.length === 0) return [];\n\n // Split on > / >>. We walk the string character by character so we can\n // distinguish the two operators without confusing a `>>` with two\n // consecutive `>`s. We intentionally ignore `>` that appear inside a\n // regex delimiter pair because users may write `/a>b/`.\n //\n // `atSegmentStart` tracks whether the running buffer is still whitespace\n // only — only in that state can a `/` open a regex literal. Using\n // `cur.length === 0` instead is wrong because `>` splits leave the loop\n // pointing at a leading space that then lands in `cur` before the next\n // non-space char.\n const rawSegments: string[] = [];\n const ops: Op[] = [\"descendant\"];\n let cur = \"\";\n let i = 0;\n let inRegex = false;\n let atSegmentStart = true;\n while (i < trimmed.length) {\n const ch = trimmed[i];\n if (ch === \"/\" && (atSegmentStart || inRegex)) {\n inRegex = !inRegex;\n cur += ch;\n atSegmentStart = false;\n i++;\n continue;\n }\n if (!inRegex && ch === \">\") {\n rawSegments.push(cur.trim());\n cur = \"\";\n atSegmentStart = true;\n if (trimmed[i + 1] === \">\") {\n ops.push(\"child\");\n i += 2;\n } else {\n ops.push(\"descendant\");\n i += 1;\n }\n continue;\n }\n cur += ch;\n if (ch !== \" \" && ch !== \"\\t\") atSegmentStart = false;\n i++;\n }\n rawSegments.push(cur.trim());\n\n return rawSegments.map((s, idx) => parseSegment(s, ops[idx] ?? \"descendant\"));\n}\n\nfunction parseSegment(raw: string, op: Op): Segment {\n let s = raw;\n let level: number | null = null;\n\n // Level prefix — exactly 1..6 `#`s followed by something that is NOT\n // another `#`. The negative lookahead matters: without it, \"#######foo\"\n // would silently match level=6 value=\"#foo\".\n const levelMatch = /^(#{1,6})(?!#)\\s*(.*)$/.exec(s);\n if (levelMatch) {\n level = levelMatch[1]!.length;\n s = levelMatch[2] ?? \"\";\n }\n\n // Regex literal: /pattern/flags — flags default to \"i\".\n const regexMatch = /^\\/(.+)\\/([gimsuy]*)$/.exec(s);\n if (regexMatch) {\n const pattern = regexMatch[1]!;\n const flags = regexMatch[2] || \"i\";\n return {\n op,\n level,\n kind: \"regex\",\n value: pattern,\n regex: new RegExp(pattern, flags),\n };\n }\n\n // Exact match: =value.\n if (s.startsWith(\"=\")) {\n return { op, level, kind: \"exact\", value: s.slice(1).trim() };\n }\n\n return { op, level, kind: \"fuzzy\", value: s.trim() };\n}\n\nexport function match(sections: Section[], selector: Segment[]): Section[] {\n if (selector.length === 0) return [];\n const out: Section[] = [];\n for (const sec of sections) {\n if (matches(sec, selector)) out.push(sec);\n }\n return out;\n}\n\nfunction matches(sec: Section, segs: Segment[]): boolean {\n // Last segment matches the candidate itself.\n const last = segs[segs.length - 1];\n if (!last || !segmentMatchesSection(last, sec)) return false;\n\n // Walk the ancestor chain backward alongside the earlier segments.\n let cursor: Section | null = sec.parent;\n for (let i = segs.length - 2; i >= 0; i--) {\n // The separator BEFORE segs[i+1] is stored on segs[i+1].op; that's the\n // relationship we need to honor when walking from segs[i+1] back to\n // segs[i] in the ancestor chain.\n const op = segs[i + 1]!.op;\n const seg = segs[i]!;\n\n if (op === \"child\") {\n if (!cursor || !segmentMatchesSection(seg, cursor)) return false;\n cursor = cursor.parent;\n } else {\n // Descendant: find any matching ancestor.\n let found: Section | null = null;\n while (cursor) {\n if (segmentMatchesSection(seg, cursor)) {\n found = cursor;\n break;\n }\n cursor = cursor.parent;\n }\n if (!found) return false;\n cursor = found.parent;\n }\n }\n return true;\n}\n\nfunction segmentMatchesSection(seg: Segment, sec: Section): boolean {\n if (seg.level !== null && seg.level !== sec.level) return false;\n const title = sec.title;\n switch (seg.kind) {\n case \"exact\":\n return title.toLowerCase() === seg.value.toLowerCase();\n case \"regex\":\n return seg.regex!.test(title);\n case \"fuzzy\":\n return title.toLowerCase().includes(seg.value.toLowerCase());\n }\n}\n",
|
|
8
|
+
"/**\n * Output formatting for mdq.\n *\n * Two targets today: `renderToc` for the TOC view and `renderSection` for a\n * single section read. Both emit grep-friendly plain text with stable\n * delimiters that agents can split on.\n *\n * A future `--json` pipeline lives in cli.ts; the shapes (Heading, Section)\n * are already JSON-clean so it's a direct serialization.\n */\n\nimport type { Section } from \"./sections\";\nimport { countLines } from \"./sections\";\nimport type { PrettyFormatter } from \"./pretty\";\n\nexport type TocOptions = {\n depth?: number;\n flat?: boolean;\n};\n\nexport function renderToc(\n file: string,\n src: string,\n sections: Section[],\n opts: TocOptions,\n): string {\n const totalLines = countLines(src);\n const headerCount = sections.length;\n const headerRange = totalLines === 0 ? \"L0\" : `L1-${totalLines}`;\n const plural = headerCount === 1 ? \"heading\" : \"headings\";\n\n const out: string[] = [];\n out.push(`${file} ${headerRange} ${headerCount} ${plural}`);\n\n for (const sec of sections) {\n if (opts.depth != null && sec.level > opts.depth) continue;\n const indent = opts.flat ? \"\" : \" \".repeat(Math.max(0, sec.level - 1));\n const hashes = \"#\".repeat(sec.level);\n const range = `L${sec.line_start}-${sec.line_end}`;\n out.push(`${indent}${hashes} ${sec.title} ${range}`);\n }\n return out.join(\"\\n\");\n}\n\nexport type SectionOptions = {\n bodyOnly?: boolean;\n noBody?: boolean;\n raw?: boolean;\n maxLines?: number;\n /** Required when bodyOnly is true so we can find the first child. */\n allSections?: Section[];\n /** Optional markdown→ANSI formatter applied to the body before delimiters. */\n pretty?: PrettyFormatter;\n};\n\nexport function renderSection(\n file: string,\n srcLines: string[],\n sec: Section,\n opts: SectionOptions,\n): string {\n const start = sec.line_start;\n let end = sec.line_end;\n\n if (opts.bodyOnly && opts.allSections) {\n const firstChild = findFirstChild(sec, opts.allSections);\n if (firstChild) end = firstChild.line_start - 1;\n }\n\n if (opts.noBody) {\n end = start;\n }\n\n // Clamp to source length so a stale `line_end` (e.g. countLines and\n // splitLines disagreeing on a trailing newline) can't overrun.\n const clampedEnd = Math.min(end, srcLines.length);\n let body = srcLines.slice(start - 1, clampedEnd).join(\"\\n\");\n\n // Truncate before pretty-printing so ANSI escapes can't land mid-cut.\n if (opts.maxLines != null && opts.maxLines > 0) {\n body = truncateBody(body, opts.maxLines);\n }\n\n if (opts.pretty) {\n body = opts.pretty(body);\n }\n\n if (opts.raw) return body;\n\n const hashes = \"#\".repeat(sec.level);\n const header = `── ${file} L${start}-${end} ${hashes} ${sec.title} ${\"─\".repeat(8)}`;\n const footer = `── end ${\"─\".repeat(40)}`;\n return `${header}\\n${body}\\n${footer}`;\n}\n\n/**\n * Cut `body` to the first `maxLines` lines. If anything was dropped, append\n * a marker line telling the agent how to get the rest. `maxLines <= 0`\n * disables truncation.\n */\nexport function truncateBody(body: string, maxLines: number): string {\n if (maxLines <= 0) return body;\n const lines = body.split(\"\\n\");\n if (lines.length <= maxLines) return body;\n const kept = lines.slice(0, maxLines).join(\"\\n\");\n const remaining = lines.length - maxLines;\n return `${kept}\\n\\n… ${remaining} more lines (use --max-lines=0 for full)`;\n}\n\nfunction findFirstChild(sec: Section, all: Section[]): Section | null {\n for (const candidate of all) {\n if (candidate.parent === sec) return candidate;\n }\n return null;\n}\n"
|
|
9
|
+
],
|
|
10
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAuCO,SAAS,IAAI,CAAC,KAAwB;AAAA,EAC3C,MAAM,MAAiB,CAAC;AAAA,EACxB,MAAM,MAAM,IAAI;AAAA,EAEhB,IAAI,IAAI;AAAA,EACR,IAAI,SAAS;AAAA,EAIb,IAAI,UAAU;AAAA,EACd,IAAI,YAAY;AAAA,EAChB,IAAI,WAAW;AAAA,EAEf,OAAO,KAAK,KAAK;AAAA,IAGf,MAAM,QAAQ;AAAA,IACd,OAAO,IAAI,OAAO,IAAI,WAAW,CAAC,MAAM;AAAA,MAAa;AAAA,IACrD,IAAI,OAAO,IAAI,MAAM,OAAO,CAAC;AAAA,IAC7B,IAAI,KAAK,SAAS,KAAK,KAAK,WAAW,KAAK,SAAS,CAAC,MAAM,IAAa;AAAA,MACvE,OAAO,KAAK,MAAM,GAAG,KAAK,SAAS,CAAC;AAAA,IACtC;AAAA,IACA;AAAA,IAEA,MAAM,QAAQ,WAAW,IAAI;AAAA,IAC7B,IAAI,OAAO;AAAA,MACT,IAAI,CAAC,SAAS;AAAA,QACZ,UAAU;AAAA,QACV,YAAY,MAAM;AAAA,QAClB,WAAW,MAAM;AAAA,MACnB,EAAO,SAAI,MAAM,SAAS,aAAa,MAAM,OAAO,UAAU;AAAA,QAC5D,UAAU;AAAA,MACZ;AAAA,IACF,EAAO,SAAI,CAAC,SAAS;AAAA,MACnB,MAAM,IAAI,aAAa,MAAM,MAAM;AAAA,MACnC,IAAI;AAAA,QAAG,IAAI,KAAK,CAAC;AAAA,IACnB;AAAA,IAEA,IAAI,KAAK;AAAA,MAAK;AAAA,IACd;AAAA,EACF;AAAA,EAEA,OAAO;AAAA;AAWT,SAAS,UAAU,CAAC,MAAoD;AAAA,EAEtE,IAAI,IAAI;AAAA,EACR,OAAO,IAAI,KAAK,KAAK,WAAW,CAAC,MAAM;AAAA,IAAI;AAAA,EAC3C,MAAM,KAAK,KAAK,WAAW,CAAC;AAAA,EAC5B,IAAI,OAAO,MAAc,OAAO;AAAA,IAAa,OAAO;AAAA,EACpD,IAAI,MAAM;AAAA,EACV,OAAO,KAAK,WAAW,IAAI,GAAG,MAAM;AAAA,IAAI;AAAA,EACxC,IAAI,MAAM;AAAA,IAAG,OAAO;AAAA,EAGpB,OAAO,EAAE,MAAM,IAAI,KAAK,IAAI;AAAA;AAY9B,SAAS,YAAY,CAAC,MAAc,QAAgC;AAAA,EAElE,IAAI,IAAI;AAAA,EACR,OAAO,IAAI,KAAK,KAAK,WAAW,CAAC,MAAM;AAAA,IAAI;AAAA,EAC3C,IAAI,KAAK,WAAW,CAAC,MAAM;AAAA,IAAY,OAAO;AAAA,EAE9C,IAAI,SAAS;AAAA,EACb,OAAO,KAAK,WAAW,IAAI,MAAM,MAAM;AAAA,IAAI;AAAA,EAC3C,IAAI,SAAS,KAAK,SAAS;AAAA,IAAG,OAAO;AAAA,EAErC,MAAM,QAAQ,IAAI;AAAA,EAClB,MAAM,UAAU,KAAK,WAAW,KAAK;AAAA,EAIrC,IAAI,QAAQ,KAAK,UAAU,YAAY,MAAM,YAAY,GAAY;AAAA,IACnE,OAAO;AAAA,EACT;AAAA,EAGA,IAAI,eAAe;AAAA,EACnB,OACE,eAAe,KAAK,WACnB,KAAK,WAAW,YAAY,MAAM,MAAM,KAAK,WAAW,YAAY,MAAM,IAC3E;AAAA,IACA;AAAA,EACF;AAAA,EAEA,IAAI,MAAM,KAAK;AAAA,EAEf,OACE,MAAM,iBACL,KAAK,WAAW,MAAM,CAAC,MAAM,MAAM,KAAK,WAAW,MAAM,CAAC,MAAM,IACjE;AAAA,IACA;AAAA,EACF;AAAA,EAGA,IAAI,UAAU;AAAA,EACd,OAAO,UAAU,gBAAgB,KAAK,WAAW,UAAU,CAAC,MAAM;AAAA,IAAI;AAAA,EACtE,IACE,UAAU,QACT,YAAY,gBACX,KAAK,WAAW,UAAU,CAAC,MAAM,MACjC,KAAK,WAAW,UAAU,CAAC,MAAM,IACnC;AAAA,IACA,MAAM;AAAA,IACN,OACE,MAAM,iBACL,KAAK,WAAW,MAAM,CAAC,MAAM,MAAM,KAAK,WAAW,MAAM,CAAC,MAAM,IACjE;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,QAAQ,KAAK,MAAM,cAAc,GAAG;AAAA,EAC1C,OAAO,EAAE,OAAO,QAAQ,OAAO,MAAM,OAAO;AAAA;;AC7IvC,SAAS,aAAa,CAAC,UAAqB,YAA+B;AAAA,EAChF,MAAM,MAAiB,CAAC;AAAA,EAExB,MAAM,QAAmB,CAAC;AAAA,EAE1B,WAAW,KAAK,UAAU;AAAA,IAGxB,OAAO,MAAM,SAAS,KAAK,MAAM,MAAM,SAAS,GAAI,SAAS,EAAE,OAAO;AAAA,MACpE,MAAM,UAAU,MAAM,IAAI;AAAA,MAC1B,QAAQ,WAAW,EAAE,OAAO;AAAA,IAC9B;AAAA,IACA,MAAM,SAAS,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAM;AAAA,IAE7D,MAAM,MAAe;AAAA,MACnB,OAAO,EAAE;AAAA,MACT,OAAO,EAAE;AAAA,MACT,YAAY,EAAE;AAAA,MAEd,UAAU;AAAA,MACV;AAAA,IACF;AAAA,IACA,IAAI,KAAK,GAAG;AAAA,IACZ,MAAM,KAAK,GAAG;AAAA,EAChB;AAAA,EAEA,OAAO;AAAA;AAOF,SAAS,MAAM,CAAC,KAAwB;AAAA,EAC7C,MAAM,OAAiB,CAAC;AAAA,EACxB,IAAI,MAAM,IAAI;AAAA,EACd,OAAO,KAAK;AAAA,IACV,KAAK,KAAK,IAAI,KAAK;AAAA,IACnB,MAAM,IAAI;AAAA,EACZ;AAAA,EACA,OAAO,KAAK,QAAQ;AAAA;AAQf,SAAS,UAAU,CAAC,KAAqB;AAAA,EAC9C,IAAI,IAAI,WAAW;AAAA,IAAG,OAAO;AAAA,EAC7B,IAAI,IAAI;AAAA,EACR,SAAS,IAAI,EAAG,IAAI,IAAI,QAAQ,KAAK;AAAA,IACnC,IAAI,IAAI,WAAW,CAAC,MAAM;AAAA,MAAI;AAAA,EAChC;AAAA,EAGA,IAAI,IAAI,WAAW,IAAI,SAAS,CAAC,MAAM;AAAA,IAAI;AAAA,EAC3C,OAAO;AAAA;;ACjDF,SAAS,aAAa,CAAC,OAA0B;AAAA,EACtD,MAAM,UAAU,MAAM,KAAK;AAAA,EAC3B,IAAI,QAAQ,WAAW;AAAA,IAAG,OAAO,CAAC;AAAA,EAYlC,MAAM,cAAwB,CAAC;AAAA,EAC/B,MAAM,MAAY,CAAC,YAAY;AAAA,EAC/B,IAAI,MAAM;AAAA,EACV,IAAI,IAAI;AAAA,EACR,IAAI,UAAU;AAAA,EACd,IAAI,iBAAiB;AAAA,EACrB,OAAO,IAAI,QAAQ,QAAQ;AAAA,IACzB,MAAM,KAAK,QAAQ;AAAA,IACnB,IAAI,OAAO,QAAQ,kBAAkB,UAAU;AAAA,MAC7C,UAAU,CAAC;AAAA,MACX,OAAO;AAAA,MACP,iBAAiB;AAAA,MACjB;AAAA,MACA;AAAA,IACF;AAAA,IACA,IAAI,CAAC,WAAW,OAAO,KAAK;AAAA,MAC1B,YAAY,KAAK,IAAI,KAAK,CAAC;AAAA,MAC3B,MAAM;AAAA,MACN,iBAAiB;AAAA,MACjB,IAAI,QAAQ,IAAI,OAAO,KAAK;AAAA,QAC1B,IAAI,KAAK,OAAO;AAAA,QAChB,KAAK;AAAA,MACP,EAAO;AAAA,QACL,IAAI,KAAK,YAAY;AAAA,QACrB,KAAK;AAAA;AAAA,MAEP;AAAA,IACF;AAAA,IACA,OAAO;AAAA,IACP,IAAI,OAAO,OAAO,OAAO;AAAA,MAAM,iBAAiB;AAAA,IAChD;AAAA,EACF;AAAA,EACA,YAAY,KAAK,IAAI,KAAK,CAAC;AAAA,EAE3B,OAAO,YAAY,IAAI,CAAC,GAAG,QAAQ,aAAa,GAAG,IAAI,QAAQ,YAAY,CAAC;AAAA;AAG9E,SAAS,YAAY,CAAC,KAAa,IAAiB;AAAA,EAClD,IAAI,IAAI;AAAA,EACR,IAAI,QAAuB;AAAA,EAK3B,MAAM,aAAa,yBAAyB,KAAK,CAAC;AAAA,EAClD,IAAI,YAAY;AAAA,IACd,QAAQ,WAAW,GAAI;AAAA,IACvB,IAAI,WAAW,MAAM;AAAA,EACvB;AAAA,EAGA,MAAM,aAAa,wBAAwB,KAAK,CAAC;AAAA,EACjD,IAAI,YAAY;AAAA,IACd,MAAM,UAAU,WAAW;AAAA,IAC3B,MAAM,QAAQ,WAAW,MAAM;AAAA,IAC/B,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,OAAO;AAAA,MACP,OAAO,IAAI,OAAO,SAAS,KAAK;AAAA,IAClC;AAAA,EACF;AAAA,EAGA,IAAI,EAAE,WAAW,GAAG,GAAG;AAAA,IACrB,OAAO,EAAE,IAAI,OAAO,MAAM,SAAS,OAAO,EAAE,MAAM,CAAC,EAAE,KAAK,EAAE;AAAA,EAC9D;AAAA,EAEA,OAAO,EAAE,IAAI,OAAO,MAAM,SAAS,OAAO,EAAE,KAAK,EAAE;AAAA;AAG9C,SAAS,KAAK,CAAC,UAAqB,UAAgC;AAAA,EACzE,IAAI,SAAS,WAAW;AAAA,IAAG,OAAO,CAAC;AAAA,EACnC,MAAM,MAAiB,CAAC;AAAA,EACxB,WAAW,OAAO,UAAU;AAAA,IAC1B,IAAI,QAAQ,KAAK,QAAQ;AAAA,MAAG,IAAI,KAAK,GAAG;AAAA,EAC1C;AAAA,EACA,OAAO;AAAA;AAGT,SAAS,OAAO,CAAC,KAAc,MAA0B;AAAA,EAEvD,MAAM,OAAO,KAAK,KAAK,SAAS;AAAA,EAChC,IAAI,CAAC,QAAQ,CAAC,sBAAsB,MAAM,GAAG;AAAA,IAAG,OAAO;AAAA,EAGvD,IAAI,SAAyB,IAAI;AAAA,EACjC,SAAS,IAAI,KAAK,SAAS,EAAG,KAAK,GAAG,KAAK;AAAA,IAIzC,MAAM,KAAK,KAAK,IAAI,GAAI;AAAA,IACxB,MAAM,MAAM,KAAK;AAAA,IAEjB,IAAI,OAAO,SAAS;AAAA,MAClB,IAAI,CAAC,UAAU,CAAC,sBAAsB,KAAK,MAAM;AAAA,QAAG,OAAO;AAAA,MAC3D,SAAS,OAAO;AAAA,IAClB,EAAO;AAAA,MAEL,IAAI,QAAwB;AAAA,MAC5B,OAAO,QAAQ;AAAA,QACb,IAAI,sBAAsB,KAAK,MAAM,GAAG;AAAA,UACtC,QAAQ;AAAA,UACR;AAAA,QACF;AAAA,QACA,SAAS,OAAO;AAAA,MAClB;AAAA,MACA,IAAI,CAAC;AAAA,QAAO,OAAO;AAAA,MACnB,SAAS,MAAM;AAAA;AAAA,EAEnB;AAAA,EACA,OAAO;AAAA;AAGT,SAAS,qBAAqB,CAAC,KAAc,KAAuB;AAAA,EAClE,IAAI,IAAI,UAAU,QAAQ,IAAI,UAAU,IAAI;AAAA,IAAO,OAAO;AAAA,EAC1D,MAAM,QAAQ,IAAI;AAAA,EAClB,QAAQ,IAAI;AAAA,SACL;AAAA,MACH,OAAO,MAAM,YAAY,MAAM,IAAI,MAAM,YAAY;AAAA,SAClD;AAAA,MACH,OAAO,IAAI,MAAO,KAAK,KAAK;AAAA,SACzB;AAAA,MACH,OAAO,MAAM,YAAY,EAAE,SAAS,IAAI,MAAM,YAAY,CAAC;AAAA;AAAA;;AC5J1D,SAAS,SAAS,CACvB,MACA,KACA,UACA,MACQ;AAAA,EACR,MAAM,aAAa,WAAW,GAAG;AAAA,EACjC,MAAM,cAAc,SAAS;AAAA,EAC7B,MAAM,cAAc,eAAe,IAAI,OAAO,MAAM;AAAA,EACpD,MAAM,SAAS,gBAAgB,IAAI,YAAY;AAAA,EAE/C,MAAM,MAAgB,CAAC;AAAA,EACvB,IAAI,KAAK,GAAG,SAAS,gBAAgB,eAAe,QAAQ;AAAA,EAE5D,WAAW,OAAO,UAAU;AAAA,IAC1B,IAAI,KAAK,SAAS,QAAQ,IAAI,QAAQ,KAAK;AAAA,MAAO;AAAA,IAClD,MAAM,SAAS,KAAK,OAAO,KAAK,KAAK,OAAO,KAAK,IAAI,GAAG,IAAI,QAAQ,CAAC,CAAC;AAAA,IACtE,MAAM,SAAS,IAAI,OAAO,IAAI,KAAK;AAAA,IACnC,MAAM,QAAQ,IAAI,IAAI,cAAc,IAAI;AAAA,IACxC,IAAI,KAAK,GAAG,SAAS,UAAU,IAAI,UAAU,OAAO;AAAA,EACtD;AAAA,EACA,OAAO,IAAI,KAAK;AAAA,CAAI;AAAA;AAcf,SAAS,aAAa,CAC3B,MACA,UACA,KACA,MACQ;AAAA,EACR,MAAM,QAAQ,IAAI;AAAA,EAClB,IAAI,MAAM,IAAI;AAAA,EAEd,IAAI,KAAK,YAAY,KAAK,aAAa;AAAA,IACrC,MAAM,aAAa,eAAe,KAAK,KAAK,WAAW;AAAA,IACvD,IAAI;AAAA,MAAY,MAAM,WAAW,aAAa;AAAA,EAChD;AAAA,EAEA,IAAI,KAAK,QAAQ;AAAA,IACf,MAAM;AAAA,EACR;AAAA,EAIA,MAAM,aAAa,KAAK,IAAI,KAAK,SAAS,MAAM;AAAA,EAChD,IAAI,OAAO,SAAS,MAAM,QAAQ,GAAG,UAAU,EAAE,KAAK;AAAA,CAAI;AAAA,EAG1D,IAAI,KAAK,YAAY,QAAQ,KAAK,WAAW,GAAG;AAAA,IAC9C,OAAO,aAAa,MAAM,KAAK,QAAQ;AAAA,EACzC;AAAA,EAEA,IAAI,KAAK,QAAQ;AAAA,IACf,OAAO,KAAK,OAAO,IAAI;AAAA,EACzB;AAAA,EAEA,IAAI,KAAK;AAAA,IAAK,OAAO;AAAA,EAErB,MAAM,SAAS,IAAI,OAAO,IAAI,KAAK;AAAA,EACnC,MAAM,SAAS,MAAK,UAAU,SAAS,QAAQ,UAAU,IAAI,SAAS,IAAI,OAAO,CAAC;AAAA,EAClF,MAAM,SAAS,UAAS,IAAI,OAAO,EAAE;AAAA,EACrC,OAAO,GAAG;AAAA,EAAW;AAAA,EAAS;AAAA;AAQzB,SAAS,YAAY,CAAC,MAAc,UAA0B;AAAA,EACnE,IAAI,YAAY;AAAA,IAAG,OAAO;AAAA,EAC1B,MAAM,QAAQ,KAAK,MAAM;AAAA,CAAI;AAAA,EAC7B,IAAI,MAAM,UAAU;AAAA,IAAU,OAAO;AAAA,EACrC,MAAM,OAAO,MAAM,MAAM,GAAG,QAAQ,EAAE,KAAK;AAAA,CAAI;AAAA,EAC/C,MAAM,YAAY,MAAM,SAAS;AAAA,EACjC,OAAO,GAAG;AAAA;AAAA,IAAY;AAAA;AAGxB,SAAS,cAAc,CAAC,KAAc,KAAgC;AAAA,EACpE,WAAW,aAAa,KAAK;AAAA,IAC3B,IAAI,UAAU,WAAW;AAAA,MAAK,OAAO;AAAA,EACvC;AAAA,EACA,OAAO;AAAA;",
|
|
11
|
+
"debugId": "F78549B744E4995264756E2164756E21",
|
|
12
|
+
"names": []
|
|
13
|
+
}
|