@timeax/scaffold 0.0.2 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.vscode/settings.json +12 -0
- package/dist/ast.cjs +438 -0
- package/dist/ast.cjs.map +1 -0
- package/dist/ast.d.cts +152 -0
- package/dist/ast.d.ts +152 -0
- package/dist/ast.mjs +433 -0
- package/dist/ast.mjs.map +1 -0
- package/dist/index.cjs +59 -12
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +68 -1
- package/dist/index.d.ts +68 -1
- package/dist/index.mjs +57 -13
- package/dist/index.mjs.map +1 -1
- package/package.json +9 -3
- package/readme.md +285 -81
- package/src/ast/format.ts +261 -0
- package/src/ast/index.ts +2 -0
- package/src/ast/parser.ts +593 -0
- package/src/core/config-loader.ts +4 -3
- package/src/core/init-scaffold.ts +8 -3
- package/src/core/structure-txt.ts +221 -174
- package/src/index.ts +3 -2
- package/src/schema/config.ts +10 -1
- package/src/schema/index.ts +1 -0
- package/test/format-roundtrip.spec.ts +20 -0
- package/test/format.spec.ts +104 -0
- package/test/parser-diagnostics.spec.ts +86 -0
- package/test/parser-tree.spec.ts +102 -0
- package/tsup.config.ts +61 -43
- package/vitest.config.ts +9 -0
- package/dist/cli.cjs +0 -1141
- package/dist/cli.cjs.map +0 -1
- package/dist/cli.mjs +0 -1130
- package/dist/cli.mjs.map +0 -1
package/dist/ast.cjs
ADDED
|
@@ -0,0 +1,438 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
// src/util/fs-utils.ts
|
|
4
|
+
function toPosixPath(p) {
|
|
5
|
+
return p.replace(/\\/g, "/");
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
// src/ast/parser.ts
|
|
9
|
+
function parseStructureAst(text, opts = {}) {
|
|
10
|
+
const indentStep = opts.indentStep ?? 2;
|
|
11
|
+
const mode = opts.mode ?? "loose";
|
|
12
|
+
const diagnostics = [];
|
|
13
|
+
const lines = [];
|
|
14
|
+
const rawLines = text.split(/\r?\n/);
|
|
15
|
+
for (let i = 0; i < rawLines.length; i++) {
|
|
16
|
+
const raw = rawLines[i];
|
|
17
|
+
const lineNo = i + 1;
|
|
18
|
+
const m = raw.match(/^(\s*)(.*)$/);
|
|
19
|
+
const indentRaw = m ? m[1] : "";
|
|
20
|
+
const content = m ? m[2] : "";
|
|
21
|
+
const { indentSpaces, hasTabs } = measureIndent(indentRaw, indentStep);
|
|
22
|
+
if (hasTabs) {
|
|
23
|
+
diagnostics.push({
|
|
24
|
+
line: lineNo,
|
|
25
|
+
message: "Tabs detected in indentation. Consider using spaces only for consistent levels.",
|
|
26
|
+
severity: mode === "strict" ? "warning" : "info",
|
|
27
|
+
code: "indent-tabs"
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
const trimmed = content.trim();
|
|
31
|
+
let kind;
|
|
32
|
+
if (!trimmed) {
|
|
33
|
+
kind = "blank";
|
|
34
|
+
} else if (trimmed.startsWith("#") || trimmed.startsWith("//")) {
|
|
35
|
+
kind = "comment";
|
|
36
|
+
} else {
|
|
37
|
+
kind = "entry";
|
|
38
|
+
}
|
|
39
|
+
lines.push({
|
|
40
|
+
index: i,
|
|
41
|
+
lineNo,
|
|
42
|
+
raw,
|
|
43
|
+
kind,
|
|
44
|
+
indentSpaces,
|
|
45
|
+
content
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
const rootNodes = [];
|
|
49
|
+
const stack = [];
|
|
50
|
+
const depthCtx = {
|
|
51
|
+
lastIndentSpaces: null,
|
|
52
|
+
lastDepth: null,
|
|
53
|
+
lastWasFile: false
|
|
54
|
+
};
|
|
55
|
+
for (const line of lines) {
|
|
56
|
+
if (line.kind !== "entry") continue;
|
|
57
|
+
const { entry, depth, diags } = parseEntryLine(
|
|
58
|
+
line,
|
|
59
|
+
indentStep,
|
|
60
|
+
mode,
|
|
61
|
+
depthCtx
|
|
62
|
+
);
|
|
63
|
+
diagnostics.push(...diags);
|
|
64
|
+
if (!entry) {
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
attachNode(entry, depth, line, rootNodes, stack, diagnostics, mode);
|
|
68
|
+
depthCtx.lastWasFile = !entry.isDir;
|
|
69
|
+
}
|
|
70
|
+
return {
|
|
71
|
+
rootNodes,
|
|
72
|
+
lines,
|
|
73
|
+
diagnostics,
|
|
74
|
+
options: {
|
|
75
|
+
indentStep,
|
|
76
|
+
mode
|
|
77
|
+
}
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
function measureIndent(rawIndent, indentStep) {
|
|
81
|
+
let spaces = 0;
|
|
82
|
+
let hasTabs = false;
|
|
83
|
+
for (const ch of rawIndent) {
|
|
84
|
+
if (ch === " ") {
|
|
85
|
+
spaces += 1;
|
|
86
|
+
} else if (ch === " ") {
|
|
87
|
+
hasTabs = true;
|
|
88
|
+
spaces += indentStep;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
return { indentSpaces: spaces, hasTabs };
|
|
92
|
+
}
|
|
93
|
+
function computeDepth(line, indentStep, mode, ctx, diagnostics) {
|
|
94
|
+
let spaces = line.indentSpaces;
|
|
95
|
+
if (spaces < 0) spaces = 0;
|
|
96
|
+
let depth;
|
|
97
|
+
if (ctx.lastIndentSpaces == null || ctx.lastDepth == null) {
|
|
98
|
+
depth = 0;
|
|
99
|
+
} else {
|
|
100
|
+
const prevSpaces = ctx.lastIndentSpaces;
|
|
101
|
+
const prevDepth = ctx.lastDepth;
|
|
102
|
+
if (spaces > prevSpaces) {
|
|
103
|
+
const diff = spaces - prevSpaces;
|
|
104
|
+
if (ctx.lastWasFile) {
|
|
105
|
+
diagnostics.push({
|
|
106
|
+
line: line.lineNo,
|
|
107
|
+
message: "Entry appears indented under a file; treating it as a sibling of the file instead of a child.",
|
|
108
|
+
severity: mode === "strict" ? "error" : "warning",
|
|
109
|
+
code: "child-of-file-loose"
|
|
110
|
+
});
|
|
111
|
+
depth = prevDepth;
|
|
112
|
+
} else {
|
|
113
|
+
if (diff > indentStep) {
|
|
114
|
+
diagnostics.push({
|
|
115
|
+
line: line.lineNo,
|
|
116
|
+
message: `Indentation jumps from ${prevSpaces} to ${spaces} spaces; treating as one level deeper.`,
|
|
117
|
+
severity: mode === "strict" ? "error" : "warning",
|
|
118
|
+
code: "indent-skip-level"
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
depth = prevDepth + 1;
|
|
122
|
+
}
|
|
123
|
+
} else if (spaces === prevSpaces) {
|
|
124
|
+
depth = prevDepth;
|
|
125
|
+
} else {
|
|
126
|
+
const diff = prevSpaces - spaces;
|
|
127
|
+
const steps = Math.round(diff / indentStep);
|
|
128
|
+
if (diff % indentStep !== 0) {
|
|
129
|
+
diagnostics.push({
|
|
130
|
+
line: line.lineNo,
|
|
131
|
+
message: `Indentation decreases from ${prevSpaces} to ${spaces} spaces, which is not a multiple of indent step (${indentStep}).`,
|
|
132
|
+
severity: mode === "strict" ? "error" : "warning",
|
|
133
|
+
code: "indent-misaligned"
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
depth = Math.max(prevDepth - steps, 0);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
ctx.lastIndentSpaces = spaces;
|
|
140
|
+
ctx.lastDepth = depth;
|
|
141
|
+
return depth;
|
|
142
|
+
}
|
|
143
|
+
function parseEntryLine(line, indentStep, mode, ctx) {
|
|
144
|
+
const diags = [];
|
|
145
|
+
const depth = computeDepth(line, indentStep, mode, ctx, diags);
|
|
146
|
+
const { contentWithoutComment } = extractInlineCommentParts(line.content);
|
|
147
|
+
const trimmed = contentWithoutComment.trim();
|
|
148
|
+
if (!trimmed) {
|
|
149
|
+
return { entry: null, depth, diags };
|
|
150
|
+
}
|
|
151
|
+
const parts = trimmed.split(/\s+/);
|
|
152
|
+
const pathToken = parts[0];
|
|
153
|
+
const annotationTokens = parts.slice(1);
|
|
154
|
+
if (pathToken.includes(":")) {
|
|
155
|
+
diags.push({
|
|
156
|
+
line: line.lineNo,
|
|
157
|
+
message: 'Path token contains ":" which is reserved for annotations. This is likely a mistake.',
|
|
158
|
+
severity: mode === "strict" ? "error" : "warning",
|
|
159
|
+
code: "path-colon"
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
const isDir = pathToken.endsWith("/");
|
|
163
|
+
const segmentName = pathToken;
|
|
164
|
+
let stub;
|
|
165
|
+
const include = [];
|
|
166
|
+
const exclude = [];
|
|
167
|
+
for (const token of annotationTokens) {
|
|
168
|
+
if (token.startsWith("@stub:")) {
|
|
169
|
+
stub = token.slice("@stub:".length);
|
|
170
|
+
} else if (token.startsWith("@include:")) {
|
|
171
|
+
const val = token.slice("@include:".length);
|
|
172
|
+
if (val) {
|
|
173
|
+
include.push(
|
|
174
|
+
...val.split(",").map((s) => s.trim()).filter(Boolean)
|
|
175
|
+
);
|
|
176
|
+
}
|
|
177
|
+
} else if (token.startsWith("@exclude:")) {
|
|
178
|
+
const val = token.slice("@exclude:".length);
|
|
179
|
+
if (val) {
|
|
180
|
+
exclude.push(
|
|
181
|
+
...val.split(",").map((s) => s.trim()).filter(Boolean)
|
|
182
|
+
);
|
|
183
|
+
}
|
|
184
|
+
} else if (token.startsWith("@")) {
|
|
185
|
+
diags.push({
|
|
186
|
+
line: line.lineNo,
|
|
187
|
+
message: `Unknown annotation token "${token}".`,
|
|
188
|
+
severity: "info",
|
|
189
|
+
code: "unknown-annotation"
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
const entry = {
|
|
194
|
+
segmentName,
|
|
195
|
+
isDir,
|
|
196
|
+
stub,
|
|
197
|
+
include: include.length ? include : void 0,
|
|
198
|
+
exclude: exclude.length ? exclude : void 0
|
|
199
|
+
};
|
|
200
|
+
return { entry, depth, diags };
|
|
201
|
+
}
|
|
202
|
+
function mapThrough(content) {
|
|
203
|
+
let cutIndex = -1;
|
|
204
|
+
const len = content.length;
|
|
205
|
+
for (let i = 0; i < len; i++) {
|
|
206
|
+
const ch = content[i];
|
|
207
|
+
const prev = i > 0 ? content[i - 1] : "";
|
|
208
|
+
if (ch === "#") {
|
|
209
|
+
if (i === 0) {
|
|
210
|
+
continue;
|
|
211
|
+
}
|
|
212
|
+
if (prev === " " || prev === " ") {
|
|
213
|
+
cutIndex = i;
|
|
214
|
+
break;
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
if (ch === "/" && i + 1 < len && content[i + 1] === "/" && (prev === " " || prev === " ")) {
|
|
218
|
+
cutIndex = i;
|
|
219
|
+
break;
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
return cutIndex;
|
|
223
|
+
}
|
|
224
|
+
function extractInlineCommentParts(content) {
|
|
225
|
+
const cutIndex = mapThrough(content);
|
|
226
|
+
if (cutIndex === -1) {
|
|
227
|
+
return {
|
|
228
|
+
contentWithoutComment: content,
|
|
229
|
+
inlineComment: null
|
|
230
|
+
};
|
|
231
|
+
}
|
|
232
|
+
return {
|
|
233
|
+
contentWithoutComment: content.slice(0, cutIndex),
|
|
234
|
+
inlineComment: content.slice(cutIndex)
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
function attachNode(entry, depth, line, rootNodes, stack, diagnostics, mode) {
|
|
238
|
+
const lineNo = line.lineNo;
|
|
239
|
+
while (stack.length > depth) {
|
|
240
|
+
stack.pop();
|
|
241
|
+
}
|
|
242
|
+
let parent = null;
|
|
243
|
+
if (depth > 0) {
|
|
244
|
+
const candidate = stack[depth - 1];
|
|
245
|
+
if (!candidate) {
|
|
246
|
+
diagnostics.push({
|
|
247
|
+
line: lineNo,
|
|
248
|
+
message: `Entry has indent depth ${depth} but no parent at depth ${depth - 1}. Treating as root.`,
|
|
249
|
+
severity: mode === "strict" ? "error" : "warning",
|
|
250
|
+
code: "missing-parent"
|
|
251
|
+
});
|
|
252
|
+
} else if (candidate.type === "file") {
|
|
253
|
+
if (mode === "strict") {
|
|
254
|
+
diagnostics.push({
|
|
255
|
+
line: lineNo,
|
|
256
|
+
message: `Cannot attach child under file "${candidate.path}".`,
|
|
257
|
+
severity: "error",
|
|
258
|
+
code: "child-of-file"
|
|
259
|
+
});
|
|
260
|
+
} else {
|
|
261
|
+
diagnostics.push({
|
|
262
|
+
line: lineNo,
|
|
263
|
+
message: `Entry appears under file "${candidate.path}". Attaching as sibling at depth ${candidate.depth}.`,
|
|
264
|
+
severity: "warning",
|
|
265
|
+
code: "child-of-file-loose"
|
|
266
|
+
});
|
|
267
|
+
while (stack.length > candidate.depth) {
|
|
268
|
+
stack.pop();
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
} else {
|
|
272
|
+
parent = candidate;
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
const parentPath = parent ? parent.path.replace(/\/$/, "") : "";
|
|
276
|
+
const normalizedSegment = toPosixPath(entry.segmentName.replace(/\/+$/, ""));
|
|
277
|
+
const fullPath = parentPath ? `${parentPath}/${normalizedSegment}${entry.isDir ? "/" : ""}` : `${normalizedSegment}${entry.isDir ? "/" : ""}`;
|
|
278
|
+
const baseNode = {
|
|
279
|
+
type: entry.isDir ? "dir" : "file",
|
|
280
|
+
name: entry.segmentName,
|
|
281
|
+
depth,
|
|
282
|
+
line: lineNo,
|
|
283
|
+
path: fullPath,
|
|
284
|
+
parent,
|
|
285
|
+
...entry.stub ? { stub: entry.stub } : {},
|
|
286
|
+
...entry.include ? { include: entry.include } : {},
|
|
287
|
+
...entry.exclude ? { exclude: entry.exclude } : {}
|
|
288
|
+
};
|
|
289
|
+
if (entry.isDir) {
|
|
290
|
+
const dirNode = {
|
|
291
|
+
...baseNode,
|
|
292
|
+
type: "dir",
|
|
293
|
+
children: []
|
|
294
|
+
};
|
|
295
|
+
if (parent) {
|
|
296
|
+
parent.children.push(dirNode);
|
|
297
|
+
} else {
|
|
298
|
+
rootNodes.push(dirNode);
|
|
299
|
+
}
|
|
300
|
+
while (stack.length > depth) {
|
|
301
|
+
stack.pop();
|
|
302
|
+
}
|
|
303
|
+
stack[depth] = dirNode;
|
|
304
|
+
} else {
|
|
305
|
+
const fileNode = {
|
|
306
|
+
...baseNode,
|
|
307
|
+
type: "file"
|
|
308
|
+
};
|
|
309
|
+
if (parent) {
|
|
310
|
+
parent.children.push(fileNode);
|
|
311
|
+
} else {
|
|
312
|
+
rootNodes.push(fileNode);
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
// src/ast/format.ts
|
|
318
|
+
function formatStructureText(text, options = {}) {
|
|
319
|
+
const indentStep = options.indentStep ?? 2;
|
|
320
|
+
const mode = options.mode ?? "loose";
|
|
321
|
+
const normalizeNewlines = options.normalizeNewlines === void 0 ? true : options.normalizeNewlines;
|
|
322
|
+
const trimTrailingWhitespace = options.trimTrailingWhitespace === void 0 ? true : options.trimTrailingWhitespace;
|
|
323
|
+
const normalizeAnnotations = options.normalizeAnnotations === void 0 ? true : options.normalizeAnnotations;
|
|
324
|
+
const ast = parseStructureAst(text, {
|
|
325
|
+
indentStep,
|
|
326
|
+
mode
|
|
327
|
+
});
|
|
328
|
+
const rawLines = text.split(/\r?\n/);
|
|
329
|
+
const lineCount = rawLines.length;
|
|
330
|
+
if (ast.lines.length !== lineCount) {
|
|
331
|
+
return {
|
|
332
|
+
text: basicNormalize(text, { normalizeNewlines, trimTrailingWhitespace }),
|
|
333
|
+
ast
|
|
334
|
+
};
|
|
335
|
+
}
|
|
336
|
+
const entryLineIndexes = [];
|
|
337
|
+
const inlineComments = [];
|
|
338
|
+
for (let i = 0; i < lineCount; i++) {
|
|
339
|
+
const lineMeta = ast.lines[i];
|
|
340
|
+
if (lineMeta.kind === "entry") {
|
|
341
|
+
entryLineIndexes.push(i);
|
|
342
|
+
const { inlineComment } = extractInlineCommentParts(lineMeta.content);
|
|
343
|
+
inlineComments.push(inlineComment);
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
const flattened = [];
|
|
347
|
+
flattenAstNodes(ast.rootNodes, 0, flattened);
|
|
348
|
+
if (flattened.length !== entryLineIndexes.length) {
|
|
349
|
+
return {
|
|
350
|
+
text: basicNormalize(text, { normalizeNewlines, trimTrailingWhitespace }),
|
|
351
|
+
ast
|
|
352
|
+
};
|
|
353
|
+
}
|
|
354
|
+
const canonicalEntryLines = flattened.map(
|
|
355
|
+
({ node, level }) => formatAstNodeLine(node, level, indentStep, normalizeAnnotations)
|
|
356
|
+
);
|
|
357
|
+
const resultLines = [];
|
|
358
|
+
let entryIdx = 0;
|
|
359
|
+
for (let i = 0; i < lineCount; i++) {
|
|
360
|
+
const lineMeta = ast.lines[i];
|
|
361
|
+
const originalLine = rawLines[i];
|
|
362
|
+
if (lineMeta.kind === "entry") {
|
|
363
|
+
const base = canonicalEntryLines[entryIdx].replace(/[ \t]+$/g, "");
|
|
364
|
+
const inline = inlineComments[entryIdx];
|
|
365
|
+
entryIdx++;
|
|
366
|
+
if (inline) {
|
|
367
|
+
resultLines.push(base + " " + inline);
|
|
368
|
+
} else {
|
|
369
|
+
resultLines.push(base);
|
|
370
|
+
}
|
|
371
|
+
} else {
|
|
372
|
+
let out = originalLine;
|
|
373
|
+
if (trimTrailingWhitespace) {
|
|
374
|
+
out = out.replace(/[ \t]+$/g, "");
|
|
375
|
+
}
|
|
376
|
+
resultLines.push(out);
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
const eol = normalizeNewlines ? detectPreferredEol(text) : getRawEol(text);
|
|
380
|
+
return {
|
|
381
|
+
text: resultLines.join(eol),
|
|
382
|
+
ast
|
|
383
|
+
};
|
|
384
|
+
}
|
|
385
|
+
function basicNormalize(text, opts) {
|
|
386
|
+
const lines = text.split(/\r?\n/);
|
|
387
|
+
const normalizedLines = opts.trimTrailingWhitespace ? lines.map((line) => line.replace(/[ \t]+$/g, "")) : lines;
|
|
388
|
+
const eol = opts.normalizeNewlines ? detectPreferredEol(text) : getRawEol(text);
|
|
389
|
+
return normalizedLines.join(eol);
|
|
390
|
+
}
|
|
391
|
+
function detectPreferredEol(text) {
|
|
392
|
+
const crlfCount = (text.match(/\r\n/g) || []).length;
|
|
393
|
+
const lfCount = (text.match(/(?<!\r)\n/g) || []).length;
|
|
394
|
+
if (crlfCount === 0 && lfCount === 0) {
|
|
395
|
+
return "\n";
|
|
396
|
+
}
|
|
397
|
+
if (crlfCount > lfCount) {
|
|
398
|
+
return "\r\n";
|
|
399
|
+
}
|
|
400
|
+
return "\n";
|
|
401
|
+
}
|
|
402
|
+
function getRawEol(text) {
|
|
403
|
+
return text.includes("\r\n") ? "\r\n" : "\n";
|
|
404
|
+
}
|
|
405
|
+
function flattenAstNodes(nodes, level, out) {
|
|
406
|
+
for (const node of nodes) {
|
|
407
|
+
out.push({ node, level });
|
|
408
|
+
if (node.type === "dir" && node.children && node.children.length) {
|
|
409
|
+
flattenAstNodes(node.children, level + 1, out);
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
function formatAstNodeLine(node, level, indentStep, normalizeAnnotations) {
|
|
414
|
+
const indent = " ".repeat(indentStep * level);
|
|
415
|
+
const baseName = node.name;
|
|
416
|
+
if (!normalizeAnnotations) {
|
|
417
|
+
return indent + baseName;
|
|
418
|
+
}
|
|
419
|
+
const tokens = [];
|
|
420
|
+
if (node.stub) {
|
|
421
|
+
tokens.push(`@stub:${node.stub}`);
|
|
422
|
+
}
|
|
423
|
+
if (node.include && node.include.length > 0) {
|
|
424
|
+
tokens.push(`@include:${node.include.join(",")}`);
|
|
425
|
+
}
|
|
426
|
+
if (node.exclude && node.exclude.length > 0) {
|
|
427
|
+
tokens.push(`@exclude:${node.exclude.join(",")}`);
|
|
428
|
+
}
|
|
429
|
+
const annotations = tokens.length ? " " + tokens.join(" ") : "";
|
|
430
|
+
return indent + baseName + annotations;
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
exports.extractInlineCommentParts = extractInlineCommentParts;
|
|
434
|
+
exports.formatStructureText = formatStructureText;
|
|
435
|
+
exports.mapThrough = mapThrough;
|
|
436
|
+
exports.parseStructureAst = parseStructureAst;
|
|
437
|
+
//# sourceMappingURL=ast.cjs.map
|
|
438
|
+
//# sourceMappingURL=ast.cjs.map
|
package/dist/ast.cjs.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/util/fs-utils.ts","../src/ast/parser.ts","../src/ast/format.ts"],"names":[],"mappings":";;;AAQO,SAAS,YAAY,CAAA,EAAmB;AAC5C,EAAA,OAAO,CAAA,CAAE,OAAA,CAAQ,KAAA,EAAO,GAAG,CAAA;AAC9B;;;ACgGO,SAAS,iBAAA,CACZ,IAAA,EACA,IAAA,GAAmB,EAAC,EACR;AACZ,EAAA,MAAM,UAAA,GAAa,KAAK,UAAA,IAAc,CAAA;AACtC,EAAA,MAAM,IAAA,GAAgB,KAAK,IAAA,IAAQ,OAAA;AAEnC,EAAA,MAAM,cAA4B,EAAC;AACnC,EAAA,MAAM,QAA4B,EAAC;AAEnC,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AAGnC,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,CAAS,QAAQ,CAAA,EAAA,EAAK;AACtC,IAAA,MAAM,GAAA,GAAM,SAAS,CAAC,CAAA;AACtB,IAAA,MAAM,SAAS,CAAA,GAAI,CAAA;AAEnB,IAAA,MAAM,CAAA,GAAI,GAAA,CAAI,KAAA,CAAM,aAAa,CAAA;AACjC,IAAA,MAAM,SAAA,GAAY,CAAA,GAAI,CAAA,CAAE,CAAC,CAAA,GAAI,EAAA;AAC7B,IAAA,MAAM,OAAA,GAAU,CAAA,GAAI,CAAA,CAAE,CAAC,CAAA,GAAI,EAAA;AAE3B,IAAA,MAAM,EAAC,YAAA,EAAc,OAAA,EAAO,GAAI,aAAA,CAAc,WAAW,UAAU,CAAA;AAEnE,IAAA,IAAI,OAAA,EAAS;AACT,MAAA,WAAA,CAAY,IAAA,CAAK;AAAA,QACb,IAAA,EAAM,MAAA;AAAA,QACN,OAAA,EACI,iFAAA;AAAA,QACJ,QAAA,EAAU,IAAA,KAAS,QAAA,GAAW,SAAA,GAAY,MAAA;AAAA,QAC1C,IAAA,EAAM;AAAA,OACT,CAAA;AAAA,IACL;AAEA,IAAA,MAAM,OAAA,GAAU,QAAQ,IAAA,EAAK;AAC7B,IAAA,IAAI,IAAA;AACJ,IAAA,IAAI,CAAC,OAAA,EAAS;AACV,MAAA,IAAA,GAAO,OAAA;AAAA,IACX,CAAA,MAAA,IAAW,QAAQ,UAAA,CAAW,GAAG,KAAK,OAAA,CAAQ,UAAA,CAAW,IAAI,CAAA,EAAG;AAC5D,MAAA,IAAA,GAAO,SAAA;AAAA,IACX,CAAA,MAAO;AACH,MAAA,IAAA,GAAO,OAAA;AAAA,IACX;AAEA,IAAA,KAAA,CAAM,IAAA,CAAK;AAAA,MACP,KAAA,EAAO,CAAA;AAAA,MACP,MAAA;AAAA,MACA,GAAA;AAAA,MACA,IAAA;AAAA,MACA,YAAA;AAAA,MACA;AAAA,KACH,CAAA;AAAA,EACL;AAEA,EAAA,MAAM,YAAuB,EAAC;AAC9B,EAAA,MAAM,QAAmB,EAAC;AAE1B,EAAA,MAAM,QAAA,GAAyB;AAAA,IAC3B,gBAAA,EAAkB,IAAA;AAAA,IAClB,SAAA,EAAW,IAAA;AAAA,IACX,WAAA,EAAa;AAAA,GACjB;AAEA,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACtB,IAAA,IAAI,IAAA,CAAK,SAAS,OAAA,EAAS;AAE3B,IAAA,MAAM,EAAC,KAAA,EAAO,KAAA,EAAO,KAAA,EAAK,GAAI,cAAA;AAAA,MAC1B,IAAA;AAAA,MACA,UAAA;AAAA,MACA,IAAA;AAAA,MACA;AAAA,KACJ;AACA,IAAA,WAAA,CAAY,IAAA,CAAK,GAAG,KAAK,CAAA;AAEzB,IAAA,IAAI,CAAC,KAAA,EAAO;AACR,MAAA;AAAA,IACJ;AAEA,IAAA,UAAA,CAAW,OAAO,KAAA,EAAO,IAAA,EAAM,SAAA,EAAW,KAAA,EAAO,aAAa,IAAI,CAAA;AAClE,IAAA,QAAA,CAAS,WAAA,GAAc,CAAC,KAAA,CAAM,KAAA;AAAA,EAClC;AAEA,EAAA,OAAO;AAAA,IACH,SAAA;AAAA,IACA,KAAA;AAAA,IACA,WAAA;AAAA,IACA,OAAA,EAAS;AAAA,MACL,UAAA;AAAA,MACA;AAAA;AACJ,GACJ;AACJ;AAMA,SAAS,aAAA,CAAc,WAAmB,UAAA,EAGxC;AACE,EAAA,IAAI,MAAA,GAAS,CAAA;AACb,EAAA,IAAI,OAAA,GAAU,KAAA;AAEd,EAAA,KAAA,MAAW,MAAM,SAAA,EAAW;AACxB,IAAA,IAAI,OAAO,GAAA,EAAK;AACZ,MAAA,MAAA,IAAU,CAAA;AAAA,IACd,CAAA,MAAA,IAAW,OAAO,GAAA,EAAM;AACpB,MAAA,OAAA,GAAU,IAAA;AAEV,MAAA,MAAA,IAAU,UAAA;AAAA,IACd;AAAA,EACJ;AAEA,EAAA,OAAO,EAAC,YAAA,EAAc,MAAA,EAAQ,OAAA,EAAO;AACzC;AA8BA,SAAS,YAAA,CACL,IAAA,EACA,UAAA,EACA,IAAA,EACA,KACA,WAAA,EACM;AACN,EAAA,IAAI,SAAS,IAAA,CAAK,YAAA;AAClB,EAAA,IAAI,MAAA,GAAS,GAAG,MAAA,GAAS,CAAA;AAEzB,EAAA,IAAI,KAAA;AAEJ,EAAA,IAAI,GAAA,CAAI,gBAAA,IAAoB,IAAA,IAAQ,GAAA,CAAI,aAAa,IAAA,EAAM;AAEvD,IAAA,KAAA,GAAQ,CAAA;AAAA,EACZ,CAAA,MAAO;AACH,IAAA,MAAM,aAAa,GAAA,CAAI,gBAAA;AACvB,IAAA,MAAM,YAAY,GAAA,CAAI,SAAA;AAEtB,IAAA,IAAI,SAAS,UAAA,EAAY;AACrB,MAAA,MAAM,OAAO,MAAA,GAAS,UAAA;AAGtB,MAAA,IAAI,IAAI,WAAA,EAAa;AACjB,QAAA,WAAA,CAAY,IAAA,CAAK;AAAA,UACb,MAAM,IAAA,CAAK,MAAA;AAAA,UACX,OAAA,EACI,+FAAA;AAAA,UACJ,QAAA,EAAU,IAAA,KAAS,QAAA,GAAW,OAAA,GAAU,SAAA;AAAA,UACxC,IAAA,EAAM;AAAA,SACT,CAAA;AAGD,QAAA,KAAA,GAAQ,SAAA;AAAA,MACZ,CAAA,MAAO;AACH,QAAA,IAAI,OAAO,UAAA,EAAY;AACnB,UAAA,WAAA,CAAY,IAAA,CAAK;AAAA,YACb,MAAM,IAAA,CAAK,MAAA;AAAA,YACX,OAAA,EAAS,CAAA,uBAAA,EAA0B,UAAU,CAAA,IAAA,EAAO,MAAM,CAAA,sCAAA,CAAA;AAAA,YAC1D,QAAA,EAAU,IAAA,KAAS,QAAA,GAAW,OAAA,GAAU,SAAA;AAAA,YACxC,IAAA,EAAM;AAAA,WACT,CAAA;AAAA,QACL;AACA,QAAA,KAAA,GAAQ,SAAA,GAAY,CAAA;AAAA,MACxB;AAAA,IACJ,CAAA,MAAA,IAAW,WAAW,UAAA,EAAY;AAC9B,MAAA,KAAA,GAAQ,SAAA;AAAA,IACZ,CAAA,MAAO;AACH,MAAA,MAAM,OAAO,UAAA,GAAa,MAAA;AAC1B,MAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,IAAA,GAAO,UAAU,CAAA;AAE1C,MAAA,IAAI,IAAA,GAAO,eAAe,CAAA,EAAG;AACzB,QAAA,WAAA,CAAY,IAAA,CAAK;AAAA,UACb,MAAM,IAAA,CAAK,MAAA;AAAA,UACX,SAAS,CAAA,2BAAA,EAA8B,UAAU,CAAA,IAAA,EAAO,MAAM,oDAAoD,UAAU,CAAA,EAAA,CAAA;AAAA,UAC5H,QAAA,EAAU,IAAA,KAAS,QAAA,GAAW,OAAA,GAAU,SAAA;AAAA,UACxC,IAAA,EAAM;AAAA,SACT,CAAA;AAAA,MACL;AAEA,MAAA,KAAA,GAAQ,IAAA,CAAK,GAAA,CAAI,SAAA,GAAY,KAAA,EAAO,CAAC,CAAA;AAAA,IACzC;AAAA,EACJ;AAEA,EAAA,GAAA,CAAI,gBAAA,GAAmB,MAAA;AACvB,EAAA,GAAA,CAAI,SAAA,GAAY,KAAA;AAEhB,EAAA,OAAO,KAAA;AACX;AAiBA,SAAS,cAAA,CACL,IAAA,EACA,UAAA,EACA,IAAA,EACA,GAAA,EAKF;AACE,EAAA,MAAM,QAAsB,EAAC;AAC7B,EAAA,MAAM,QAAQ,YAAA,CAAa,IAAA,EAAM,UAAA,EAAY,IAAA,EAAM,KAAK,KAAK,CAAA;AAG7D,EAAA,MAAM,EAAC,qBAAA,EAAqB,GAAI,yBAAA,CAA0B,KAAK,OAAO,CAAA;AACtE,EAAA,MAAM,OAAA,GAAU,sBAAsB,IAAA,EAAK;AAC3C,EAAA,IAAI,CAAC,OAAA,EAAS;AAEV,IAAA,OAAO,EAAC,KAAA,EAAO,IAAA,EAAM,KAAA,EAAO,KAAA,EAAK;AAAA,EACrC;AAEA,EAAA,MAAM,KAAA,GAAQ,OAAA,CAAQ,KAAA,CAAM,KAAK,CAAA;AACjC,EAAA,MAAM,SAAA,GAAY,MAAM,CAAC,CAAA;AACzB,EAAA,MAAM,gBAAA,GAAmB,KAAA,CAAM,KAAA,CAAM,CAAC,CAAA;AAGtC,EAAA,IAAI,SAAA,CAAU,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,IAAA,KAAA,CAAM,IAAA,CAAK;AAAA,MACP,MAAM,IAAA,CAAK,MAAA;AAAA,MACX,OAAA,EACI,sFAAA;AAAA,MACJ,QAAA,EAAU,IAAA,KAAS,QAAA,GAAW,OAAA,GAAU,SAAA;AAAA,MACxC,IAAA,EAAM;AAAA,KACT,CAAA;AAAA,EACL;AAEA,EAAA,MAAM,KAAA,GAAQ,SAAA,CAAU,QAAA,CAAS,GAAG,CAAA;AACpC,EAAA,MAAM,WAAA,GAAc,SAAA;AAEpB,EAAA,IAAI,IAAA;AACJ,EAAA,MAAM,UAAoB,EAAC;AAC3B,EAAA,MAAM,UAAoB,EAAC;AAE3B,EAAA,KAAA,MAAW,SAAS,gBAAA,EAAkB;AAClC,IAAA,IAAI,KAAA,CAAM,UAAA,CAAW,QAAQ,CAAA,EAAG;AAC5B,MAAA,IAAA,GAAO,KAAA,CAAM,KAAA,CAAM,QAAA,CAAS,MAAM,CAAA;AAAA,IACtC,CAAA,MAAA,IAAW,KAAA,CAAM,UAAA,CAAW,WAAW,CAAA,EAAG;AACtC,MAAA,MAAM,GAAA,GAAM,KAAA,CAAM,KAAA,CAAM,WAAA,CAAY,MAAM,CAAA;AAC1C,MAAA,IAAI,GAAA,EAAK;AACL,QAAA,OAAA,CAAQ,IAAA;AAAA,UACJ,GAAG,GAAA,CACE,KAAA,CAAM,GAAG,CAAA,CACT,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,IAAA,EAAM,CAAA,CACnB,OAAO,OAAO;AAAA,SACvB;AAAA,MACJ;AAAA,IACJ,CAAA,MAAA,IAAW,KAAA,CAAM,UAAA,CAAW,WAAW,CAAA,EAAG;AACtC,MAAA,MAAM,GAAA,GAAM,KAAA,CAAM,KAAA,CAAM,WAAA,CAAY,MAAM,CAAA;AAC1C,MAAA,IAAI,GAAA,EAAK;AACL,QAAA,OAAA,CAAQ,IAAA;AAAA,UACJ,GAAG,GAAA,CACE,KAAA,CAAM,GAAG,CAAA,CACT,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,IAAA,EAAM,CAAA,CACnB,OAAO,OAAO;AAAA,SACvB;AAAA,MACJ;AAAA,IACJ,CAAA,MAAA,IAAW,KAAA,CAAM,UAAA,CAAW,GAAG,CAAA,EAAG;AAC9B,MAAA,KAAA,CAAM,IAAA,CAAK;AAAA,QACP,MAAM,IAAA,CAAK,MAAA;AAAA,QACX,OAAA,EAAS,6BAA6B,KAAK,CAAA,EAAA,CAAA;AAAA,QAC3C,QAAA,EAAU,MAAA;AAAA,QACV,IAAA,EAAM;AAAA,OACT,CAAA;AAAA,IACL;AAAA,EACJ;AAEA,EAAA,MAAM,KAAA,GAAqB;AAAA,IACvB,WAAA;AAAA,IACA,KAAA;AAAA,IACA,IAAA;AAAA,IACA,OAAA,EAAS,OAAA,CAAQ,MAAA,GAAS,OAAA,GAAU,MAAA;AAAA,IACpC,OAAA,EAAS,OAAA,CAAQ,MAAA,GAAS,OAAA,GAAU;AAAA,GACxC;AAEA,EAAA,OAAO,EAAC,KAAA,EAAO,KAAA,EAAO,KAAA,EAAK;AAC/B;AAEO,SAAS,WAAW,OAAA,EAAiB;AACxC,EAAA,IAAI,QAAA,GAAW,EAAA;AACf,EAAA,MAAM,MAAM,OAAA,CAAQ,MAAA;AAEpB,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,GAAA,EAAK,CAAA,EAAA,EAAK;AAC1B,IAAA,MAAM,EAAA,GAAK,QAAQ,CAAC,CAAA;AACpB,IAAA,MAAM,OAAO,CAAA,GAAI,CAAA,GAAI,OAAA,CAAQ,CAAA,GAAI,CAAC,CAAA,GAAI,EAAA;AAGtC,IAAA,IAAI,OAAO,GAAA,EAAK;AACZ,MAAA,IAAI,MAAM,CAAA,EAAG;AAET,QAAA;AAAA,MACJ;AACA,MAAA,IAAI,IAAA,KAAS,GAAA,IAAO,IAAA,KAAS,GAAA,EAAM;AAC/B,QAAA,QAAA,GAAW,CAAA;AACX,QAAA;AAAA,MACJ;AAAA,IACJ;AAGA,IAAA,IACI,EAAA,KAAO,GAAA,IACP,CAAA,GAAI,CAAA,GAAI,GAAA,IACR,OAAA,CAAQ,CAAA,GAAI,CAAC,CAAA,KAAM,GAAA,KAClB,IAAA,KAAS,GAAA,IAAO,SAAS,GAAA,CAAA,EAC5B;AACE,MAAA,QAAA,GAAW,CAAA;AACX,MAAA;AAAA,IACJ;AAAA,EACJ;AAEA,EAAA,OAAO,QAAA;AACX;AAKO,SAAS,0BAA0B,OAAA,EAGxC;AACE,EAAA,MAAM,QAAA,GAAW,WAAW,OAAO,CAAA;AAEnC,EAAA,IAAI,aAAa,EAAA,EAAI;AACjB,IAAA,OAAO;AAAA,MACH,qBAAA,EAAuB,OAAA;AAAA,MACvB,aAAA,EAAe;AAAA,KACnB;AAAA,EACJ;AAEA,EAAA,OAAO;AAAA,IACH,qBAAA,EAAuB,OAAA,CAAQ,KAAA,CAAM,CAAA,EAAG,QAAQ,CAAA;AAAA,IAChD,aAAA,EAAe,OAAA,CAAQ,KAAA,CAAM,QAAQ;AAAA,GACzC;AACJ;AAMA,SAAS,WACL,KAAA,EACA,KAAA,EACA,MACA,SAAA,EACA,KAAA,EACA,aACA,IAAA,EACI;AACJ,EAAA,MAAM,SAAS,IAAA,CAAK,MAAA;AAGpB,EAAA,OAAO,KAAA,CAAM,SAAS,KAAA,EAAO;AACzB,IAAA,KAAA,CAAM,GAAA,EAAI;AAAA,EACd;AAEA,EAAA,IAAI,MAAA,GAAyB,IAAA;AAC7B,EAAA,IAAI,QAAQ,CAAA,EAAG;AACX,IAAA,MAAM,SAAA,GAAY,KAAA,CAAM,KAAA,GAAQ,CAAC,CAAA;AACjC,IAAA,IAAI,CAAC,SAAA,EAAW;AAEZ,MAAA,WAAA,CAAY,IAAA,CAAK;AAAA,QACb,IAAA,EAAM,MAAA;AAAA,QACN,OAAA,EAAS,CAAA,uBAAA,EAA0B,KAAK,CAAA,wBAAA,EACpC,QAAQ,CACZ,CAAA,mBAAA,CAAA;AAAA,QACA,QAAA,EAAU,IAAA,KAAS,QAAA,GAAW,OAAA,GAAU,SAAA;AAAA,QACxC,IAAA,EAAM;AAAA,OACT,CAAA;AAAA,IACL,CAAA,MAAA,IAAW,SAAA,CAAU,IAAA,KAAS,MAAA,EAAQ;AAElC,MAAA,IAAI,SAAS,QAAA,EAAU;AACnB,QAAA,WAAA,CAAY,IAAA,CAAK;AAAA,UACb,IAAA,EAAM,MAAA;AAAA,UACN,OAAA,EAAS,CAAA,gCAAA,EAAmC,SAAA,CAAU,IAAI,CAAA,EAAA,CAAA;AAAA,UAC1D,QAAA,EAAU,OAAA;AAAA,UACV,IAAA,EAAM;AAAA,SACT,CAAA;AAAA,MAEL,CAAA,MAAO;AACH,QAAA,WAAA,CAAY,IAAA,CAAK;AAAA,UACb,IAAA,EAAM,MAAA;AAAA,UACN,SAAS,CAAA,0BAAA,EAA6B,SAAA,CAAU,IAAI,CAAA,iCAAA,EAChD,UAAU,KACd,CAAA,CAAA,CAAA;AAAA,UACA,QAAA,EAAU,SAAA;AAAA,UACV,IAAA,EAAM;AAAA,SACT,CAAA;AAED,QAAA,OAAO,KAAA,CAAM,MAAA,GAAS,SAAA,CAAU,KAAA,EAAO;AACnC,UAAA,KAAA,CAAM,GAAA,EAAI;AAAA,QACd;AAAA,MACJ;AAAA,IACJ,CAAA,MAAO;AACH,MAAA,MAAA,GAAS,SAAA;AAAA,IACb;AAAA,EACJ;AAEA,EAAA,MAAM,aAAa,MAAA,GAAS,MAAA,CAAO,KAAK,OAAA,CAAQ,KAAA,EAAO,EAAE,CAAA,GAAI,EAAA;AAC7D,EAAA,MAAM,oBAAoB,WAAA,CAAY,KAAA,CAAM,YAAY,OAAA,CAAQ,MAAA,EAAQ,EAAE,CAAC,CAAA;AAC3E,EAAA,MAAM,WAAW,UAAA,GACX,CAAA,EAAG,UAAU,CAAA,CAAA,EAAI,iBAAiB,GAAG,KAAA,CAAM,KAAA,GAAQ,GAAA,GAAM,EAAE,KAC3D,CAAA,EAAG,iBAAiB,GAAG,KAAA,CAAM,KAAA,GAAQ,MAAM,EAAE,CAAA,CAAA;AAEnD,EAAA,MAAM,QAAA,GAAwB;AAAA,IAC1B,IAAA,EAAM,KAAA,CAAM,KAAA,GAAQ,KAAA,GAAQ,MAAA;AAAA,IAC5B,MAAM,KAAA,CAAM,WAAA;AAAA,IACZ,KAAA;AAAA,IACA,IAAA,EAAM,MAAA;AAAA,IACN,IAAA,EAAM,QAAA;AAAA,IACN,MAAA;AAAA,IACA,GAAI,MAAM,IAAA,GAAO,EAAC,MAAM,KAAA,CAAM,IAAA,KAAQ,EAAC;AAAA,IACvC,GAAI,MAAM,OAAA,GAAU,EAAC,SAAS,KAAA,CAAM,OAAA,KAAW,EAAC;AAAA,IAChD,GAAI,MAAM,OAAA,GAAU,EAAC,SAAS,KAAA,CAAM,OAAA,KAAW;AAAC,GACpD;AAEA,EAAA,IAAI,MAAM,KAAA,EAAO;AACb,IAAA,MAAM,OAAA,GAAmB;AAAA,MACrB,GAAG,QAAA;AAAA,MACH,IAAA,EAAM,KAAA;AAAA,MACN,UAAU;AAAC,KACf;AAEA,IAAA,IAAI,MAAA,EAAQ;AACR,MAAA,MAAA,CAAO,QAAA,CAAS,KAAK,OAAO,CAAA;AAAA,IAChC,CAAA,MAAO;AACH,MAAA,SAAA,CAAU,KAAK,OAAO,CAAA;AAAA,IAC1B;AAGA,IAAA,OAAO,KAAA,CAAM,SAAS,KAAA,EAAO;AACzB,MAAA,KAAA,CAAM,GAAA,EAAI;AAAA,IACd;AACA,IAAA,KAAA,CAAM,KAAK,CAAA,GAAI,OAAA;AAAA,EACnB,CAAA,MAAO;AACH,IAAA,MAAM,QAAA,GAAqB;AAAA,MACvB,GAAG,QAAA;AAAA,MACH,IAAA,EAAM;AAAA,KACV;AAEA,IAAA,IAAI,MAAA,EAAQ;AACR,MAAA,MAAA,CAAO,QAAA,CAAS,KAAK,QAAQ,CAAA;AAAA,IACjC,CAAA,MAAO;AACH,MAAA,SAAA,CAAU,KAAK,QAAQ,CAAA;AAAA,IAC3B;AAAA,EAIJ;AACJ;;;AChhBO,SAAS,mBAAA,CACZ,IAAA,EACA,OAAA,GAAyB,EAAC,EACd;AACZ,EAAA,MAAM,UAAA,GAAa,QAAQ,UAAA,IAAc,CAAA;AACzC,EAAA,MAAM,IAAA,GAAgB,QAAQ,IAAA,IAAQ,OAAA;AACtC,EAAA,MAAM,iBAAA,GACF,OAAA,CAAQ,iBAAA,KAAsB,MAAA,GAAY,OAAO,OAAA,CAAQ,iBAAA;AAC7D,EAAA,MAAM,sBAAA,GACF,OAAA,CAAQ,sBAAA,KAA2B,MAAA,GAC7B,OACA,OAAA,CAAQ,sBAAA;AAClB,EAAA,MAAM,oBAAA,GACF,OAAA,CAAQ,oBAAA,KAAyB,MAAA,GAC3B,OACA,OAAA,CAAQ,oBAAA;AAGlB,EAAA,MAAM,GAAA,GAAM,kBAAkB,IAAA,EAAM;AAAA,IAChC,UAAA;AAAA,IACA;AAAA,GACH,CAAA;AAED,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AACnC,EAAA,MAAM,YAAY,QAAA,CAAS,MAAA;AAG3B,EAAA,IAAI,GAAA,CAAI,KAAA,CAAM,MAAA,KAAW,SAAA,EAAW;AAChC,IAAA,OAAO;AAAA,MACH,MAAM,cAAA,CAAe,IAAA,EAAM,EAAC,iBAAA,EAAmB,wBAAuB,CAAA;AAAA,MACtE;AAAA,KACJ;AAAA,EACJ;AAGA,EAAA,MAAM,mBAA6B,EAAC;AACpC,EAAA,MAAM,iBAAoC,EAAC;AAE3C,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAChC,IAAA,MAAM,QAAA,GAAW,GAAA,CAAI,KAAA,CAAM,CAAC,CAAA;AAC5B,IAAA,IAAI,QAAA,CAAS,SAAS,OAAA,EAAS;AAC3B,MAAA,gBAAA,CAAiB,KAAK,CAAC,CAAA;AACvB,MAAA,MAAM,EAAC,aAAA,EAAa,GAAI,yBAAA,CAA0B,SAAS,OAAO,CAAA;AAClE,MAAA,cAAA,CAAe,KAAK,aAAa,CAAA;AAAA,IACrC;AAAA,EACJ;AAGA,EAAA,MAAM,YAAgD,EAAC;AACvD,EAAA,eAAA,CAAgB,GAAA,CAAI,SAAA,EAAW,CAAA,EAAG,SAAS,CAAA;AAE3C,EAAA,IAAI,SAAA,CAAU,MAAA,KAAW,gBAAA,CAAiB,MAAA,EAAQ;AAE9C,IAAA,OAAO;AAAA,MACH,MAAM,cAAA,CAAe,IAAA,EAAM,EAAC,iBAAA,EAAmB,wBAAuB,CAAA;AAAA,MACtE;AAAA,KACJ;AAAA,EACJ;AAGA,EAAA,MAAM,sBAAgC,SAAA,CAAU,GAAA;AAAA,IAAI,CAAC,EAAC,IAAA,EAAM,KAAA,OACxD,iBAAA,CAAkB,IAAA,EAAM,KAAA,EAAO,UAAA,EAAY,oBAAoB;AAAA,GACnE;AAGA,EAAA,MAAM,cAAwB,EAAC;AAC/B,EAAA,IAAI,QAAA,GAAW,CAAA;AAEf,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAChC,IAAA,MAAM,QAAA,GAAW,GAAA,CAAI,KAAA,CAAM,CAAC,CAAA;AAC5B,IAAA,MAAM,YAAA,GAAe,SAAS,CAAC,CAAA;AAE/B,IAAA,IAAI,QAAA,CAAS,SAAS,OAAA,EAAS;AAC3B,MAAA,MAAM,OAAO,mBAAA,CAAoB,QAAQ,CAAA,CAAE,OAAA,CAAQ,YAAY,EAAE,CAAA;AACjE,MAAA,MAAM,MAAA,GAAS,eAAe,QAAQ,CAAA;AACtC,MAAA,QAAA,EAAA;AAEA,MAAA,IAAI,MAAA,EAAQ;AAER,QAAA,WAAA,CAAY,IAAA,CAAK,IAAA,GAAO,GAAA,GAAM,MAAM,CAAA;AAAA,MACxC,CAAA,MAAO;AACH,QAAA,WAAA,CAAY,KAAK,IAAI,CAAA;AAAA,MACzB;AAAA,IACJ,CAAA,MAAO;AACH,MAAA,IAAI,GAAA,GAAM,YAAA;AACV,MAAA,IAAI,sBAAA,EAAwB;AACxB,QAAA,GAAA,GAAM,GAAA,CAAI,OAAA,CAAQ,UAAA,EAAY,EAAE,CAAA;AAAA,MACpC;AACA,MAAA,WAAA,CAAY,KAAK,GAAG,CAAA;AAAA,IACxB;AAAA,EACJ;AAEA,EAAA,MAAM,MAAM,iBAAA,GAAoB,kBAAA,CAAmB,IAAI,CAAA,GAAI,UAAU,IAAI,CAAA;AACzE,EAAA,OAAO;AAAA,IACH,IAAA,EAAM,WAAA,CAAY,IAAA,CAAK,GAAG,CAAA;AAAA,IAC1B;AAAA,GACJ;AACJ;AASA,SAAS,cAAA,CACL,MACA,IAAA,EACM;AACN,EAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AAChC,EAAA,MAAM,eAAA,GAAkB,IAAA,CAAK,sBAAA,GACvB,KAAA,CAAM,GAAA,CAAI,CAAC,IAAA,KAAS,IAAA,CAAK,OAAA,CAAQ,UAAA,EAAY,EAAE,CAAC,CAAA,GAChD,KAAA;AAEN,EAAA,MAAM,MAAM,IAAA,CAAK,iBAAA,GAAoB,mBAAmB,IAAI,CAAA,GAAI,UAAU,IAAI,CAAA;AAC9E,EAAA,OAAO,eAAA,CAAgB,KAAK,GAAG,CAAA;AACnC;AAMA,SAAS,mBAAmB,IAAA,EAAsB;AAC9C,EAAA,MAAM,aAAa,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA,IAAK,EAAC,EAAG,MAAA;AAC9C,EAAA,MAAM,WAAW,IAAA,CAAK,KAAA,CAAM,YAAY,CAAA,IAAK,EAAC,EAAG,MAAA;AAEjD,EAAA,IAAI,SAAA,KAAc,CAAA,IAAK,OAAA,KAAY,CAAA,EAAG;AAClC,IAAA,OAAO,IAAA;AAAA,EACX;AAEA,EAAA,IAAI,YAAY,OAAA,EAAS;AACrB,IAAA,OAAO,MAAA;AAAA,EACX;AAEA,EAAA,OAAO,IAAA;AACX;AAKA,SAAS,UAAU,IAAA,EAAsB;AACrC,EAAA,OAAO,IAAA,CAAK,QAAA,CAAS,MAAM,CAAA,GAAI,MAAA,GAAS,IAAA;AAC5C;AAKA,SAAS,eAAA,CACL,KAAA,EACA,KAAA,EACA,GAAA,EACI;AACJ,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACtB,IAAA,GAAA,CAAI,IAAA,CAAK,EAAC,IAAA,EAAM,KAAA,EAAM,CAAA;AACtB,IAAA,IAAI,KAAK,IAAA,KAAS,KAAA,IAAS,KAAK,QAAA,IAAY,IAAA,CAAK,SAAS,MAAA,EAAQ;AAC9D,MAAA,eAAA,CAAgB,IAAA,CAAK,QAAA,EAAU,KAAA,GAAQ,CAAA,EAAG,GAAG,CAAA;AAAA,IACjD;AAAA,EACJ;AACJ;AAUA,SAAS,iBAAA,CACL,IAAA,EACA,KAAA,EACA,UAAA,EACA,oBAAA,EACM;AACN,EAAA,MAAM,MAAA,GAAS,GAAA,CAAI,MAAA,CAAO,UAAA,GAAa,KAAK,CAAA;AAC5C,EAAA,MAAM,WAAW,IAAA,CAAK,IAAA;AAEtB,EAAA,IAAI,CAAC,oBAAA,EAAsB;AACvB,IAAA,OAAO,MAAA,GAAS,QAAA;AAAA,EACpB;AAEA,EAAA,MAAM,SAAmB,EAAC;AAE1B,EAAA,IAAI,KAAK,IAAA,EAAM;AACX,IAAA,MAAA,CAAO,IAAA,CAAK,CAAA,MAAA,EAAS,IAAA,CAAK,IAAI,CAAA,CAAE,CAAA;AAAA,EACpC;AACA,EAAA,IAAI,IAAA,CAAK,OAAA,IAAW,IAAA,CAAK,OAAA,CAAQ,SAAS,CAAA,EAAG;AACzC,IAAA,MAAA,CAAO,KAAK,CAAA,SAAA,EAAY,IAAA,CAAK,QAAQ,IAAA,CAAK,GAAG,CAAC,CAAA,CAAE,CAAA;AAAA,EACpD;AACA,EAAA,IAAI,IAAA,CAAK,OAAA,IAAW,IAAA,CAAK,OAAA,CAAQ,SAAS,CAAA,EAAG;AACzC,IAAA,MAAA,CAAO,KAAK,CAAA,SAAA,EAAY,IAAA,CAAK,QAAQ,IAAA,CAAK,GAAG,CAAC,CAAA,CAAE,CAAA;AAAA,EACpD;AAEA,EAAA,MAAM,cAAc,MAAA,CAAO,MAAA,GAAS,MAAM,MAAA,CAAO,IAAA,CAAK,GAAG,CAAA,GAAI,EAAA;AAC7D,EAAA,OAAO,SAAS,QAAA,GAAW,WAAA;AAC/B","file":"ast.cjs","sourcesContent":["// src/util/fs-utils.ts\r\n\r\nimport fs from 'fs';\r\nimport path from 'path';\r\n\r\n/**\r\n * Convert any path to a POSIX-style path with forward slashes.\r\n */\r\nexport function toPosixPath(p: string): string {\r\n return p.replace(/\\\\/g, '/');\r\n}\r\n\r\n/**\r\n * Ensure a directory exists (like mkdir -p).\r\n * Returns the absolute path of the directory.\r\n */\r\nexport function ensureDirSync(dirPath: string): string {\r\n if (!fs.existsSync(dirPath)) {\r\n fs.mkdirSync(dirPath, { recursive: true });\r\n }\r\n return dirPath;\r\n}\r\n\r\n/**\r\n * Synchronous check for file or directory existence.\r\n */\r\nexport function existsSync(targetPath: string): boolean {\r\n return fs.existsSync(targetPath);\r\n}\r\n\r\n/**\r\n * Read a file as UTF-8, returning null if it doesn't exist\r\n * or if an error occurs (no exceptions thrown).\r\n */\r\nexport function readFileSafeSync(filePath: string): string | null {\r\n try {\r\n return fs.readFileSync(filePath, 'utf8');\r\n } catch {\r\n return null;\r\n }\r\n}\r\n\r\n/**\r\n * Write a UTF-8 file, creating parent directories if needed.\r\n */\r\nexport function writeFileSafeSync(filePath: string, contents: string): void {\r\n const dir = path.dirname(filePath);\r\n ensureDirSync(dir);\r\n fs.writeFileSync(filePath, contents, 'utf8');\r\n}\r\n\r\n/**\r\n * Remove a file if it exists. Does nothing on error.\r\n */\r\nexport function removeFileSafeSync(filePath: string): void {\r\n try {\r\n if (fs.existsSync(filePath)) {\r\n fs.unlinkSync(filePath);\r\n }\r\n } catch {\r\n // ignore\r\n }\r\n}\r\n\r\n/**\r\n * Get file stats if they exist, otherwise null.\r\n */\r\nexport function statSafeSync(targetPath: string): fs.Stats | null {\r\n try {\r\n return fs.statSync(targetPath);\r\n } catch {\r\n return null;\r\n }\r\n}\r\n\r\n/**\r\n * Resolve an absolute path from projectRoot + relative path,\r\n * and assert it stays within the project root.\r\n *\r\n * Throws if the resolved path escapes the project root.\r\n */\r\nexport function resolveProjectPath(projectRoot: string, relPath: string): string {\r\n const absRoot = path.resolve(projectRoot);\r\n const absTarget = path.resolve(absRoot, relPath);\r\n\r\n // Normalise for safety check\r\n const rootWithSep = absRoot.endsWith(path.sep) ? absRoot : absRoot + path.sep;\r\n if (!absTarget.startsWith(rootWithSep) && absTarget !== absRoot) {\r\n throw new Error(\r\n `Attempted to resolve path outside project root: ` +\r\n `root=\"${absRoot}\", target=\"${absTarget}\"`,\r\n );\r\n }\r\n\r\n return absTarget;\r\n}\r\n\r\n/**\r\n * Convert an absolute path back to a project-relative path.\r\n * Throws if the path is not under projectRoot.\r\n */\r\nexport function toProjectRelativePath(projectRoot: string, absolutePath: string): string {\r\n const absRoot = path.resolve(projectRoot);\r\n const absTarget = path.resolve(absolutePath);\r\n\r\n const rootWithSep = absRoot.endsWith(path.sep) ? absRoot : absRoot + path.sep;\r\n if (!absTarget.startsWith(rootWithSep) && absTarget !== absRoot) {\r\n throw new Error(\r\n `Path \"${absTarget}\" is not inside project root \"${absRoot}\".`,\r\n );\r\n }\r\n\r\n const rel = path.relative(absRoot, absTarget);\r\n return toPosixPath(rel);\r\n}\r\n\r\n/**\r\n * Check if `target` is inside (or equal to) `base` directory.\r\n */\r\nexport function isSubPath(base: string, target: string): boolean {\r\n const absBase = path.resolve(base);\r\n const absTarget = path.resolve(target);\r\n\r\n const baseWithSep = absBase.endsWith(path.sep) ? absBase : absBase + path.sep;\r\n return absTarget === absBase || absTarget.startsWith(baseWithSep);\r\n}","// src/ast/parser.ts\r\n\r\nimport {toPosixPath} from '../util/fs-utils';\r\n\r\nexport type AstMode = 'strict' | 'loose';\r\n\r\nexport type DiagnosticSeverity = 'info' | 'warning' | 'error';\r\n\r\nexport interface Diagnostic {\r\n line: number; // 1-based\r\n column?: number; // 1-based (optional)\r\n message: string;\r\n severity: DiagnosticSeverity;\r\n code?: string;\r\n}\r\n\r\n/**\r\n * How a physical line in the text was classified.\r\n */\r\nexport type LineKind = 'blank' | 'comment' | 'entry';\r\n\r\nexport interface StructureAstLine {\r\n index: number; // 0-based\r\n lineNo: number; // 1-based\r\n raw: string;\r\n kind: LineKind;\r\n indentSpaces: number;\r\n content: string; // after leading whitespace (includes path+annotations+inline comment)\r\n}\r\n\r\n/**\r\n * AST node base for structure entries.\r\n */\r\ninterface AstNodeBase {\r\n type: 'dir' | 'file';\r\n /** The last segment name, e.g. \"schema/\" or \"index.ts\". */\r\n name: string;\r\n /** Depth level (0 = root, 1 = child of root, etc.). */\r\n depth: number;\r\n /** 1-based source line number. */\r\n line: number;\r\n /** Normalized POSIX path from root, e.g. \"src/schema/index.ts\" or \"src/schema/\". */\r\n path: string;\r\n /** Stub annotation, if any. */\r\n stub?: string;\r\n /** Include glob patterns, if any. */\r\n include?: string[];\r\n /** Exclude glob patterns, if any. */\r\n exclude?: string[];\r\n /** Parent node; null for roots. */\r\n parent: DirNode | null;\r\n}\r\n\r\nexport interface DirNode extends AstNodeBase {\r\n type: 'dir';\r\n children: AstNode[];\r\n}\r\n\r\nexport interface FileNode extends AstNodeBase {\r\n type: 'file';\r\n children?: undefined;\r\n}\r\n\r\nexport type AstNode = DirNode | FileNode;\r\n\r\nexport interface AstOptions {\r\n /**\r\n * Spaces per indent level.\r\n * Default: 2.\r\n */\r\n indentStep?: number;\r\n\r\n /**\r\n * Parser mode:\r\n * - \"strict\": mismatched indentation / impossible structures are errors.\r\n * - \"loose\" : tries to recover from bad indentation, demotes some issues to warnings.\r\n *\r\n * Default: \"loose\".\r\n */\r\n mode?: AstMode;\r\n}\r\n\r\n/**\r\n * Full AST result: nodes + per-line meta + diagnostics.\r\n */\r\nexport interface StructureAst {\r\n /** Root-level nodes (depth 0). */\r\n rootNodes: AstNode[];\r\n /** All lines as seen in the source file. */\r\n lines: StructureAstLine[];\r\n /** Collected diagnostics (errors + warnings + infos). */\r\n diagnostics: Diagnostic[];\r\n /** Resolved options used by the parser. */\r\n options: Required<AstOptions>;\r\n}\r\n\r\n/**\r\n * Main entry: parse a structure text into an AST tree with diagnostics.\r\n *\r\n * - Does NOT throw on parse errors.\r\n * - Always returns something (even if diagnostics contain errors).\r\n * - In \"loose\" mode, attempts to repair:\r\n * - odd/misaligned indentation → snapped via relative depth rules with warnings.\r\n * - large indent jumps → treated as \"one level deeper\" with warnings.\r\n * - children under files → attached to nearest viable ancestor with warnings.\r\n */\r\nexport function parseStructureAst(\r\n text: string,\r\n opts: AstOptions = {},\r\n): StructureAst {\r\n const indentStep = opts.indentStep ?? 2;\r\n const mode: AstMode = opts.mode ?? 'loose';\r\n\r\n const diagnostics: Diagnostic[] = [];\r\n const lines: StructureAstLine[] = [];\r\n\r\n const rawLines = text.split(/\\r?\\n/);\r\n\r\n // First pass: classify + measure indentation.\r\n for (let i = 0; i < rawLines.length; i++) {\r\n const raw = rawLines[i];\r\n const lineNo = i + 1;\r\n\r\n const m = raw.match(/^(\\s*)(.*)$/);\r\n const indentRaw = m ? m[1] : '';\r\n const content = m ? m[2] : '';\r\n\r\n const {indentSpaces, hasTabs} = measureIndent(indentRaw, indentStep);\r\n\r\n if (hasTabs) {\r\n diagnostics.push({\r\n line: lineNo,\r\n message:\r\n 'Tabs detected in indentation. Consider using spaces only for consistent levels.',\r\n severity: mode === 'strict' ? 'warning' : 'info',\r\n code: 'indent-tabs',\r\n });\r\n }\r\n\r\n const trimmed = content.trim();\r\n let kind: LineKind;\r\n if (!trimmed) {\r\n kind = 'blank';\r\n } else if (trimmed.startsWith('#') || trimmed.startsWith('//')) {\r\n kind = 'comment';\r\n } else {\r\n kind = 'entry';\r\n }\r\n\r\n lines.push({\r\n index: i,\r\n lineNo,\r\n raw,\r\n kind,\r\n indentSpaces,\r\n content,\r\n });\r\n }\r\n\r\n const rootNodes: AstNode[] = [];\r\n const stack: AstNode[] = []; // nodes by depth index (0 = level 0, 1 = level 1, ...)\r\n\r\n const depthCtx: DepthContext = {\r\n lastIndentSpaces: null,\r\n lastDepth: null,\r\n lastWasFile: false,\r\n };\r\n\r\n for (const line of lines) {\r\n if (line.kind !== 'entry') continue;\r\n\r\n const {entry, depth, diags} = parseEntryLine(\r\n line,\r\n indentStep,\r\n mode,\r\n depthCtx,\r\n );\r\n diagnostics.push(...diags);\r\n\r\n if (!entry) {\r\n continue;\r\n }\r\n\r\n attachNode(entry, depth, line, rootNodes, stack, diagnostics, mode);\r\n depthCtx.lastWasFile = !entry.isDir;\r\n }\r\n\r\n return {\r\n rootNodes,\r\n lines,\r\n diagnostics,\r\n options: {\r\n indentStep,\r\n mode,\r\n },\r\n };\r\n}\r\n\r\n// ---------------------------------------------------------------------------\r\n// Internal: indentation measurement & depth fixing (relative model)\r\n// ---------------------------------------------------------------------------\r\n\r\nfunction measureIndent(rawIndent: string, indentStep: number): {\r\n indentSpaces: number;\r\n hasTabs: boolean;\r\n} {\r\n let spaces = 0;\r\n let hasTabs = false;\r\n\r\n for (const ch of rawIndent) {\r\n if (ch === ' ') {\r\n spaces += 1;\r\n } else if (ch === '\\t') {\r\n hasTabs = true;\r\n // Treat tab as one level to avoid chaos. This is arbitrary but stable-ish.\r\n spaces += indentStep;\r\n }\r\n }\r\n\r\n return {indentSpaces: spaces, hasTabs};\r\n}\r\n\r\ninterface DepthContext {\r\n lastIndentSpaces: number | null;\r\n lastDepth: number | null;\r\n lastWasFile: boolean;\r\n}\r\n\r\n/**\r\n * Compute logical depth using a relative algorithm:\r\n *\r\n * First entry line:\r\n * - depth = 0\r\n *\r\n * For each subsequent entry line:\r\n * Let prevSpaces = lastIndentSpaces, prevDepth = lastDepth.\r\n *\r\n * - if spaces > prevSpaces:\r\n * - if spaces > prevSpaces + indentStep → warn about a \"skip\"\r\n * - depth = prevDepth + 1\r\n *\r\n * - else if spaces === prevSpaces:\r\n * - depth = prevDepth\r\n *\r\n * - else (spaces < prevSpaces):\r\n * - diff = prevSpaces - spaces\r\n * - steps = round(diff / indentStep)\r\n * - if diff is not a clean multiple → warn about misalignment\r\n * - depth = max(prevDepth - steps, 0)\r\n */\r\nfunction computeDepth(\r\n line: StructureAstLine,\r\n indentStep: number,\r\n mode: AstMode,\r\n ctx: DepthContext,\r\n diagnostics: Diagnostic[],\r\n): number {\r\n let spaces = line.indentSpaces;\r\n if (spaces < 0) spaces = 0;\r\n\r\n let depth: number;\r\n\r\n if (ctx.lastIndentSpaces == null || ctx.lastDepth == null) {\r\n // First entry line: treat as root.\r\n depth = 0;\r\n } else {\r\n const prevSpaces = ctx.lastIndentSpaces;\r\n const prevDepth = ctx.lastDepth;\r\n\r\n if (spaces > prevSpaces) {\r\n const diff = spaces - prevSpaces;\r\n\r\n // NEW: indenting under a file → child-of-file-loose\r\n if (ctx.lastWasFile) {\r\n diagnostics.push({\r\n line: line.lineNo,\r\n message:\r\n 'Entry appears indented under a file; treating it as a sibling of the file instead of a child.',\r\n severity: mode === 'strict' ? 'error' : 'warning',\r\n code: 'child-of-file-loose',\r\n });\r\n\r\n // Treat as sibling of the file, not a child:\r\n depth = prevDepth;\r\n } else {\r\n if (diff > indentStep) {\r\n diagnostics.push({\r\n line: line.lineNo,\r\n message: `Indentation jumps from ${prevSpaces} to ${spaces} spaces; treating as one level deeper.`,\r\n severity: mode === 'strict' ? 'error' : 'warning',\r\n code: 'indent-skip-level',\r\n });\r\n }\r\n depth = prevDepth + 1;\r\n }\r\n } else if (spaces === prevSpaces) {\r\n depth = prevDepth;\r\n } else {\r\n const diff = prevSpaces - spaces;\r\n const steps = Math.round(diff / indentStep);\r\n\r\n if (diff % indentStep !== 0) {\r\n diagnostics.push({\r\n line: line.lineNo,\r\n message: `Indentation decreases from ${prevSpaces} to ${spaces} spaces, which is not a multiple of indent step (${indentStep}).`,\r\n severity: mode === 'strict' ? 'error' : 'warning',\r\n code: 'indent-misaligned',\r\n });\r\n }\r\n\r\n depth = Math.max(prevDepth - steps, 0);\r\n }\r\n }\r\n\r\n ctx.lastIndentSpaces = spaces;\r\n ctx.lastDepth = depth;\r\n\r\n return depth;\r\n}\r\n\r\n// ---------------------------------------------------------------------------\r\n// Internal: entry line parsing (path + annotations)\r\n// ---------------------------------------------------------------------------\r\n\r\ninterface ParsedEntry {\r\n segmentName: string;\r\n isDir: boolean;\r\n stub?: string;\r\n include?: string[];\r\n exclude?: string[];\r\n}\r\n\r\n/**\r\n * Parse a single entry line into a ParsedEntry + depth.\r\n */\r\nfunction parseEntryLine(\r\n line: StructureAstLine,\r\n indentStep: number,\r\n mode: AstMode,\r\n ctx: DepthContext,\r\n): {\r\n entry: ParsedEntry | null;\r\n depth: number;\r\n diags: Diagnostic[];\r\n} {\r\n const diags: Diagnostic[] = [];\r\n const depth = computeDepth(line, indentStep, mode, ctx, diags);\r\n\r\n // Extract before inline comment\r\n const {contentWithoutComment} = extractInlineCommentParts(line.content);\r\n const trimmed = contentWithoutComment.trim();\r\n if (!trimmed) {\r\n // Structural line that became empty after stripping inline comment; treat as no-op.\r\n return {entry: null, depth, diags};\r\n }\r\n\r\n const parts = trimmed.split(/\\s+/);\r\n const pathToken = parts[0];\r\n const annotationTokens = parts.slice(1);\r\n\r\n // Path sanity checks\r\n if (pathToken.includes(':')) {\r\n diags.push({\r\n line: line.lineNo,\r\n message:\r\n 'Path token contains \":\" which is reserved for annotations. This is likely a mistake.',\r\n severity: mode === 'strict' ? 'error' : 'warning',\r\n code: 'path-colon',\r\n });\r\n }\r\n\r\n const isDir = pathToken.endsWith('/');\r\n const segmentName = pathToken;\r\n\r\n let stub: string | undefined;\r\n const include: string[] = [];\r\n const exclude: string[] = [];\r\n\r\n for (const token of annotationTokens) {\r\n if (token.startsWith('@stub:')) {\r\n stub = token.slice('@stub:'.length);\r\n } else if (token.startsWith('@include:')) {\r\n const val = token.slice('@include:'.length);\r\n if (val) {\r\n include.push(\r\n ...val\r\n .split(',')\r\n .map((s) => s.trim())\r\n .filter(Boolean),\r\n );\r\n }\r\n } else if (token.startsWith('@exclude:')) {\r\n const val = token.slice('@exclude:'.length);\r\n if (val) {\r\n exclude.push(\r\n ...val\r\n .split(',')\r\n .map((s) => s.trim())\r\n .filter(Boolean),\r\n );\r\n }\r\n } else if (token.startsWith('@')) {\r\n diags.push({\r\n line: line.lineNo,\r\n message: `Unknown annotation token \"${token}\".`,\r\n severity: 'info',\r\n code: 'unknown-annotation',\r\n });\r\n }\r\n }\r\n\r\n const entry: ParsedEntry = {\r\n segmentName,\r\n isDir,\r\n stub,\r\n include: include.length ? include : undefined,\r\n exclude: exclude.length ? exclude : undefined,\r\n };\r\n\r\n return {entry, depth, diags};\r\n}\r\n\r\nexport function mapThrough(content: string) {\r\n let cutIndex = -1;\r\n const len = content.length;\r\n\r\n for (let i = 0; i < len; i++) {\r\n const ch = content[i];\r\n const prev = i > 0 ? content[i - 1] : '';\r\n\r\n // Inline \"# ...\"\r\n if (ch === '#') {\r\n if (i === 0) {\r\n // full-line comment; not our case (we only call this for \"entry\" lines)\r\n continue;\r\n }\r\n if (prev === ' ' || prev === '\\t') {\r\n cutIndex = i;\r\n break;\r\n }\r\n }\r\n\r\n // Inline \"// ...\"\r\n if (\r\n ch === '/' &&\r\n i + 1 < len &&\r\n content[i + 1] === '/' &&\r\n (prev === ' ' || prev === '\\t')\r\n ) {\r\n cutIndex = i;\r\n break;\r\n }\r\n }\r\n\r\n return cutIndex;\r\n}\r\n\r\n/**\r\n * Extracts the inline comment portion (if any) from the content area (no leading indent).\r\n */\r\nexport function extractInlineCommentParts(content: string): {\r\n contentWithoutComment: string;\r\n inlineComment: string | null;\r\n} {\r\n const cutIndex = mapThrough(content);\r\n\r\n if (cutIndex === -1) {\r\n return {\r\n contentWithoutComment: content,\r\n inlineComment: null,\r\n };\r\n }\r\n\r\n return {\r\n contentWithoutComment: content.slice(0, cutIndex),\r\n inlineComment: content.slice(cutIndex),\r\n };\r\n}\r\n\r\n// ---------------------------------------------------------------------------\r\n// Internal: tree construction\r\n// ---------------------------------------------------------------------------\r\n\r\nfunction attachNode(\r\n entry: ParsedEntry,\r\n depth: number,\r\n line: StructureAstLine,\r\n rootNodes: AstNode[],\r\n stack: AstNode[],\r\n diagnostics: Diagnostic[],\r\n mode: AstMode,\r\n): void {\r\n const lineNo = line.lineNo;\r\n\r\n // Pop stack until we’re at or above the desired depth.\r\n while (stack.length > depth) {\r\n stack.pop();\r\n }\r\n\r\n let parent: DirNode | null = null;\r\n if (depth > 0) {\r\n const candidate = stack[depth - 1];\r\n if (!candidate) {\r\n // Indented but no parent; in strict mode error, in loose mode, treat as root.\r\n diagnostics.push({\r\n line: lineNo,\r\n message: `Entry has indent depth ${depth} but no parent at depth ${\r\n depth - 1\r\n }. Treating as root.`,\r\n severity: mode === 'strict' ? 'error' : 'warning',\r\n code: 'missing-parent',\r\n });\r\n } else if (candidate.type === 'file') {\r\n // Child under file, impossible by design.\r\n if (mode === 'strict') {\r\n diagnostics.push({\r\n line: lineNo,\r\n message: `Cannot attach child under file \"${candidate.path}\".`,\r\n severity: 'error',\r\n code: 'child-of-file',\r\n });\r\n // Force it to root to at least keep the node.\r\n } else {\r\n diagnostics.push({\r\n line: lineNo,\r\n message: `Entry appears under file \"${candidate.path}\". Attaching as sibling at depth ${\r\n candidate.depth\r\n }.`,\r\n severity: 'warning',\r\n code: 'child-of-file-loose',\r\n });\r\n // Treat as sibling at candidate's depth.\r\n while (stack.length > candidate.depth) {\r\n stack.pop();\r\n }\r\n }\r\n } else {\r\n parent = candidate as DirNode;\r\n }\r\n }\r\n\r\n const parentPath = parent ? parent.path.replace(/\\/$/, '') : '';\r\n const normalizedSegment = toPosixPath(entry.segmentName.replace(/\\/+$/, ''));\r\n const fullPath = parentPath\r\n ? `${parentPath}/${normalizedSegment}${entry.isDir ? '/' : ''}`\r\n : `${normalizedSegment}${entry.isDir ? '/' : ''}`;\r\n\r\n const baseNode: AstNodeBase = {\r\n type: entry.isDir ? 'dir' : 'file',\r\n name: entry.segmentName,\r\n depth,\r\n line: lineNo,\r\n path: fullPath,\r\n parent,\r\n ...(entry.stub ? {stub: entry.stub} : {}),\r\n ...(entry.include ? {include: entry.include} : {}),\r\n ...(entry.exclude ? {exclude: entry.exclude} : {}),\r\n };\r\n\r\n if (entry.isDir) {\r\n const dirNode: DirNode = {\r\n ...baseNode,\r\n type: 'dir',\r\n children: [],\r\n };\r\n\r\n if (parent) {\r\n parent.children.push(dirNode);\r\n } else {\r\n rootNodes.push(dirNode);\r\n }\r\n\r\n // Ensure stack[depth] is this dir.\r\n while (stack.length > depth) {\r\n stack.pop();\r\n }\r\n stack[depth] = dirNode;\r\n } else {\r\n const fileNode: FileNode = {\r\n ...baseNode,\r\n type: 'file',\r\n };\r\n\r\n if (parent) {\r\n parent.children.push(fileNode);\r\n } else {\r\n rootNodes.push(fileNode);\r\n }\r\n\r\n // Files themselves are NOT placed on the stack to prevent children,\r\n // but attachNode will repair children-under-file in loose mode.\r\n }\r\n}","// src/ast/format.ts\r\n\r\nimport {\r\n parseStructureAst,\r\n type AstMode,\r\n type StructureAst,\r\n type AstNode, extractInlineCommentParts,\r\n} from './parser';\r\n\r\nexport interface FormatOptions {\r\n /**\r\n * Spaces per indent level for re-printing entries.\r\n * Defaults to 2.\r\n */\r\n indentStep?: number;\r\n\r\n /**\r\n * Parser mode to use for the AST.\r\n * - \"loose\": attempt to repair mis-indents / bad parents (default).\r\n * - \"strict\": report issues as errors, less repair.\r\n */\r\n mode?: AstMode;\r\n\r\n /**\r\n * Normalize newlines to the dominant style in the original text (LF vs. CRLF).\r\n * Defaults to true.\r\n */\r\n normalizeNewlines?: boolean;\r\n\r\n /**\r\n * Trim trailing whitespace on non-entry lines (comments / blanks).\r\n * Defaults to true.\r\n */\r\n trimTrailingWhitespace?: boolean;\r\n\r\n /**\r\n * Whether to normalize annotation ordering and spacing:\r\n * name @stub:... @include:... @exclude:...\r\n * Defaults to true.\r\n */\r\n normalizeAnnotations?: boolean;\r\n}\r\n\r\nexport interface FormatResult {\r\n /** Formatted text. */\r\n text: string;\r\n /** Underlying AST that was used. */\r\n ast: StructureAst;\r\n}\r\n\r\n/**\r\n * Smart formatter for scaffold structure files.\r\n *\r\n * - Uses the loose AST parser (parseStructureAst) to understand structure.\r\n * - Auto-fixes indentation based on tree depth (indentStep).\r\n * - Keeps **all** blank lines and full-line comments in place.\r\n * - Preserves inline comments (# / //) on entry lines.\r\n * - Canonicalizes annotation order (stub → include → exclude) if enabled.\r\n *\r\n * It does **not** throw on invalid input:\r\n * - parseStructureAst always returns an AST + diagnostics.\r\n * - If something is catastrophically off (entry/node counts mismatch),\r\n * it falls back to a minimal normalization pass.\r\n */\r\nexport function formatStructureText(\r\n text: string,\r\n options: FormatOptions = {},\r\n): FormatResult {\r\n const indentStep = options.indentStep ?? 2;\r\n const mode: AstMode = options.mode ?? 'loose';\r\n const normalizeNewlines =\r\n options.normalizeNewlines === undefined ? true : options.normalizeNewlines;\r\n const trimTrailingWhitespace =\r\n options.trimTrailingWhitespace === undefined\r\n ? true\r\n : options.trimTrailingWhitespace;\r\n const normalizeAnnotations =\r\n options.normalizeAnnotations === undefined\r\n ? true\r\n : options.normalizeAnnotations;\r\n\r\n // 1. Parse to our \"smart\" AST (non-throwing).\r\n const ast = parseStructureAst(text, {\r\n indentStep,\r\n mode,\r\n });\r\n\r\n const rawLines = text.split(/\\r?\\n/);\r\n const lineCount = rawLines.length;\r\n\r\n // Sanity check: AST lines length should match raw lines length.\r\n if (ast.lines.length !== lineCount) {\r\n return {\r\n text: basicNormalize(text, {normalizeNewlines, trimTrailingWhitespace}),\r\n ast,\r\n };\r\n }\r\n\r\n // 2. Collect entry line indices and inline comments from the original text.\r\n const entryLineIndexes: number[] = [];\r\n const inlineComments: (string | null)[] = [];\r\n\r\n for (let i = 0; i < lineCount; i++) {\r\n const lineMeta = ast.lines[i];\r\n if (lineMeta.kind === 'entry') {\r\n entryLineIndexes.push(i);\r\n const {inlineComment} = extractInlineCommentParts(lineMeta.content);\r\n inlineComments.push(inlineComment);\r\n }\r\n }\r\n\r\n // 3. Flatten AST nodes in depth-first order to get an ordered node list.\r\n const flattened: { node: AstNode; level: number }[] = [];\r\n flattenAstNodes(ast.rootNodes, 0, flattened);\r\n\r\n if (flattened.length !== entryLineIndexes.length) {\r\n // If counts don't match, something is inconsistent – do not risk corruption.\r\n return {\r\n text: basicNormalize(text, {normalizeNewlines, trimTrailingWhitespace}),\r\n ast,\r\n };\r\n }\r\n\r\n // 4. Build canonical entry lines from AST nodes.\r\n const canonicalEntryLines: string[] = flattened.map(({node, level}) =>\r\n formatAstNodeLine(node, level, indentStep, normalizeAnnotations),\r\n );\r\n\r\n // 5. Merge canonical entry lines + inline comments back into original structure.\r\n const resultLines: string[] = [];\r\n let entryIdx = 0;\r\n\r\n for (let i = 0; i < lineCount; i++) {\r\n const lineMeta = ast.lines[i];\r\n const originalLine = rawLines[i];\r\n\r\n if (lineMeta.kind === 'entry') {\r\n const base = canonicalEntryLines[entryIdx].replace(/[ \\t]+$/g, '');\r\n const inline = inlineComments[entryIdx];\r\n entryIdx++;\r\n\r\n if (inline) {\r\n // Always ensure a single space before the inline comment marker.\r\n resultLines.push(base + ' ' + inline);\r\n } else {\r\n resultLines.push(base);\r\n }\r\n } else {\r\n let out = originalLine;\r\n if (trimTrailingWhitespace) {\r\n out = out.replace(/[ \\t]+$/g, '');\r\n }\r\n resultLines.push(out);\r\n }\r\n }\r\n\r\n const eol = normalizeNewlines ? detectPreferredEol(text) : getRawEol(text);\r\n return {\r\n text: resultLines.join(eol),\r\n ast,\r\n };\r\n}\r\n\r\n// ---------------------------------------------------------------------------\r\n// Internal helpers\r\n// ---------------------------------------------------------------------------\r\n\r\n/**\r\n * Fallback: basic normalization when we can't safely map AST ↔ text.\r\n */\r\nfunction basicNormalize(\r\n text: string,\r\n opts: { normalizeNewlines: boolean; trimTrailingWhitespace: boolean },\r\n): string {\r\n const lines = text.split(/\\r?\\n/);\r\n const normalizedLines = opts.trimTrailingWhitespace\r\n ? lines.map((line) => line.replace(/[ \\t]+$/g, ''))\r\n : lines;\r\n\r\n const eol = opts.normalizeNewlines ? detectPreferredEol(text) : getRawEol(text);\r\n return normalizedLines.join(eol);\r\n}\r\n\r\n/**\r\n * Detect whether the file is more likely LF or CRLF and reuse that.\r\n * If mixed or no clear signal, default to \"\\n\".\r\n */\r\nfunction detectPreferredEol(text: string): string {\r\n const crlfCount = (text.match(/\\r\\n/g) || []).length;\r\n const lfCount = (text.match(/(?<!\\r)\\n/g) || []).length;\r\n\r\n if (crlfCount === 0 && lfCount === 0) {\r\n return '\\n';\r\n }\r\n\r\n if (crlfCount > lfCount) {\r\n return '\\r\\n';\r\n }\r\n\r\n return '\\n';\r\n}\r\n\r\n/**\r\n * If you really want the raw style, detect only CRLF vs. LF.\r\n */\r\nfunction getRawEol(text: string): string {\r\n return text.includes('\\r\\n') ? '\\r\\n' : '\\n';\r\n}\r\n\r\n/**\r\n * Flatten AST nodes into a depth-first list while tracking indent level.\r\n */\r\nfunction flattenAstNodes(\r\n nodes: AstNode[],\r\n level: number,\r\n out: { node: AstNode; level: number }[],\r\n): void {\r\n for (const node of nodes) {\r\n out.push({node, level});\r\n if (node.type === 'dir' && node.children && node.children.length) {\r\n flattenAstNodes(node.children, level + 1, out);\r\n }\r\n }\r\n}\r\n\r\n/**\r\n * Format a single AST node into one canonical line.\r\n *\r\n * - Uses `level * indentStep` spaces as indentation.\r\n * - Uses the node's `name` as provided by the parser (e.g. \"src/\" or \"index.ts\").\r\n * - Annotations are printed in a stable order if normalizeAnnotations is true:\r\n * @stub:..., @include:..., @exclude:...\r\n */\r\nfunction formatAstNodeLine(\r\n node: AstNode,\r\n level: number,\r\n indentStep: number,\r\n normalizeAnnotations: boolean,\r\n): string {\r\n const indent = ' '.repeat(indentStep * level);\r\n const baseName = node.name;\r\n\r\n if (!normalizeAnnotations) {\r\n return indent + baseName;\r\n }\r\n\r\n const tokens: string[] = [];\r\n\r\n if (node.stub) {\r\n tokens.push(`@stub:${node.stub}`);\r\n }\r\n if (node.include && node.include.length > 0) {\r\n tokens.push(`@include:${node.include.join(',')}`);\r\n }\r\n if (node.exclude && node.exclude.length > 0) {\r\n tokens.push(`@exclude:${node.exclude.join(',')}`);\r\n }\r\n\r\n const annotations = tokens.length ? ' ' + tokens.join(' ') : '';\r\n return indent + baseName + annotations;\r\n}"]}
|