@isentinel/weld 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.mjs ADDED
@@ -0,0 +1,1613 @@
1
+ import { n as validateConfig } from "./schema-Cp_hoJ0u.mjs";
2
+ import { type } from "arktype";
3
+ import { loadConfig } from "c12";
4
+ import process from "node:process";
5
+ import { parseArgs } from "node:util";
6
+ import { loadRojoProject, mapFsPathToDataModel } from "@isentinel/rojo-utils";
7
+ import fs from "node:fs";
8
+ import path from "node:path";
9
+ import { GenMapping, addMapping, setSourceContent, toEncodedMap } from "@jridgewell/gen-mapping";
10
+ import { generateDtsBundle } from "dts-bundle-generator";
11
+ import { createEnv } from "@t3-oss/env-core";
12
+ import { spawnLute, writeTemporaryLuauScript } from "@isentinel/luau-ast";
13
+ import { defuFn } from "defu";
14
+ import color from "tinyrainbow";
15
+ //#region src/codegen/bundle-generator.ts
16
+ /**
17
+ * Generate bundled output with flattened chunks.
18
+ *
19
+ * @param chunks - Chunks in topological order.
20
+ * @param entrySource - Rewritten entry point source (inlined at top level).
21
+ * @param options - Optional bundle generation options.
22
+ * @returns The complete bundled Luau source.
23
+ */
24
+ function generateChunkedBundle(chunks, entrySource, options = {}) {
25
+ const { entryInfo, onMapping } = options;
26
+ const lines = bundlePrelude(options);
27
+ for (const chunk of chunks) {
28
+ if (chunk.modules.length === 0) continue;
29
+ appendWrappedChunk({
30
+ chunk,
31
+ lines,
32
+ onMapping
33
+ });
34
+ }
35
+ const trimmedEntry = entrySource.replace(/\n$/, "");
36
+ const entryStartLine = lines.length + 1;
37
+ lines.push(trimmedEntry, "");
38
+ if (entryInfo !== void 0 && onMapping !== void 0) emitLineMappings({
39
+ content: entryInfo.originalContent,
40
+ lineCount: countLines(trimmedEntry),
41
+ onMapping,
42
+ sourcePath: entryInfo.sourcePath,
43
+ startLine: entryStartLine
44
+ });
45
+ return lines.join("\n");
46
+ }
47
+ function bundlePrelude(options) {
48
+ const lines = [];
49
+ if (options.includeTsGlobal !== false) lines.push("local TS = _G[script]", "");
50
+ lines.push("local __WELD_MODULES = {", " cache = {} :: any,", "}", "");
51
+ return lines;
52
+ }
53
+ function countLines(source) {
54
+ return source.split("\n").length;
55
+ }
56
+ function emitLineMappings(input) {
57
+ const { content, lineCount, onMapping, sourcePath, startLine } = input;
58
+ for (let index = 0; index < lineCount; index++) onMapping({
59
+ content,
60
+ generatedLine: startLine + index,
61
+ originalLine: index + 1,
62
+ sourcePath
63
+ });
64
+ }
65
+ /**
66
+ * Sum totalLocals across all modules in a chunk. Returns 0 if no module has
67
+ * totalLocals set (debug mode off).
68
+ *
69
+ * @param chunk - The chunk to sum.
70
+ * @returns Total locals across the chunk's modules.
71
+ */
72
+ function sumChunkLocals(chunk) {
73
+ let total = 0;
74
+ for (const module_ of chunk.modules) total += module_.totalLocals ?? 0;
75
+ return total;
76
+ }
77
+ function appendChunkModuleBody(input) {
78
+ const { lines, module_, onMapping } = input;
79
+ if (module_.totalLocals !== void 0) lines.push(`\t-- [${module_.id}] Variables in scope: ${String(module_.totalLocals)}`);
80
+ const moduleStartLine = lines.length + 1;
81
+ const moduleLines = module_.source.split("\n");
82
+ for (const sourceLine of moduleLines) lines.push(`\t${sourceLine}`);
83
+ if (onMapping !== void 0 && module_.sourcePath !== void 0 && module_.originalContent !== void 0) emitLineMappings({
84
+ content: module_.originalContent,
85
+ lineCount: moduleLines.length,
86
+ onMapping,
87
+ sourcePath: module_.sourcePath,
88
+ startLine: moduleStartLine
89
+ });
90
+ }
91
+ /**
92
+ * Append a chunk wrapped in a do-block to isolate its locals.
93
+ *
94
+ * @param input - Output lines array, the chunk to append, and optional source-map callback.
95
+ */
96
+ function appendWrappedChunk(input) {
97
+ const { chunk, lines, onMapping } = input;
98
+ const chunkTotal = sumChunkLocals(chunk);
99
+ if (chunkTotal > 0) lines.push(`-- Chunk ${String(chunk.id)}: ${String(chunkTotal)} locals (${String(chunk.modules.length)} modules)`);
100
+ lines.push("do");
101
+ for (const module_ of chunk.modules) appendChunkModuleBody({
102
+ lines,
103
+ module_,
104
+ onMapping
105
+ });
106
+ for (const module_ of chunk.modules) lines.push(`\tfunction __WELD_MODULES.${module_.id}() return _${module_.id} end`);
107
+ lines.push("end", "");
108
+ }
109
+ //#endregion
110
+ //#region src/codegen/transforms/rename-locals.ts
111
+ /**
112
+ * Build a byte-offset-to-char-offset mapping for a source string.
113
+ * Lute reports byte offsets but JS string operations use char offsets.
114
+ * They differ when the source contains multibyte UTF-8 characters.
115
+ *
116
+ * @param source - The source string.
117
+ * @returns A function that converts byte offsets to char offsets.
118
+ */
119
+ function buildByteToCharMap(source) {
120
+ const encoder = new TextEncoder();
121
+ const bytes = encoder.encode(source);
122
+ const byteToChar = new Int32Array(bytes.length + 1);
123
+ let charIndex = 0;
124
+ let byteIndex = 0;
125
+ for (const codePoint of source) {
126
+ const charBytes = encoder.encode(codePoint).length;
127
+ for (let offset = 0; offset < charBytes; offset++) byteToChar[byteIndex + offset] = charIndex;
128
+ byteIndex += charBytes;
129
+ charIndex += codePoint.length;
130
+ }
131
+ byteToChar[byteIndex] = charIndex;
132
+ return (byteOffset) => byteToChar[byteOffset];
133
+ }
134
+ /**
135
+ * Apply all local renames to the source, working in descending offset order.
136
+ *
137
+ * @param source - Original source.
138
+ * @param options - Rename configuration.
139
+ * @returns Source with renamed locals.
140
+ */
141
+ function applyRenames(source, options) {
142
+ const { locals, prefix, toChar } = options;
143
+ const allOccurrences = [];
144
+ for (const binding of locals) for (const loc of binding.locations) allOccurrences.push({
145
+ name: binding.name,
146
+ end: toChar(loc.end),
147
+ start: toChar(loc.start)
148
+ });
149
+ allOccurrences.sort((first, second) => second.start - first.start);
150
+ let result = source;
151
+ for (const occurrence of allOccurrences) {
152
+ const replacement = `${prefix}_${occurrence.name}`;
153
+ result = result.slice(0, occurrence.start) + replacement + result.slice(occurrence.end);
154
+ }
155
+ return result;
156
+ }
157
+ /**
158
+ * Rewrite the trailing top-level return to an export capture assignment.
159
+ *
160
+ * @param source - Source with renames applied.
161
+ * @param prefix - Module prefix for the export capture variable.
162
+ * @returns The rewritten source.
163
+ */
164
+ function rewriteReturn(source, prefix) {
165
+ let result = source;
166
+ const lastReturnIndex = result.lastIndexOf("\nreturn ");
167
+ if (lastReturnIndex !== -1) result = `${result.slice(0, lastReturnIndex + 1)}local _${prefix} = ${result.slice(lastReturnIndex + 8)}`;
168
+ else if (result.startsWith("return ")) result = `local _${prefix} = ${result.slice(7)}`;
169
+ return result;
170
+ }
171
+ /** Prefix top-level local variables and rewrite trailing return to export capture. */
172
+ const renameLocals = {
173
+ name: "rename-locals",
174
+ apply(source, context) {
175
+ const toChar = buildByteToCharMap(source);
176
+ return rewriteReturn(applyRenames(source, {
177
+ locals: context.locals,
178
+ prefix: context.moduleId,
179
+ toChar
180
+ }), context.moduleId);
181
+ }
182
+ };
183
+ //#endregion
184
+ //#region src/codegen/transforms/regex-utils.ts
185
+ /**
186
+ * Escape special regex characters in a string.
187
+ *
188
+ * @param text - The string to escape.
189
+ * @returns The escaped string safe for use in a regex.
190
+ */
191
+ function escapeRegex(text) {
192
+ return text.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
193
+ }
194
+ //#endregion
195
+ //#region src/codegen/transforms/rewrite-internal-imports.ts
196
+ /**
197
+ * Build a regex pattern that matches the call site for a resolved import.
198
+ *
199
+ * @param resolved - The resolved import metadata, including kind and args.
200
+ * @returns A RegExp matching the corresponding source call.
201
+ */
202
+ function buildImportPattern(resolved) {
203
+ if (resolved.kind === "require") {
204
+ /* c8 ignore next -- args[0] is always present for resolved require kinds (filtered upstream) */
205
+ const argument = escapeRegex(resolved.args[0] ?? "");
206
+ return new RegExp(`require\\(\\s*${argument}\\s*\\)`);
207
+ }
208
+ const joined = resolved.args.map((argument) => escapeRegex(argument)).join(",\\s*");
209
+ return new RegExp(`TS\\.import\\(${joined}\\)`);
210
+ }
211
+ /**
212
+ * Replace all `TS.import()` calls with module references.
213
+ *
214
+ * @param source - Source with TS boilerplate already stripped.
215
+ * @param resolvedImports - Resolved imports to apply.
216
+ * @returns The rewritten source and count of successful replacements.
217
+ */
218
+ function rewriteImports(source, resolvedImports) {
219
+ let result = source;
220
+ let rewriteCount = 0;
221
+ for (const resolved of resolvedImports) {
222
+ const pattern = buildImportPattern(resolved);
223
+ const replacement = resolved.intraChunk === true ? `_${resolved.moduleId}` : `__WELD_MODULES.${resolved.moduleId}()`;
224
+ const rewritten = result.replace(pattern, replacement);
225
+ if (rewritten !== result) rewriteCount++;
226
+ result = rewritten;
227
+ }
228
+ return {
229
+ rewriteCount,
230
+ source: result
231
+ };
232
+ }
233
+ /** Replace `TS.import()` calls with `__WELD_MODULES` references or direct local access. */
234
+ const rewriteInternalImports = {
235
+ name: "rewrite-internal-imports",
236
+ apply(source, context) {
237
+ const { rewriteCount, source: rewritten } = rewriteImports(source, context.resolvedImports);
238
+ if (rewriteCount !== context.resolvedImports.length) throw new Error(`rewrite count mismatch: expected ${String(context.resolvedImports.length)}, got ${String(rewriteCount)}`);
239
+ return rewritten;
240
+ }
241
+ };
242
+ //#endregion
243
+ //#region src/codegen/transforms/strip-boilerplate.ts
244
+ const TS_BOILERPLATE_PATTERN = /local\s+TS\s*=\s*_G\[script]\n?/;
245
+ /** Strip the `local TS = _G[script]` boilerplate line. */
246
+ const stripBoilerplate = {
247
+ name: "strip-boilerplate",
248
+ apply(source) {
249
+ return source.replace(TS_BOILERPLATE_PATTERN, "");
250
+ }
251
+ };
252
+ //#endregion
253
+ //#region src/codegen/transforms/validate-no-script-refs.ts
254
+ /**
255
+ * Matches all Luau comments in left-to-right order:
256
+ * - Block comments with any `=` delimiter count: `--[=*[ ... ]=*]`
257
+ * - Single-line comments: `--` to end of line.
258
+ *
259
+ * Order matters: block pattern is tried first at each `--` position.
260
+ * If `--[[` appears inside a `-- single-line comment`, the single-line
261
+ * pattern matches first (consuming to EOL), preventing a false block open.
262
+ */
263
+ const COMMENT_PATTERN = /--\[(=*)\[[\s\S]*?]\1]|--[^\n]*/g;
264
+ /**
265
+ * Matches only the external passthrough shape
266
+ * `TS.import(script, TS.getModule(<args>)[.sub.path]*)`, tolerating the
267
+ * dotted subpath chain roblox-ts emits for nested module paths like
268
+ * `TS.getModule(script, "@rbxts", "vide").src`. Internal calls like
269
+ * `TS.import(script, script.Parent, "dep")` are intentionally *not* matched
270
+ * so a leftover internal import that was not rewritten still fails validation.
271
+ */
272
+ const TS_IMPORT_PATTERN = /TS\.import\(\s*script\s*,\s*TS\.getModule\([^()]*\)(?:\.\w+)*\s*\)/g;
273
+ function stripValidatedRegions(source) {
274
+ return source.replace(COMMENT_PATTERN, "").replace(TS_IMPORT_PATTERN, "");
275
+ }
276
+ //#endregion
277
+ //#region src/codegen/defaults.ts
278
+ /** Pipeline for non-entry modules: rename locals, strip boilerplate, rewrite imports, validate. */
279
+ const MODULE_PIPELINE = [
280
+ renameLocals,
281
+ stripBoilerplate,
282
+ rewriteInternalImports,
283
+ {
284
+ name: "validate-no-script-refs",
285
+ apply(source) {
286
+ if (/\bscript\b/.test(stripValidatedRegions(source))) throw new Error("non-boilerplate script reference found");
287
+ return source;
288
+ }
289
+ }
290
+ ];
291
+ /** Pipeline for entry point modules: no rename (locals stay global), no validate (script refs are legal). */
292
+ const ENTRY_PIPELINE = [stripBoilerplate, rewriteInternalImports];
293
+ /**
294
+ * Path-mode non-entry pipeline. Skips validateNoScriptRefs because handwritten
295
+ * Luau modules legitimately reference `script`. Skips stripBoilerplate because
296
+ * there is no `local TS = _G[script]` line to remove.
297
+ */
298
+ const PATH_MODULE_PIPELINE = [renameLocals, rewriteInternalImports];
299
+ /** Path-mode entry pipeline. */
300
+ const PATH_ENTRY_PIPELINE = [rewriteInternalImports];
301
+ //#endregion
302
+ //#region src/codegen/module-id.ts
303
+ const ALPHABET_SIZE = 26;
304
+ /**
305
+ * 'a'.charCodeAt(0).
306
+ */
307
+ const CHAR_CODE_A = 97;
308
+ /**
309
+ * Generate a short alphabetic ID from a zero-based index using base-26
310
+ * encoding with lowercase letters.
311
+ *
312
+ * @param index - Zero-based module index.
313
+ * @returns A lowercase alphabetic identifier.
314
+ */
315
+ function generateId(index) {
316
+ let result = "";
317
+ let remaining = index;
318
+ do {
319
+ result = String.fromCharCode(CHAR_CODE_A + remaining % ALPHABET_SIZE) + result;
320
+ remaining = Math.floor(remaining / ALPHABET_SIZE) - 1;
321
+ } while (remaining >= 0);
322
+ return result;
323
+ }
324
+ /**
325
+ * Assign short IDs to modules in the given order.
326
+ *
327
+ * @param sortedFilePaths - File paths in deterministic sort order.
328
+ * @returns A map from file path to short alphabetic ID.
329
+ */
330
+ function assignModuleIds(sortedFilePaths) {
331
+ const result = /* @__PURE__ */ new Map();
332
+ for (const [index, filePath] of sortedFilePaths.entries()) result.set(filePath, generateId(index));
333
+ return result;
334
+ }
335
+ //#endregion
336
+ //#region src/codegen/pipeline.ts
337
+ /**
338
+ * Run a sequence of transforms over source code.
339
+ *
340
+ * @param source - Input source code.
341
+ * @param options - Pipeline configuration.
342
+ * @returns The final transformed source.
343
+ */
344
+ function runPipeline(source, options) {
345
+ const { context, transforms } = options;
346
+ return transforms.reduce((current, transform) => transform.apply(current, context), source);
347
+ }
348
+ //#endregion
349
+ //#region src/utils/path.ts
350
+ /**
351
+ * Convert Windows backslash paths to POSIX forward slashes.
352
+ *
353
+ * @param value - Path to normalize.
354
+ * @returns The path with forward slashes.
355
+ */
356
+ function toPosix(value) {
357
+ return value.replaceAll("\\", "/");
358
+ }
359
+ const LUAU_EXTENSIONS = [".luau", ".lua"];
360
+ //#endregion
361
+ //#region src/codegen/sourcemap-emitter.ts
362
+ /**
363
+ * Emit a Source Map V3 JSON object from a list of line-to-line mappings.
364
+ *
365
+ * Sources are emitted as paths relative to {@link EmitSourceMapInput.sourcemapDirectory}
366
+ * with forward slashes. Line numbers are 1-based for both ends.
367
+ *
368
+ * @param input - The mappings, output path, and sourcemap directory.
369
+ * @returns A Source Map V3 JSON object.
370
+ */
371
+ function emitSourceMap(input) {
372
+ const map = new GenMapping({ file: path.basename(input.outputPath) });
373
+ const relativeBySource = /* @__PURE__ */ new Map();
374
+ for (const entry of input.entries) {
375
+ let relativeSource = relativeBySource.get(entry.sourcePath);
376
+ if (relativeSource === void 0) {
377
+ relativeSource = toPosix(path.relative(input.sourcemapDirectory, entry.sourcePath));
378
+ relativeBySource.set(entry.sourcePath, relativeSource);
379
+ setSourceContent(map, relativeSource, entry.content);
380
+ }
381
+ addMapping(map, {
382
+ generated: {
383
+ column: 0,
384
+ line: entry.generatedLine
385
+ },
386
+ original: {
387
+ column: 0,
388
+ line: entry.originalLine
389
+ },
390
+ source: relativeSource
391
+ });
392
+ }
393
+ return toEncodedMap(map);
394
+ }
395
+ //#endregion
396
+ //#region src/declarations/declaration-bundler.ts
397
+ /**
398
+ * Map a .luau file path to its co-located .d.ts declaration file path.
399
+ *
400
+ * For `init.luau`, tries `init.d.ts` first, then falls back to `index.d.ts`
401
+ * (roblox-ts renames `index.ts` → `init.luau`, but tsc emits `index.d.ts`).
402
+ *
403
+ * @param luauPath - Absolute posix-normalized path to a .luau file.
404
+ * @returns Absolute path to the corresponding .d.ts file.
405
+ */
406
+ function luauToDtsPath(luauPath) {
407
+ const direct = toPosix(luauPath.replace(/\.luau$/, ".d.ts"));
408
+ if (fs.existsSync(direct)) return direct;
409
+ const checked = [direct];
410
+ if (path.basename(luauPath) === "init.luau") {
411
+ const indexPath = toPosix(path.join(path.dirname(luauPath), "index.d.ts"));
412
+ checked.push(indexPath);
413
+ if (fs.existsSync(indexPath)) return indexPath;
414
+ }
415
+ throw new Error(`No declaration file found for ${luauPath} (checked: ${checked.join(", ")})`);
416
+ }
417
+ /**
418
+ * Bundle .d.ts declaration files for a Luau entry module.
419
+ *
420
+ * Maps the entry .luau to its co-located .d.ts, then uses dts-bundle-generator
421
+ * to produce a single merged declaration file.
422
+ *
423
+ * @param config - Declaration bundle configuration.
424
+ * @returns The bundle result with output path.
425
+ */
426
+ function bundleDeclarations(config) {
427
+ const bundled = generateDtsBundle([{
428
+ filePath: luauToDtsPath(config.entryLuauPath),
429
+ libraries: config.importedLibraries && config.importedLibraries.length > 0 ? { importedLibraries: [...config.importedLibraries] } : void 0,
430
+ output: { noBanner: true }
431
+ }], {})[0];
432
+ fs.mkdirSync(path.dirname(config.outputPath), { recursive: true });
433
+ fs.writeFileSync(config.outputPath, bundled);
434
+ return { outputPath: config.outputPath };
435
+ }
436
+ createEnv({
437
+ emptyStringAsUndefined: true,
438
+ runtimeEnv: process.env,
439
+ server: { WELD_DEBUG: type("string | undefined") }
440
+ });
441
+ const FALSY_VALUES = new Set(["0", "false"]);
442
+ /**
443
+ * Whether debug mode is enabled via the WELD_DEBUG environment variable.
444
+ *
445
+ * @returns True when WELD_DEBUG is set to a truthy value.
446
+ */
447
+ function isDebug() {
448
+ const value = process.env["WELD_DEBUG"];
449
+ if (value === void 0 || value === "") return false;
450
+ return !FALSY_VALUES.has(value);
451
+ }
452
+ //#endregion
453
+ //#region src/extraction/schema.ts
454
+ /** Schema for a single require/import entry extracted from a Luau file. */
455
+ const requireInfoSchema = type({
456
+ args: "string[]",
457
+ kind: "'require' | 'ts_import' | 'ts_import_external'"
458
+ }).as();
459
+ /** Schema for a top-level local binding with source locations. */
460
+ const topLevelLocalSchema = type({
461
+ name: "string",
462
+ locations: type({
463
+ end: "number",
464
+ start: "number"
465
+ }).array()
466
+ }).as();
467
+ /** Schema for per-file extraction data from the Lute analysis. */
468
+ const fileAnalysisSchema = type({
469
+ "maxLocalsInScope": "number",
470
+ "path": "string",
471
+ "renamedSource?": "string",
472
+ "requires": requireInfoSchema.array(),
473
+ "topLevelLocals?": topLevelLocalSchema.array(),
474
+ "totalLocals": "number"
475
+ }).as();
476
+ /** Schema for the timing summary emitted as the final NDJSON line. */
477
+ const timingSummarySchema = type({
478
+ discoverMs: "number",
479
+ fileCount: "number",
480
+ parseMs: "number",
481
+ walkMs: "number",
482
+ wallMs: "number"
483
+ }).as();
484
+ //#endregion
485
+ //#region src/extraction/ndjson-parser.ts
486
+ const TIMING_PREFIX = "__TIMING__";
487
+ /**
488
+ * Parse NDJSON stdout from the Lute extraction process.
489
+ *
490
+ * @param stdout - Raw stdout string with one JSON object per line.
491
+ * @returns Parsed extraction result with file analyses and optional timing.
492
+ */
493
+ function parseNdjsonOutput(stdout) {
494
+ const lines = stdout.split("\n");
495
+ const analyses = [];
496
+ let timing;
497
+ for (const line of lines) {
498
+ const trimmed = line.trim();
499
+ if (trimmed === "") continue;
500
+ if (trimmed.startsWith(TIMING_PREFIX)) {
501
+ const result = timingSummarySchema(safeParse(trimmed.slice(10), trimmed));
502
+ if (result instanceof type.errors) throw new Error(`Invalid timing data on line: ${trimmed}\n${result.summary}`);
503
+ timing = result;
504
+ continue;
505
+ }
506
+ const result = fileAnalysisSchema(safeParse(trimmed, trimmed));
507
+ if (result instanceof type.errors) throw new Error(`Invalid file analysis on line: ${trimmed}\n${result.summary}`);
508
+ analyses.push(result);
509
+ }
510
+ return {
511
+ analyses,
512
+ timing
513
+ };
514
+ }
515
+ /**
516
+ * Parse a JSON string, throwing a descriptive error on failure.
517
+ *
518
+ * @param json - JSON string to parse.
519
+ * @param originalLine - Original line for error context.
520
+ * @returns The parsed value.
521
+ */
522
+ function safeParse(json, originalLine) {
523
+ try {
524
+ return JSON.parse(json);
525
+ } catch {
526
+ throw new Error(`Invalid JSON on line: ${originalLine}`);
527
+ }
528
+ }
529
+ //#endregion
530
+ //#region src/extraction/lute-spawner.ts
531
+ /**
532
+ * Spawn the Lute extraction process and return parsed results.
533
+ *
534
+ * @param luauRoot - Root directory containing Luau source files.
535
+ * @param scriptPath - Optional override path to the extract.luau script.
536
+ * @returns Parsed extraction result with file analyses and optional timing.
537
+ */
538
+ function spawnLuteExtractor(luauRoot, scriptPath) {
539
+ const resolvedScript = scriptPath ?? writeTemporaryLuauScript("--- Weld extraction script: discovers Luau files, parses each with Lute's\n--- syntax.parse(), walks the full CST to find require()/TS.import() calls\n--- and count local variables per scope. Outputs one NDJSON line per file\n--- to stdout, followed by a __TIMING__-prefixed timing summary.\n---\n--- Usage: lute run extract.luau -- <luau-root>\n---\n--- CST shape (Lute returns full CST, not stripped AST):\n--- - Values/variables/arguments are wrapped: {node: <actual>}\n--- - Names are token objects: {text: \"name\"}\n--- - Strings: {tag: \"string\", token: {text: \"value\"}}\n--- - Locals: {tag: \"local\", local: {name: {text: \"x\"}}, token: {text: \"x\"}}\n--- - Globals: {tag: \"global\", name: {text: \"script\"}}\n--- - Index chains: {tag: \"indexname\", expression: <base>, index: {text: \"Parent\"}}\n--- - Calls: {tag: \"call\", func: <expr>, arguments: [{node: <arg>}, ...]}\n\nlocal fs = require(\"@std/fs\")\nlocal json = require(\"@std/json\")\nlocal process = require(\"@std/process\")\nlocal syntax = require(\"@std/syntax\")\n\n--------------------------------------------------------------------------------\n-- Argument parsing\n--------------------------------------------------------------------------------\n\nlocal args = process.args\nlocal pastSeparator = false\nlocal userArgs: { string } = {}\nfor _, arg in args do\n if pastSeparator then\n table.insert(userArgs, arg)\n elseif arg == \"--\" then\n pastSeparator = true\n end\nend\n\nlocal luauRoot = userArgs[1]\nif not luauRoot then\n error(\"Usage: lute run extract.luau -- <luau-root>\")\nend\n\nluauRoot = string.gsub(luauRoot, \"\\\\\", \"/\")\nif string.sub(luauRoot, -1) == \"/\" then\n luauRoot = string.sub(luauRoot, 1, -2)\nend\n\n--------------------------------------------------------------------------------\n-- File discovery\n--------------------------------------------------------------------------------\n\nlocal function discoverFiles(directory: string, relativeTo: string, results: { string })\n local ok, entries = pcall(fs.listDirectory, directory)\n if not ok then\n return\n end\n\n for _, entry in entries do\n local fullPath = directory .. \"/\" .. entry.name\n if entry.type == \"dir\" then\n -- Skip node_modules, coverage dirs, and dot directories\n if entry.name == \"node_modules\" or entry.name == \".jest-roblox\" then\n continue\n end\n\n if string.sub(entry.name, 1, 1) == \".\" then\n continue\n end\n\n discoverFiles(fullPath, relativeTo, results)\n elseif\n entry.type == \"file\"\n and (string.sub(entry.name, -5) == \".luau\" or string.sub(entry.name, -4) == \".lua\")\n then\n local relative = string.sub(fullPath, #relativeTo + 2)\n table.insert(results, relative)\n end\n end\nend\n\n--------------------------------------------------------------------------------\n-- CST helpers\n--------------------------------------------------------------------------------\n\ntype RequireInfo = {\n kind: \"require\" | \"ts_import\" | \"ts_import_external\",\n args: { string },\n}\n\ntype SourceLocation = {\n start: number,\n [\"end\"]: number,\n}\n\ntype TopLevelLocal = {\n name: string,\n locations: { SourceLocation },\n}\n\n--- Build a table mapping 1-based line numbers to 0-based byte offsets of each\n--- line's start position. Used to convert CST line/column positions to byte\n--- offsets.\nlocal function buildLineOffsets(source: string): { number }\n local offsets: { number } = { 0 } -- line 1 starts at byte 0\n local pos = 1\n while pos <= #source do\n local nl = string.find(source, \"\\n\", pos, true)\n if not nl then\n break\n end\n -- Next line starts at the byte after the newline\n table.insert(offsets, nl) -- 0-based offset: nl is 1-based pos of \\n, next char is at nl (0-based)\n pos = nl + 1\n end\n return offsets\nend\n\n--- Convert a CST location (1-based line/column) to a 0-based byte offset range.\nlocal function locationToByteRange(location: any, lineOffsets: { number }): SourceLocation\n local beginLine = location.beginLine :: number\n local beginCol = location.beginColumn :: number\n local endLine = location.endLine :: number\n local endCol = location.endColumn :: number\n local startOffset = lineOffsets[beginLine] + beginCol - 1\n local endOffset = lineOffsets[endLine] + endCol - 1\n return { start = startOffset, [\"end\"] = endOffset }\nend\n\n--- Create a unique key for a declaration binding based on its location.\nlocal function bindingKey(location: any): string\n return `{location.beginLine}:{location.beginColumn}`\nend\n\n--- Unwrap a CST node wrapper ({node: X} -> X).\nlocal function unwrap(wrapper: any): any\n if type(wrapper) == \"table\" and wrapper.node ~= nil then\n return wrapper.node\n end\n\n return wrapper\nend\n\n--- Get the text name from a CST name/token reference.\nlocal function getName(node: any): string?\n if type(node) ~= \"table\" then\n return nil\n end\n\n if node.text ~= nil then\n return node.text :: string\n end\n\n if node.name ~= nil and type(node.name) == \"table\" then\n return (node.name :: any).text\n end\n\n return nil\nend\n\n--- Reconstruct a path expression into a readable string.\nlocal function exprToString(node: any): string?\n if type(node) ~= \"table\" then\n return nil\n end\n\n if node.tag == \"string\" then\n local text: string? = if node.token then (node.token :: any).text else node.text\n if text then\n return `\"{text}\"`\n end\n\n return nil\n end\n\n if node.tag == \"global\" then\n return getName(node)\n end\n\n if node.tag == \"local\" then\n if node.token then\n return getName(node.token)\n end\n\n if node[\"local\"] then\n return getName(node[\"local\"])\n end\n\n return nil\n end\n\n if node.tag == \"indexname\" then\n local base = exprToString(node.expression)\n local index = getName(node.index)\n if base and index then\n return `{base}.{index}`\n end\n\n return index\n end\n\n return nil\nend\n\n--- Extract all arguments from a CST call's argument list as readable strings.\nlocal function extractCallArgs(arguments: any): { string }\n local result: { string } = {}\n if type(arguments) ~= \"table\" then\n return result\n end\n\n for _, argWrapper in arguments do\n local arg = unwrap(argWrapper)\n if type(arg) == \"table\" then\n local repr = exprToString(arg)\n if repr then\n table.insert(result, repr)\n end\n end\n end\n\n return result\nend\n\n--- Check if a CST node is a TS.getModule(...) call.\nlocal function isGetModuleCall(node: any): boolean\n if type(node) ~= \"table\" or node.tag ~= \"call\" then\n return false\n end\n\n local func = node.func\n if not func or func.tag ~= \"indexname\" then\n return false\n end\n\n local baseName = exprToString(func.expression)\n local indexName = getName(func.index)\n return baseName == \"TS\" and indexName == \"getModule\"\nend\n\n--- Extract TS.getModule arguments and any sub-path chain from a CST node.\n--- For `TS.getModule(script, \"@rbxts\", \"vide\").src`, returns {\"@rbxts\", \"vide\", \"src\"}.\n--- Returns nil if the node does not contain a TS.getModule call.\nlocal function extractGetModuleArgs(node: any): { string }?\n -- Walk down indexname chain to find the base call\n local subPathSegments: { string } = {}\n local current = node\n\n while type(current) == \"table\" and current.tag == \"indexname\" do\n local indexName = getName(current.index)\n if indexName then\n table.insert(subPathSegments, 1, indexName)\n end\n current = current.expression\n end\n\n -- Base must be a TS.getModule(...) call\n if not isGetModuleCall(current) then\n return nil\n end\n\n -- Extract string literal args from getModule (skip first arg: script)\n local result: { string } = {}\n if current.arguments and type(current.arguments) == \"table\" then\n local skipFirst = true\n for _, argWrapper in current.arguments do\n if skipFirst then\n skipFirst = false\n continue\n end\n local arg = unwrap(argWrapper)\n if type(arg) == \"table\" and arg.tag == \"string\" then\n local text: string? = if arg.token then (arg.token :: any).text else arg.text\n if text then\n table.insert(result, text)\n end\n end\n end\n end\n\n -- Append sub-path segments\n for _, segment in subPathSegments do\n table.insert(result, segment)\n end\n\n return result\nend\n\n--- Check if a CST call node is require() or TS.import() and record it.\nlocal function checkCall(callNode: any, requires: { RequireInfo })\n if type(callNode) ~= \"table\" or callNode.tag ~= \"call\" then\n return\n end\n\n local func = callNode.func\n if not func then\n return\n end\n\n -- require(...) -- global or local\n if func.tag == \"global\" or func.tag == \"local\" then\n local funcName = exprToString(func)\n if funcName == \"require\" then\n local info: RequireInfo = {\n kind = \"require\",\n args = extractCallArgs(callNode.arguments),\n }\n table.insert(requires, info)\n return\n end\n end\n\n -- TS.import(...) -- indexname where base is \"TS\" and index is \"import\"\n if func.tag == \"indexname\" then\n local baseName = exprToString(func.expression)\n local indexName = getName(func.index)\n\n if baseName == \"TS\" and indexName == \"import\" then\n -- Check if any argument contains TS.getModule (external dep)\n local externalArgs: { string }? = nil\n if callNode.arguments and type(callNode.arguments) == \"table\" then\n for _, argWrapper in callNode.arguments do\n local arg = unwrap(argWrapper)\n local moduleArgs = extractGetModuleArgs(arg)\n if moduleArgs then\n externalArgs = moduleArgs\n break\n end\n end\n end\n\n if externalArgs then\n local info: RequireInfo = {\n kind = \"ts_import_external\",\n args = externalArgs,\n }\n table.insert(requires, info)\n else\n local info: RequireInfo = {\n kind = \"ts_import\",\n args = extractCallArgs(callNode.arguments),\n }\n table.insert(requires, info)\n end\n end\n end\nend\n\n--- Table for tracking top-level local references during expression walking.\n--- Maps binding key (line:col) to the TopLevelLocal entry.\ntype TopLevelTracker = {\n --- Map from binding key to TopLevelLocal entry.\n bindings: { [string]: TopLevelLocal },\n --- Precomputed line-start byte offsets.\n lineOffsets: { number },\n}\n\n--- Forward declaration for mutual recursion (walkExpression uses walkBlock)\nlocal walkBlock: (block: any, requires: { RequireInfo }, tracker: TopLevelTracker?, rootBlock: boolean?) -> (number, number)\n\n--------------------------------------------------------------------------------\n-- Full expression walker\n--------------------------------------------------------------------------------\n\n--- Walk an expression recursively, checking for calls at every position.\n--- This is the key difference from the spike: we walk ALL expression positions.\n--- When tracker is provided, also records byte offsets for top-level local refs.\nlocal function walkExpression(expr: any, requires: { RequireInfo }, tracker: TopLevelTracker?)\n if type(expr) ~= \"table\" then\n return\n end\n\n -- Track local references when a tracker is active\n if tracker and expr.tag == \"local\" and expr.kind == \"expr\" then\n local binding = expr[\"local\"]\n if type(binding) == \"table\" and binding.location then\n local key = bindingKey(binding.location)\n local entry = tracker.bindings[key]\n if entry then\n local range = locationToByteRange(expr.location, tracker.lineOffsets)\n table.insert(entry.locations, range)\n end\n end\n end\n\n -- Always check if this expression itself is a call\n checkCall(expr, requires)\n\n -- Recurse into sub-expressions based on tag\n local tag = expr.tag\n\n if tag == \"call\" then\n -- Walk the function expression (could be a nested call)\n if expr.func then\n walkExpression(expr.func, requires, tracker)\n end\n\n -- Walk each argument (nested calls in arguments)\n if expr.arguments and type(expr.arguments) == \"table\" then\n for _, argWrapper in expr.arguments do\n walkExpression(unwrap(argWrapper), requires, tracker)\n end\n end\n elseif tag == \"indexname\" or tag == \"index\" then\n if expr.expression then\n walkExpression(expr.expression, requires, tracker)\n end\n\n if tag == \"index\" and expr.index then\n walkExpression(unwrap(expr.index), requires, tracker)\n end\n elseif tag == \"binary\" then\n if expr.lhsOperand then\n walkExpression(unwrap(expr.lhsOperand), requires, tracker)\n end\n\n if expr.rhsOperand then\n walkExpression(unwrap(expr.rhsOperand), requires, tracker)\n end\n elseif tag == \"unary\" then\n if expr.operand then\n walkExpression(unwrap(expr.operand), requires, tracker)\n end\n elseif tag == \"group\" then\n if expr.expression then\n walkExpression(unwrap(expr.expression), requires, tracker)\n end\n elseif tag == \"table\" then\n if expr.entries and type(expr.entries) == \"table\" then\n for _, entry in expr.entries do\n local e = unwrap(entry)\n if type(e) == \"table\" then\n -- Table entry value\n if e.value then\n walkExpression(unwrap(e.value), requires, tracker)\n end\n\n -- Table entry key (indexexpr)\n if e.key then\n walkExpression(unwrap(e.key), requires, tracker)\n end\n end\n end\n end\n elseif tag == \"ifelse\" then\n -- Ternary: if cond then a else b\n if expr.condition then\n walkExpression(unwrap(expr.condition), requires, tracker)\n end\n\n if expr.thenExpr then\n walkExpression(unwrap(expr.thenExpr), requires, tracker)\n end\n\n if expr.elseExpr then\n walkExpression(unwrap(expr.elseExpr), requires, tracker)\n end\n\n -- elseif chains in if-expressions\n if expr.elseifs and type(expr.elseifs) == \"table\" then\n for _, elseifEntry in expr.elseifs do\n if type(elseifEntry) == \"table\" then\n if elseifEntry.condition then\n walkExpression(unwrap(elseifEntry.condition), requires, tracker)\n end\n\n if elseifEntry.thenExpr then\n walkExpression(unwrap(elseifEntry.thenExpr), requires, tracker)\n end\n end\n end\n end\n elseif tag == \"function\" then\n -- Anonymous function expression: walk the body for require calls\n -- but mark as non-root so locals inside aren't tracked as top-level.\n if expr.body and type(expr.body) == \"table\" and expr.body.statements then\n walkBlock(expr.body, requires, tracker, false)\n end\n elseif tag == \"cast\" then\n if expr.operand then\n walkExpression(unwrap(expr.operand), requires, tracker)\n end\n end\nend\n\n--------------------------------------------------------------------------------\n-- Statement/block walker\n--------------------------------------------------------------------------------\n\n--- Count local declarations in a CST statement.\nlocal function countLocals(stat: any): number\n if stat.tag == \"local\" then\n if stat.variables and type(stat.variables) == \"table\" then\n return #stat.variables\n end\n\n return 1\n elseif stat.tag == \"localfunction\" then\n return 1\n end\n\n return 0\nend\n\n--- Register a top-level local declaration in the tracker.\nlocal function registerTopLevelLocal(\n tracker: TopLevelTracker,\n name: string,\n declLocation: any\n)\n local key = bindingKey(declLocation)\n local range = locationToByteRange(declLocation, tracker.lineOffsets)\n local entry: TopLevelLocal = {\n name = name,\n locations = { range },\n }\n tracker.bindings[key] = entry\nend\n\n--- Walk a block's statements, extracting requires and counting locals.\n--- Uses iterative stack-based traversal for the block level to avoid stack limits.\n--- When tracker is provided, records top-level local declarations from the root\n--- block (first stack entry only) and all references throughout the tree.\nwalkBlock = function(block: any, requires: { RequireInfo }, tracker: TopLevelTracker?, rootBlock: boolean?): (number, number)\n local maxLocals = 0\n local totalLocals = 0\n local isInitialRoot = if rootBlock == nil then true else rootBlock\n\n type StackEntry = { statements: any, locals: number, isRoot: boolean }\n local stack: { StackEntry } = {}\n table.insert(stack, { statements = block.statements or {}, locals = 0, isRoot = isInitialRoot })\n\n while #stack > 0 do\n local entry = table.remove(stack, #stack) :: StackEntry\n local statements = entry.statements\n local scopeLocals = entry.locals\n local isRoot = entry.isRoot\n\n if type(statements) ~= \"table\" then\n continue\n end\n\n for _, stat in statements do\n if type(stat) ~= \"table\" then\n continue\n end\n\n local localCount = countLocals(stat)\n scopeLocals += localCount\n totalLocals += localCount\n\n -- Register top-level locals when tracking (root block only)\n if tracker and isRoot then\n if stat.tag == \"local\" and stat.variables then\n for _, varWrapper in stat.variables do\n local v = unwrap(varWrapper)\n if type(v) == \"table\" and v.name and v.location then\n local varName = getName(v.name)\n if varName then\n registerTopLevelLocal(tracker, varName, v.location)\n end\n end\n end\n elseif stat.tag == \"localfunction\" and stat.name then\n local funcName = getName(stat.name.name or stat.name)\n local funcLoc = stat.name.location\n if funcName and funcLoc then\n registerTopLevelLocal(tracker, funcName, funcLoc)\n end\n end\n end\n\n -- Walk values in local/assign statements\n if (stat.tag == \"local\" or stat.tag == \"assign\") and stat.values then\n for _, valueWrapper in stat.values do\n walkExpression(unwrap(valueWrapper), requires, tracker)\n end\n end\n\n -- Walk assignment targets (for destructuring/indexing)\n if stat.tag == \"assign\" and stat.variables then\n for _, varWrapper in stat.variables do\n walkExpression(unwrap(varWrapper), requires, tracker)\n end\n end\n\n -- Expression statements (bare calls)\n if stat.tag == \"expression\" and stat.expression then\n walkExpression(stat.expression, requires, tracker)\n end\n\n -- Return statements (CST uses \"expressions\" not \"values\")\n if stat.tag == \"return\" and stat.expressions then\n for _, valueWrapper in stat.expressions do\n walkExpression(unwrap(valueWrapper), requires, tracker)\n end\n end\n\n -- For-in: walk the expression list\n if stat.tag == \"forin\" and stat.values then\n for _, valueWrapper in stat.values do\n walkExpression(unwrap(valueWrapper), requires, tracker)\n end\n end\n\n -- Numeric for: walk start/end/step\n if stat.tag == \"for\" then\n if stat.from then\n walkExpression(unwrap(stat.from), requires, tracker)\n end\n\n if stat.to then\n walkExpression(unwrap(stat.to), requires, tracker)\n end\n\n if stat.step then\n walkExpression(unwrap(stat.step), requires, tracker)\n end\n end\n\n -- While/repeat: walk condition\n if stat.tag == \"while\" and stat.condition then\n walkExpression(unwrap(stat.condition), requires, tracker)\n end\n\n if stat.tag == \"repeat\" and stat.condition then\n walkExpression(unwrap(stat.condition), requires, tracker)\n end\n\n -- If statement conditions\n if stat.tag == \"conditional\" and stat.condition then\n walkExpression(unwrap(stat.condition), requires, tracker)\n end\n\n -- Non-local function statements: track name references.\n -- Handles both `function foo(...)` (direct local) and\n -- `function Foo.bar(...)` / `function Foo:baz(...)` (indexname\n -- where the base expression is a local reference).\n if stat.tag == \"function\" and stat.name and tracker then\n local nameNode = stat.name\n -- Direct local: `function foo(...)`\n if nameNode.tag == \"local\" or (nameNode.token and nameNode[\"local\"]) then\n local nameToken = nameNode.token :: any\n local nameLocal = nameNode[\"local\"] :: any\n if nameToken and nameLocal and type(nameLocal) == \"table\" then\n local declLoc = nameLocal.location\n if nameToken.location and declLoc then\n local key = bindingKey(declLoc)\n local existing = tracker.bindings[key]\n if existing then\n local range = locationToByteRange(nameToken.location, tracker.lineOffsets)\n table.insert(existing.locations, range)\n end\n end\n end\n end\n -- Indexname: `function Foo.bar(...)` — walk the base expression\n if nameNode.tag == \"indexname\" and nameNode.expression then\n walkExpression(nameNode.expression, requires, tracker)\n end\n end\n\n -- Push nested blocks onto the stack (never root)\n if stat.body and type(stat.body) == \"table\" and stat.body.statements then\n table.insert(stack, { statements = stat.body.statements, locals = 0, isRoot = false })\n end\n\n if stat.thenBlock and type(stat.thenBlock) == \"table\" and stat.thenBlock.statements then\n table.insert(stack, { statements = stat.thenBlock.statements, locals = 0, isRoot = false })\n end\n\n if stat.elseBlock and type(stat.elseBlock) == \"table\" and stat.elseBlock.statements then\n table.insert(stack, { statements = stat.elseBlock.statements, locals = 0, isRoot = false })\n end\n\n if stat.elseifs and type(stat.elseifs) == \"table\" then\n for _, elseif_ in stat.elseifs do\n if type(elseif_) == \"table\" then\n -- Walk elseif condition\n if elseif_.condition then\n walkExpression(unwrap(elseif_.condition), requires, tracker)\n end\n\n local thenBlock = elseif_.thenBlock\n if type(thenBlock) == \"table\" and thenBlock.statements then\n table.insert(stack, { statements = thenBlock.statements, locals = 0, isRoot = false })\n end\n end\n end\n end\n\n -- Function bodies (localfunction and function statements)\n if stat.func and type(stat.func) == \"table\" then\n local funcBody = stat.func.body\n if type(funcBody) == \"table\" and funcBody.statements then\n table.insert(stack, { statements = funcBody.statements, locals = 0, isRoot = false })\n end\n end\n end\n\n if scopeLocals > maxLocals then\n maxLocals = scopeLocals\n end\n end\n\n return maxLocals, totalLocals\nend\n\n--------------------------------------------------------------------------------\n-- Per-file analysis\n--------------------------------------------------------------------------------\n\nlocal function analyzeSource(source: string, filePath: string): (any, number, number)\n local parseStart = os.clock()\n local parseResult = syntax.parse(source)\n local parseTime = os.clock() - parseStart\n\n local requires: { RequireInfo } = {}\n\n -- Build tracker for top-level local collection\n local lineOffsets = buildLineOffsets(source)\n local tracker: TopLevelTracker = {\n bindings = {},\n lineOffsets = lineOffsets,\n }\n\n local walkStart = os.clock()\n local maxLocals, totalLocals = walkBlock(parseResult.root, requires, tracker)\n local walkTime = os.clock() - walkStart\n\n -- Serialize requires for JSON\n local reqs: json.Array = {} :: any\n for _, req in requires do\n table.insert(reqs :: any, {\n kind = req.kind,\n args = req.args :: json.Array,\n })\n end\n\n -- Serialize topLevelLocals for JSON\n local topLocals: json.Array = {} :: any\n for _, entry in tracker.bindings do\n local locs: json.Array = {} :: any\n for _, loc in entry.locations do\n table.insert(locs :: any, {\n start = loc.start,\n [\"end\"] = loc[\"end\"],\n })\n end\n table.insert(topLocals :: any, {\n name = entry.name,\n locations = locs,\n })\n end\n\n local analysis = {\n path = filePath,\n requires = reqs,\n maxLocalsInScope = maxLocals,\n totalLocals = totalLocals,\n topLevelLocals = topLocals,\n }\n\n return analysis, parseTime, walkTime\nend\n\n--------------------------------------------------------------------------------\n-- Main\n--------------------------------------------------------------------------------\n\n-- Discover files\nlocal discoverStart = os.clock()\nlocal files: { string } = {}\ndiscoverFiles(luauRoot, luauRoot, files)\nlocal discoverTime = os.clock() - discoverStart\n\n-- Parse + extract, stream NDJSON\nlocal totalStart = os.clock()\nlocal totalParseMs = 0\nlocal totalWalkMs = 0\n\nfor _, relativePath in files do\n local fullPath = luauRoot .. \"/\" .. relativePath\n local source = fs.readFileToString(fullPath)\n local analysis, parseTime, walkTime = analyzeSource(source, relativePath)\n\n totalParseMs += parseTime * 1000\n totalWalkMs += walkTime * 1000\n\n print(json.serialize(analysis))\nend\n\nlocal totalTime = os.clock() - totalStart\n\n-- Final line: timing summary prefixed so Node can distinguish it\nprint(\"__TIMING__\" .. json.serialize({\n fileCount = #files,\n discoverMs = discoverTime * 1000,\n parseMs = totalParseMs,\n walkMs = totalWalkMs,\n wallMs = totalTime * 1000,\n}))\n", "extract");
540
+ return parseNdjsonOutput(spawnLute({
541
+ args: [luauRoot],
542
+ maxBuffer: 10 * 1024 * 1024,
543
+ scriptPath: resolvedScript,
544
+ timeout: 3e4
545
+ }));
546
+ }
547
+ //#endregion
548
+ //#region src/graph/chunk-assigner.ts
549
+ /** Default soft cap for locals per chunk. */
550
+ const DEFAULT_SOFT_CAP = 150;
551
+ /** Default hard cap for locals per chunk. */
552
+ const DEFAULT_HARD_CAP = 180;
553
+ /** Fan-in threshold: modules imported by this many or more get penalized. */
554
+ const HIGH_FAN_IN_THRESHOLD = 3;
555
+ /** Modules with fewer locals than this get a merge bonus. */
556
+ const SMALL_MODULE_THRESHOLD = 10;
557
+ /** Modules with more locals than this get a merge penalty. */
558
+ const LARGE_MODULE_THRESHOLD = 50;
559
+ /**
560
+ * Assign modules to chunks using greedy DFS packing with merge scoring.
561
+ * Walks the dependency graph in post-order, greedily merging modules into
562
+ * chunks based on fan-in, module size, and local variable budgets.
563
+ *
564
+ * @param options - Chunk assignment configuration.
565
+ * @returns The chunk assignment with ordered chunks and module mapping.
566
+ */
567
+ function assignChunks(options) {
568
+ const { graph, localCounts } = options;
569
+ const softCap = options.softCap ?? DEFAULT_SOFT_CAP;
570
+ const hardCap = options.hardCap ?? DEFAULT_HARD_CAP;
571
+ const state = packModules({
572
+ fanIn: computeFanIn(graph),
573
+ graph,
574
+ hardCap,
575
+ localCounts,
576
+ modules: dfsOrderDfs(graph).filter((module_) => module_ !== graph.entryPoint),
577
+ softCap
578
+ });
579
+ const entryLocals = localCounts.get(graph.entryPoint);
580
+ const entryChunk = createChunk({
581
+ chunkId: state.chunks.length,
582
+ moduleLocals: entryLocals,
583
+ modulePath: graph.entryPoint
584
+ });
585
+ state.chunks.push(entryChunk);
586
+ state.moduleToChunk.set(graph.entryPoint, entryChunk.id);
587
+ return {
588
+ chunks: state.chunks,
589
+ moduleToChunk: state.moduleToChunk
590
+ };
591
+ }
592
+ /**
593
+ * Create a new chunk with the given module.
594
+ *
595
+ * @param options - Chunk creation options.
596
+ * @returns The new chunk.
597
+ */
598
+ function createChunk(options) {
599
+ return {
600
+ id: options.chunkId,
601
+ modules: [options.modulePath],
602
+ totalLocals: options.moduleLocals + 1
603
+ };
604
+ }
605
+ /**
606
+ * Start a new chunk in the pack state.
607
+ *
608
+ * @param state - Mutable pack state.
609
+ * @param options - Module to start the chunk with.
610
+ */
611
+ function startChunk(state, options) {
612
+ const chunk = createChunk({
613
+ chunkId: state.chunks.length,
614
+ ...options
615
+ });
616
+ state.currentChunk = chunk;
617
+ state.currentChunkModules = new Set([options.modulePath]);
618
+ state.chunks.push(chunk);
619
+ state.moduleToChunk.set(options.modulePath, chunk.id);
620
+ }
621
+ function computeDependencyBonus({ chunkModules, graph, modulePath }) {
622
+ const node = graph.nodes.get(modulePath);
623
+ for (const edge of node.imports) if (chunkModules.has(edge.resolvedPath)) return 1;
624
+ return 0;
625
+ }
626
+ function computeMergeScore(input) {
627
+ const { chunkModules, fanIn, graph, moduleLocals, modulePath } = input;
628
+ let score = 0;
629
+ if (fanIn === 1) score += 2;
630
+ if (moduleLocals < SMALL_MODULE_THRESHOLD) score += 1;
631
+ score += computeDependencyBonus({
632
+ chunkModules,
633
+ graph,
634
+ modulePath
635
+ });
636
+ if (fanIn >= HIGH_FAN_IN_THRESHOLD) score -= 2;
637
+ if (moduleLocals > LARGE_MODULE_THRESHOLD) score -= 1;
638
+ return score;
639
+ }
640
+ /**
641
+ * Try to pack one module into the current chunk or start a new one.
642
+ *
643
+ * @param state - Mutable pack state.
644
+ * @param input - Module packing parameters.
645
+ */
646
+ function packOneModule(state, input) {
647
+ const { fanIn, graph, hardCap, localCounts, modulePath, softCap } = input;
648
+ const moduleLocals = localCounts.get(modulePath);
649
+ if (state.currentChunk === void 0) {
650
+ startChunk(state, {
651
+ moduleLocals,
652
+ modulePath
653
+ });
654
+ return;
655
+ }
656
+ const estimated = state.currentChunk.totalLocals + moduleLocals + 1;
657
+ const score = computeMergeScore({
658
+ chunkModules: state.currentChunkModules,
659
+ fanIn: fanIn.get(modulePath),
660
+ graph,
661
+ moduleLocals,
662
+ modulePath
663
+ });
664
+ if (estimated <= softCap && score >= 0 || estimated <= hardCap && score > 0) {
665
+ state.currentChunk.modules.push(modulePath);
666
+ state.currentChunk.totalLocals = estimated;
667
+ state.currentChunkModules.add(modulePath);
668
+ state.moduleToChunk.set(modulePath, state.currentChunk.id);
669
+ } else startChunk(state, {
670
+ moduleLocals,
671
+ modulePath
672
+ });
673
+ }
674
+ /**
675
+ * Pack modules into chunks using merge scoring.
676
+ *
677
+ * @param input - Packing configuration.
678
+ * @returns Mutable pack state with chunks and mappings.
679
+ */
680
+ function packModules(input) {
681
+ const { fanIn, graph, hardCap, localCounts, modules, softCap } = input;
682
+ const state = {
683
+ chunks: [],
684
+ currentChunk: void 0,
685
+ currentChunkModules: /* @__PURE__ */ new Set(),
686
+ moduleToChunk: /* @__PURE__ */ new Map()
687
+ };
688
+ for (const modulePath of modules) packOneModule(state, {
689
+ fanIn,
690
+ graph,
691
+ hardCap,
692
+ localCounts,
693
+ modulePath,
694
+ softCap
695
+ });
696
+ return state;
697
+ }
698
+ /**
699
+ * Compute fan-in for each module (how many other modules import it).
700
+ *
701
+ * @param graph - The dependency graph.
702
+ * @returns Map from file path to fan-in count.
703
+ */
704
+ function computeFanIn(graph) {
705
+ const fanIn = /* @__PURE__ */ new Map();
706
+ for (const path of graph.nodes.keys()) fanIn.set(path, 0);
707
+ for (const node of graph.nodes.values()) for (const edge of node.imports) fanIn.set(edge.resolvedPath, fanIn.get(edge.resolvedPath) + 1);
708
+ return fanIn;
709
+ }
710
+ /**
711
+ * Walk the graph in dfsOrder DFS from the entry point.
712
+ * Visits leaves first, entry last.
713
+ *
714
+ * @param graph - The dependency graph.
715
+ * @returns Module paths in dfsOrder.
716
+ */
717
+ function dfsOrderDfs(graph) {
718
+ const visited = /* @__PURE__ */ new Set();
719
+ const order = [];
720
+ function visit(path) {
721
+ if (visited.has(path)) return;
722
+ visited.add(path);
723
+ const node = graph.nodes.get(path);
724
+ for (const edge of node.imports) visit(edge.resolvedPath);
725
+ order.push(path);
726
+ }
727
+ visit(graph.entryPoint);
728
+ return order;
729
+ }
730
+ //#endregion
731
+ //#region src/graph/dependency-graph.ts
732
+ /**
733
+ * Build a directed dependency graph from file analyses and module resolution.
734
+ * Discovers reachable modules via DFS, resolves TS.import calls, and produces
735
+ * a topologically sorted ordering.
736
+ *
737
+ * @param options - Graph building configuration.
738
+ * @returns The dependency graph with topologically sorted modules.
739
+ */
740
+ function buildDependencyGraph(options) {
741
+ const { analyses, entryFilePath, resolve, resolveRequire } = options;
742
+ const nodes = discoverReachableModules({
743
+ analysisByPath: indexAnalyses(analyses),
744
+ entryFilePath,
745
+ resolve,
746
+ resolveRequire
747
+ });
748
+ return {
749
+ entryPoint: entryFilePath,
750
+ nodes,
751
+ sorted: topologicalSort(nodes)
752
+ };
753
+ }
754
+ /**
755
+ * Index file analyses by path for O(1) lookup.
756
+ *
757
+ * @param analyses - Array of file analyses to index.
758
+ * @returns Map from file path to its analysis.
759
+ */
760
+ function indexAnalyses(analyses) {
761
+ const map = /* @__PURE__ */ new Map();
762
+ for (const entry of analyses) map.set(entry.path, entry);
763
+ return map;
764
+ }
765
+ /* c8 ignore start -- only true-branch path is reached in practice; defensive */
766
+ function stripQuotes(text) {
767
+ if (text.length >= 2 && text.startsWith("\"") && text.endsWith("\"")) return text.slice(1, -1);
768
+ return text;
769
+ }
770
+ /* c8 ignore stop */
771
+ function resolvePlainRequire(request, options) {
772
+ if (request.args.length === 0) return;
773
+ if (options.resolveRequire === void 0) throw new Error(`plain require() not supported in bundled modules: ${options.filePath}`);
774
+ const requireString = stripQuotes(request.args[0]);
775
+ const result = options.resolveRequire(options.filePath, requireString);
776
+ if (result.kind === "passthrough") return;
777
+ return {
778
+ args: request.args,
779
+ kind: "require",
780
+ resolvedPath: result.path
781
+ };
782
+ }
783
+ /**
784
+ * Resolve a single require/import request into an optional ImportEdge.
785
+ *
786
+ * @param request - The extracted require/import metadata.
787
+ * @param options - File path and resolver functions.
788
+ * @returns The resolved edge, or `undefined` for passthrough/external imports.
789
+ */
790
+ function resolveSingleImport(request, options) {
791
+ if (request.kind === "ts_import_external") return;
792
+ if (request.kind === "require") return resolvePlainRequire(request, options);
793
+ /* c8 ignore next 5 -- defensive: ts_import without resolve is a misconfiguration the dispatch path normally rules out */
794
+ if (options.resolve === void 0) throw new Error(`TS.import resolution invoked without a resolve callback: ${options.filePath}`);
795
+ const resolvedPath = options.resolve(options.filePath, request.args);
796
+ return {
797
+ args: request.args,
798
+ kind: "ts_import",
799
+ resolvedPath
800
+ };
801
+ }
802
+ /**
803
+ * Resolve every require/import in a file's analysis into import edges.
804
+ *
805
+ * @param options - File analysis and resolver functions.
806
+ * @returns Array of resolved import edges.
807
+ */
808
+ function resolveImports(options) {
809
+ const imports = [];
810
+ for (const request of options.fileAnalysis.requires) {
811
+ const edge = resolveSingleImport(request, options);
812
+ if (edge !== void 0) imports.push(edge);
813
+ }
814
+ return imports;
815
+ }
816
+ /**
817
+ * DFS visit a single node: resolve imports, detect cycles, recurse.
818
+ *
819
+ * @param state - Shared DFS traversal state.
820
+ * @param filePath - Path of the module to visit.
821
+ */
822
+ function dfsVisit(state, filePath) {
823
+ if (state.visited.has(filePath)) return;
824
+ if (state.onStack.has(filePath)) {
825
+ const cycle = [...state.onStack, filePath];
826
+ const start = cycle.indexOf(filePath);
827
+ throw new Error(`Circular dependency detected: ${cycle.slice(start).join(" -> ")}`);
828
+ }
829
+ const fileAnalysis = state.analysisByPath.get(filePath);
830
+ if (!fileAnalysis) throw new Error(`No analysis found for module: ${filePath}`);
831
+ state.onStack.add(filePath);
832
+ const imports = resolveImports({
833
+ fileAnalysis,
834
+ filePath,
835
+ resolve: state.resolve,
836
+ resolveRequire: state.resolveRequire
837
+ });
838
+ for (const edge of imports) dfsVisit(state, edge.resolvedPath);
839
+ state.onStack.delete(filePath);
840
+ state.visited.add(filePath);
841
+ state.nodes.set(filePath, {
842
+ filePath,
843
+ imports
844
+ });
845
+ }
846
+ /**
847
+ * DFS from the entry point to discover all reachable modules,
848
+ * detect cycles, and build the adjacency list.
849
+ *
850
+ * @param options - Entry path, indexed analyses, and resolver function.
851
+ * @returns Map of reachable module nodes.
852
+ */
853
+ function discoverReachableModules(options) {
854
+ const state = {
855
+ analysisByPath: options.analysisByPath,
856
+ nodes: /* @__PURE__ */ new Map(),
857
+ onStack: /* @__PURE__ */ new Set(),
858
+ resolve: options.resolve,
859
+ resolveRequire: options.resolveRequire,
860
+ visited: /* @__PURE__ */ new Set()
861
+ };
862
+ dfsVisit(state, options.entryFilePath);
863
+ return state.nodes;
864
+ }
865
+ /**
866
+ * Compute in-degree for each node based on import edges.
867
+ *
868
+ * @param nodes - Adjacency list of reachable modules.
869
+ * @returns Map from file path to its in-degree count.
870
+ */
871
+ function computeInDegrees(nodes) {
872
+ const inDegree = /* @__PURE__ */ new Map();
873
+ for (const path of nodes.keys()) inDegree.set(path, 0);
874
+ for (const node of nodes.values()) for (const edge of node.imports) inDegree.set(edge.resolvedPath, inDegree.get(edge.resolvedPath) + 1);
875
+ return inDegree;
876
+ }
877
+ /**
878
+ * Drain the Kahn queue: pop zero-in-degree nodes, decrement neighbors.
879
+ *
880
+ * @param options - Kahn's algorithm state.
881
+ * @returns Nodes in dependents-first order.
882
+ */
883
+ function drainKahnQueue(options) {
884
+ const { inDegree, nodes, queue } = options;
885
+ const sorted = [];
886
+ while (queue.length > 0) {
887
+ const path = queue.shift();
888
+ sorted.push(path);
889
+ const node = nodes.get(path);
890
+ for (const edge of node.imports) {
891
+ const updated = inDegree.get(edge.resolvedPath) - 1;
892
+ inDegree.set(edge.resolvedPath, updated);
893
+ if (updated === 0) queue.push(edge.resolvedPath);
894
+ }
895
+ }
896
+ return sorted;
897
+ }
898
+ /**
899
+ * Topologically sort module nodes using Kahn's algorithm.
900
+ * Returns dependencies before dependents (leaf modules first, entry last).
901
+ *
902
+ * @param nodes - Adjacency list of reachable modules.
903
+ * @returns Topologically sorted file paths with dependencies first.
904
+ */
905
+ function topologicalSort(nodes) {
906
+ const inDegree = computeInDegrees(nodes);
907
+ const queue = [];
908
+ for (const [path, degree] of inDegree) if (degree === 0) queue.push(path);
909
+ return drainKahnQueue({
910
+ inDegree,
911
+ nodes,
912
+ queue
913
+ }).reverse();
914
+ }
915
+ //#endregion
916
+ //#region src/resolution/ts-import-parser.ts
917
+ const PARENT_PATTERN = /\.Parent/g;
918
+ /**
919
+ * Parse the argument list of a `TS.import` call into a structured result.
920
+ *
921
+ * @param args - Raw argument strings extracted from the Luau AST.
922
+ * @returns The parsed import with parent count and path segments.
923
+ */
924
+ function parseTsImport(args) {
925
+ if (args.length < 3) throw new Error(`TS.import requires at least 3 arguments (script, base, segment), got ${String(args.length)}`);
926
+ const basePath = args[1];
927
+ const rawSegments = args.slice(2);
928
+ return {
929
+ parentCount: basePath.match(PARENT_PATTERN)?.length ?? 0,
930
+ segments: rawSegments.map((segment) => segment.replace(/^"/, "").replace(/"$/, ""))
931
+ };
932
+ }
933
+ //#endregion
934
+ //#region src/resolution/module-resolver.ts
935
+ /**
936
+ * Create a module resolver that wires TS.import parsing, Rojo path
937
+ * resolution, and reverse mapping to resolve imports to filesystem paths.
938
+ *
939
+ * @param getRbxPath - Function that maps a file path to its RbxPath.
940
+ * @param reverseMapper - Reverse mapper for looking up filesystem paths.
941
+ * @returns A ModuleResolver instance.
942
+ */
943
+ function createModuleResolver(getRbxPath, reverseMapper) {
944
+ return { resolve(fromFilePath, tsImportArgs) {
945
+ const callerRbxPath = getRbxPath(fromFilePath);
946
+ if (callerRbxPath === void 0) throw new Error(`Cannot resolve RbxPath for caller: ${fromFilePath}`);
947
+ const { parentCount, segments } = parseTsImport([...tsImportArgs]);
948
+ if (parentCount > callerRbxPath.length) throw new Error(`Parent traversal count (${String(parentCount)}) exceeds caller RbxPath depth (${String(callerRbxPath.length)}) for: ${fromFilePath}`);
949
+ const targetRbxPath = [...callerRbxPath.slice(0, callerRbxPath.length - parentCount), ...segments];
950
+ const resolved = reverseMapper.resolve(targetRbxPath);
951
+ if (resolved === void 0) throw new Error(`Cannot resolve target module: ${targetRbxPath.join(".")} (from ${fromFilePath})`);
952
+ return resolved;
953
+ } };
954
+ }
955
+ //#endregion
956
+ //#region src/resolution/path-require-resolver.ts
957
+ const PATH_SEPARATOR = "/";
958
+ const PASSTHROUGH_PREFIXES = new Set(["@rbx", "@std"]);
959
+ const INIT_BASENAMES = new Set(["init.lua", "init.luau"]);
960
+ /**
961
+ * Build a resolver that mirrors Roblox's runtime string-require semantics on
962
+ * the filesystem: `./` and `../` resolve relative to the caller, `@self/x`
963
+ * resolves to a sibling of an init module, and `@std/*` / `@rbx/*` are
964
+ * passed through (left unchanged in emitted output).
965
+ *
966
+ * @returns A function that resolves a single `require(...)` string.
967
+ */
968
+ function createPathRequireResolver() {
969
+ const resolutionCache = /* @__PURE__ */ new Map();
970
+ return function resolve(fromFilePath, requireString) {
971
+ const parts = requireString.split(PATH_SEPARATOR);
972
+ const firstPart = parts[0];
973
+ if (PASSTHROUGH_PREFIXES.has(firstPart)) return { kind: "passthrough" };
974
+ validateFirstPart(fromFilePath, firstPart);
975
+ const targetWithoutExtension = walkParts(fromFilePath, parts);
976
+ const resolved = resolveFileWithExtension(targetWithoutExtension, resolutionCache);
977
+ if (resolved === void 0) throw new Error(`cannot resolve require("${requireString}") from ${fromFilePath}: no .luau or .lua file at ${targetWithoutExtension}`);
978
+ return {
979
+ kind: "resolved",
980
+ path: resolved
981
+ };
982
+ };
983
+ }
984
+ function validateFirstPart(fromFilePath, firstPart) {
985
+ if (firstPart === "") throw new Error(`paths beginning with '/' are not supported (from ${fromFilePath})`);
986
+ if (firstPart === "@game") throw new Error(`@game-prefixed requires are not bundleable (from ${fromFilePath})`);
987
+ if (firstPart === "@self" && !INIT_BASENAMES.has(path.basename(fromFilePath))) throw new Error(`@self requires are only valid from init.luau / init.lua (from ${fromFilePath})`);
988
+ if (firstPart !== "." && firstPart !== ".." && firstPart !== "@self" && !firstPart.startsWith("@")) throw new Error(`bare require("${firstPart}") is not supported; use a relative path (from ${fromFilePath})`);
989
+ }
990
+ function ensureLeadingZone(state, part) {
991
+ if (!state.inLeadingDotZone) throw new Error(`'${part}' is only allowed at the beginning of a require path`);
992
+ }
993
+ function processPart(state, part) {
994
+ if (part === "") return;
995
+ if (part === "." || part === "@self") {
996
+ ensureLeadingZone(state, part);
997
+ state.inLeadingDotZone = false;
998
+ return;
999
+ }
1000
+ if (part === "..") {
1001
+ ensureLeadingZone(state, part);
1002
+ state.currentDirectory = toPosix(path.dirname(state.currentDirectory));
1003
+ return;
1004
+ }
1005
+ state.currentDirectory = toPosix(path.join(state.currentDirectory, part));
1006
+ state.inLeadingDotZone = false;
1007
+ }
1008
+ function walkParts(fromFilePath, parts) {
1009
+ const state = {
1010
+ currentDirectory: toPosix(path.dirname(fromFilePath)),
1011
+ inLeadingDotZone: true
1012
+ };
1013
+ for (const part of parts) processPart(state, part);
1014
+ return state.currentDirectory;
1015
+ }
1016
+ function resolveFileWithExtension(targetWithoutExtension, cache) {
1017
+ const cached = cache.get(targetWithoutExtension);
1018
+ if (cached !== void 0 || cache.has(targetWithoutExtension)) return cached;
1019
+ for (const extension of LUAU_EXTENSIONS) {
1020
+ const candidate = targetWithoutExtension + extension;
1021
+ if (fs.existsSync(candidate)) {
1022
+ cache.set(targetWithoutExtension, candidate);
1023
+ return candidate;
1024
+ }
1025
+ }
1026
+ cache.set(targetWithoutExtension, void 0);
1027
+ }
1028
+ //#endregion
1029
+ //#region src/resolution/rbxpath-reverse-mapper.ts
1030
+ /**
1031
+ * Serialize an RbxPath array into a string suitable for use as a Map key.
1032
+ *
1033
+ * @param rbxPath - The RbxPath segments to serialize.
1034
+ * @returns The serialized string key.
1035
+ */
1036
+ function serializeRbxPath(rbxPath) {
1037
+ return rbxPath.join("\0");
1038
+ }
1039
+ /**
1040
+ * Build a reverse map from RbxPath to filesystem path.
1041
+ * Calls `getRbxPath` on each discovered file.
1042
+ *
1043
+ * @param discoveredFiles - Absolute paths of all discovered files.
1044
+ * @param getRbxPath - Function that maps a file path to its RbxPath.
1045
+ * @returns A ReverseMapper for looking up filesystem paths by RbxPath.
1046
+ */
1047
+ function createReverseMapper(discoveredFiles, getRbxPath) {
1048
+ const map = /* @__PURE__ */ new Map();
1049
+ for (const filePath of discoveredFiles) {
1050
+ const rbxPath = getRbxPath(filePath);
1051
+ if (rbxPath === void 0) continue;
1052
+ map.set(serializeRbxPath(rbxPath), filePath);
1053
+ }
1054
+ return { resolve(rbxPath) {
1055
+ return map.get(serializeRbxPath(rbxPath));
1056
+ } };
1057
+ }
1058
+ //#endregion
1059
+ //#region src/bundler.ts
1060
+ const LOCALS_WARNING_THRESHOLD = 200;
1061
+ /**
1062
+ * Bundle a Luau entry point and its dependencies into a single file.
1063
+ *
1064
+ * @param config - Bundle configuration.
1065
+ * @returns The bundle result with output path, module count, timing, and warnings.
1066
+ */
1067
+ function bundle(config) {
1068
+ const outputPath = toPosix(path.resolve(config.output));
1069
+ if (config.declaration === true && !outputPath.endsWith(".luau")) throw new Error(`--declaration requires output path ending in .luau, got: ${outputPath}`);
1070
+ const totalStart = performance.now();
1071
+ const phases = runBundlePhases({
1072
+ config,
1073
+ entryPath: toPosix(path.resolve(config.entry)),
1074
+ outputPath
1075
+ });
1076
+ return {
1077
+ declarationOutputPath: phases.declarationOutputPath,
1078
+ moduleCount: phases.graph.sorted.length,
1079
+ outputPath,
1080
+ sourceMapPath: phases.sourceMapPath,
1081
+ timing: {
1082
+ ...phases.timing,
1083
+ totalMs: performance.now() - totalStart
1084
+ },
1085
+ warnings: collectLocalsWarnings(phases.rawAnalyses)
1086
+ };
1087
+ }
1088
+ function writeSourceMapIfRequested(config, context) {
1089
+ if (config.requireMode !== "path" || context.entries.length === 0) return;
1090
+ const sourceMapPath = `${context.outputPath}.map`;
1091
+ const map = emitSourceMap({
1092
+ entries: context.entries,
1093
+ outputPath: context.outputPath,
1094
+ sourcemapDirectory: toPosix(path.dirname(context.outputPath))
1095
+ });
1096
+ fs.writeFileSync(sourceMapPath, JSON.stringify(map));
1097
+ return sourceMapPath;
1098
+ }
1099
+ function emitDeclarations(config, options) {
1100
+ if (config.declaration !== true) return {};
1101
+ const start = performance.now();
1102
+ const dtsOutputPath = options.outputPath.replace(/init\.luau$/, "index.d.ts").replace(/\.luau$/, ".d.ts");
1103
+ const result = bundleDeclarations({
1104
+ entryLuauPath: options.entryPath,
1105
+ importedLibraries: config.external,
1106
+ outputPath: dtsOutputPath
1107
+ });
1108
+ return {
1109
+ declarationMs: performance.now() - start,
1110
+ declarationOutputPath: result.outputPath
1111
+ };
1112
+ }
1113
+ function writeBundleArtifacts(input) {
1114
+ const { codegen, config, entryPath, outputPath } = input;
1115
+ fs.mkdirSync(path.dirname(outputPath), { recursive: true });
1116
+ fs.writeFileSync(outputPath, codegen.output);
1117
+ const sourceMapPath = writeSourceMapIfRequested(config, {
1118
+ entries: codegen.sourceMapEntries,
1119
+ outputPath
1120
+ });
1121
+ return {
1122
+ ...emitDeclarations(config, {
1123
+ entryPath,
1124
+ outputPath
1125
+ }),
1126
+ sourceMapPath
1127
+ };
1128
+ }
1129
+ function toAbsoluteAnalyses(analyses, luauRoot) {
1130
+ return analyses.map((a) => ({
1131
+ ...a,
1132
+ path: toPosix(path.join(luauRoot, a.path))
1133
+ }));
1134
+ }
1135
+ function extractAndValidate(options) {
1136
+ const { entryPath } = options;
1137
+ const luauRoot = options.luauRoot !== void 0 && options.luauRoot !== "" ? toPosix(path.resolve(options.luauRoot)) : path.dirname(entryPath);
1138
+ const extractionStart = performance.now();
1139
+ const extraction = spawnLuteExtractor(luauRoot);
1140
+ const extractionMs = performance.now() - extractionStart;
1141
+ const entryRelativePath = toPosix(path.relative(luauRoot, entryPath));
1142
+ if (!extraction.analyses.some((analysis) => analysis.path === entryRelativePath)) throw new Error(`Entry file not found in extraction data: ${entryPath}`);
1143
+ return {
1144
+ absoluteAnalyses: toAbsoluteAnalyses(extraction.analyses, luauRoot),
1145
+ extractionMs,
1146
+ rawAnalyses: extraction.analyses
1147
+ };
1148
+ }
1149
+ function buildPathModeGraph(analyses, entryPath) {
1150
+ return buildDependencyGraph({
1151
+ analyses,
1152
+ entryFilePath: entryPath,
1153
+ resolveRequire: createPathRequireResolver()
1154
+ });
1155
+ }
1156
+ function createRbxPathResolver(project, projectDirectory) {
1157
+ return (filePath) => {
1158
+ const posixPath = toPosix(filePath);
1159
+ const dataModelPath = mapFsPathToDataModel(toPosix(path.relative(projectDirectory, posixPath)), project.tree);
1160
+ if (dataModelPath === void 0) return;
1161
+ if (dataModelPath === "") return ["game"];
1162
+ return ["game", ...dataModelPath.split("/")];
1163
+ };
1164
+ }
1165
+ function buildTsImportGraph(options) {
1166
+ const { analyses, config, entryPath } = options;
1167
+ if (config.project === void 0 || config.project === "") throw new Error("project is required when requireMode is 'ts-import'");
1168
+ const getRbxPath = createRbxPathResolver(loadRojoProject(config.project), toPosix(path.dirname(path.resolve(config.project))));
1169
+ const resolver = createModuleResolver(getRbxPath, createReverseMapper(analyses.map((a) => a.path), getRbxPath));
1170
+ return buildDependencyGraph({
1171
+ analyses,
1172
+ entryFilePath: entryPath,
1173
+ resolve: (from, args) => resolver.resolve(from, args)
1174
+ });
1175
+ }
1176
+ function resolveModules(config, analyses, entryPath) {
1177
+ if (config.requireMode === "path") return buildPathModeGraph(analyses, entryPath);
1178
+ return buildTsImportGraph({
1179
+ analyses,
1180
+ config,
1181
+ entryPath
1182
+ });
1183
+ }
1184
+ function buildAnalysisLookups(analyses) {
1185
+ const localCounts = /* @__PURE__ */ new Map();
1186
+ const topLevelLocals = /* @__PURE__ */ new Map();
1187
+ for (const analysis of analyses) {
1188
+ const nonBoilerplate = (analysis.topLevelLocals ?? []).filter((local) => local.name !== "TS");
1189
+ localCounts.set(analysis.path, nonBoilerplate.length);
1190
+ topLevelLocals.set(analysis.path, nonBoilerplate);
1191
+ }
1192
+ return {
1193
+ localCounts,
1194
+ topLevelLocals
1195
+ };
1196
+ }
1197
+ function buildResolvedImports(graph, options) {
1198
+ const { chunkMapping, filePath, moduleIds } = options;
1199
+ const node = graph.nodes.get(filePath);
1200
+ const sourceChunk = chunkMapping.get(filePath);
1201
+ return node.imports.map((edge) => {
1202
+ const targetChunk = chunkMapping.get(edge.resolvedPath);
1203
+ const intraChunk = sourceChunk !== void 0 && targetChunk !== void 0 && sourceChunk === targetChunk;
1204
+ return {
1205
+ args: [...edge.args],
1206
+ intraChunk: intraChunk || void 0,
1207
+ kind: edge.kind,
1208
+ moduleId: moduleIds.get(edge.resolvedPath)
1209
+ };
1210
+ });
1211
+ }
1212
+ function renameAndRewriteWithRaw(input, rawSource) {
1213
+ const { chunkAssignment, graph, moduleId, moduleIds, modulePath, pipeline, topLevelLocals } = input;
1214
+ return runPipeline(rawSource, {
1215
+ context: {
1216
+ isEntryPoint: false,
1217
+ locals: topLevelLocals,
1218
+ moduleId,
1219
+ resolvedImports: buildResolvedImports(graph, {
1220
+ chunkMapping: chunkAssignment.moduleToChunk,
1221
+ filePath: modulePath,
1222
+ moduleIds
1223
+ })
1224
+ },
1225
+ transforms: pipeline
1226
+ });
1227
+ }
1228
+ function processChunkedModule(input) {
1229
+ const rawSource = fs.readFileSync(input.modulePath, "utf-8");
1230
+ return {
1231
+ id: input.moduleId,
1232
+ originalContent: rawSource,
1233
+ source: renameAndRewriteWithRaw(input, rawSource),
1234
+ sourcePath: input.modulePath,
1235
+ totalLocals: isDebug() ? input.totalLocals : void 0
1236
+ };
1237
+ }
1238
+ function buildBundleChunks(options) {
1239
+ const { chunkAssignment, graph, lookups, moduleIds, pipeline } = options;
1240
+ return chunkAssignment.chunks.filter((chunk) => !chunk.modules.includes(graph.entryPoint)).map((chunk) => {
1241
+ const modules = chunk.modules.map((modulePath) => {
1242
+ return processChunkedModule({
1243
+ chunkAssignment,
1244
+ graph,
1245
+ moduleId: moduleIds.get(modulePath),
1246
+ moduleIds,
1247
+ modulePath,
1248
+ pipeline,
1249
+ topLevelLocals: lookups.topLevelLocals.get(modulePath),
1250
+ totalLocals: lookups.localCounts.get(modulePath)
1251
+ });
1252
+ });
1253
+ return {
1254
+ id: chunk.id,
1255
+ modules
1256
+ };
1257
+ });
1258
+ }
1259
+ function rewriteEntrySource(options) {
1260
+ const { chunkAssignment, entryPath, graph, moduleIds, pipeline } = options;
1261
+ const entrySource = fs.readFileSync(entryPath, "utf-8");
1262
+ return {
1263
+ originalContent: entrySource,
1264
+ rewritten: runPipeline(entrySource, {
1265
+ context: {
1266
+ isEntryPoint: true,
1267
+ locals: [],
1268
+ moduleId: "",
1269
+ resolvedImports: buildResolvedImports(graph, {
1270
+ chunkMapping: chunkAssignment.moduleToChunk,
1271
+ filePath: entryPath,
1272
+ moduleIds
1273
+ })
1274
+ },
1275
+ transforms: pipeline
1276
+ })
1277
+ };
1278
+ }
1279
+ function pickPipelines(requireMode) {
1280
+ if (requireMode === "path") return {
1281
+ entry: PATH_ENTRY_PIPELINE,
1282
+ module: PATH_MODULE_PIPELINE
1283
+ };
1284
+ return {
1285
+ entry: ENTRY_PIPELINE,
1286
+ module: MODULE_PIPELINE
1287
+ };
1288
+ }
1289
+ function assembleBundle(input) {
1290
+ const sourceMapEntries = [];
1291
+ return {
1292
+ output: generateChunkedBundle(input.chunks, input.entry.rewritten, {
1293
+ entryInfo: {
1294
+ originalContent: input.entry.originalContent,
1295
+ sourcePath: input.entryPath
1296
+ },
1297
+ includeTsGlobal: input.requireMode !== "path",
1298
+ onMapping: (mapping) => {
1299
+ sourceMapEntries.push(mapping);
1300
+ }
1301
+ }),
1302
+ sourceMapEntries
1303
+ };
1304
+ }
1305
+ function generateOutput(options) {
1306
+ const { analyses, entryPath, graph, requireMode } = options;
1307
+ const moduleIds = assignModuleIds(graph.sorted.filter((mp) => mp !== entryPath));
1308
+ const lookups = buildAnalysisLookups(analyses);
1309
+ const chunkAssignment = assignChunks({
1310
+ graph,
1311
+ localCounts: lookups.localCounts
1312
+ });
1313
+ const pipelines = pickPipelines(requireMode);
1314
+ return assembleBundle({
1315
+ chunks: buildBundleChunks({
1316
+ chunkAssignment,
1317
+ graph,
1318
+ lookups,
1319
+ moduleIds,
1320
+ pipeline: pipelines.module
1321
+ }),
1322
+ entry: rewriteEntrySource({
1323
+ chunkAssignment,
1324
+ entryPath,
1325
+ graph,
1326
+ moduleIds,
1327
+ pipeline: pipelines.entry
1328
+ }),
1329
+ entryPath,
1330
+ requireMode
1331
+ });
1332
+ }
1333
+ function runCodegen(input) {
1334
+ const start = performance.now();
1335
+ return {
1336
+ codegen: generateOutput({
1337
+ analyses: input.analyses,
1338
+ entryPath: input.entryPath,
1339
+ graph: input.graph,
1340
+ requireMode: input.config.requireMode ?? "ts-import"
1341
+ }),
1342
+ codegenMs: performance.now() - start
1343
+ };
1344
+ }
1345
+ function runBundlePhases(input) {
1346
+ const { config, entryPath, outputPath } = input;
1347
+ const { absoluteAnalyses, extractionMs, rawAnalyses } = extractAndValidate({
1348
+ entryPath,
1349
+ luauRoot: config.luauRoot
1350
+ });
1351
+ const resolutionStart = performance.now();
1352
+ const graph = resolveModules(config, absoluteAnalyses, entryPath);
1353
+ const resolutionMs = performance.now() - resolutionStart;
1354
+ const { codegen, codegenMs } = runCodegen({
1355
+ analyses: absoluteAnalyses,
1356
+ config,
1357
+ entryPath,
1358
+ graph
1359
+ });
1360
+ const written = writeBundleArtifacts({
1361
+ codegen,
1362
+ config,
1363
+ entryPath,
1364
+ outputPath
1365
+ });
1366
+ return {
1367
+ declarationOutputPath: written.declarationOutputPath,
1368
+ graph,
1369
+ rawAnalyses,
1370
+ sourceMapPath: written.sourceMapPath,
1371
+ timing: {
1372
+ codegenMs,
1373
+ declarationMs: written.declarationMs,
1374
+ extractionMs,
1375
+ resolutionMs
1376
+ }
1377
+ };
1378
+ }
1379
+ function collectLocalsWarnings(analyses) {
1380
+ const warnings = [];
1381
+ for (const analysis of analyses) if (analysis.totalLocals > LOCALS_WARNING_THRESHOLD) warnings.push(`${analysis.path}: ${String(analysis.totalLocals)} locals exceeds ${String(LOCALS_WARNING_THRESHOLD)} threshold`);
1382
+ return warnings;
1383
+ }
1384
+ //#endregion
1385
+ //#region src/config/loader.ts
1386
+ /**
1387
+ * Load and validate a weld config file using c12 discovery.
1388
+ *
1389
+ * @param configPath - Explicit path to a config file, or undefined for auto-discovery.
1390
+ * @param cwd - Working directory for config discovery.
1391
+ * @returns The validated config.
1392
+ * @rejects When the config file is not found, invalid, or extends resolution fails.
1393
+ */
1394
+ async function loadConfig$1(configPath, cwd = process.cwd()) {
1395
+ const { config, extendWarnings } = await loadRawConfig(configPath, cwd);
1396
+ if (extendWarnings.length > 0) {
1397
+ const extendsPath = extendWarnings[0]?.match(/`([^`]+)`/)?.[1];
1398
+ throw new Error(`Failed to resolve extends: "${String(extendsPath)}". If the file exists, try adding the file extension (e.g. ".ts").`);
1399
+ }
1400
+ return validateConfig(resolveFunctionValues(config));
1401
+ }
1402
+ function merger(...sources) {
1403
+ return defuFn(...sources.filter(Boolean));
1404
+ }
1405
+ function interceptExtendWarnings(originalWarn, extendWarnings) {
1406
+ return (...args) => {
1407
+ const message = args.join(" ");
1408
+ if (message.includes("Cannot extend config")) {
1409
+ extendWarnings.push(message);
1410
+ return;
1411
+ }
1412
+ originalWarn.apply(console, args);
1413
+ };
1414
+ }
1415
+ function isNotFoundError(err) {
1416
+ return err instanceof Error && err.message.includes("cannot be resolved");
1417
+ }
1418
+ async function loadRawConfig(configPath, cwd) {
1419
+ const extendWarnings = [];
1420
+ const originalWarn = console.warn;
1421
+ try {
1422
+ console.warn = interceptExtendWarnings(originalWarn, extendWarnings);
1423
+ return {
1424
+ config: (await loadConfig({
1425
+ name: "weld",
1426
+ configFile: configPath,
1427
+ configFileRequired: configPath !== void 0,
1428
+ cwd,
1429
+ dotenv: false,
1430
+ globalRc: false,
1431
+ merger,
1432
+ omit$Keys: true,
1433
+ packageJson: false,
1434
+ rcFile: false
1435
+ })).config,
1436
+ extendWarnings
1437
+ };
1438
+ } catch (err) {
1439
+ if (configPath !== void 0 && isNotFoundError(err)) throw new Error(`Config file not found: ${configPath}`, { cause: err });
1440
+ throw err;
1441
+ } finally {
1442
+ console.warn = originalWarn;
1443
+ }
1444
+ }
1445
+ function isMergerFunction(value) {
1446
+ return typeof value === "function";
1447
+ }
1448
+ function resolveFunctionValues(config) {
1449
+ const resolved = {};
1450
+ for (const [key, value] of Object.entries(config)) resolved[key] = isMergerFunction(value) ? value(void 0) : value;
1451
+ return resolved;
1452
+ }
1453
+ //#endregion
1454
+ //#region src/utils/logger.ts
1455
+ const LABEL_WIDTH = 12;
1456
+ /**
1457
+ * Format a duration in milliseconds for display. Values below 1000 are shown
1458
+ * as whole milliseconds (e.g. "456ms"). Values at or above 1000 are shown as
1459
+ * seconds with one decimal place (e.g. "1.2s").
1460
+ *
1461
+ * @param ms - Duration in milliseconds.
1462
+ * @returns Formatted time string.
1463
+ */
1464
+ function formatTime(ms) {
1465
+ if (ms >= 1e3) return `${(ms / 1e3).toFixed(1)}s`;
1466
+ return `${Math.round(ms)}ms`;
1467
+ }
1468
+ /**
1469
+ * Log a warning message to stderr with a yellow warning symbol.
1470
+ *
1471
+ * @param message - Warning text to display.
1472
+ */
1473
+ function logWarning(message) {
1474
+ process.stderr.write(`${color.yellow("⚠")} ${message}\n`);
1475
+ }
1476
+ /**
1477
+ * Log an error with formatted output to stderr. Accepts both string messages
1478
+ * and Error objects.
1479
+ *
1480
+ * @param error - The error to display.
1481
+ */
1482
+ function logError(error) {
1483
+ const message = error instanceof Error ? error.message : String(error);
1484
+ process.stderr.write(`${color.red("✖")} ${message}\n`);
1485
+ }
1486
+ /**
1487
+ * Log a successful bundle result to stderr, including module count, output
1488
+ * path, and a timing breakdown.
1489
+ *
1490
+ * @param result - Bundle result information.
1491
+ */
1492
+ function logResult(result) {
1493
+ const { declarationOutputPath, moduleCount, outputPath, timing } = result;
1494
+ const lines = [
1495
+ ["Extraction", timing.extractionMs],
1496
+ ["Resolution", timing.resolutionMs],
1497
+ ["Codegen", timing.codegenMs]
1498
+ ];
1499
+ if (timing.declarationMs !== void 0) lines.push(["Declaration", timing.declarationMs]);
1500
+ const timeValues = [...lines.map(([, ms]) => formatTime(ms)), formatTime(timing.totalMs)];
1501
+ const maxTimeWidth = Math.max(...timeValues.map((value) => value.length));
1502
+ process.stderr.write(`${color.green("✓")} Bundled ${String(moduleCount)} modules → ${outputPath}\n`);
1503
+ if (declarationOutputPath !== void 0) process.stderr.write(`${color.green("✓")} Declaration → ${declarationOutputPath}\n`);
1504
+ process.stderr.write("\n");
1505
+ for (const [label, ms] of lines) {
1506
+ const formattedTime = formatTime(ms).padStart(maxTimeWidth);
1507
+ process.stderr.write(` ${label.padEnd(LABEL_WIDTH)} ${formattedTime}\n`);
1508
+ }
1509
+ process.stderr.write(color.dim(` ${"─".repeat(LABEL_WIDTH + 1 + maxTimeWidth)}\n`));
1510
+ const totalFormatted = formatTime(timing.totalMs).padStart(maxTimeWidth);
1511
+ process.stderr.write(` ${"Total".padEnd(LABEL_WIDTH)} ${totalFormatted}\n`);
1512
+ }
1513
+ //#endregion
1514
+ //#region src/cli.ts
1515
+ /**
1516
+ * Entry point for the weld bundler CLI.
1517
+ *
1518
+ * @param args - CLI arguments to parse.
1519
+ */
1520
+ async function main(args = process.argv.slice(2)) {
1521
+ try {
1522
+ const cliOptions = parseBundleArgs(args);
1523
+ const result = bundle(mergeConfig(await loadConfig$1(cliOptions.config), cliOptions));
1524
+ logResult(result);
1525
+ for (const warning of result.warnings) logWarning(warning);
1526
+ process.exitCode = 0;
1527
+ } catch (err) {
1528
+ logError(err);
1529
+ process.exitCode = 1;
1530
+ }
1531
+ }
1532
+ function mergeConfig(fileConfig, cli) {
1533
+ const entry = cli.entry ?? fileConfig.entry;
1534
+ const project = cli.project ?? fileConfig.project;
1535
+ const output = cli.output ?? fileConfig.output;
1536
+ const luauRoot = cli.luauRoot ?? fileConfig.luauRoot;
1537
+ const requireMode = cli.requireMode ?? fileConfig.requireMode ?? "ts-import";
1538
+ if (entry === void 0 || entry === "") throw new Error("--entry is required (via CLI or config file)");
1539
+ if (requireMode === "ts-import" && (project === void 0 || project === "")) throw new Error("--project is required (via CLI or config file)");
1540
+ if (output === void 0 || output === "") throw new Error("--output is required (via CLI or config file)");
1541
+ const external = cli.external.length > 0 ? cli.external : fileConfig.external;
1542
+ return {
1543
+ declaration: cli.declaration ?? fileConfig.declaration ?? false,
1544
+ entry,
1545
+ external,
1546
+ luauRoot,
1547
+ output,
1548
+ project: requireMode === "path" ? void 0 : project,
1549
+ requireMode
1550
+ };
1551
+ }
1552
+ function parseRequireMode(raw) {
1553
+ if (raw === void 0) return;
1554
+ if (raw !== "ts-import" && raw !== "path") throw new Error(`--require-mode must be 'ts-import' or 'path' (got: ${raw})`);
1555
+ return raw;
1556
+ }
1557
+ /**
1558
+ * Parse CLI arguments into bundle options.
1559
+ *
1560
+ * @param args - Raw CLI argument strings.
1561
+ * @returns Parsed CLI options.
1562
+ */
1563
+ function parseBundleArgs(args) {
1564
+ const { values } = parseArgs({
1565
+ args,
1566
+ options: {
1567
+ "config": {
1568
+ short: "c",
1569
+ type: "string"
1570
+ },
1571
+ "declaration": {
1572
+ short: "d",
1573
+ type: "boolean"
1574
+ },
1575
+ "entry": {
1576
+ short: "e",
1577
+ type: "string"
1578
+ },
1579
+ "external": {
1580
+ multiple: true,
1581
+ type: "string"
1582
+ },
1583
+ "luau-root": { type: "string" },
1584
+ "output": {
1585
+ short: "o",
1586
+ type: "string"
1587
+ },
1588
+ "project": {
1589
+ short: "p",
1590
+ type: "string"
1591
+ },
1592
+ "require-mode": {
1593
+ short: "r",
1594
+ type: "string"
1595
+ }
1596
+ },
1597
+ strict: true
1598
+ });
1599
+ return {
1600
+ config: values.config,
1601
+ declaration: values.declaration,
1602
+ entry: values.entry,
1603
+ external: values.external ?? [],
1604
+ luauRoot: values["luau-root"],
1605
+ output: values.output,
1606
+ project: values.project,
1607
+ requireMode: parseRequireMode(values["require-mode"])
1608
+ };
1609
+ }
1610
+ //#endregion
1611
+ export { main };
1612
+
1613
+ //# sourceMappingURL=cli.mjs.map