thinkwell 0.4.0 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/thinkwell +50 -2
- package/dist/cli/build.d.ts +40 -0
- package/dist/cli/build.d.ts.map +1 -0
- package/dist/cli/build.js +954 -0
- package/dist/cli/build.js.map +1 -0
- package/dist/cli/main.js +26 -1
- package/dist/cli/main.js.map +1 -1
- package/dist-pkg/cli-build.cjs +6354 -0
- package/dist-pkg/esbuild-bin/darwin-arm64/esbuild +0 -0
- package/dist-pkg/package.json +100 -0
- package/dist-pkg/pkg-cli.cjs +75952 -0
- package/dist-pkg/pkg-common.cjs +235 -0
- package/dist-pkg/pkg-prelude/bootstrap.js +2260 -0
- package/dist-pkg/pkg-prelude/diagnostic.js +149 -0
- package/package.json +11 -6
|
@@ -0,0 +1,954 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Build command for creating self-contained executables from user scripts.
|
|
3
|
+
*
|
|
4
|
+
* This module provides the `thinkwell build` command that compiles user scripts
|
|
5
|
+
* into standalone binaries using the same pkg-based tooling as the thinkwell CLI.
|
|
6
|
+
*
|
|
7
|
+
* The build process follows a two-stage pipeline:
|
|
8
|
+
* 1. **Pre-bundle with esbuild** - Bundle user script + thinkwell packages into CJS
|
|
9
|
+
* 2. **Compile with pkg** - Create self-contained binary with Node.js runtime
|
|
10
|
+
*/
|
|
11
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync, rmSync, copyFileSync, chmodSync, createWriteStream, } from "node:fs";
|
|
12
|
+
import { dirname, resolve, basename, join, isAbsolute } from "node:path";
|
|
13
|
+
import { fileURLToPath } from "node:url";
|
|
14
|
+
import { styleText } from "node:util";
|
|
15
|
+
import { homedir } from "node:os";
|
|
16
|
+
import { createHash } from "node:crypto";
|
|
17
|
+
import { spawn, execSync } from "node:child_process";
|
|
18
|
+
import ora from "ora";
|
|
19
|
+
import * as esbuild from "esbuild";
|
|
20
|
+
// Handle both ESM and CJS contexts for __dirname
|
|
21
|
+
// When bundled to CJS, import.meta.url won't work, but global __dirname will
|
|
22
|
+
const __dirname = typeof import.meta?.url === "string"
|
|
23
|
+
? dirname(fileURLToPath(import.meta.url))
|
|
24
|
+
: globalThis.__dirname || dirname(process.argv[1]);
|
|
25
|
+
// Map user-friendly target names to pkg target names
|
|
26
|
+
const TARGET_MAP = {
|
|
27
|
+
"darwin-arm64": "node24-macos-arm64",
|
|
28
|
+
"darwin-x64": "node24-macos-x64",
|
|
29
|
+
"linux-x64": "node24-linux-x64",
|
|
30
|
+
"linux-arm64": "node24-linux-arm64",
|
|
31
|
+
};
|
|
32
|
+
// Detect the current host platform
|
|
33
|
+
function detectHostTarget() {
|
|
34
|
+
const platform = process.platform;
|
|
35
|
+
const arch = process.arch;
|
|
36
|
+
if (platform === "darwin" && arch === "arm64")
|
|
37
|
+
return "darwin-arm64";
|
|
38
|
+
if (platform === "darwin" && arch === "x64")
|
|
39
|
+
return "darwin-x64";
|
|
40
|
+
if (platform === "linux" && arch === "x64")
|
|
41
|
+
return "linux-x64";
|
|
42
|
+
if (platform === "linux" && arch === "arm64")
|
|
43
|
+
return "linux-arm64";
|
|
44
|
+
throw new Error(`Unsupported platform: ${platform}-${arch}. ` +
|
|
45
|
+
`Supported platforms: darwin-arm64, darwin-x64, linux-x64, linux-arm64`);
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Parse and validate build options from command-line arguments.
|
|
49
|
+
*/
|
|
50
|
+
export function parseBuildArgs(args) {
|
|
51
|
+
const options = {
|
|
52
|
+
entry: "",
|
|
53
|
+
targets: [],
|
|
54
|
+
include: [],
|
|
55
|
+
};
|
|
56
|
+
let i = 0;
|
|
57
|
+
while (i < args.length) {
|
|
58
|
+
const arg = args[i];
|
|
59
|
+
if (arg === "-o" || arg === "--output") {
|
|
60
|
+
i++;
|
|
61
|
+
if (i >= args.length) {
|
|
62
|
+
throw new Error("Missing value for --output");
|
|
63
|
+
}
|
|
64
|
+
options.output = args[i];
|
|
65
|
+
}
|
|
66
|
+
else if (arg === "-t" || arg === "--target") {
|
|
67
|
+
i++;
|
|
68
|
+
if (i >= args.length) {
|
|
69
|
+
throw new Error("Missing value for --target");
|
|
70
|
+
}
|
|
71
|
+
const target = args[i];
|
|
72
|
+
const validTargets = ["darwin-arm64", "darwin-x64", "linux-x64", "linux-arm64", "host"];
|
|
73
|
+
if (!validTargets.includes(target)) {
|
|
74
|
+
throw new Error(`Invalid target '${target}'. Valid targets: ${validTargets.join(", ")}`);
|
|
75
|
+
}
|
|
76
|
+
options.targets.push(target);
|
|
77
|
+
}
|
|
78
|
+
else if (arg === "--include") {
|
|
79
|
+
i++;
|
|
80
|
+
if (i >= args.length) {
|
|
81
|
+
throw new Error("Missing value for --include");
|
|
82
|
+
}
|
|
83
|
+
options.include.push(args[i]);
|
|
84
|
+
}
|
|
85
|
+
else if (arg === "--verbose" || arg === "-v") {
|
|
86
|
+
options.verbose = true;
|
|
87
|
+
}
|
|
88
|
+
else if (arg === "--quiet" || arg === "-q") {
|
|
89
|
+
options.quiet = true;
|
|
90
|
+
}
|
|
91
|
+
else if (arg === "--dry-run" || arg === "-n") {
|
|
92
|
+
options.dryRun = true;
|
|
93
|
+
}
|
|
94
|
+
else if (arg.startsWith("-")) {
|
|
95
|
+
throw new Error(`Unknown option: ${arg}`);
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
// Positional argument - entry file
|
|
99
|
+
if (options.entry) {
|
|
100
|
+
throw new Error(`Unexpected argument: ${arg}`);
|
|
101
|
+
}
|
|
102
|
+
options.entry = arg;
|
|
103
|
+
}
|
|
104
|
+
i++;
|
|
105
|
+
}
|
|
106
|
+
// Validate entry
|
|
107
|
+
if (!options.entry) {
|
|
108
|
+
throw new Error("No entry file specified");
|
|
109
|
+
}
|
|
110
|
+
// Default target is host
|
|
111
|
+
if (options.targets.length === 0) {
|
|
112
|
+
options.targets = ["host"];
|
|
113
|
+
}
|
|
114
|
+
return options;
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Initialize the build context with resolved paths and validated inputs.
|
|
118
|
+
*/
|
|
119
|
+
function initBuildContext(options) {
|
|
120
|
+
// Resolve entry path
|
|
121
|
+
const entryPath = isAbsolute(options.entry)
|
|
122
|
+
? options.entry
|
|
123
|
+
: resolve(process.cwd(), options.entry);
|
|
124
|
+
if (!existsSync(entryPath)) {
|
|
125
|
+
const suggestion = options.entry.endsWith(".ts") || options.entry.endsWith(".js")
|
|
126
|
+
? ""
|
|
127
|
+
: "\n Did you mean to add a .ts or .js extension?";
|
|
128
|
+
throw new Error(`Entry file not found: ${options.entry}${suggestion}\n` +
|
|
129
|
+
` Working directory: ${process.cwd()}`);
|
|
130
|
+
}
|
|
131
|
+
const entryBasename = basename(entryPath).replace(/\.(ts|js|mts|mjs|cts|cjs)$/, "");
|
|
132
|
+
const entryDir = dirname(entryPath);
|
|
133
|
+
// Create build directory in the entry file's directory
|
|
134
|
+
const buildDir = join(entryDir, ".thinkwell-build");
|
|
135
|
+
// Find the thinkwell dist-pkg directory
|
|
136
|
+
// When running from npm install: node_modules/thinkwell/dist-pkg
|
|
137
|
+
// When running from source: packages/thinkwell/dist-pkg
|
|
138
|
+
const thinkwellDistPkg = resolve(__dirname, "../../dist-pkg");
|
|
139
|
+
if (!existsSync(thinkwellDistPkg)) {
|
|
140
|
+
throw new Error(`Thinkwell dist-pkg not found at ${thinkwellDistPkg}.\n` +
|
|
141
|
+
` This may indicate a corrupted installation.\n` +
|
|
142
|
+
` Try reinstalling thinkwell: npm install thinkwell`);
|
|
143
|
+
}
|
|
144
|
+
// Resolve "host" targets to actual platform
|
|
145
|
+
const resolvedTargets = options.targets.map((t) => t === "host" ? detectHostTarget() : t);
|
|
146
|
+
// Deduplicate targets
|
|
147
|
+
const uniqueTargets = [...new Set(resolvedTargets)];
|
|
148
|
+
return {
|
|
149
|
+
entryPath,
|
|
150
|
+
entryBasename,
|
|
151
|
+
entryDir,
|
|
152
|
+
buildDir,
|
|
153
|
+
thinkwellDistPkg,
|
|
154
|
+
resolvedTargets: uniqueTargets,
|
|
155
|
+
options,
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Generate the output path for a given target.
|
|
160
|
+
*/
|
|
161
|
+
function getOutputPath(ctx, target) {
|
|
162
|
+
if (ctx.options.output) {
|
|
163
|
+
if (ctx.resolvedTargets.length === 1) {
|
|
164
|
+
// Single target: use exact output path
|
|
165
|
+
return isAbsolute(ctx.options.output)
|
|
166
|
+
? ctx.options.output
|
|
167
|
+
: resolve(process.cwd(), ctx.options.output);
|
|
168
|
+
}
|
|
169
|
+
else {
|
|
170
|
+
// Multiple targets: append target suffix
|
|
171
|
+
const base = isAbsolute(ctx.options.output)
|
|
172
|
+
? ctx.options.output
|
|
173
|
+
: resolve(process.cwd(), ctx.options.output);
|
|
174
|
+
return `${base}-${target}`;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
else {
|
|
178
|
+
// Default: <entry-basename>-<target> in current directory
|
|
179
|
+
return resolve(process.cwd(), `${ctx.entryBasename}-${target}`);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
/**
|
|
183
|
+
* Generate the wrapper entry point that sets up global.__bundled__.
|
|
184
|
+
*
|
|
185
|
+
* This creates a CJS file that:
|
|
186
|
+
* 1. Loads the pre-bundled thinkwell packages
|
|
187
|
+
* 2. Registers them in global.__bundled__
|
|
188
|
+
* 3. Loads and runs the user's bundled code
|
|
189
|
+
*/
|
|
190
|
+
function generateWrapperSource(userBundlePath) {
|
|
191
|
+
return `#!/usr/bin/env node
|
|
192
|
+
/**
|
|
193
|
+
* Generated wrapper for thinkwell build.
|
|
194
|
+
* This file is auto-generated - do not edit.
|
|
195
|
+
*/
|
|
196
|
+
|
|
197
|
+
// Register bundled thinkwell packages
|
|
198
|
+
const thinkwell = require('./thinkwell.cjs');
|
|
199
|
+
const acpModule = require('./acp.cjs');
|
|
200
|
+
const protocolModule = require('./protocol.cjs');
|
|
201
|
+
|
|
202
|
+
global.__bundled__ = {
|
|
203
|
+
'thinkwell': thinkwell,
|
|
204
|
+
'@thinkwell/acp': acpModule,
|
|
205
|
+
'@thinkwell/protocol': protocolModule,
|
|
206
|
+
};
|
|
207
|
+
|
|
208
|
+
// Load the user's bundled code
|
|
209
|
+
require('./${basename(userBundlePath)}');
|
|
210
|
+
`;
|
|
211
|
+
}
|
|
212
|
+
/**
|
|
213
|
+
* Stage 1: Bundle user script with esbuild.
|
|
214
|
+
*
|
|
215
|
+
* This bundles the user's entry point along with all its dependencies
|
|
216
|
+
* into a single CJS file. The thinkwell packages are marked as external
|
|
217
|
+
* since they'll be provided via global.__bundled__.
|
|
218
|
+
*/
|
|
219
|
+
async function bundleUserScript(ctx) {
|
|
220
|
+
const outputFile = join(ctx.buildDir, `${ctx.entryBasename}-bundle.cjs`);
|
|
221
|
+
if (ctx.options.verbose) {
|
|
222
|
+
console.log(` Bundling ${ctx.entryPath}...`);
|
|
223
|
+
}
|
|
224
|
+
// Note: When running from a compiled binary, ESBUILD_BINARY_PATH is set
|
|
225
|
+
// by main-pkg.cjs before this module loads.
|
|
226
|
+
try {
|
|
227
|
+
await esbuild.build({
|
|
228
|
+
entryPoints: [ctx.entryPath],
|
|
229
|
+
bundle: true,
|
|
230
|
+
platform: "node",
|
|
231
|
+
format: "cjs",
|
|
232
|
+
outfile: outputFile,
|
|
233
|
+
// External: Node built-ins
|
|
234
|
+
external: ["node:*"],
|
|
235
|
+
// Mark thinkwell packages as external - they're provided via global.__bundled__
|
|
236
|
+
// But actually, we need to transform the imports, so let's bundle them
|
|
237
|
+
// and use a banner to set up the module aliases
|
|
238
|
+
banner: {
|
|
239
|
+
js: `
|
|
240
|
+
// Alias thinkwell packages to global.__bundled__
|
|
241
|
+
const __origRequire = require;
|
|
242
|
+
require = function(id) {
|
|
243
|
+
if (id === 'thinkwell' || id === 'thinkwell:agent' || id === 'thinkwell:connectors') {
|
|
244
|
+
return global.__bundled__['thinkwell'];
|
|
245
|
+
}
|
|
246
|
+
if (id === '@thinkwell/acp' || id === 'thinkwell:acp') {
|
|
247
|
+
return global.__bundled__['@thinkwell/acp'];
|
|
248
|
+
}
|
|
249
|
+
if (id === '@thinkwell/protocol' || id === 'thinkwell:protocol') {
|
|
250
|
+
return global.__bundled__['@thinkwell/protocol'];
|
|
251
|
+
}
|
|
252
|
+
return __origRequire(id);
|
|
253
|
+
};
|
|
254
|
+
require.resolve = __origRequire.resolve;
|
|
255
|
+
require.cache = __origRequire.cache;
|
|
256
|
+
require.extensions = __origRequire.extensions;
|
|
257
|
+
require.main = __origRequire.main;
|
|
258
|
+
`,
|
|
259
|
+
},
|
|
260
|
+
// Resolve thinkwell imports to bundled versions during bundle time
|
|
261
|
+
plugins: [
|
|
262
|
+
{
|
|
263
|
+
name: "thinkwell-resolver",
|
|
264
|
+
setup(build) {
|
|
265
|
+
// Resolve thinkwell:* imports to the npm package
|
|
266
|
+
build.onResolve({ filter: /^thinkwell:/ }, (args) => {
|
|
267
|
+
const moduleName = args.path.replace("thinkwell:", "");
|
|
268
|
+
const moduleMap = {
|
|
269
|
+
agent: "thinkwell",
|
|
270
|
+
acp: "@thinkwell/acp",
|
|
271
|
+
protocol: "@thinkwell/protocol",
|
|
272
|
+
connectors: "thinkwell",
|
|
273
|
+
};
|
|
274
|
+
const resolved = moduleMap[moduleName];
|
|
275
|
+
if (resolved) {
|
|
276
|
+
// Mark as external - will be provided by global.__bundled__ at runtime
|
|
277
|
+
return { path: resolved, external: true };
|
|
278
|
+
}
|
|
279
|
+
return null;
|
|
280
|
+
});
|
|
281
|
+
// Mark thinkwell packages as external
|
|
282
|
+
build.onResolve({ filter: /^(thinkwell|@thinkwell\/(acp|protocol))$/ }, (args) => {
|
|
283
|
+
return { path: args.path, external: true };
|
|
284
|
+
});
|
|
285
|
+
},
|
|
286
|
+
},
|
|
287
|
+
],
|
|
288
|
+
sourcemap: false,
|
|
289
|
+
minify: false,
|
|
290
|
+
keepNames: true,
|
|
291
|
+
target: "node24",
|
|
292
|
+
logLevel: ctx.options.verbose ? "info" : "silent",
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
catch (error) {
|
|
296
|
+
// Provide helpful error messages for common failures
|
|
297
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
298
|
+
if (message.includes("Could not resolve")) {
|
|
299
|
+
const match = message.match(/Could not resolve "([^"]+)"/);
|
|
300
|
+
const moduleName = match ? match[1] : "unknown module";
|
|
301
|
+
throw new Error(`Could not resolve dependency "${moduleName}".\n` +
|
|
302
|
+
` Make sure all dependencies are installed: npm install\n` +
|
|
303
|
+
` If this is a dev dependency, it may need to be a regular dependency.`);
|
|
304
|
+
}
|
|
305
|
+
if (message.includes("No loader is configured")) {
|
|
306
|
+
throw new Error(`Unsupported file type in import.\n` +
|
|
307
|
+
` esbuild cannot bundle this file type by default.\n` +
|
|
308
|
+
` Consider using --include to embed the file as an asset instead.`);
|
|
309
|
+
}
|
|
310
|
+
throw error;
|
|
311
|
+
}
|
|
312
|
+
return outputFile;
|
|
313
|
+
}
|
|
314
|
+
/**
|
|
315
|
+
* Copy thinkwell pre-bundled packages to build directory.
|
|
316
|
+
*/
|
|
317
|
+
function copyThinkwellBundles(ctx) {
|
|
318
|
+
const bundles = ["thinkwell.cjs", "acp.cjs", "protocol.cjs"];
|
|
319
|
+
for (const bundle of bundles) {
|
|
320
|
+
const src = join(ctx.thinkwellDistPkg, bundle);
|
|
321
|
+
const dest = join(ctx.buildDir, bundle);
|
|
322
|
+
if (!existsSync(src)) {
|
|
323
|
+
throw new Error(`Thinkwell bundle not found: ${src}`);
|
|
324
|
+
}
|
|
325
|
+
const content = readFileSync(src);
|
|
326
|
+
writeFileSync(dest, content);
|
|
327
|
+
if (ctx.options.verbose) {
|
|
328
|
+
console.log(` Copied ${bundle}`);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
/**
|
|
333
|
+
* Check if running from a pkg-compiled binary.
|
|
334
|
+
*/
|
|
335
|
+
function isRunningFromCompiledBinary() {
|
|
336
|
+
// @ts-expect-error process.pkg is set by pkg at runtime
|
|
337
|
+
return typeof process.pkg !== "undefined";
|
|
338
|
+
}
|
|
339
|
+
// ============================================================================
|
|
340
|
+
// Portable Node.js Download (for compiled binary builds)
|
|
341
|
+
// ============================================================================
|
|
342
|
+
/** Pinned Node.js version for portable runtime */
|
|
343
|
+
const PORTABLE_NODE_VERSION = "24.1.0";
|
|
344
|
+
/** Get the thinkwell cache directory */
|
|
345
|
+
function getCacheDir() {
|
|
346
|
+
return process.env.THINKWELL_CACHE_DIR || join(homedir(), ".cache", "thinkwell");
|
|
347
|
+
}
|
|
348
|
+
/** Get the thinkwell version from package.json */
|
|
349
|
+
function getThinkwellVersion() {
|
|
350
|
+
try {
|
|
351
|
+
const pkgPath = resolve(__dirname, "../../package.json");
|
|
352
|
+
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
353
|
+
return pkg.version || "unknown";
|
|
354
|
+
}
|
|
355
|
+
catch {
|
|
356
|
+
return "unknown";
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
/**
|
|
360
|
+
* Map process.platform/arch to Node.js download format.
|
|
361
|
+
*/
|
|
362
|
+
function getNodePlatformArch() {
|
|
363
|
+
const platform = process.platform === "darwin" ? "darwin" : "linux";
|
|
364
|
+
const arch = process.arch; // x64 or arm64
|
|
365
|
+
return { platform, arch };
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* Download a file from a URL with progress reporting.
|
|
369
|
+
*/
|
|
370
|
+
async function downloadFile(url, destPath, spinner) {
|
|
371
|
+
const response = await fetch(url);
|
|
372
|
+
if (!response.ok) {
|
|
373
|
+
throw new Error(`Failed to download ${url}: ${response.status} ${response.statusText}`);
|
|
374
|
+
}
|
|
375
|
+
const contentLength = response.headers.get("content-length");
|
|
376
|
+
const totalBytes = contentLength ? parseInt(contentLength, 10) : 0;
|
|
377
|
+
// Ensure directory exists
|
|
378
|
+
mkdirSync(dirname(destPath), { recursive: true });
|
|
379
|
+
const fileStream = createWriteStream(destPath);
|
|
380
|
+
const reader = response.body?.getReader();
|
|
381
|
+
if (!reader) {
|
|
382
|
+
throw new Error("No response body");
|
|
383
|
+
}
|
|
384
|
+
let downloadedBytes = 0;
|
|
385
|
+
try {
|
|
386
|
+
while (true) {
|
|
387
|
+
const { done, value } = await reader.read();
|
|
388
|
+
if (done)
|
|
389
|
+
break;
|
|
390
|
+
fileStream.write(Buffer.from(value));
|
|
391
|
+
downloadedBytes += value.length;
|
|
392
|
+
if (spinner && totalBytes > 0) {
|
|
393
|
+
const percent = Math.round((downloadedBytes / totalBytes) * 100);
|
|
394
|
+
const downloadedMB = (downloadedBytes / 1024 / 1024).toFixed(1);
|
|
395
|
+
const totalMB = (totalBytes / 1024 / 1024).toFixed(1);
|
|
396
|
+
spinner.text = `Downloading Node.js runtime... ${downloadedMB} MB / ${totalMB} MB (${percent}%)`;
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
finally {
|
|
401
|
+
fileStream.end();
|
|
402
|
+
}
|
|
403
|
+
// Wait for file to be fully written
|
|
404
|
+
await new Promise((resolve, reject) => {
|
|
405
|
+
fileStream.on("finish", resolve);
|
|
406
|
+
fileStream.on("error", reject);
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
/**
|
|
410
|
+
* Compute SHA-256 hash of a file.
|
|
411
|
+
*/
|
|
412
|
+
function hashFile(filePath) {
|
|
413
|
+
const content = readFileSync(filePath);
|
|
414
|
+
return createHash("sha256").update(content).digest("hex");
|
|
415
|
+
}
|
|
416
|
+
/**
|
|
417
|
+
* Fetch the expected SHA-256 checksum for a Node.js download.
|
|
418
|
+
*/
|
|
419
|
+
async function fetchExpectedChecksum(version, filename) {
|
|
420
|
+
const url = `https://nodejs.org/dist/v${version}/SHASUMS256.txt`;
|
|
421
|
+
const response = await fetch(url);
|
|
422
|
+
if (!response.ok) {
|
|
423
|
+
throw new Error(`Failed to fetch checksums: ${response.status}`);
|
|
424
|
+
}
|
|
425
|
+
const text = await response.text();
|
|
426
|
+
for (const line of text.split("\n")) {
|
|
427
|
+
// Format: "hash filename"
|
|
428
|
+
const parts = line.trim().split(/\s+/);
|
|
429
|
+
if (parts.length === 2 && parts[1] === filename) {
|
|
430
|
+
return parts[0];
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
throw new Error(`Checksum not found for ${filename}`);
|
|
434
|
+
}
|
|
435
|
+
/**
|
|
436
|
+
* Extract a .tar.gz archive using the system tar command.
|
|
437
|
+
*/
|
|
438
|
+
function extractTarGz(archivePath, destDir) {
|
|
439
|
+
execSync(`tar -xzf "${archivePath}" -C "${destDir}"`, {
|
|
440
|
+
stdio: "pipe",
|
|
441
|
+
});
|
|
442
|
+
}
|
|
443
|
+
/**
|
|
444
|
+
* Ensure portable Node.js is available in the cache.
|
|
445
|
+
*
|
|
446
|
+
* Downloads from nodejs.org if not cached, verifies checksum, and extracts.
|
|
447
|
+
* Returns the path to the node binary.
|
|
448
|
+
*/
|
|
449
|
+
async function ensurePortableNode(spinner) {
|
|
450
|
+
const version = PORTABLE_NODE_VERSION;
|
|
451
|
+
const { platform, arch } = getNodePlatformArch();
|
|
452
|
+
const cacheDir = join(getCacheDir(), "node", `v${version}`);
|
|
453
|
+
const nodeBinary = process.platform === "win32" ? "node.exe" : "node";
|
|
454
|
+
const nodePath = join(cacheDir, nodeBinary);
|
|
455
|
+
// Check if already cached
|
|
456
|
+
if (existsSync(nodePath)) {
|
|
457
|
+
return nodePath;
|
|
458
|
+
}
|
|
459
|
+
const filename = `node-v${version}-${platform}-${arch}.tar.gz`;
|
|
460
|
+
const url = `https://nodejs.org/dist/v${version}/${filename}`;
|
|
461
|
+
const archivePath = join(cacheDir, filename);
|
|
462
|
+
spinner?.start("Downloading Node.js runtime (first time only)...");
|
|
463
|
+
try {
|
|
464
|
+
// Ensure cache directory exists
|
|
465
|
+
mkdirSync(cacheDir, { recursive: true });
|
|
466
|
+
// Download
|
|
467
|
+
await downloadFile(url, archivePath, spinner);
|
|
468
|
+
// Verify checksum
|
|
469
|
+
spinner?.start("Verifying download integrity...");
|
|
470
|
+
const expectedHash = await fetchExpectedChecksum(version, filename);
|
|
471
|
+
const actualHash = hashFile(archivePath);
|
|
472
|
+
if (actualHash !== expectedHash) {
|
|
473
|
+
// Clean up the corrupted download
|
|
474
|
+
rmSync(archivePath, { force: true });
|
|
475
|
+
throw new Error(`Node.js download verification failed.\n\n` +
|
|
476
|
+
` Expected: ${expectedHash}\n` +
|
|
477
|
+
` Actual: ${actualHash}\n\n` +
|
|
478
|
+
`This may indicate a corrupted download or network interference.\n` +
|
|
479
|
+
`Please retry or report this issue.`);
|
|
480
|
+
}
|
|
481
|
+
// Extract
|
|
482
|
+
spinner?.start("Extracting Node.js...");
|
|
483
|
+
extractTarGz(archivePath, cacheDir);
|
|
484
|
+
// Move node binary to cache root
|
|
485
|
+
// The tarball extracts to node-v{version}-{platform}-{arch}/bin/node
|
|
486
|
+
const extractedDir = join(cacheDir, `node-v${version}-${platform}-${arch}`);
|
|
487
|
+
const extractedBin = join(extractedDir, "bin", nodeBinary);
|
|
488
|
+
copyFileSync(extractedBin, nodePath);
|
|
489
|
+
chmodSync(nodePath, 0o755);
|
|
490
|
+
// Cleanup: remove extracted directory and archive
|
|
491
|
+
rmSync(extractedDir, { recursive: true, force: true });
|
|
492
|
+
rmSync(archivePath, { force: true });
|
|
493
|
+
spinner?.succeed(`Node.js v${version} cached to ${cacheDir}`);
|
|
494
|
+
return nodePath;
|
|
495
|
+
}
|
|
496
|
+
catch (error) {
|
|
497
|
+
// Cleanup on error
|
|
498
|
+
rmSync(cacheDir, { recursive: true, force: true });
|
|
499
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
500
|
+
// Provide helpful error messages
|
|
501
|
+
if (message.includes("ETIMEDOUT") || message.includes("ENOTFOUND")) {
|
|
502
|
+
throw new Error(`Failed to download Node.js runtime.\n\n` +
|
|
503
|
+
` URL: ${url}\n` +
|
|
504
|
+
` Error: ${message}\n\n` +
|
|
505
|
+
`Check your network connection and try again.\n` +
|
|
506
|
+
`If behind a proxy, set HTTPS_PROXY environment variable.`);
|
|
507
|
+
}
|
|
508
|
+
throw error;
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
/**
|
|
512
|
+
* Ensure the pkg CLI bundle and its auxiliary files are extracted from the
|
|
513
|
+
* compiled binary's assets.
|
|
514
|
+
*
|
|
515
|
+
* pkg requires several auxiliary files at runtime:
|
|
516
|
+
* - pkg-cli.cjs - The main bundled CLI
|
|
517
|
+
* - package.json - pkg's version info (read as ../package.json from cacheDir)
|
|
518
|
+
* - pkg-prelude/ - JavaScript files injected into compiled binaries
|
|
519
|
+
* - pkg-dictionary/ - Compression dictionaries for bytecode
|
|
520
|
+
* - pkg-common.cjs - Common utilities
|
|
521
|
+
*
|
|
522
|
+
* Returns the path to the extracted pkg-cli.cjs file.
|
|
523
|
+
*/
|
|
524
|
+
function ensurePkgCli() {
|
|
525
|
+
const version = getThinkwellVersion();
|
|
526
|
+
const pkgCliBaseDir = join(getCacheDir(), "pkg-cli");
|
|
527
|
+
const cacheDir = join(pkgCliBaseDir, version);
|
|
528
|
+
const pkgCliPath = join(cacheDir, "pkg-cli.cjs");
|
|
529
|
+
// Check if already cached (check for main file and a prelude file)
|
|
530
|
+
const preludeCheck = join(cacheDir, "pkg-prelude", "bootstrap.js");
|
|
531
|
+
if (existsSync(pkgCliPath) && existsSync(preludeCheck)) {
|
|
532
|
+
return pkgCliPath;
|
|
533
|
+
}
|
|
534
|
+
// Base path for pkg assets in the compiled binary's snapshot
|
|
535
|
+
const distPkgPath = resolve(__dirname, "../../dist-pkg");
|
|
536
|
+
// Extract main CLI bundle
|
|
537
|
+
const cliSrc = join(distPkgPath, "pkg-cli.cjs");
|
|
538
|
+
if (!existsSync(cliSrc)) {
|
|
539
|
+
throw new Error(`pkg CLI not found in compiled binary assets.\n` +
|
|
540
|
+
` Expected at: ${cliSrc}\n\n` +
|
|
541
|
+
`This may indicate a build issue. Please report this.`);
|
|
542
|
+
}
|
|
543
|
+
mkdirSync(cacheDir, { recursive: true });
|
|
544
|
+
copyFileSync(cliSrc, pkgCliPath);
|
|
545
|
+
// Extract pkg's package.json (for version info)
|
|
546
|
+
// pkg reads ../package.json relative to __dirname (which is cacheDir)
|
|
547
|
+
// So we place it in the parent directory (pkgCliBaseDir)
|
|
548
|
+
const pkgJsonSrc = join(distPkgPath, "package.json");
|
|
549
|
+
if (existsSync(pkgJsonSrc)) {
|
|
550
|
+
copyFileSync(pkgJsonSrc, join(pkgCliBaseDir, "package.json"));
|
|
551
|
+
}
|
|
552
|
+
// Extract prelude files
|
|
553
|
+
const preludeDir = join(cacheDir, "pkg-prelude");
|
|
554
|
+
mkdirSync(preludeDir, { recursive: true });
|
|
555
|
+
for (const file of ["bootstrap.js", "diagnostic.js"]) {
|
|
556
|
+
const src = join(distPkgPath, "pkg-prelude", file);
|
|
557
|
+
if (existsSync(src)) {
|
|
558
|
+
copyFileSync(src, join(preludeDir, file));
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
// Extract common.js
|
|
562
|
+
const commonSrc = join(distPkgPath, "pkg-common.cjs");
|
|
563
|
+
if (existsSync(commonSrc)) {
|
|
564
|
+
copyFileSync(commonSrc, join(cacheDir, "pkg-common.cjs"));
|
|
565
|
+
}
|
|
566
|
+
// Extract dictionary files
|
|
567
|
+
// pkg reads ../dictionary relative to __dirname (which is cacheDir)
|
|
568
|
+
// So we place it in the parent directory (pkgCliBaseDir/dictionary/)
|
|
569
|
+
const dictionaryDir = join(pkgCliBaseDir, "dictionary");
|
|
570
|
+
mkdirSync(dictionaryDir, { recursive: true });
|
|
571
|
+
for (const file of ["v8-7.8.js", "v8-8.4.js", "v8-12.4.js"]) {
|
|
572
|
+
const src = join(distPkgPath, "pkg-dictionary", file);
|
|
573
|
+
if (existsSync(src)) {
|
|
574
|
+
copyFileSync(src, join(dictionaryDir, file));
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
return pkgCliPath;
|
|
578
|
+
}
|
|
579
|
+
/**
|
|
580
|
+
* Spawn a subprocess and wait for completion.
|
|
581
|
+
*/
|
|
582
|
+
function spawnAsync(command, args, options = {}) {
|
|
583
|
+
return new Promise((resolve) => {
|
|
584
|
+
const proc = spawn(command, args, {
|
|
585
|
+
cwd: options.cwd,
|
|
586
|
+
env: options.env || process.env,
|
|
587
|
+
stdio: options.verbose ? "inherit" : "pipe",
|
|
588
|
+
});
|
|
589
|
+
let stdout = "";
|
|
590
|
+
let stderr = "";
|
|
591
|
+
if (!options.verbose) {
|
|
592
|
+
proc.stdout?.on("data", (data) => {
|
|
593
|
+
stdout += data.toString();
|
|
594
|
+
});
|
|
595
|
+
proc.stderr?.on("data", (data) => {
|
|
596
|
+
stderr += data.toString();
|
|
597
|
+
});
|
|
598
|
+
}
|
|
599
|
+
proc.on("close", (code) => {
|
|
600
|
+
resolve({
|
|
601
|
+
exitCode: code ?? 1,
|
|
602
|
+
stdout,
|
|
603
|
+
stderr,
|
|
604
|
+
});
|
|
605
|
+
});
|
|
606
|
+
proc.on("error", (error) => {
|
|
607
|
+
resolve({
|
|
608
|
+
exitCode: 1,
|
|
609
|
+
stdout,
|
|
610
|
+
stderr: error.message,
|
|
611
|
+
});
|
|
612
|
+
});
|
|
613
|
+
});
|
|
614
|
+
}
|
|
615
|
+
/**
|
|
616
|
+
* Compile using pkg via subprocess (for compiled binary environment).
|
|
617
|
+
*
|
|
618
|
+
* This function is called when running from a compiled thinkwell binary.
|
|
619
|
+
* It downloads a portable Node.js runtime and uses the bundled pkg CLI
|
|
620
|
+
* to perform the compilation as a subprocess.
|
|
621
|
+
*/
|
|
622
|
+
async function compileWithPkgSubprocess(ctx, wrapperPath, target, outputPath, spinner) {
|
|
623
|
+
// Ensure portable Node.js is available
|
|
624
|
+
const nodePath = await ensurePortableNode(spinner);
|
|
625
|
+
// Extract pkg CLI from snapshot
|
|
626
|
+
const pkgCliPath = ensurePkgCli();
|
|
627
|
+
const pkgTarget = TARGET_MAP[target];
|
|
628
|
+
// Ensure output directory exists
|
|
629
|
+
const outputDir = dirname(outputPath);
|
|
630
|
+
if (!existsSync(outputDir)) {
|
|
631
|
+
mkdirSync(outputDir, { recursive: true });
|
|
632
|
+
}
|
|
633
|
+
// Build pkg CLI arguments
|
|
634
|
+
const args = [
|
|
635
|
+
pkgCliPath,
|
|
636
|
+
wrapperPath,
|
|
637
|
+
"--targets",
|
|
638
|
+
pkgTarget,
|
|
639
|
+
"--output",
|
|
640
|
+
outputPath,
|
|
641
|
+
"--options",
|
|
642
|
+
"experimental-transform-types,disable-warning=ExperimentalWarning",
|
|
643
|
+
"--public",
|
|
644
|
+
];
|
|
645
|
+
// Add assets if specified
|
|
646
|
+
if (ctx.options.include && ctx.options.include.length > 0) {
|
|
647
|
+
for (const pattern of ctx.options.include) {
|
|
648
|
+
args.push("--assets", pattern);
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
spinner?.start(`Compiling for ${target}...`);
|
|
652
|
+
const result = await spawnAsync(nodePath, args, {
|
|
653
|
+
cwd: ctx.buildDir,
|
|
654
|
+
env: {
|
|
655
|
+
...process.env,
|
|
656
|
+
// Set pkg cache path for pkg-fetch downloads
|
|
657
|
+
PKG_CACHE_PATH: join(getCacheDir(), "pkg-cache"),
|
|
658
|
+
},
|
|
659
|
+
verbose: ctx.options.verbose,
|
|
660
|
+
});
|
|
661
|
+
if (result.exitCode !== 0) {
|
|
662
|
+
const errorOutput = result.stderr || result.stdout;
|
|
663
|
+
throw new Error(`pkg compilation failed for ${target}.\n\n` +
|
|
664
|
+
`Exit code: ${result.exitCode}\n` +
|
|
665
|
+
(errorOutput ? `Output:\n${errorOutput}` : ""));
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
/**
|
|
669
|
+
* Stage 2: Compile with pkg.
|
|
670
|
+
*
|
|
671
|
+
* Uses @yao-pkg/pkg to create a self-contained binary.
|
|
672
|
+
*
|
|
673
|
+
* When running from a compiled thinkwell binary, this function uses a
|
|
674
|
+
* subprocess approach: downloading a portable Node.js runtime and executing
|
|
675
|
+
* the bundled pkg CLI as a child process. This works around pkg's dynamic
|
|
676
|
+
* import limitations in the virtual filesystem.
|
|
677
|
+
*
|
|
678
|
+
* When running from npm/source, this function uses @yao-pkg/pkg programmatically.
|
|
679
|
+
*/
|
|
680
|
+
async function compileWithPkg(ctx, wrapperPath, target, outputPath, spinner) {
|
|
681
|
+
// When running from a compiled binary, use subprocess approach
|
|
682
|
+
if (isRunningFromCompiledBinary()) {
|
|
683
|
+
await compileWithPkgSubprocess(ctx, wrapperPath, target, outputPath, spinner);
|
|
684
|
+
return;
|
|
685
|
+
}
|
|
686
|
+
// Normal path: use pkg programmatically
|
|
687
|
+
const { exec } = await import("@yao-pkg/pkg");
|
|
688
|
+
const pkgTarget = TARGET_MAP[target];
|
|
689
|
+
// Ensure output directory exists
|
|
690
|
+
const outputDir = dirname(outputPath);
|
|
691
|
+
if (!existsSync(outputDir)) {
|
|
692
|
+
mkdirSync(outputDir, { recursive: true });
|
|
693
|
+
}
|
|
694
|
+
// Build pkg configuration
|
|
695
|
+
const pkgConfig = [
|
|
696
|
+
wrapperPath,
|
|
697
|
+
"--targets",
|
|
698
|
+
pkgTarget,
|
|
699
|
+
"--output",
|
|
700
|
+
outputPath,
|
|
701
|
+
"--options",
|
|
702
|
+
"experimental-transform-types,disable-warning=ExperimentalWarning",
|
|
703
|
+
"--public", // Include source instead of bytecode (required for cross-compilation)
|
|
704
|
+
];
|
|
705
|
+
// Add assets if specified
|
|
706
|
+
if (ctx.options.include && ctx.options.include.length > 0) {
|
|
707
|
+
for (const pattern of ctx.options.include) {
|
|
708
|
+
pkgConfig.push("--assets", pattern);
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
await exec(pkgConfig);
|
|
712
|
+
}
|
|
713
|
+
// ============================================================================
|
|
714
|
+
// Top-Level Await Detection
|
|
715
|
+
// ============================================================================
|
|
716
|
+
/**
|
|
717
|
+
* Detect top-level await usage in the entry file.
|
|
718
|
+
* Returns an array of line numbers where top-level await is found.
|
|
719
|
+
*/
|
|
720
|
+
function detectTopLevelAwait(filePath) {
|
|
721
|
+
const content = readFileSync(filePath, "utf-8");
|
|
722
|
+
const lines = content.split("\n");
|
|
723
|
+
const awaits = [];
|
|
724
|
+
// Track nesting depth of functions/classes
|
|
725
|
+
let depth = 0;
|
|
726
|
+
let inMultiLineComment = false;
|
|
727
|
+
for (let i = 0; i < lines.length; i++) {
|
|
728
|
+
let line = lines[i];
|
|
729
|
+
// Handle multi-line comments
|
|
730
|
+
if (inMultiLineComment) {
|
|
731
|
+
const endIdx = line.indexOf("*/");
|
|
732
|
+
if (endIdx !== -1) {
|
|
733
|
+
line = line.slice(endIdx + 2);
|
|
734
|
+
inMultiLineComment = false;
|
|
735
|
+
}
|
|
736
|
+
else {
|
|
737
|
+
continue;
|
|
738
|
+
}
|
|
739
|
+
}
|
|
740
|
+
// Remove single-line comments
|
|
741
|
+
const singleLineCommentIdx = line.indexOf("//");
|
|
742
|
+
if (singleLineCommentIdx !== -1) {
|
|
743
|
+
line = line.slice(0, singleLineCommentIdx);
|
|
744
|
+
}
|
|
745
|
+
// Handle multi-line comment start
|
|
746
|
+
const multiLineStart = line.indexOf("/*");
|
|
747
|
+
if (multiLineStart !== -1) {
|
|
748
|
+
const multiLineEnd = line.indexOf("*/", multiLineStart);
|
|
749
|
+
if (multiLineEnd !== -1) {
|
|
750
|
+
line = line.slice(0, multiLineStart) + line.slice(multiLineEnd + 2);
|
|
751
|
+
}
|
|
752
|
+
else {
|
|
753
|
+
line = line.slice(0, multiLineStart);
|
|
754
|
+
inMultiLineComment = true;
|
|
755
|
+
}
|
|
756
|
+
}
|
|
757
|
+
// Count function/class/arrow function depth changes
|
|
758
|
+
// This is a simplified heuristic - not a full parser
|
|
759
|
+
const openBraces = (line.match(/\{/g) || []).length;
|
|
760
|
+
const closeBraces = (line.match(/\}/g) || []).length;
|
|
761
|
+
// Check for function/class/arrow declarations that increase depth
|
|
762
|
+
if (/\b(function|class|async\s+function)\b/.test(line) && line.includes("{")) {
|
|
763
|
+
depth += 1;
|
|
764
|
+
}
|
|
765
|
+
else if (/=>\s*\{/.test(line)) {
|
|
766
|
+
depth += 1;
|
|
767
|
+
}
|
|
768
|
+
// Adjust depth for brace changes (simplified)
|
|
769
|
+
depth += openBraces - closeBraces;
|
|
770
|
+
if (depth < 0)
|
|
771
|
+
depth = 0;
|
|
772
|
+
// Check for await at top level (depth 0)
|
|
773
|
+
if (depth === 0 && /\bawait\b/.test(line)) {
|
|
774
|
+
// Make sure it's not inside a string
|
|
775
|
+
const withoutStrings = line.replace(/(["'`])(?:(?!\1)[^\\]|\\.)*\1/g, "");
|
|
776
|
+
if (/\bawait\b/.test(withoutStrings)) {
|
|
777
|
+
awaits.push(i + 1); // 1-indexed line numbers
|
|
778
|
+
}
|
|
779
|
+
}
|
|
780
|
+
}
|
|
781
|
+
return awaits;
|
|
782
|
+
}
|
|
783
|
+
// ============================================================================
|
|
784
|
+
// Output Helpers
|
|
785
|
+
// ============================================================================
|
|
786
|
+
/** Log output respecting quiet mode */
|
|
787
|
+
function log(ctx, message) {
|
|
788
|
+
if (!ctx.options.quiet) {
|
|
789
|
+
console.log(message);
|
|
790
|
+
}
|
|
791
|
+
}
|
|
792
|
+
/** Create a spinner respecting quiet mode */
|
|
793
|
+
function createSpinner(ctx, text) {
|
|
794
|
+
return ora({
|
|
795
|
+
text,
|
|
796
|
+
isSilent: ctx.options.quiet,
|
|
797
|
+
});
|
|
798
|
+
}
|
|
799
|
+
/**
|
|
800
|
+
* Run a dry-run build that shows what would be built without actually building.
|
|
801
|
+
*/
|
|
802
|
+
function runDryRun(ctx) {
|
|
803
|
+
console.log(styleText("bold", "Dry run mode - no files will be created\n"));
|
|
804
|
+
console.log(styleText("bold", "Entry point:"));
|
|
805
|
+
console.log(` ${ctx.entryPath}\n`);
|
|
806
|
+
console.log(styleText("bold", "Targets:"));
|
|
807
|
+
for (const target of ctx.resolvedTargets) {
|
|
808
|
+
const outputPath = getOutputPath(ctx, target);
|
|
809
|
+
console.log(` ${target} → ${outputPath}`);
|
|
810
|
+
}
|
|
811
|
+
console.log();
|
|
812
|
+
if (ctx.options.include && ctx.options.include.length > 0) {
|
|
813
|
+
console.log(styleText("bold", "Assets to include:"));
|
|
814
|
+
for (const pattern of ctx.options.include) {
|
|
815
|
+
console.log(` ${pattern}`);
|
|
816
|
+
}
|
|
817
|
+
console.log();
|
|
818
|
+
}
|
|
819
|
+
console.log(styleText("bold", "Build steps:"));
|
|
820
|
+
console.log(" 1. Bundle user script with esbuild");
|
|
821
|
+
console.log(" 2. Copy thinkwell packages");
|
|
822
|
+
console.log(" 3. Generate wrapper entry point");
|
|
823
|
+
console.log(` 4. Compile with pkg for ${ctx.resolvedTargets.length} target(s)`);
|
|
824
|
+
console.log();
|
|
825
|
+
// Check for potential issues
|
|
826
|
+
const topLevelAwaits = detectTopLevelAwait(ctx.entryPath);
|
|
827
|
+
if (topLevelAwaits.length > 0) {
|
|
828
|
+
console.log(styleText("yellow", "Warning: Top-level await detected"));
|
|
829
|
+
console.log(" Top-level await is not supported in compiled binaries.");
|
|
830
|
+
console.log(` Found at line(s): ${topLevelAwaits.join(", ")}`);
|
|
831
|
+
console.log(" Wrap async code in an async main() function instead.\n");
|
|
832
|
+
}
|
|
833
|
+
console.log(styleText("dim", "Run without --dry-run to build."));
|
|
834
|
+
}
|
|
835
|
+
/**
|
|
836
|
+
* Main build function.
|
|
837
|
+
*/
|
|
838
|
+
export async function runBuild(options) {
|
|
839
|
+
const ctx = initBuildContext(options);
|
|
840
|
+
// Check for top-level await and warn
|
|
841
|
+
const topLevelAwaits = detectTopLevelAwait(ctx.entryPath);
|
|
842
|
+
if (topLevelAwaits.length > 0) {
|
|
843
|
+
console.log(styleText("yellow", "Warning: Top-level await detected"));
|
|
844
|
+
console.log(" Top-level await is not supported in compiled binaries.");
|
|
845
|
+
console.log(` Found at line(s): ${topLevelAwaits.join(", ")}`);
|
|
846
|
+
console.log(" Wrap async code in an async main() function instead.\n");
|
|
847
|
+
}
|
|
848
|
+
// Handle dry-run mode
|
|
849
|
+
if (options.dryRun) {
|
|
850
|
+
runDryRun(ctx);
|
|
851
|
+
return;
|
|
852
|
+
}
|
|
853
|
+
log(ctx, `Building ${styleText("bold", ctx.entryBasename)}...\n`);
|
|
854
|
+
// Create build directory
|
|
855
|
+
if (existsSync(ctx.buildDir)) {
|
|
856
|
+
rmSync(ctx.buildDir, { recursive: true });
|
|
857
|
+
}
|
|
858
|
+
mkdirSync(ctx.buildDir, { recursive: true });
|
|
859
|
+
try {
|
|
860
|
+
// Stage 1: Bundle user script
|
|
861
|
+
let spinner = createSpinner(ctx, "Bundling with esbuild...");
|
|
862
|
+
spinner.start();
|
|
863
|
+
const userBundlePath = await bundleUserScript(ctx);
|
|
864
|
+
spinner.succeed("User script bundled");
|
|
865
|
+
// Stage 2: Copy thinkwell bundles
|
|
866
|
+
spinner = createSpinner(ctx, "Preparing thinkwell packages...");
|
|
867
|
+
spinner.start();
|
|
868
|
+
copyThinkwellBundles(ctx);
|
|
869
|
+
spinner.succeed("Thinkwell packages ready");
|
|
870
|
+
// Generate wrapper
|
|
871
|
+
const wrapperPath = join(ctx.buildDir, "wrapper.cjs");
|
|
872
|
+
const wrapperSource = generateWrapperSource(userBundlePath);
|
|
873
|
+
writeFileSync(wrapperPath, wrapperSource);
|
|
874
|
+
if (ctx.options.verbose) {
|
|
875
|
+
log(ctx, " Generated wrapper entry point");
|
|
876
|
+
}
|
|
877
|
+
// Stage 3: Compile with pkg for each target
|
|
878
|
+
const outputs = [];
|
|
879
|
+
for (const target of ctx.resolvedTargets) {
|
|
880
|
+
const outputPath = getOutputPath(ctx, target);
|
|
881
|
+
spinner = createSpinner(ctx, `Compiling for ${target}...`);
|
|
882
|
+
spinner.start();
|
|
883
|
+
await compileWithPkg(ctx, wrapperPath, target, outputPath, spinner);
|
|
884
|
+
outputs.push(outputPath);
|
|
885
|
+
spinner.succeed(`Built ${basename(outputPath)}`);
|
|
886
|
+
}
|
|
887
|
+
log(ctx, "");
|
|
888
|
+
log(ctx, styleText("green", "Build complete!"));
|
|
889
|
+
log(ctx, "");
|
|
890
|
+
log(ctx, styleText("bold", "Output:"));
|
|
891
|
+
for (const output of outputs) {
|
|
892
|
+
log(ctx, ` ${output}`);
|
|
893
|
+
}
|
|
894
|
+
}
|
|
895
|
+
finally {
|
|
896
|
+
// Clean up build directory
|
|
897
|
+
if (!ctx.options.verbose) {
|
|
898
|
+
try {
|
|
899
|
+
rmSync(ctx.buildDir, { recursive: true });
|
|
900
|
+
}
|
|
901
|
+
catch {
|
|
902
|
+
// Ignore cleanup errors
|
|
903
|
+
}
|
|
904
|
+
}
|
|
905
|
+
else {
|
|
906
|
+
log(ctx, `\nBuild artifacts preserved in: ${ctx.buildDir}`);
|
|
907
|
+
}
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
/**
|
|
911
|
+
* Show help for the build command.
|
|
912
|
+
*/
|
|
913
|
+
export function showBuildHelp() {
|
|
914
|
+
console.log(`
|
|
915
|
+
thinkwell build - Compile TypeScript scripts into standalone executables
|
|
916
|
+
|
|
917
|
+
Usage:
|
|
918
|
+
thinkwell build [options] <entry>
|
|
919
|
+
|
|
920
|
+
Arguments:
|
|
921
|
+
entry TypeScript or JavaScript entry point
|
|
922
|
+
|
|
923
|
+
Options:
|
|
924
|
+
-o, --output <path> Output file path (default: ./<name>-<target>)
|
|
925
|
+
-t, --target <target> Target platform (can be specified multiple times)
|
|
926
|
+
--include <glob> Additional files to embed as assets
|
|
927
|
+
-n, --dry-run Show what would be built without building
|
|
928
|
+
-q, --quiet Suppress all output except errors (for CI)
|
|
929
|
+
-v, --verbose Show detailed build output
|
|
930
|
+
-h, --help Show this help message
|
|
931
|
+
|
|
932
|
+
Targets:
|
|
933
|
+
host Current platform (default)
|
|
934
|
+
darwin-arm64 macOS on Apple Silicon
|
|
935
|
+
darwin-x64 macOS on Intel
|
|
936
|
+
linux-x64 Linux on x64
|
|
937
|
+
linux-arm64 Linux on ARM64
|
|
938
|
+
|
|
939
|
+
Examples:
|
|
940
|
+
thinkwell build src/agent.ts Build for current platform
|
|
941
|
+
thinkwell build src/agent.ts -o dist/my-agent Specify output path
|
|
942
|
+
thinkwell build src/agent.ts --target linux-x64 Build for Linux
|
|
943
|
+
thinkwell build src/agent.ts -t darwin-arm64 -t linux-x64 Multi-platform
|
|
944
|
+
thinkwell build src/agent.ts --dry-run Preview build without executing
|
|
945
|
+
|
|
946
|
+
The resulting binary is self-contained and includes:
|
|
947
|
+
- Node.js 24 runtime with TypeScript support
|
|
948
|
+
- All thinkwell packages
|
|
949
|
+
- Your bundled application code
|
|
950
|
+
|
|
951
|
+
Note: Binaries are ~70-90 MB due to the embedded Node.js runtime.
|
|
952
|
+
`);
|
|
953
|
+
}
|
|
954
|
+
//# sourceMappingURL=build.js.map
|