llmist 0.8.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -21,8 +21,12 @@ import {
21
21
  init_messages,
22
22
  init_model_shortcuts,
23
23
  init_registry,
24
- resolveModel
25
- } from "./chunk-62M4TDAK.js";
24
+ init_schema_to_json,
25
+ init_schema_validator,
26
+ resolveModel,
27
+ schemaToJSONSchema,
28
+ validateGadgetSchema
29
+ } from "./chunk-VXPZQZF5.js";
26
30
 
27
31
  // src/cli/constants.ts
28
32
  var CLI_NAME = "llmist";
@@ -30,11 +34,11 @@ var CLI_DESCRIPTION = "Command line utilities for llmist agents and direct LLM a
30
34
  var COMMANDS = {
31
35
  complete: "complete",
32
36
  agent: "agent",
33
- models: "models"
37
+ models: "models",
38
+ gadget: "gadget"
34
39
  };
35
40
  var LOG_LEVELS = ["silly", "trace", "debug", "info", "warn", "error", "fatal"];
36
41
  var DEFAULT_MODEL = "openai:gpt-5-nano";
37
- var DEFAULT_PARAMETER_FORMAT = "toml";
38
42
  var OPTION_FLAGS = {
39
43
  model: "-m, --model <identifier>",
40
44
  systemPrompt: "-s, --system <prompt>",
@@ -42,10 +46,11 @@ var OPTION_FLAGS = {
42
46
  maxTokens: "--max-tokens <count>",
43
47
  maxIterations: "-i, --max-iterations <count>",
44
48
  gadgetModule: "-g, --gadget <module>",
45
- parameterFormat: "--parameter-format <format>",
46
49
  logLevel: "--log-level <level>",
47
50
  logFile: "--log-file <path>",
48
51
  logReset: "--log-reset",
52
+ logLlmRequests: "--log-llm-requests [dir]",
53
+ logLlmResponses: "--log-llm-responses [dir]",
49
54
  noBuiltins: "--no-builtins",
50
55
  noBuiltinInteraction: "--no-builtin-interaction",
51
56
  quiet: "-q, --quiet"
@@ -57,10 +62,11 @@ var OPTION_DESCRIPTIONS = {
57
62
  maxTokens: "Maximum number of output tokens requested from the model.",
58
63
  maxIterations: "Maximum number of agent loop iterations before exiting.",
59
64
  gadgetModule: "Path or module specifier for a gadget export. Repeat to register multiple gadgets.",
60
- parameterFormat: "Format for gadget parameter schemas: 'json', 'yaml', 'toml', or 'auto'.",
61
65
  logLevel: "Log level: silly, trace, debug, info, warn, error, fatal.",
62
66
  logFile: "Path to log file. When set, logs are written to file instead of stderr.",
63
67
  logReset: "Reset (truncate) the log file at session start instead of appending.",
68
+ logLlmRequests: "Save raw LLM requests as plain text. Optional dir, defaults to ~/.llmist/logs/requests/",
69
+ logLlmResponses: "Save raw LLM responses as plain text. Optional dir, defaults to ~/.llmist/logs/responses/",
64
70
  noBuiltins: "Disable built-in gadgets (AskUser, TellUser).",
65
71
  noBuiltinInteraction: "Disable interactive gadgets (AskUser) while keeping TellUser.",
66
72
  quiet: "Suppress all output except content (text and TellUser messages)."
@@ -68,12 +74,12 @@ var OPTION_DESCRIPTIONS = {
68
74
  var SUMMARY_PREFIX = "[llmist]";
69
75
 
70
76
  // src/cli/program.ts
71
- import { Command, InvalidArgumentError as InvalidArgumentError3 } from "commander";
77
+ import { Command, InvalidArgumentError as InvalidArgumentError2 } from "commander";
72
78
 
73
79
  // package.json
74
80
  var package_default = {
75
81
  name: "llmist",
76
- version: "0.7.0",
82
+ version: "1.0.0",
77
83
  description: "Universal TypeScript LLM client with streaming-first agent framework. Works with any model - no structured outputs or native tool calling required. Implements its own flexible grammar for function calling.",
78
84
  type: "module",
79
85
  main: "dist/index.cjs",
@@ -375,8 +381,33 @@ async function loadGadgets(specifiers, cwd, importer = (specifier) => import(spe
375
381
  return gadgets;
376
382
  }
377
383
 
378
- // src/cli/option-helpers.ts
379
- import { InvalidArgumentError as InvalidArgumentError2 } from "commander";
384
+ // src/cli/llm-logging.ts
385
+ import { mkdir, writeFile } from "node:fs/promises";
386
+ import { homedir } from "node:os";
387
+ import { join } from "node:path";
388
+ var DEFAULT_LLM_LOG_DIR = join(homedir(), ".llmist", "logs");
389
+ function resolveLogDir(option, subdir) {
390
+ if (option === true) {
391
+ return join(DEFAULT_LLM_LOG_DIR, subdir);
392
+ }
393
+ if (typeof option === "string") {
394
+ return option;
395
+ }
396
+ return void 0;
397
+ }
398
+ function formatLlmRequest(messages) {
399
+ const lines = [];
400
+ for (const msg of messages) {
401
+ lines.push(`=== ${msg.role.toUpperCase()} ===`);
402
+ lines.push(msg.content ?? "");
403
+ lines.push("");
404
+ }
405
+ return lines.join("\n");
406
+ }
407
+ async function writeLogFile(dir, filename, content) {
408
+ await mkdir(dir, { recursive: true });
409
+ await writeFile(join(dir, filename), content, "utf-8");
410
+ }
380
411
 
381
412
  // src/cli/utils.ts
382
413
  init_constants();
@@ -420,9 +451,29 @@ function ensureMarkedConfigured() {
420
451
  }
421
452
  function renderMarkdown(text) {
422
453
  ensureMarkedConfigured();
423
- const rendered = marked.parse(text);
454
+ let rendered = marked.parse(text);
455
+ rendered = rendered.replace(/\*\*(.+?)\*\*/g, (_, content) => chalk.bold(content)).replace(/(?<!\*)\*(\S[^*]*)\*(?!\*)/g, (_, content) => chalk.italic(content));
424
456
  return rendered.trimEnd();
425
457
  }
458
+ function createRainbowSeparator() {
459
+ const colors = [chalk.red, chalk.yellow, chalk.green, chalk.cyan, chalk.blue, chalk.magenta];
460
+ const char = "\u2500";
461
+ const width = process.stdout.columns || 80;
462
+ let result = "";
463
+ for (let i = 0; i < width; i++) {
464
+ result += colors[i % colors.length](char);
465
+ }
466
+ return result;
467
+ }
468
+ function renderMarkdownWithSeparators(text) {
469
+ const rendered = renderMarkdown(text);
470
+ const separator = createRainbowSeparator();
471
+ return `
472
+ ${separator}
473
+ ${rendered}
474
+ ${separator}
475
+ `;
476
+ }
426
477
  function formatTokens(tokens) {
427
478
  return tokens >= 1e3 ? `${(tokens / 1e3).toFixed(1)}k` : `${tokens}`;
428
479
  }
@@ -542,7 +593,7 @@ function formatGadgetSummary(result) {
542
593
  const summaryLine = `${icon} ${gadgetLabel}${paramsLabel} ${chalk.dim("\u2192")} ${outputLabel} ${timeLabel}`;
543
594
  if (result.gadgetName === "TellUser" && result.parameters?.message) {
544
595
  const message = String(result.parameters.message);
545
- const rendered = renderMarkdown(message);
596
+ const rendered = renderMarkdownWithSeparators(message);
546
597
  return `${summaryLine}
547
598
  ${rendered}`;
548
599
  }
@@ -849,7 +900,7 @@ var StreamProgress = class {
849
900
  }
850
901
  this.isRunning = false;
851
902
  if (this.hasRendered) {
852
- this.target.write("\r\x1B[K");
903
+ this.target.write("\r\x1B[K\x1B[0G");
853
904
  this.hasRendered = false;
854
905
  }
855
906
  }
@@ -942,16 +993,6 @@ async function executeAction(action, env) {
942
993
  }
943
994
 
944
995
  // src/cli/option-helpers.ts
945
- var PARAMETER_FORMAT_VALUES = ["json", "yaml", "toml", "auto"];
946
- function parseParameterFormat(value) {
947
- const normalized = value.toLowerCase();
948
- if (!PARAMETER_FORMAT_VALUES.includes(normalized)) {
949
- throw new InvalidArgumentError2(
950
- `Parameter format must be one of: ${PARAMETER_FORMAT_VALUES.join(", ")}`
951
- );
952
- }
953
- return normalized;
954
- }
955
996
  function addCompleteOptions(cmd, defaults) {
956
997
  return cmd.option(OPTION_FLAGS.model, OPTION_DESCRIPTIONS.model, defaults?.model ?? DEFAULT_MODEL).option(OPTION_FLAGS.systemPrompt, OPTION_DESCRIPTIONS.systemPrompt, defaults?.system).option(
957
998
  OPTION_FLAGS.temperature,
@@ -963,7 +1004,7 @@ function addCompleteOptions(cmd, defaults) {
963
1004
  OPTION_DESCRIPTIONS.maxTokens,
964
1005
  createNumericParser({ label: "Max tokens", integer: true, min: 1 }),
965
1006
  defaults?.["max-tokens"]
966
- ).option(OPTION_FLAGS.quiet, OPTION_DESCRIPTIONS.quiet, defaults?.quiet);
1007
+ ).option(OPTION_FLAGS.quiet, OPTION_DESCRIPTIONS.quiet, defaults?.quiet).option(OPTION_FLAGS.logLlmRequests, OPTION_DESCRIPTIONS.logLlmRequests, defaults?.["log-llm-requests"]).option(OPTION_FLAGS.logLlmResponses, OPTION_DESCRIPTIONS.logLlmResponses, defaults?.["log-llm-responses"]);
967
1008
  }
968
1009
  function addAgentOptions(cmd, defaults) {
969
1010
  const gadgetAccumulator = (value, previous = []) => [
@@ -983,16 +1024,11 @@ function addAgentOptions(cmd, defaults) {
983
1024
  defaults?.["max-iterations"]
984
1025
  ).option(OPTION_FLAGS.gadgetModule, OPTION_DESCRIPTIONS.gadgetModule, gadgetAccumulator, [
985
1026
  ...defaultGadgets
986
- ]).option(
987
- OPTION_FLAGS.parameterFormat,
988
- OPTION_DESCRIPTIONS.parameterFormat,
989
- parseParameterFormat,
990
- defaults?.["parameter-format"] ?? DEFAULT_PARAMETER_FORMAT
991
- ).option(OPTION_FLAGS.noBuiltins, OPTION_DESCRIPTIONS.noBuiltins, defaults?.builtins !== false).option(
1027
+ ]).option(OPTION_FLAGS.noBuiltins, OPTION_DESCRIPTIONS.noBuiltins, defaults?.builtins !== false).option(
992
1028
  OPTION_FLAGS.noBuiltinInteraction,
993
1029
  OPTION_DESCRIPTIONS.noBuiltinInteraction,
994
1030
  defaults?.["builtin-interaction"] !== false
995
- ).option(OPTION_FLAGS.quiet, OPTION_DESCRIPTIONS.quiet, defaults?.quiet);
1031
+ ).option(OPTION_FLAGS.quiet, OPTION_DESCRIPTIONS.quiet, defaults?.quiet).option(OPTION_FLAGS.logLlmRequests, OPTION_DESCRIPTIONS.logLlmRequests, defaults?.["log-llm-requests"]).option(OPTION_FLAGS.logLlmResponses, OPTION_DESCRIPTIONS.logLlmResponses, defaults?.["log-llm-responses"]);
996
1032
  }
997
1033
  function configToCompleteOptions(config) {
998
1034
  const result = {};
@@ -1001,6 +1037,8 @@ function configToCompleteOptions(config) {
1001
1037
  if (config.temperature !== void 0) result.temperature = config.temperature;
1002
1038
  if (config["max-tokens"] !== void 0) result.maxTokens = config["max-tokens"];
1003
1039
  if (config.quiet !== void 0) result.quiet = config.quiet;
1040
+ if (config["log-llm-requests"] !== void 0) result.logLlmRequests = config["log-llm-requests"];
1041
+ if (config["log-llm-responses"] !== void 0) result.logLlmResponses = config["log-llm-responses"];
1004
1042
  return result;
1005
1043
  }
1006
1044
  function configToAgentOptions(config) {
@@ -1010,7 +1048,6 @@ function configToAgentOptions(config) {
1010
1048
  if (config.temperature !== void 0) result.temperature = config.temperature;
1011
1049
  if (config["max-iterations"] !== void 0) result.maxIterations = config["max-iterations"];
1012
1050
  if (config.gadget !== void 0) result.gadget = config.gadget;
1013
- if (config["parameter-format"] !== void 0) result.parameterFormat = config["parameter-format"];
1014
1051
  if (config.builtins !== void 0) result.builtins = config.builtins;
1015
1052
  if (config["builtin-interaction"] !== void 0)
1016
1053
  result.builtinInteraction = config["builtin-interaction"];
@@ -1018,7 +1055,11 @@ function configToAgentOptions(config) {
1018
1055
  result.gadgetStartPrefix = config["gadget-start-prefix"];
1019
1056
  if (config["gadget-end-prefix"] !== void 0)
1020
1057
  result.gadgetEndPrefix = config["gadget-end-prefix"];
1058
+ if (config["gadget-arg-prefix"] !== void 0)
1059
+ result.gadgetArgPrefix = config["gadget-arg-prefix"];
1021
1060
  if (config.quiet !== void 0) result.quiet = config.quiet;
1061
+ if (config["log-llm-requests"] !== void 0) result.logLlmRequests = config["log-llm-requests"];
1062
+ if (config["log-llm-responses"] !== void 0) result.logLlmResponses = config["log-llm-responses"];
1022
1063
  return result;
1023
1064
  }
1024
1065
 
@@ -1042,7 +1083,7 @@ function createHumanInputHandler(env, progress) {
1042
1083
  const rl = createInterface({ input: env.stdin, output: env.stdout });
1043
1084
  try {
1044
1085
  const questionLine = question.trim() ? `
1045
- ${renderMarkdown(question.trim())}` : "";
1086
+ ${renderMarkdownWithSeparators(question.trim())}` : "";
1046
1087
  let isFirst = true;
1047
1088
  while (true) {
1048
1089
  const statsPrompt = progress.formatPrompt();
@@ -1085,6 +1126,9 @@ async function executeAgent(promptArg, options, env) {
1085
1126
  const progress = new StreamProgress(env.stderr, stderrTTY, client.modelRegistry);
1086
1127
  let usage;
1087
1128
  let iterations = 0;
1129
+ const llmRequestsDir = resolveLogDir(options.logLlmRequests, "requests");
1130
+ const llmResponsesDir = resolveLogDir(options.logLlmResponses, "responses");
1131
+ let llmCallCounter = 0;
1088
1132
  const countMessagesTokens = async (model, messages) => {
1089
1133
  try {
1090
1134
  return await client.countTokens(model, messages);
@@ -1107,12 +1151,18 @@ async function executeAgent(promptArg, options, env) {
1107
1151
  // onLLMCallStart: Start progress indicator for each LLM call
1108
1152
  // This showcases how to react to agent lifecycle events
1109
1153
  onLLMCallStart: async (context) => {
1154
+ llmCallCounter++;
1110
1155
  const inputTokens = await countMessagesTokens(
1111
1156
  context.options.model,
1112
1157
  context.options.messages
1113
1158
  );
1114
1159
  progress.startCall(context.options.model, inputTokens);
1115
1160
  progress.setInputTokens(inputTokens, false);
1161
+ if (llmRequestsDir) {
1162
+ const filename = `${Date.now()}_call_${llmCallCounter}.request.txt`;
1163
+ const content = formatLlmRequest(context.options.messages);
1164
+ await writeLogFile(llmRequestsDir, filename, content);
1165
+ }
1116
1166
  },
1117
1167
  // onStreamChunk: Real-time updates as LLM generates tokens
1118
1168
  // This enables responsive UIs that show progress during generation
@@ -1175,6 +1225,10 @@ async function executeAgent(promptArg, options, env) {
1175
1225
  `);
1176
1226
  }
1177
1227
  }
1228
+ if (llmResponsesDir) {
1229
+ const filename = `${Date.now()}_call_${llmCallCounter}.response.txt`;
1230
+ await writeLogFile(llmResponsesDir, filename, context.rawResponse);
1231
+ }
1178
1232
  }
1179
1233
  },
1180
1234
  // SHOWCASE: Controller-based approval gating for dangerous gadgets
@@ -1239,13 +1293,15 @@ Command rejected by user with message: "${response}"`
1239
1293
  if (gadgets.length > 0) {
1240
1294
  builder.withGadgets(...gadgets);
1241
1295
  }
1242
- builder.withParameterFormat(options.parameterFormat);
1243
1296
  if (options.gadgetStartPrefix) {
1244
1297
  builder.withGadgetStartPrefix(options.gadgetStartPrefix);
1245
1298
  }
1246
1299
  if (options.gadgetEndPrefix) {
1247
1300
  builder.withGadgetEndPrefix(options.gadgetEndPrefix);
1248
1301
  }
1302
+ if (options.gadgetArgPrefix) {
1303
+ builder.withGadgetArgPrefix(options.gadgetArgPrefix);
1304
+ }
1249
1305
  builder.withSyntheticGadgetCall(
1250
1306
  "TellUser",
1251
1307
  {
@@ -1262,17 +1318,25 @@ Command rejected by user with message: "${response}"`
1262
1318
  resultMapping: (text) => `\u2139\uFE0F ${text}`
1263
1319
  });
1264
1320
  const agent = builder.ask(prompt);
1321
+ let textBuffer = "";
1322
+ const flushTextBuffer = () => {
1323
+ if (textBuffer) {
1324
+ const output = options.quiet ? textBuffer : renderMarkdownWithSeparators(textBuffer);
1325
+ printer.write(output);
1326
+ textBuffer = "";
1327
+ }
1328
+ };
1265
1329
  for await (const event of agent.run()) {
1266
1330
  if (event.type === "text") {
1267
1331
  progress.pause();
1268
- printer.write(event.content);
1332
+ textBuffer += event.content;
1269
1333
  } else if (event.type === "gadget_result") {
1334
+ flushTextBuffer();
1270
1335
  progress.pause();
1271
1336
  if (options.quiet) {
1272
1337
  if (event.result.gadgetName === "TellUser" && event.result.parameters?.message) {
1273
1338
  const message = String(event.result.parameters.message);
1274
- const rendered = renderMarkdown(message);
1275
- env.stdout.write(`${rendered}
1339
+ env.stdout.write(`${message}
1276
1340
  `);
1277
1341
  }
1278
1342
  } else {
@@ -1282,6 +1346,7 @@ Command rejected by user with message: "${response}"`
1282
1346
  }
1283
1347
  }
1284
1348
  }
1349
+ flushTextBuffer();
1285
1350
  progress.complete();
1286
1351
  printer.ensureNewline();
1287
1352
  if (!options.quiet && iterations > 1) {
@@ -1320,9 +1385,18 @@ async function executeComplete(promptArg, options, env) {
1320
1385
  builder.addSystem(options.system);
1321
1386
  }
1322
1387
  builder.addUser(prompt);
1388
+ const messages = builder.build();
1389
+ const llmRequestsDir = resolveLogDir(options.logLlmRequests, "requests");
1390
+ const llmResponsesDir = resolveLogDir(options.logLlmResponses, "responses");
1391
+ const timestamp = Date.now();
1392
+ if (llmRequestsDir) {
1393
+ const filename = `${timestamp}_complete.request.txt`;
1394
+ const content = formatLlmRequest(messages);
1395
+ await writeLogFile(llmRequestsDir, filename, content);
1396
+ }
1323
1397
  const stream = client.stream({
1324
1398
  model,
1325
- messages: builder.build(),
1399
+ messages,
1326
1400
  temperature: options.temperature,
1327
1401
  maxTokens: options.maxTokens
1328
1402
  });
@@ -1333,7 +1407,7 @@ async function executeComplete(promptArg, options, env) {
1333
1407
  progress.startCall(model, estimatedInputTokens);
1334
1408
  let finishReason;
1335
1409
  let usage;
1336
- let totalChars = 0;
1410
+ let accumulatedResponse = "";
1337
1411
  for await (const chunk of stream) {
1338
1412
  if (chunk.usage) {
1339
1413
  usage = chunk.usage;
@@ -1346,8 +1420,8 @@ async function executeComplete(promptArg, options, env) {
1346
1420
  }
1347
1421
  if (chunk.text) {
1348
1422
  progress.pause();
1349
- totalChars += chunk.text.length;
1350
- progress.update(totalChars);
1423
+ accumulatedResponse += chunk.text;
1424
+ progress.update(accumulatedResponse.length);
1351
1425
  printer.write(chunk.text);
1352
1426
  }
1353
1427
  if (chunk.finishReason !== void 0) {
@@ -1357,6 +1431,10 @@ async function executeComplete(promptArg, options, env) {
1357
1431
  progress.endCall(usage);
1358
1432
  progress.complete();
1359
1433
  printer.ensureNewline();
1434
+ if (llmResponsesDir) {
1435
+ const filename = `${timestamp}_complete.response.txt`;
1436
+ await writeLogFile(llmResponsesDir, filename, accumulatedResponse);
1437
+ }
1360
1438
  if (stderrTTY && !options.quiet) {
1361
1439
  const summary = renderSummary({ finishReason, usage, cost: progress.getTotalCost() });
1362
1440
  if (summary) {
@@ -1375,8 +1453,8 @@ function registerCompleteCommand(program, env, config) {
1375
1453
 
1376
1454
  // src/cli/config.ts
1377
1455
  import { existsSync, readFileSync } from "node:fs";
1378
- import { homedir } from "node:os";
1379
- import { join } from "node:path";
1456
+ import { homedir as homedir2 } from "node:os";
1457
+ import { join as join2 } from "node:path";
1380
1458
  import { load as parseToml } from "js-toml";
1381
1459
 
1382
1460
  // src/cli/templates.ts
@@ -1471,6 +1549,8 @@ var COMPLETE_CONFIG_KEYS = /* @__PURE__ */ new Set([
1471
1549
  "log-level",
1472
1550
  "log-file",
1473
1551
  "log-reset",
1552
+ "log-llm-requests",
1553
+ "log-llm-responses",
1474
1554
  "type"
1475
1555
  // Allowed for inheritance compatibility, ignored for built-in commands
1476
1556
  ]);
@@ -1480,16 +1560,18 @@ var AGENT_CONFIG_KEYS = /* @__PURE__ */ new Set([
1480
1560
  "temperature",
1481
1561
  "max-iterations",
1482
1562
  "gadget",
1483
- "parameter-format",
1484
1563
  "builtins",
1485
1564
  "builtin-interaction",
1486
1565
  "gadget-start-prefix",
1487
1566
  "gadget-end-prefix",
1567
+ "gadget-arg-prefix",
1488
1568
  "quiet",
1489
1569
  "inherits",
1490
1570
  "log-level",
1491
1571
  "log-file",
1492
1572
  "log-reset",
1573
+ "log-llm-requests",
1574
+ "log-llm-responses",
1493
1575
  "type"
1494
1576
  // Allowed for inheritance compatibility, ignored for built-in commands
1495
1577
  ]);
@@ -1499,9 +1581,8 @@ var CUSTOM_CONFIG_KEYS = /* @__PURE__ */ new Set([
1499
1581
  "type",
1500
1582
  "description"
1501
1583
  ]);
1502
- var VALID_PARAMETER_FORMATS = ["json", "yaml", "toml", "auto"];
1503
1584
  function getConfigPath() {
1504
- return join(homedir(), ".llmist", "cli.toml");
1585
+ return join2(homedir2(), ".llmist", "cli.toml");
1505
1586
  }
1506
1587
  var ConfigError = class extends Error {
1507
1588
  constructor(message, path2) {
@@ -1635,6 +1716,20 @@ function validateCompleteConfig(raw, section) {
1635
1716
  if ("quiet" in rawObj) {
1636
1717
  result.quiet = validateBoolean(rawObj.quiet, "quiet", section);
1637
1718
  }
1719
+ if ("log-llm-requests" in rawObj) {
1720
+ result["log-llm-requests"] = validateStringOrBoolean(
1721
+ rawObj["log-llm-requests"],
1722
+ "log-llm-requests",
1723
+ section
1724
+ );
1725
+ }
1726
+ if ("log-llm-responses" in rawObj) {
1727
+ result["log-llm-responses"] = validateStringOrBoolean(
1728
+ rawObj["log-llm-responses"],
1729
+ "log-llm-responses",
1730
+ section
1731
+ );
1732
+ }
1638
1733
  return result;
1639
1734
  }
1640
1735
  function validateAgentConfig(raw, section) {
@@ -1660,15 +1755,6 @@ function validateAgentConfig(raw, section) {
1660
1755
  if ("gadget" in rawObj) {
1661
1756
  result.gadget = validateStringArray(rawObj.gadget, "gadget", section);
1662
1757
  }
1663
- if ("parameter-format" in rawObj) {
1664
- const format = validateString(rawObj["parameter-format"], "parameter-format", section);
1665
- if (!VALID_PARAMETER_FORMATS.includes(format)) {
1666
- throw new ConfigError(
1667
- `[${section}].parameter-format must be one of: ${VALID_PARAMETER_FORMATS.join(", ")}`
1668
- );
1669
- }
1670
- result["parameter-format"] = format;
1671
- }
1672
1758
  if ("builtins" in rawObj) {
1673
1759
  result.builtins = validateBoolean(rawObj.builtins, "builtins", section);
1674
1760
  }
@@ -1693,11 +1779,38 @@ function validateAgentConfig(raw, section) {
1693
1779
  section
1694
1780
  );
1695
1781
  }
1782
+ if ("gadget-arg-prefix" in rawObj) {
1783
+ result["gadget-arg-prefix"] = validateString(
1784
+ rawObj["gadget-arg-prefix"],
1785
+ "gadget-arg-prefix",
1786
+ section
1787
+ );
1788
+ }
1696
1789
  if ("quiet" in rawObj) {
1697
1790
  result.quiet = validateBoolean(rawObj.quiet, "quiet", section);
1698
1791
  }
1792
+ if ("log-llm-requests" in rawObj) {
1793
+ result["log-llm-requests"] = validateStringOrBoolean(
1794
+ rawObj["log-llm-requests"],
1795
+ "log-llm-requests",
1796
+ section
1797
+ );
1798
+ }
1799
+ if ("log-llm-responses" in rawObj) {
1800
+ result["log-llm-responses"] = validateStringOrBoolean(
1801
+ rawObj["log-llm-responses"],
1802
+ "log-llm-responses",
1803
+ section
1804
+ );
1805
+ }
1699
1806
  return result;
1700
1807
  }
1808
+ function validateStringOrBoolean(value, field, section) {
1809
+ if (typeof value === "string" || typeof value === "boolean") {
1810
+ return value;
1811
+ }
1812
+ throw new ConfigError(`[${section}].${field} must be a string or boolean`);
1813
+ }
1701
1814
  function validateCustomConfig(raw, section) {
1702
1815
  if (typeof raw !== "object" || raw === null) {
1703
1816
  throw new ConfigError(`[${section}] must be a table`);
@@ -1732,15 +1845,6 @@ function validateCustomConfig(raw, section) {
1732
1845
  if ("gadget" in rawObj) {
1733
1846
  result.gadget = validateStringArray(rawObj.gadget, "gadget", section);
1734
1847
  }
1735
- if ("parameter-format" in rawObj) {
1736
- const format = validateString(rawObj["parameter-format"], "parameter-format", section);
1737
- if (!VALID_PARAMETER_FORMATS.includes(format)) {
1738
- throw new ConfigError(
1739
- `[${section}].parameter-format must be one of: ${VALID_PARAMETER_FORMATS.join(", ")}`
1740
- );
1741
- }
1742
- result["parameter-format"] = format;
1743
- }
1744
1848
  if ("builtins" in rawObj) {
1745
1849
  result.builtins = validateBoolean(rawObj.builtins, "builtins", section);
1746
1850
  }
@@ -1765,6 +1869,13 @@ function validateCustomConfig(raw, section) {
1765
1869
  section
1766
1870
  );
1767
1871
  }
1872
+ if ("gadget-arg-prefix" in rawObj) {
1873
+ result["gadget-arg-prefix"] = validateString(
1874
+ rawObj["gadget-arg-prefix"],
1875
+ "gadget-arg-prefix",
1876
+ section
1877
+ );
1878
+ }
1768
1879
  if ("max-tokens" in rawObj) {
1769
1880
  result["max-tokens"] = validateNumber(rawObj["max-tokens"], "max-tokens", section, {
1770
1881
  integer: true,
@@ -1955,8 +2066,439 @@ function resolveInheritance(config, configPath) {
1955
2066
  return resolved;
1956
2067
  }
1957
2068
 
1958
- // src/cli/models-command.ts
2069
+ // src/cli/gadget-command.ts
2070
+ init_schema_to_json();
2071
+ init_schema_validator();
2072
+ import chalk5 from "chalk";
2073
+
2074
+ // src/cli/gadget-prompts.ts
2075
+ init_schema_to_json();
2076
+ import { createInterface as createInterface2 } from "node:readline/promises";
1959
2077
  import chalk4 from "chalk";
2078
+ async function promptForParameters(schema, ctx) {
2079
+ if (!schema) {
2080
+ return {};
2081
+ }
2082
+ const jsonSchema = schemaToJSONSchema(schema, { target: "draft-7" });
2083
+ if (!jsonSchema.properties || Object.keys(jsonSchema.properties).length === 0) {
2084
+ return {};
2085
+ }
2086
+ const rl = createInterface2({ input: ctx.stdin, output: ctx.stdout });
2087
+ const params = {};
2088
+ try {
2089
+ for (const [key, prop] of Object.entries(jsonSchema.properties)) {
2090
+ const value = await promptForField(rl, key, prop, jsonSchema.required ?? []);
2091
+ if (value !== void 0) {
2092
+ params[key] = value;
2093
+ }
2094
+ }
2095
+ } finally {
2096
+ rl.close();
2097
+ }
2098
+ const result = schema.safeParse(params);
2099
+ if (!result.success) {
2100
+ const issues = result.error.issues.map((i) => ` ${i.path.join(".")}: ${i.message}`).join("\n");
2101
+ throw new Error(`Invalid parameters:
2102
+ ${issues}`);
2103
+ }
2104
+ return result.data;
2105
+ }
2106
+ async function promptForField(rl, key, prop, required) {
2107
+ const isRequired = required.includes(key);
2108
+ const typeHint = formatTypeHint(prop);
2109
+ const defaultHint = prop.default !== void 0 ? chalk4.dim(` [default: ${JSON.stringify(prop.default)}]`) : "";
2110
+ const requiredMarker = isRequired ? chalk4.red("*") : "";
2111
+ let prompt = `
2112
+ ${chalk4.cyan.bold(key)}${requiredMarker}`;
2113
+ if (prop.description) {
2114
+ prompt += chalk4.dim(` - ${prop.description}`);
2115
+ }
2116
+ prompt += `
2117
+ ${typeHint}${defaultHint}
2118
+ ${chalk4.green(">")} `;
2119
+ const answer = await rl.question(prompt);
2120
+ const trimmed = answer.trim();
2121
+ if (!trimmed) {
2122
+ if (prop.default !== void 0) {
2123
+ return void 0;
2124
+ }
2125
+ if (!isRequired) {
2126
+ return void 0;
2127
+ }
2128
+ throw new Error(`Parameter '${key}' is required.`);
2129
+ }
2130
+ return parseValue(trimmed, prop, key);
2131
+ }
2132
+ function formatTypeHint(prop) {
2133
+ if (prop.enum) {
2134
+ return chalk4.yellow(`(${prop.enum.join(" | ")})`);
2135
+ }
2136
+ if (prop.type === "array") {
2137
+ const items = prop.items;
2138
+ if (items?.enum) {
2139
+ return chalk4.yellow(`(${items.enum.join(" | ")})[] comma-separated`);
2140
+ }
2141
+ const itemType = items?.type ?? "any";
2142
+ return chalk4.yellow(`(${itemType}[]) comma-separated`);
2143
+ }
2144
+ if (prop.type === "object" && prop.properties) {
2145
+ return chalk4.yellow("(object) enter as JSON");
2146
+ }
2147
+ return chalk4.yellow(`(${prop.type ?? "any"})`);
2148
+ }
2149
+ function parseValue(input, prop, key) {
2150
+ const type = prop.type;
2151
+ if (type === "number" || type === "integer") {
2152
+ const num = Number(input);
2153
+ if (Number.isNaN(num)) {
2154
+ throw new Error(`Invalid number for '${key}': ${input}`);
2155
+ }
2156
+ if (type === "integer" && !Number.isInteger(num)) {
2157
+ throw new Error(`Expected integer for '${key}', got: ${input}`);
2158
+ }
2159
+ return num;
2160
+ }
2161
+ if (type === "boolean") {
2162
+ const lower = input.toLowerCase();
2163
+ if (["true", "yes", "1", "y"].includes(lower)) return true;
2164
+ if (["false", "no", "0", "n"].includes(lower)) return false;
2165
+ throw new Error(`Invalid boolean for '${key}': ${input} (use true/false, yes/no, 1/0)`);
2166
+ }
2167
+ if (type === "array") {
2168
+ const items = input.split(",").map((s) => s.trim()).filter(Boolean);
2169
+ const itemType = prop.items?.type;
2170
+ if (itemType === "number" || itemType === "integer") {
2171
+ return items.map((item) => {
2172
+ const num = Number(item);
2173
+ if (Number.isNaN(num)) throw new Error(`Invalid number in '${key}' array: ${item}`);
2174
+ return num;
2175
+ });
2176
+ }
2177
+ if (itemType === "boolean") {
2178
+ return items.map((item) => {
2179
+ const lower = item.toLowerCase();
2180
+ if (["true", "yes", "1", "y"].includes(lower)) return true;
2181
+ if (["false", "no", "0", "n"].includes(lower)) return false;
2182
+ throw new Error(`Invalid boolean in '${key}' array: ${item}`);
2183
+ });
2184
+ }
2185
+ return items;
2186
+ }
2187
+ if (type === "object") {
2188
+ try {
2189
+ return JSON.parse(input);
2190
+ } catch {
2191
+ throw new Error(`Invalid JSON for '${key}': ${input}`);
2192
+ }
2193
+ }
2194
+ return input;
2195
+ }
2196
+ async function readStdinJson(stdin) {
2197
+ const chunks = [];
2198
+ for await (const chunk of stdin) {
2199
+ if (typeof chunk === "string") {
2200
+ chunks.push(chunk);
2201
+ } else {
2202
+ chunks.push(chunk.toString("utf8"));
2203
+ }
2204
+ }
2205
+ const content = chunks.join("").trim();
2206
+ if (!content) {
2207
+ return {};
2208
+ }
2209
+ try {
2210
+ const parsed = JSON.parse(content);
2211
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
2212
+ throw new Error("Stdin must contain a JSON object, not an array or primitive.");
2213
+ }
2214
+ return parsed;
2215
+ } catch (error) {
2216
+ if (error instanceof SyntaxError) {
2217
+ throw new Error(`Invalid JSON from stdin: ${error.message}`);
2218
+ }
2219
+ throw error;
2220
+ }
2221
+ }
2222
+
2223
+ // src/cli/gadget-command.ts
2224
+ async function selectGadget(file, nameOption, cwd) {
2225
+ const gadgets = await loadGadgets([file], cwd);
2226
+ if (gadgets.length === 0) {
2227
+ throw new Error(
2228
+ `No gadgets found in '${file}'.
2229
+ Ensure the file exports a Gadget class or instance.`
2230
+ );
2231
+ }
2232
+ if (gadgets.length === 1) {
2233
+ const gadget = gadgets[0];
2234
+ const name = gadget.name ?? gadget.constructor.name;
2235
+ return { gadget, name };
2236
+ }
2237
+ const names = gadgets.map((g) => g.name ?? g.constructor.name);
2238
+ if (!nameOption) {
2239
+ throw new Error(
2240
+ `File '${file}' exports ${gadgets.length} gadgets.
2241
+ Use --name to select one:
2242
+ ` + names.map((n) => ` - ${n}`).join("\n")
2243
+ );
2244
+ }
2245
+ const found = gadgets.find((g) => (g.name ?? g.constructor.name) === nameOption);
2246
+ if (!found) {
2247
+ throw new Error(
2248
+ `Gadget '${nameOption}' not found in '${file}'.
2249
+ Available gadgets:
2250
+ ` + names.map((n) => ` - ${n}`).join("\n")
2251
+ );
2252
+ }
2253
+ return { gadget: found, name: nameOption };
2254
+ }
2255
+ async function executeGadgetRun(file, options, env) {
2256
+ const cwd = process.cwd();
2257
+ const { gadget, name } = await selectGadget(file, options.name, cwd);
2258
+ env.stderr.write(chalk5.cyan.bold(`
2259
+ \u{1F527} Running gadget: ${name}
2260
+ `));
2261
+ let params;
2262
+ if (env.isTTY) {
2263
+ params = await promptForParameters(gadget.parameterSchema, {
2264
+ stdin: env.stdin,
2265
+ stdout: env.stderr
2266
+ // Prompts go to stderr to keep stdout clean
2267
+ });
2268
+ } else {
2269
+ env.stderr.write(chalk5.dim("Reading parameters from stdin...\n"));
2270
+ const stdinParams = await readStdinJson(env.stdin);
2271
+ if (gadget.parameterSchema) {
2272
+ const result2 = gadget.parameterSchema.safeParse(stdinParams);
2273
+ if (!result2.success) {
2274
+ const issues = result2.error.issues.map((i) => ` ${i.path.join(".")}: ${i.message}`).join("\n");
2275
+ throw new Error(`Invalid parameters:
2276
+ ${issues}`);
2277
+ }
2278
+ params = result2.data;
2279
+ } else {
2280
+ params = stdinParams;
2281
+ }
2282
+ }
2283
+ env.stderr.write(chalk5.dim("\nExecuting...\n"));
2284
+ const startTime = Date.now();
2285
+ let result;
2286
+ try {
2287
+ if (gadget.timeoutMs && gadget.timeoutMs > 0) {
2288
+ result = await Promise.race([
2289
+ Promise.resolve(gadget.execute(params)),
2290
+ new Promise(
2291
+ (_, reject) => setTimeout(
2292
+ () => reject(new Error(`Gadget timed out after ${gadget.timeoutMs}ms`)),
2293
+ gadget.timeoutMs
2294
+ )
2295
+ )
2296
+ ]);
2297
+ } else {
2298
+ result = await Promise.resolve(gadget.execute(params));
2299
+ }
2300
+ } catch (error) {
2301
+ const message = error instanceof Error ? error.message : String(error);
2302
+ throw new Error(`Execution failed: ${message}`);
2303
+ }
2304
+ const elapsed = Date.now() - startTime;
2305
+ env.stderr.write(chalk5.green(`
2306
+ \u2713 Completed in ${elapsed}ms
2307
+
2308
+ `));
2309
+ formatOutput(result, options, env.stdout);
2310
+ }
2311
+ function formatOutput(result, options, stdout) {
2312
+ if (options.raw) {
2313
+ stdout.write(result);
2314
+ if (!result.endsWith("\n")) stdout.write("\n");
2315
+ return;
2316
+ }
2317
+ if (options.json || looksLikeJson(result)) {
2318
+ try {
2319
+ const parsed = JSON.parse(result);
2320
+ stdout.write(JSON.stringify(parsed, null, 2) + "\n");
2321
+ return;
2322
+ } catch {
2323
+ }
2324
+ }
2325
+ stdout.write(result);
2326
+ if (!result.endsWith("\n")) stdout.write("\n");
2327
+ }
2328
+ function looksLikeJson(str) {
2329
+ const trimmed = str.trim();
2330
+ return trimmed.startsWith("{") && trimmed.endsWith("}") || trimmed.startsWith("[") && trimmed.endsWith("]");
2331
+ }
2332
+ async function executeGadgetInfo(file, options, env) {
2333
+ const cwd = process.cwd();
2334
+ const { gadget, name } = await selectGadget(file, options.name, cwd);
2335
+ if (options.json) {
2336
+ const info = buildGadgetInfo(gadget, name);
2337
+ env.stdout.write(JSON.stringify(info, null, 2) + "\n");
2338
+ return;
2339
+ }
2340
+ env.stdout.write("\n");
2341
+ env.stdout.write(chalk5.cyan.bold(`${name}
2342
+ `));
2343
+ env.stdout.write(chalk5.cyan("\u2550".repeat(name.length)) + "\n\n");
2344
+ env.stdout.write(chalk5.bold("Description:\n"));
2345
+ env.stdout.write(` ${gadget.description}
2346
+
2347
+ `);
2348
+ if (gadget.parameterSchema) {
2349
+ env.stdout.write(chalk5.bold("Parameters:\n"));
2350
+ const jsonSchema = schemaToJSONSchema(gadget.parameterSchema, { target: "draft-7" });
2351
+ env.stdout.write(formatSchemaAsText(jsonSchema, " ") + "\n\n");
2352
+ } else {
2353
+ env.stdout.write(chalk5.dim("No parameters required.\n\n"));
2354
+ }
2355
+ if (gadget.timeoutMs) {
2356
+ env.stdout.write(chalk5.bold("Timeout:\n"));
2357
+ env.stdout.write(` ${gadget.timeoutMs}ms
2358
+
2359
+ `);
2360
+ }
2361
+ if (gadget.examples && gadget.examples.length > 0) {
2362
+ env.stdout.write(chalk5.bold("Examples:\n"));
2363
+ for (const example of gadget.examples) {
2364
+ if (example.comment) {
2365
+ env.stdout.write(chalk5.dim(` # ${example.comment}
2366
+ `));
2367
+ }
2368
+ env.stdout.write(` Input: ${chalk5.cyan(JSON.stringify(example.params))}
2369
+ `);
2370
+ if (example.output !== void 0) {
2371
+ env.stdout.write(` Output: ${chalk5.green(example.output)}
2372
+ `);
2373
+ }
2374
+ env.stdout.write("\n");
2375
+ }
2376
+ }
2377
+ }
2378
+ function buildGadgetInfo(gadget, name) {
2379
+ const info = {
2380
+ name,
2381
+ description: gadget.description
2382
+ };
2383
+ if (gadget.parameterSchema) {
2384
+ info.schema = schemaToJSONSchema(gadget.parameterSchema, { target: "draft-7" });
2385
+ }
2386
+ if (gadget.timeoutMs) {
2387
+ info.timeoutMs = gadget.timeoutMs;
2388
+ }
2389
+ if (gadget.examples && gadget.examples.length > 0) {
2390
+ info.examples = gadget.examples;
2391
+ }
2392
+ return info;
2393
+ }
2394
+ function formatSchemaAsText(schema, indent = "") {
2395
+ const lines = [];
2396
+ const properties = schema.properties || {};
2397
+ const required = schema.required || [];
2398
+ for (const [key, prop] of Object.entries(properties)) {
2399
+ const type = prop.type;
2400
+ const description = prop.description;
2401
+ const isRequired = required.includes(key);
2402
+ const enumValues = prop.enum;
2403
+ const defaultValue = prop.default;
2404
+ let line = `${indent}${chalk5.cyan(key)}`;
2405
+ if (isRequired) {
2406
+ line += chalk5.red("*");
2407
+ }
2408
+ if (type === "array") {
2409
+ const items = prop.items;
2410
+ const itemType = items?.type || "any";
2411
+ line += chalk5.dim(` (${itemType}[])`);
2412
+ } else if (type === "object" && prop.properties) {
2413
+ line += chalk5.dim(" (object)");
2414
+ } else {
2415
+ line += chalk5.dim(` (${type})`);
2416
+ }
2417
+ if (defaultValue !== void 0) {
2418
+ line += chalk5.dim(` [default: ${JSON.stringify(defaultValue)}]`);
2419
+ }
2420
+ if (description) {
2421
+ line += `: ${description}`;
2422
+ }
2423
+ if (enumValues) {
2424
+ line += chalk5.yellow(` - one of: ${enumValues.join(", ")}`);
2425
+ }
2426
+ lines.push(line);
2427
+ if (type === "object" && prop.properties) {
2428
+ lines.push(formatSchemaAsText(prop, indent + " "));
2429
+ }
2430
+ }
2431
+ return lines.join("\n");
2432
+ }
2433
+ async function executeGadgetValidate(file, env) {
2434
+ const cwd = process.cwd();
2435
+ try {
2436
+ const gadgets = await loadGadgets([file], cwd);
2437
+ if (gadgets.length === 0) {
2438
+ throw new Error(
2439
+ "No gadgets exported from file.\nA valid gadget must have:\n - execute() method\n - description property\n - parameterSchema (optional)"
2440
+ );
2441
+ }
2442
+ const issues = [];
2443
+ for (const gadget of gadgets) {
2444
+ const name = gadget.name ?? gadget.constructor.name;
2445
+ if (!gadget.description) {
2446
+ issues.push(`${name}: Missing 'description' property.`);
2447
+ }
2448
+ if (gadget.parameterSchema) {
2449
+ try {
2450
+ validateGadgetSchema(gadget.parameterSchema, name);
2451
+ } catch (schemaError) {
2452
+ const message = schemaError instanceof Error ? schemaError.message : String(schemaError);
2453
+ issues.push(`${name}: ${message}`);
2454
+ }
2455
+ }
2456
+ if (typeof gadget.execute !== "function") {
2457
+ issues.push(`${name}: Missing 'execute()' method.`);
2458
+ }
2459
+ }
2460
+ if (issues.length > 0) {
2461
+ throw new Error(`Validation issues:
2462
+ ${issues.map((i) => ` - ${i}`).join("\n")}`);
2463
+ }
2464
+ env.stdout.write(chalk5.green.bold("\n\u2713 Valid\n\n"));
2465
+ env.stdout.write(chalk5.bold("Gadgets found:\n"));
2466
+ for (const gadget of gadgets) {
2467
+ const name = gadget.name ?? gadget.constructor.name;
2468
+ const schemaInfo = gadget.parameterSchema ? chalk5.cyan("(with schema)") : chalk5.dim("(no schema)");
2469
+ env.stdout.write(` ${chalk5.bold(name)} ${schemaInfo}
2470
+ `);
2471
+ env.stdout.write(chalk5.dim(` ${gadget.description}
2472
+ `));
2473
+ }
2474
+ env.stdout.write("\n");
2475
+ } catch (error) {
2476
+ const message = error instanceof Error ? error.message : String(error);
2477
+ env.stdout.write(chalk5.red.bold(`
2478
+ \u2717 Invalid
2479
+
2480
+ `));
2481
+ env.stdout.write(`${message}
2482
+
2483
+ `);
2484
+ env.setExitCode(1);
2485
+ }
2486
+ }
2487
+ function registerGadgetCommand(program, env) {
2488
+ const gadgetCmd = program.command("gadget").description("Test and inspect gadgets outside the agent loop.");
2489
+ gadgetCmd.command("run <file>").description("Execute a gadget with interactive prompts or stdin JSON.").option("--name <gadget>", "Select gadget by name (required if file exports multiple)").option("--json", "Format output as pretty-printed JSON").option("--raw", "Output result as raw string without formatting").action(
2490
+ (file, options) => executeAction(() => executeGadgetRun(file, options, env), env)
2491
+ );
2492
+ gadgetCmd.command("info <file>").description("Display gadget description, schema, and examples.").option("--name <gadget>", "Select gadget by name (required if file exports multiple)").option("--json", "Output as JSON instead of formatted text").action(
2493
+ (file, options) => executeAction(() => executeGadgetInfo(file, options, env), env)
2494
+ );
2495
+ gadgetCmd.command("validate <file>").description("Check if file exports valid gadget(s).").action(
2496
+ (file) => executeAction(() => executeGadgetValidate(file, env), env)
2497
+ );
2498
+ }
2499
+
2500
+ // src/cli/models-command.ts
2501
+ import chalk6 from "chalk";
1960
2502
  init_model_shortcuts();
1961
2503
  async function handleModelsCommand(options, env) {
1962
2504
  const client = env.createClient();
@@ -1976,13 +2518,13 @@ function renderTable(models, verbose, stream) {
1976
2518
  }
1977
2519
  grouped.get(provider).push(model);
1978
2520
  }
1979
- stream.write(chalk4.bold.cyan("\nAvailable Models\n"));
1980
- stream.write(chalk4.cyan("=".repeat(80)) + "\n\n");
2521
+ stream.write(chalk6.bold.cyan("\nAvailable Models\n"));
2522
+ stream.write(chalk6.cyan("=".repeat(80)) + "\n\n");
1981
2523
  const providers = Array.from(grouped.keys()).sort();
1982
2524
  for (const provider of providers) {
1983
2525
  const providerModels = grouped.get(provider);
1984
2526
  const providerName = provider.charAt(0).toUpperCase() + provider.slice(1);
1985
- stream.write(chalk4.bold.yellow(`${providerName} Models
2527
+ stream.write(chalk6.bold.yellow(`${providerName} Models
1986
2528
  `));
1987
2529
  if (verbose) {
1988
2530
  renderVerboseTable(providerModels, stream);
@@ -1991,11 +2533,11 @@ function renderTable(models, verbose, stream) {
1991
2533
  }
1992
2534
  stream.write("\n");
1993
2535
  }
1994
- stream.write(chalk4.bold.magenta("Model Shortcuts\n"));
1995
- stream.write(chalk4.dim("\u2500".repeat(80)) + "\n");
2536
+ stream.write(chalk6.bold.magenta("Model Shortcuts\n"));
2537
+ stream.write(chalk6.dim("\u2500".repeat(80)) + "\n");
1996
2538
  const shortcuts = Object.entries(MODEL_ALIASES).sort((a, b) => a[0].localeCompare(b[0]));
1997
2539
  for (const [shortcut, fullName] of shortcuts) {
1998
- stream.write(chalk4.cyan(` ${shortcut.padEnd(15)}`) + chalk4.dim(" \u2192 ") + chalk4.white(fullName) + "\n");
2540
+ stream.write(chalk6.cyan(` ${shortcut.padEnd(15)}`) + chalk6.dim(" \u2192 ") + chalk6.white(fullName) + "\n");
1999
2541
  }
2000
2542
  stream.write("\n");
2001
2543
  }
@@ -2005,45 +2547,45 @@ function renderCompactTable(models, stream) {
2005
2547
  const contextWidth = 13;
2006
2548
  const inputWidth = 10;
2007
2549
  const outputWidth = 10;
2008
- stream.write(chalk4.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
2550
+ stream.write(chalk6.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
2009
2551
  stream.write(
2010
- chalk4.bold(
2552
+ chalk6.bold(
2011
2553
  "Model ID".padEnd(idWidth) + " " + "Display Name".padEnd(nameWidth) + " " + "Context".padEnd(contextWidth) + " " + "Input".padEnd(inputWidth) + " " + "Output".padEnd(outputWidth)
2012
2554
  ) + "\n"
2013
2555
  );
2014
- stream.write(chalk4.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
2556
+ stream.write(chalk6.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
2015
2557
  for (const model of models) {
2016
2558
  const contextFormatted = formatTokens2(model.contextWindow);
2017
2559
  const inputPrice = `$${model.pricing.input.toFixed(2)}`;
2018
2560
  const outputPrice = `$${model.pricing.output.toFixed(2)}`;
2019
2561
  stream.write(
2020
- chalk4.green(model.modelId.padEnd(idWidth)) + " " + chalk4.white(model.displayName.padEnd(nameWidth)) + " " + chalk4.yellow(contextFormatted.padEnd(contextWidth)) + " " + chalk4.cyan(inputPrice.padEnd(inputWidth)) + " " + chalk4.cyan(outputPrice.padEnd(outputWidth)) + "\n"
2562
+ chalk6.green(model.modelId.padEnd(idWidth)) + " " + chalk6.white(model.displayName.padEnd(nameWidth)) + " " + chalk6.yellow(contextFormatted.padEnd(contextWidth)) + " " + chalk6.cyan(inputPrice.padEnd(inputWidth)) + " " + chalk6.cyan(outputPrice.padEnd(outputWidth)) + "\n"
2021
2563
  );
2022
2564
  }
2023
- stream.write(chalk4.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
2024
- stream.write(chalk4.dim(` * Prices are per 1M tokens
2565
+ stream.write(chalk6.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
2566
+ stream.write(chalk6.dim(` * Prices are per 1M tokens
2025
2567
  `));
2026
2568
  }
2027
2569
  function renderVerboseTable(models, stream) {
2028
2570
  for (const model of models) {
2029
- stream.write(chalk4.bold.green(`
2571
+ stream.write(chalk6.bold.green(`
2030
2572
  ${model.modelId}
2031
2573
  `));
2032
- stream.write(chalk4.dim(" " + "\u2500".repeat(60)) + "\n");
2033
- stream.write(` ${chalk4.dim("Name:")} ${chalk4.white(model.displayName)}
2574
+ stream.write(chalk6.dim(" " + "\u2500".repeat(60)) + "\n");
2575
+ stream.write(` ${chalk6.dim("Name:")} ${chalk6.white(model.displayName)}
2034
2576
  `);
2035
- stream.write(` ${chalk4.dim("Context:")} ${chalk4.yellow(formatTokens2(model.contextWindow))}
2577
+ stream.write(` ${chalk6.dim("Context:")} ${chalk6.yellow(formatTokens2(model.contextWindow))}
2036
2578
  `);
2037
- stream.write(` ${chalk4.dim("Max Output:")} ${chalk4.yellow(formatTokens2(model.maxOutputTokens))}
2579
+ stream.write(` ${chalk6.dim("Max Output:")} ${chalk6.yellow(formatTokens2(model.maxOutputTokens))}
2038
2580
  `);
2039
- stream.write(` ${chalk4.dim("Pricing:")} ${chalk4.cyan(`$${model.pricing.input.toFixed(2)} input`)} ${chalk4.dim("/")} ${chalk4.cyan(`$${model.pricing.output.toFixed(2)} output`)} ${chalk4.dim("(per 1M tokens)")}
2581
+ stream.write(` ${chalk6.dim("Pricing:")} ${chalk6.cyan(`$${model.pricing.input.toFixed(2)} input`)} ${chalk6.dim("/")} ${chalk6.cyan(`$${model.pricing.output.toFixed(2)} output`)} ${chalk6.dim("(per 1M tokens)")}
2040
2582
  `);
2041
2583
  if (model.pricing.cachedInput !== void 0) {
2042
- stream.write(` ${chalk4.dim("Cached Input:")} ${chalk4.cyan(`$${model.pricing.cachedInput.toFixed(2)} per 1M tokens`)}
2584
+ stream.write(` ${chalk6.dim("Cached Input:")} ${chalk6.cyan(`$${model.pricing.cachedInput.toFixed(2)} per 1M tokens`)}
2043
2585
  `);
2044
2586
  }
2045
2587
  if (model.knowledgeCutoff) {
2046
- stream.write(` ${chalk4.dim("Knowledge:")} ${model.knowledgeCutoff}
2588
+ stream.write(` ${chalk6.dim("Knowledge:")} ${model.knowledgeCutoff}
2047
2589
  `);
2048
2590
  }
2049
2591
  const features = [];
@@ -2054,20 +2596,20 @@ function renderVerboseTable(models, stream) {
2054
2596
  if (model.features.structuredOutputs) features.push("structured-outputs");
2055
2597
  if (model.features.fineTuning) features.push("fine-tuning");
2056
2598
  if (features.length > 0) {
2057
- stream.write(` ${chalk4.dim("Features:")} ${chalk4.blue(features.join(", "))}
2599
+ stream.write(` ${chalk6.dim("Features:")} ${chalk6.blue(features.join(", "))}
2058
2600
  `);
2059
2601
  }
2060
2602
  if (model.metadata) {
2061
2603
  if (model.metadata.family) {
2062
- stream.write(` ${chalk4.dim("Family:")} ${model.metadata.family}
2604
+ stream.write(` ${chalk6.dim("Family:")} ${model.metadata.family}
2063
2605
  `);
2064
2606
  }
2065
2607
  if (model.metadata.releaseDate) {
2066
- stream.write(` ${chalk4.dim("Released:")} ${model.metadata.releaseDate}
2608
+ stream.write(` ${chalk6.dim("Released:")} ${model.metadata.releaseDate}
2067
2609
  `);
2068
2610
  }
2069
2611
  if (model.metadata.notes) {
2070
- stream.write(` ${chalk4.dim("Notes:")} ${chalk4.italic(model.metadata.notes)}
2612
+ stream.write(` ${chalk6.dim("Notes:")} ${chalk6.italic(model.metadata.notes)}
2071
2613
  `);
2072
2614
  }
2073
2615
  }
@@ -2119,7 +2661,7 @@ function registerModelsCommand(program, env) {
2119
2661
  init_client();
2120
2662
  init_logger();
2121
2663
  import readline from "node:readline";
2122
- import chalk5 from "chalk";
2664
+ import chalk7 from "chalk";
2123
2665
  var LOG_LEVEL_MAP = {
2124
2666
  silly: 0,
2125
2667
  trace: 1,
@@ -2166,14 +2708,14 @@ function createPromptFunction(stdin, stdout) {
2166
2708
  output: stdout
2167
2709
  });
2168
2710
  stdout.write("\n");
2169
- stdout.write(`${chalk5.cyan("\u2500".repeat(60))}
2711
+ stdout.write(`${chalk7.cyan("\u2500".repeat(60))}
2170
2712
  `);
2171
- stdout.write(chalk5.cyan.bold("\u{1F916} Agent asks:\n"));
2713
+ stdout.write(chalk7.cyan.bold("\u{1F916} Agent asks:\n"));
2172
2714
  stdout.write(`${question}
2173
2715
  `);
2174
- stdout.write(`${chalk5.cyan("\u2500".repeat(60))}
2716
+ stdout.write(`${chalk7.cyan("\u2500".repeat(60))}
2175
2717
  `);
2176
- rl.question(chalk5.green.bold("You: "), (answer) => {
2718
+ rl.question(chalk7.green.bold("You: "), (answer) => {
2177
2719
  rl.close();
2178
2720
  resolve(answer);
2179
2721
  });
@@ -2250,7 +2792,7 @@ function registerCustomCommand(program, name, config, env) {
2250
2792
  function parseLogLevel(value) {
2251
2793
  const normalized = value.toLowerCase();
2252
2794
  if (!LOG_LEVELS.includes(normalized)) {
2253
- throw new InvalidArgumentError3(`Log level must be one of: ${LOG_LEVELS.join(", ")}`);
2795
+ throw new InvalidArgumentError2(`Log level must be one of: ${LOG_LEVELS.join(", ")}`);
2254
2796
  }
2255
2797
  return normalized;
2256
2798
  }
@@ -2263,6 +2805,7 @@ function createProgram(env, config) {
2263
2805
  registerCompleteCommand(program, env, config?.complete);
2264
2806
  registerAgentCommand(program, env, config?.agent);
2265
2807
  registerModelsCommand(program, env);
2808
+ registerGadgetCommand(program, env);
2266
2809
  if (config) {
2267
2810
  const customNames = getCustomCommandNames(config);
2268
2811
  for (const name of customNames) {