llmist 0.4.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.cjs CHANGED
@@ -368,6 +368,7 @@ var init_prompt_config = __esm({
368
368
  criticalUsage: "INVOKE gadgets using the markers - do not describe what you want to do.",
369
369
  formatDescriptionYaml: "Parameters in YAML format (one per line)",
370
370
  formatDescriptionJson: "Parameters in JSON format (valid JSON object)",
371
+ formatDescriptionToml: "Parameters in TOML format (key = value pairs, use triple-quotes for multiline)",
371
372
  rules: () => [
372
373
  "Output ONLY plain text with the exact markers - never use function/tool calling",
373
374
  "You can invoke multiple gadgets in a single response",
@@ -375,6 +376,7 @@ var init_prompt_config = __esm({
375
376
  ],
376
377
  schemaLabelJson: "\n\nInput Schema (JSON):",
377
378
  schemaLabelYaml: "\n\nInput Schema (YAML):",
379
+ schemaLabelToml: "\n\nInput Schema (TOML):",
378
380
  customExamples: null
379
381
  };
380
382
  }
@@ -395,6 +397,15 @@ var init_messages = __esm({
395
397
  constructor(promptConfig) {
396
398
  this.promptConfig = promptConfig ?? {};
397
399
  }
400
+ /**
401
+ * Set custom prefixes for gadget markers.
402
+ * Used to configure history builder to match system prompt markers.
403
+ */
404
+ withPrefixes(startPrefix, endPrefix) {
405
+ this.startPrefix = startPrefix;
406
+ this.endPrefix = endPrefix;
407
+ return this;
408
+ }
398
409
  addSystem(content, metadata) {
399
410
  this.messages.push({ role: "system", content, metadata });
400
411
  return this;
@@ -432,7 +443,14 @@ var init_messages = __esm({
432
443
  for (const gadget of gadgets) {
433
444
  const gadgetName = gadget.name ?? gadget.constructor.name;
434
445
  const instruction = gadget.getInstruction(parameterFormat);
435
- const schemaMarker = parameterFormat === "yaml" ? "\n\nInput Schema (YAML):" : "\n\nInput Schema (JSON):";
446
+ const schemaMarkers = {
447
+ yaml: "\n\nInput Schema (YAML):",
448
+ json: "\n\nInput Schema (JSON):",
449
+ toml: "\n\nInput Schema (TOML):",
450
+ auto: "\n\nInput Schema (JSON):"
451
+ // auto defaults to JSON schema display
452
+ };
453
+ const schemaMarker = schemaMarkers[parameterFormat];
436
454
  const schemaIndex = instruction.indexOf(schemaMarker);
437
455
  const description = (schemaIndex !== -1 ? instruction.substring(0, schemaIndex) : instruction).trim();
438
456
  const schema = schemaIndex !== -1 ? instruction.substring(schemaIndex + schemaMarker.length).trim() : "";
@@ -452,15 +470,26 @@ ${schema}`);
452
470
  }
453
471
  buildUsageSection(parameterFormat, context) {
454
472
  const parts = [];
455
- const formatDescription = parameterFormat === "yaml" ? resolvePromptTemplate(
456
- this.promptConfig.formatDescriptionYaml,
457
- DEFAULT_PROMPTS.formatDescriptionYaml,
458
- context
459
- ) : resolvePromptTemplate(
460
- this.promptConfig.formatDescriptionJson,
461
- DEFAULT_PROMPTS.formatDescriptionJson,
462
- context
463
- );
473
+ const formatDescriptionMap = {
474
+ yaml: {
475
+ config: this.promptConfig.formatDescriptionYaml,
476
+ defaultValue: DEFAULT_PROMPTS.formatDescriptionYaml
477
+ },
478
+ json: {
479
+ config: this.promptConfig.formatDescriptionJson,
480
+ defaultValue: DEFAULT_PROMPTS.formatDescriptionJson
481
+ },
482
+ toml: {
483
+ config: this.promptConfig.formatDescriptionToml,
484
+ defaultValue: DEFAULT_PROMPTS.formatDescriptionToml
485
+ },
486
+ auto: {
487
+ config: this.promptConfig.formatDescriptionJson,
488
+ defaultValue: DEFAULT_PROMPTS.formatDescriptionJson
489
+ }
490
+ };
491
+ const { config, defaultValue } = formatDescriptionMap[parameterFormat];
492
+ const formatDescription = resolvePromptTemplate(config, defaultValue, context);
464
493
  parts.push("\n\nHOW TO INVOKE GADGETS");
465
494
  parts.push("\n=====================\n");
466
495
  const criticalUsage = resolvePromptTemplate(
@@ -488,38 +517,110 @@ CRITICAL: ${criticalUsage}
488
517
  return this.promptConfig.customExamples(context);
489
518
  }
490
519
  const parts = [];
491
- const singleExample = parameterFormat === "yaml" ? `${this.startPrefix}translate
520
+ const singleExamples = {
521
+ yaml: `${this.startPrefix}translate
492
522
  from: English
493
523
  to: Polish
494
- content: Paris is the capital of France.
495
- ${this.endPrefix}` : `${this.startPrefix}translate
496
- {"from": "English", "to": "Polish", "content": "Paris is the capital of France."}
497
- ${this.endPrefix}`;
524
+ content: "Paris is the capital of France: a beautiful city."
525
+ ${this.endPrefix}`,
526
+ json: `${this.startPrefix}translate
527
+ {"from": "English", "to": "Polish", "content": "Paris is the capital of France: a beautiful city."}
528
+ ${this.endPrefix}`,
529
+ toml: `${this.startPrefix}translate
530
+ from = "English"
531
+ to = "Polish"
532
+ content = "Paris is the capital of France: a beautiful city."
533
+ ${this.endPrefix}`,
534
+ auto: `${this.startPrefix}translate
535
+ {"from": "English", "to": "Polish", "content": "Paris is the capital of France: a beautiful city."}
536
+ ${this.endPrefix}`
537
+ };
498
538
  parts.push(`
499
539
 
500
540
  EXAMPLE (Single Gadget):
501
541
 
502
- ${singleExample}`);
503
- const multipleExample = parameterFormat === "yaml" ? `${this.startPrefix}translate
542
+ ${singleExamples[parameterFormat]}`);
543
+ const multipleExamples = {
544
+ yaml: `${this.startPrefix}translate
504
545
  from: English
505
546
  to: Polish
506
- content: Paris is the capital of France.
547
+ content: "Paris is the capital of France: a beautiful city."
507
548
  ${this.endPrefix}
508
549
  ${this.startPrefix}analyze
509
550
  type: economic_analysis
510
551
  matter: "Polish Economy"
511
- question: Polish arms exports 2025.
512
- ${this.endPrefix}` : `${this.startPrefix}translate
513
- {"from": "English", "to": "Polish", "content": "Paris is the capital of France."}
552
+ question: |
553
+ Analyze the following:
554
+ - Polish arms exports 2025
555
+ - Economic implications
556
+ ${this.endPrefix}`,
557
+ json: `${this.startPrefix}translate
558
+ {"from": "English", "to": "Polish", "content": "Paris is the capital of France: a beautiful city."}
559
+ ${this.endPrefix}
560
+ ${this.startPrefix}analyze
561
+ {"type": "economic_analysis", "matter": "Polish Economy", "question": "Analyze the following: Polish arms exports 2025, economic implications"}
562
+ ${this.endPrefix}`,
563
+ toml: `${this.startPrefix}translate
564
+ from = "English"
565
+ to = "Polish"
566
+ content = "Paris is the capital of France: a beautiful city."
567
+ ${this.endPrefix}
568
+ ${this.startPrefix}analyze
569
+ type = "economic_analysis"
570
+ matter = "Polish Economy"
571
+ question = """
572
+ Analyze the following:
573
+ - Polish arms exports 2025
574
+ - Economic implications
575
+ """
576
+ ${this.endPrefix}`,
577
+ auto: `${this.startPrefix}translate
578
+ {"from": "English", "to": "Polish", "content": "Paris is the capital of France: a beautiful city."}
514
579
  ${this.endPrefix}
515
580
  ${this.startPrefix}analyze
516
- {"type": "economic_analysis", "matter": "Polish Economy", "question": "Polish arms exports 2025."}
517
- ${this.endPrefix}`;
581
+ {"type": "economic_analysis", "matter": "Polish Economy", "question": "Analyze the following: Polish arms exports 2025, economic implications"}
582
+ ${this.endPrefix}`
583
+ };
518
584
  parts.push(`
519
585
 
520
586
  EXAMPLE (Multiple Gadgets):
521
587
 
522
- ${multipleExample}`);
588
+ ${multipleExamples[parameterFormat]}`);
589
+ if (parameterFormat === "yaml") {
590
+ parts.push(`
591
+
592
+ YAML MULTILINE SYNTAX:
593
+ For string values with special characters (colons, dashes, quotes) or multiple lines,
594
+ use the pipe (|) syntax. ALL content lines MUST be indented with 2 spaces:
595
+
596
+ CORRECT - all lines indented:
597
+ question: |
598
+ Which option do you prefer?
599
+ - Option A: fast processing
600
+ - Option B: thorough analysis
601
+ Please choose one.
602
+
603
+ WRONG - inconsistent indentation breaks YAML:
604
+ question: |
605
+ Which option do you prefer?
606
+ - Option A: fast
607
+ Please choose one. <-- ERROR: not indented, breaks out of the block`);
608
+ } else if (parameterFormat === "toml") {
609
+ parts.push(`
610
+
611
+ TOML MULTILINE SYNTAX:
612
+ For string values with multiple lines or special characters, use triple-quotes ("""):
613
+
614
+ filePath = "README.md"
615
+ content = """
616
+ # Project Title
617
+
618
+ This content can contain:
619
+ - Markdown lists
620
+ - Special characters: # : -
621
+ - Multiple paragraphs
622
+ """`);
623
+ }
523
624
  return parts.join("");
524
625
  }
525
626
  buildRulesSection(context) {
@@ -563,6 +664,16 @@ ${this.endPrefix}`
563
664
  return `${key}: ${JSON.stringify(value)}`;
564
665
  }).join("\n");
565
666
  }
667
+ if (format === "toml") {
668
+ return Object.entries(parameters).map(([key, value]) => {
669
+ if (typeof value === "string" && value.includes("\n")) {
670
+ return `${key} = """
671
+ ${value}
672
+ """`;
673
+ }
674
+ return `${key} = ${JSON.stringify(value)}`;
675
+ }).join("\n");
676
+ }
566
677
  return JSON.stringify(parameters);
567
678
  }
568
679
  build() {
@@ -664,11 +775,14 @@ var init_conversation_manager = __esm({
664
775
  initialMessages;
665
776
  historyBuilder;
666
777
  parameterFormat;
667
- constructor(baseMessages, initialMessages, parameterFormat = "json") {
778
+ constructor(baseMessages, initialMessages, options = {}) {
668
779
  this.baseMessages = baseMessages;
669
780
  this.initialMessages = initialMessages;
670
- this.parameterFormat = parameterFormat;
781
+ this.parameterFormat = options.parameterFormat ?? "json";
671
782
  this.historyBuilder = new LLMMessageBuilder();
783
+ if (options.startPrefix && options.endPrefix) {
784
+ this.historyBuilder.withPrefixes(options.startPrefix, options.endPrefix);
785
+ }
672
786
  }
673
787
  addUserMessage(content) {
674
788
  this.historyBuilder.addUser(content);
@@ -1139,11 +1253,109 @@ var init_executor = __esm({
1139
1253
  });
1140
1254
 
1141
1255
  // src/gadgets/parser.ts
1142
- var yaml, globalInvocationCounter, StreamParser;
1256
+ function preprocessYaml(yamlStr) {
1257
+ const lines = yamlStr.split("\n");
1258
+ const result = [];
1259
+ let i = 0;
1260
+ while (i < lines.length) {
1261
+ const line = lines[i];
1262
+ const match = line.match(/^(\s*)([\w-]+):\s+(.+)$/);
1263
+ if (match) {
1264
+ const [, indent, key, value] = match;
1265
+ if (value === "|" || value === ">" || value === "|-" || value === ">-") {
1266
+ result.push(line);
1267
+ i++;
1268
+ const keyIndentLen2 = indent.length;
1269
+ const blockLines = [];
1270
+ let minContentIndent = Infinity;
1271
+ while (i < lines.length) {
1272
+ const blockLine = lines[i];
1273
+ const blockIndentMatch = blockLine.match(/^(\s*)/);
1274
+ const blockIndentLen = blockIndentMatch ? blockIndentMatch[1].length : 0;
1275
+ if (blockLine.trim() === "") {
1276
+ blockLines.push({ content: "", originalIndent: 0 });
1277
+ i++;
1278
+ continue;
1279
+ }
1280
+ if (blockIndentLen > keyIndentLen2) {
1281
+ const content = blockLine.substring(blockIndentLen);
1282
+ blockLines.push({ content, originalIndent: blockIndentLen });
1283
+ if (content.trim().length > 0) {
1284
+ minContentIndent = Math.min(minContentIndent, blockIndentLen);
1285
+ }
1286
+ i++;
1287
+ } else {
1288
+ break;
1289
+ }
1290
+ }
1291
+ const targetIndent = keyIndentLen2 + 2;
1292
+ for (const blockLine of blockLines) {
1293
+ if (blockLine.content === "") {
1294
+ result.push("");
1295
+ } else {
1296
+ result.push(" ".repeat(targetIndent) + blockLine.content);
1297
+ }
1298
+ }
1299
+ continue;
1300
+ }
1301
+ if (value.startsWith('"') || value.startsWith("'") || value === "true" || value === "false" || /^-?\d+(\.\d+)?$/.test(value)) {
1302
+ result.push(line);
1303
+ i++;
1304
+ continue;
1305
+ }
1306
+ const keyIndentLen = indent.length;
1307
+ const continuationLines = [];
1308
+ let j = i + 1;
1309
+ while (j < lines.length) {
1310
+ const nextLine = lines[j];
1311
+ if (nextLine.trim() === "") {
1312
+ continuationLines.push(nextLine);
1313
+ j++;
1314
+ continue;
1315
+ }
1316
+ const nextIndentMatch = nextLine.match(/^(\s*)/);
1317
+ const nextIndentLen = nextIndentMatch ? nextIndentMatch[1].length : 0;
1318
+ if (nextIndentLen > keyIndentLen) {
1319
+ continuationLines.push(nextLine);
1320
+ j++;
1321
+ } else {
1322
+ break;
1323
+ }
1324
+ }
1325
+ if (continuationLines.length > 0 && continuationLines.some((l) => l.trim().length > 0)) {
1326
+ result.push(`${indent}${key}: |`);
1327
+ result.push(`${indent} ${value}`);
1328
+ for (const contLine of continuationLines) {
1329
+ if (contLine.trim() === "") {
1330
+ result.push("");
1331
+ } else {
1332
+ const contIndentMatch = contLine.match(/^(\s*)/);
1333
+ const contIndent = contIndentMatch ? contIndentMatch[1] : "";
1334
+ const contContent = contLine.substring(contIndent.length);
1335
+ result.push(`${indent} ${contContent}`);
1336
+ }
1337
+ }
1338
+ i = j;
1339
+ continue;
1340
+ }
1341
+ if (value.includes(": ") || value.endsWith(":")) {
1342
+ const escaped = value.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
1343
+ result.push(`${indent}${key}: "${escaped}"`);
1344
+ i++;
1345
+ continue;
1346
+ }
1347
+ }
1348
+ result.push(line);
1349
+ i++;
1350
+ }
1351
+ return result.join("\n");
1352
+ }
1353
+ var yaml, import_js_toml, globalInvocationCounter, StreamParser;
1143
1354
  var init_parser = __esm({
1144
1355
  "src/gadgets/parser.ts"() {
1145
1356
  "use strict";
1146
1357
  yaml = __toESM(require("js-yaml"), 1);
1358
+ import_js_toml = require("js-toml");
1147
1359
  init_constants();
1148
1360
  globalInvocationCounter = 0;
1149
1361
  StreamParser = class {
@@ -1165,6 +1377,17 @@ var init_parser = __esm({
1165
1377
  this.lastReportedTextLength = index;
1166
1378
  return segment.trim().length > 0 ? segment : void 0;
1167
1379
  }
1380
+ /**
1381
+ * Parse gadget name, handling both old format (name:invocationId) and new format (just name).
1382
+ * For new format, generates a unique invocation ID.
1383
+ */
1384
+ parseGadgetName(gadgetName) {
1385
+ if (gadgetName.includes(":")) {
1386
+ const parts = gadgetName.split(":");
1387
+ return { actualName: parts[0], invocationId: parts[1] };
1388
+ }
1389
+ return { actualName: gadgetName, invocationId: `gadget_${++globalInvocationCounter}` };
1390
+ }
1168
1391
  /**
1169
1392
  * Parse parameter string according to configured format
1170
1393
  */
@@ -1178,20 +1401,31 @@ var init_parser = __esm({
1178
1401
  }
1179
1402
  if (this.parameterFormat === "yaml") {
1180
1403
  try {
1181
- return { parameters: yaml.load(raw) };
1404
+ return { parameters: yaml.load(preprocessYaml(raw)) };
1182
1405
  } catch (error) {
1183
1406
  return { parseError: error instanceof Error ? error.message : "Failed to parse YAML" };
1184
1407
  }
1185
1408
  }
1409
+ if (this.parameterFormat === "toml") {
1410
+ try {
1411
+ return { parameters: (0, import_js_toml.load)(raw) };
1412
+ } catch (error) {
1413
+ return { parseError: error instanceof Error ? error.message : "Failed to parse TOML" };
1414
+ }
1415
+ }
1186
1416
  try {
1187
1417
  return { parameters: JSON.parse(raw) };
1188
1418
  } catch {
1189
1419
  try {
1190
- return { parameters: yaml.load(raw) };
1191
- } catch (error) {
1192
- return {
1193
- parseError: error instanceof Error ? error.message : "Failed to parse as JSON or YAML"
1194
- };
1420
+ return { parameters: (0, import_js_toml.load)(raw) };
1421
+ } catch {
1422
+ try {
1423
+ return { parameters: yaml.load(preprocessYaml(raw)) };
1424
+ } catch (error) {
1425
+ return {
1426
+ parseError: error instanceof Error ? error.message : "Failed to parse as JSON, TOML, or YAML"
1427
+ };
1428
+ }
1195
1429
  }
1196
1430
  }
1197
1431
  }
@@ -1210,16 +1444,7 @@ var init_parser = __esm({
1210
1444
  const metadataEndIndex = this.buffer.indexOf("\n", metadataStartIndex);
1211
1445
  if (metadataEndIndex === -1) break;
1212
1446
  const gadgetName = this.buffer.substring(metadataStartIndex, metadataEndIndex).trim();
1213
- let invocationId;
1214
- let actualGadgetName;
1215
- if (gadgetName.includes(":")) {
1216
- const parts = gadgetName.split(":");
1217
- actualGadgetName = parts[0];
1218
- invocationId = parts[1];
1219
- } else {
1220
- actualGadgetName = gadgetName;
1221
- invocationId = `gadget_${++globalInvocationCounter}`;
1222
- }
1447
+ const { actualName: actualGadgetName, invocationId } = this.parseGadgetName(gadgetName);
1223
1448
  const contentStartIndex = metadataEndIndex + 1;
1224
1449
  let partEndIndex;
1225
1450
  let endMarkerLength = 0;
@@ -1229,23 +1454,29 @@ var init_parser = __esm({
1229
1454
  if (partEndIndex === -1) break;
1230
1455
  endMarkerLength = oldEndMarker.length;
1231
1456
  } else {
1232
- partEndIndex = contentStartIndex;
1457
+ const nextStartPos = this.buffer.indexOf(this.startPrefix, contentStartIndex);
1458
+ let validEndPos = -1;
1459
+ let searchPos = contentStartIndex;
1233
1460
  while (true) {
1234
- const endPos = this.buffer.indexOf(this.endPrefix, partEndIndex);
1235
- if (endPos === -1) {
1236
- partEndIndex = -1;
1237
- break;
1238
- }
1461
+ const endPos = this.buffer.indexOf(this.endPrefix, searchPos);
1462
+ if (endPos === -1) break;
1239
1463
  const afterEnd = this.buffer.substring(endPos + this.endPrefix.length);
1240
1464
  if (afterEnd.startsWith("\n") || afterEnd.startsWith("\r") || afterEnd.startsWith(this.startPrefix) || afterEnd.length === 0) {
1241
- partEndIndex = endPos;
1242
- endMarkerLength = this.endPrefix.length;
1465
+ validEndPos = endPos;
1243
1466
  break;
1244
1467
  } else {
1245
- partEndIndex = endPos + this.endPrefix.length;
1468
+ searchPos = endPos + this.endPrefix.length;
1246
1469
  }
1247
1470
  }
1248
- if (partEndIndex === -1) break;
1471
+ if (nextStartPos !== -1 && (validEndPos === -1 || nextStartPos < validEndPos)) {
1472
+ partEndIndex = nextStartPos;
1473
+ endMarkerLength = 0;
1474
+ } else if (validEndPos !== -1) {
1475
+ partEndIndex = validEndPos;
1476
+ endMarkerLength = this.endPrefix.length;
1477
+ } else {
1478
+ break;
1479
+ }
1249
1480
  }
1250
1481
  const parametersRaw = this.buffer.substring(contentStartIndex, partEndIndex).trim();
1251
1482
  const { parameters, parseError } = this.parseParameters(parametersRaw);
@@ -1268,8 +1499,35 @@ var init_parser = __esm({
1268
1499
  this.lastReportedTextLength = 0;
1269
1500
  }
1270
1501
  }
1271
- // Finalize parsing and return remaining text
1502
+ // Finalize parsing and return remaining text or incomplete gadgets
1272
1503
  *finalize() {
1504
+ const startIndex = this.buffer.indexOf(this.startPrefix, this.lastReportedTextLength);
1505
+ if (startIndex !== -1) {
1506
+ const textBefore = this.takeTextUntil(startIndex);
1507
+ if (textBefore !== void 0) {
1508
+ yield { type: "text", content: textBefore };
1509
+ }
1510
+ const metadataStartIndex = startIndex + this.startPrefix.length;
1511
+ const metadataEndIndex = this.buffer.indexOf("\n", metadataStartIndex);
1512
+ if (metadataEndIndex !== -1) {
1513
+ const gadgetName = this.buffer.substring(metadataStartIndex, metadataEndIndex).trim();
1514
+ const { actualName: actualGadgetName, invocationId } = this.parseGadgetName(gadgetName);
1515
+ const contentStartIndex = metadataEndIndex + 1;
1516
+ const parametersRaw = this.buffer.substring(contentStartIndex).trim();
1517
+ const { parameters, parseError } = this.parseParameters(parametersRaw);
1518
+ yield {
1519
+ type: "gadget_call",
1520
+ call: {
1521
+ gadgetName: actualGadgetName,
1522
+ invocationId,
1523
+ parametersYaml: parametersRaw,
1524
+ parameters,
1525
+ parseError
1526
+ }
1527
+ };
1528
+ return;
1529
+ }
1530
+ }
1273
1531
  const remainingText = this.takeTextUntil(this.buffer.length);
1274
1532
  if (remainingText !== void 0) {
1275
1533
  yield { type: "text", content: remainingText };
@@ -1750,11 +2008,11 @@ var init_agent = __esm({
1750
2008
  role: message.role,
1751
2009
  content: message.content
1752
2010
  }));
1753
- this.conversation = new ConversationManager(
1754
- baseMessages,
1755
- initialMessages,
1756
- this.parameterFormat
1757
- );
2011
+ this.conversation = new ConversationManager(baseMessages, initialMessages, {
2012
+ parameterFormat: this.parameterFormat,
2013
+ startPrefix: options.gadgetStartPrefix,
2014
+ endPrefix: options.gadgetEndPrefix
2015
+ });
1758
2016
  this.userPromptProvided = !!options.userPrompt;
1759
2017
  if (options.userPrompt) {
1760
2018
  this.conversation.addUserMessage(options.userPrompt);
@@ -2745,10 +3003,11 @@ var init_gemini = __esm({
2745
3003
  return GEMINI_MODELS;
2746
3004
  }
2747
3005
  buildRequestPayload(options, descriptor, _spec, messages) {
2748
- const { systemInstruction, contents } = this.extractSystemAndContents(messages);
3006
+ const contents = this.convertMessagesToContents(messages);
2749
3007
  const generationConfig = this.buildGenerationConfig(options);
2750
3008
  const config = {
2751
- ...systemInstruction ? { systemInstruction: systemInstruction.parts.map((p) => p.text).join("\n") } : {},
3009
+ // Note: systemInstruction removed - it doesn't work with countTokens()
3010
+ // System messages are now included in contents as user+model exchanges
2752
3011
  ...generationConfig ? { ...generationConfig } : {},
2753
3012
  // Explicitly disable function calling to prevent UNEXPECTED_TOOL_CALL errors
2754
3013
  toolConfig: {
@@ -2769,31 +3028,37 @@ var init_gemini = __esm({
2769
3028
  const streamResponse = await client.models.generateContentStream(payload);
2770
3029
  return streamResponse;
2771
3030
  }
2772
- extractSystemAndContents(messages) {
2773
- const firstSystemIndex = messages.findIndex((message) => message.role === "system");
2774
- if (firstSystemIndex === -1) {
2775
- return {
2776
- systemInstruction: null,
2777
- contents: this.mergeConsecutiveMessages(messages)
2778
- };
3031
+ /**
3032
+ * Convert LLM messages to Gemini contents format.
3033
+ *
3034
+ * For Gemini, we convert system messages to user+model exchanges instead of
3035
+ * using systemInstruction, because:
3036
+ * 1. systemInstruction doesn't work with countTokens() API
3037
+ * 2. This approach gives perfect token counting accuracy (0% error)
3038
+ * 3. The model receives and follows system instructions identically
3039
+ *
3040
+ * System message: "You are a helpful assistant"
3041
+ * Becomes:
3042
+ * - User: "You are a helpful assistant"
3043
+ * - Model: "Understood."
3044
+ */
3045
+ convertMessagesToContents(messages) {
3046
+ const expandedMessages = [];
3047
+ for (const message of messages) {
3048
+ if (message.role === "system") {
3049
+ expandedMessages.push({
3050
+ role: "user",
3051
+ content: message.content
3052
+ });
3053
+ expandedMessages.push({
3054
+ role: "assistant",
3055
+ content: "Understood."
3056
+ });
3057
+ } else {
3058
+ expandedMessages.push(message);
3059
+ }
2779
3060
  }
2780
- let systemBlockEnd = firstSystemIndex;
2781
- while (systemBlockEnd < messages.length && messages[systemBlockEnd].role === "system") {
2782
- systemBlockEnd++;
2783
- }
2784
- const systemMessages = messages.slice(firstSystemIndex, systemBlockEnd);
2785
- const nonSystemMessages = [
2786
- ...messages.slice(0, firstSystemIndex),
2787
- ...messages.slice(systemBlockEnd)
2788
- ];
2789
- const systemInstruction = {
2790
- role: "system",
2791
- parts: systemMessages.map((message) => ({ text: message.content }))
2792
- };
2793
- return {
2794
- systemInstruction,
2795
- contents: this.mergeConsecutiveMessages(nonSystemMessages)
2796
- };
3061
+ return this.mergeConsecutiveMessages(expandedMessages);
2797
3062
  }
2798
3063
  mergeConsecutiveMessages(messages) {
2799
3064
  if (messages.length === 0) {
@@ -2882,8 +3147,8 @@ var init_gemini = __esm({
2882
3147
  *
2883
3148
  * This method provides accurate token estimation for Gemini models by:
2884
3149
  * - Using the SDK's countTokens() method
2885
- * - Properly extracting and handling system instructions
2886
- * - Transforming messages to Gemini's expected format
3150
+ * - Converting system messages to user+model exchanges (same as in generation)
3151
+ * - This gives perfect token counting accuracy (0% error vs actual usage)
2887
3152
  *
2888
3153
  * @param messages - The messages to count tokens for
2889
3154
  * @param descriptor - Model descriptor containing the model name
@@ -2902,16 +3167,14 @@ var init_gemini = __esm({
2902
3167
  */
2903
3168
  async countTokens(messages, descriptor, _spec) {
2904
3169
  const client = this.client;
2905
- const { systemInstruction, contents } = this.extractSystemAndContents(messages);
2906
- const request = {
2907
- model: descriptor.name,
2908
- contents: this.convertContentsForNewSDK(contents)
2909
- };
2910
- if (systemInstruction) {
2911
- request.systemInstruction = systemInstruction.parts.map((p) => p.text).join("\n");
2912
- }
3170
+ const contents = this.convertMessagesToContents(messages);
2913
3171
  try {
2914
- const response = await client.models.countTokens(request);
3172
+ const response = await client.models.countTokens({
3173
+ model: descriptor.name,
3174
+ contents: this.convertContentsForNewSDK(contents)
3175
+ // Note: systemInstruction not used - it's not supported by countTokens()
3176
+ // and would cause a 2100% token counting error
3177
+ });
2915
3178
  return response.totalTokens ?? 0;
2916
3179
  } catch (error) {
2917
3180
  console.warn(
@@ -4270,7 +4533,7 @@ var COMMANDS = {
4270
4533
  };
4271
4534
  var LOG_LEVELS = ["silly", "trace", "debug", "info", "warn", "error", "fatal"];
4272
4535
  var DEFAULT_MODEL = "openai:gpt-5-nano";
4273
- var DEFAULT_PARAMETER_FORMAT = "json";
4536
+ var DEFAULT_PARAMETER_FORMAT = "toml";
4274
4537
  var OPTION_FLAGS = {
4275
4538
  model: "-m, --model <identifier>",
4276
4539
  systemPrompt: "-s, --system <prompt>",
@@ -4291,7 +4554,7 @@ var OPTION_DESCRIPTIONS = {
4291
4554
  maxTokens: "Maximum number of output tokens requested from the model.",
4292
4555
  maxIterations: "Maximum number of agent loop iterations before exiting.",
4293
4556
  gadgetModule: "Path or module specifier for a gadget export. Repeat to register multiple gadgets.",
4294
- parameterFormat: "Format for gadget parameter schemas: 'json', 'yaml', or 'auto'.",
4557
+ parameterFormat: "Format for gadget parameter schemas: 'json', 'yaml', 'toml', or 'auto'.",
4295
4558
  logLevel: "Log level: silly, trace, debug, info, warn, error, fatal.",
4296
4559
  logFile: "Path to log file. When set, logs are written to file instead of stderr.",
4297
4560
  noBuiltins: "Disable built-in gadgets (AskUser, TellUser).",
@@ -4305,7 +4568,7 @@ var import_commander3 = require("commander");
4305
4568
  // package.json
4306
4569
  var package_default = {
4307
4570
  name: "llmist",
4308
- version: "0.3.1",
4571
+ version: "0.4.1",
4309
4572
  description: "Universal TypeScript LLM client with streaming-first agent framework. Works with any model - no structured outputs or native tool calling required. Implements its own flexible grammar for function calling.",
4310
4573
  type: "module",
4311
4574
  main: "dist/index.cjs",
@@ -4389,7 +4652,10 @@ var package_default = {
4389
4652
  "@google/genai": "^1.27.0",
4390
4653
  chalk: "^5.6.2",
4391
4654
  commander: "^12.1.0",
4655
+ "js-toml": "^1.0.2",
4392
4656
  "js-yaml": "^4.1.0",
4657
+ marked: "^17.0.1",
4658
+ "marked-terminal": "^7.3.0",
4393
4659
  openai: "^6.0.0",
4394
4660
  tiktoken: "^1.0.22",
4395
4661
  tslog: "^4.10.2",
@@ -4402,6 +4668,7 @@ var package_default = {
4402
4668
  "@semantic-release/changelog": "^6.0.3",
4403
4669
  "@semantic-release/git": "^10.0.1",
4404
4670
  "@types/js-yaml": "^4.0.9",
4671
+ "@types/marked-terminal": "^6.1.1",
4405
4672
  "@types/node": "^20.12.7",
4406
4673
  "bun-types": "^1.3.2",
4407
4674
  dotenv: "^17.2.3",
@@ -4414,7 +4681,7 @@ var package_default = {
4414
4681
 
4415
4682
  // src/cli/agent-command.ts
4416
4683
  var import_promises = require("readline/promises");
4417
- var import_commander2 = require("commander");
4684
+ var import_chalk3 = __toESM(require("chalk"), 1);
4418
4685
  init_builder();
4419
4686
  init_registry();
4420
4687
  init_constants2();
@@ -4504,6 +4771,83 @@ function mergeDescriptions(schema, jsonSchema) {
4504
4771
 
4505
4772
  // src/gadgets/gadget.ts
4506
4773
  init_schema_validator();
4774
+ function formatYamlValue(value, indent = "") {
4775
+ if (typeof value === "string") {
4776
+ const lines = value.split("\n");
4777
+ if (lines.length === 1 && !value.includes(":") && !value.startsWith("-")) {
4778
+ return value;
4779
+ }
4780
+ const indentedLines = lines.map((line) => `${indent} ${line}`).join("\n");
4781
+ return `|
4782
+ ${indentedLines}`;
4783
+ }
4784
+ if (typeof value === "number" || typeof value === "boolean") {
4785
+ return String(value);
4786
+ }
4787
+ if (value === null || value === void 0) {
4788
+ return "null";
4789
+ }
4790
+ if (Array.isArray(value)) {
4791
+ if (value.length === 0) return "[]";
4792
+ const items = value.map((item) => `${indent}- ${formatYamlValue(item, indent + " ")}`);
4793
+ return "\n" + items.join("\n");
4794
+ }
4795
+ if (typeof value === "object") {
4796
+ const entries = Object.entries(value);
4797
+ if (entries.length === 0) return "{}";
4798
+ const lines = entries.map(([k, v]) => {
4799
+ const formattedValue = formatYamlValue(v, indent + " ");
4800
+ if (formattedValue.startsWith("\n") || formattedValue.startsWith("|")) {
4801
+ return `${indent}${k}: ${formattedValue}`;
4802
+ }
4803
+ return `${indent}${k}: ${formattedValue}`;
4804
+ });
4805
+ return "\n" + lines.join("\n");
4806
+ }
4807
+ return yaml2.dump(value).trimEnd();
4808
+ }
4809
+ function formatParamsAsYaml(params) {
4810
+ const lines = [];
4811
+ for (const [key, value] of Object.entries(params)) {
4812
+ const formattedValue = formatYamlValue(value, "");
4813
+ if (formattedValue.startsWith("\n")) {
4814
+ lines.push(`${key}:${formattedValue}`);
4815
+ } else {
4816
+ lines.push(`${key}: ${formattedValue}`);
4817
+ }
4818
+ }
4819
+ return lines.join("\n");
4820
+ }
4821
+ function formatTomlValue(value) {
4822
+ if (typeof value === "string") {
4823
+ if (value.includes("\n")) {
4824
+ return `"""
4825
+ ${value}
4826
+ """`;
4827
+ }
4828
+ return JSON.stringify(value);
4829
+ }
4830
+ if (typeof value === "number" || typeof value === "boolean") {
4831
+ return String(value);
4832
+ }
4833
+ if (value === null || value === void 0) {
4834
+ return '""';
4835
+ }
4836
+ if (Array.isArray(value)) {
4837
+ return JSON.stringify(value);
4838
+ }
4839
+ if (typeof value === "object") {
4840
+ return JSON.stringify(value);
4841
+ }
4842
+ return JSON.stringify(value);
4843
+ }
4844
+ function formatParamsAsToml(params) {
4845
+ const lines = [];
4846
+ for (const [key, value] of Object.entries(params)) {
4847
+ lines.push(`${key} = ${formatTomlValue(value)}`);
4848
+ }
4849
+ return lines.join("\n");
4850
+ }
4507
4851
  var BaseGadget = class {
4508
4852
  /**
4509
4853
  * The name of the gadget. Used for identification when LLM calls it.
@@ -4523,6 +4867,14 @@ var BaseGadget = class {
4523
4867
  * Set to 0 or undefined to disable timeout for this gadget.
4524
4868
  */
4525
4869
  timeoutMs;
4870
+ /**
4871
+ * Optional usage examples to help LLMs understand proper invocation.
4872
+ * Examples are rendered in getInstruction() alongside the schema.
4873
+ *
4874
+ * Note: Uses broader `unknown` type to allow typed examples from subclasses
4875
+ * while maintaining runtime compatibility.
4876
+ */
4877
+ examples;
4526
4878
  /**
4527
4879
  * Auto-generated instruction text for the LLM.
4528
4880
  * Combines name, description, and parameter schema into a formatted instruction.
@@ -4535,7 +4887,7 @@ var BaseGadget = class {
4535
4887
  * Generate instruction text for the LLM with format-specific schema.
4536
4888
  * Combines name, description, and parameter schema into a formatted instruction.
4537
4889
  *
4538
- * @param format - Format for the schema representation ('json' | 'yaml' | 'auto')
4890
+ * @param format - Format for the schema representation ('json' | 'yaml' | 'toml' | 'auto')
4539
4891
  * @returns Formatted instruction string
4540
4892
  */
4541
4893
  getInstruction(format = "json") {
@@ -4550,12 +4902,38 @@ var BaseGadget = class {
4550
4902
  if (format === "json" || format === "auto") {
4551
4903
  parts.push("\n\nInput Schema (JSON):");
4552
4904
  parts.push(JSON.stringify(jsonSchema, null, 2));
4905
+ } else if (format === "toml") {
4906
+ parts.push("\n\nInput Schema (TOML):");
4907
+ parts.push(JSON.stringify(jsonSchema, null, 2));
4553
4908
  } else {
4554
4909
  const yamlSchema = yaml2.dump(jsonSchema).trimEnd();
4555
4910
  parts.push("\n\nInput Schema (YAML):");
4556
4911
  parts.push(yamlSchema);
4557
4912
  }
4558
4913
  }
4914
+ if (this.examples && this.examples.length > 0) {
4915
+ parts.push("\n\nExamples:");
4916
+ this.examples.forEach((example, index) => {
4917
+ if (index > 0) {
4918
+ parts.push("");
4919
+ }
4920
+ if (example.comment) {
4921
+ parts.push(`# ${example.comment}`);
4922
+ }
4923
+ parts.push("Input:");
4924
+ if (format === "json" || format === "auto") {
4925
+ parts.push(JSON.stringify(example.params, null, 2));
4926
+ } else if (format === "toml") {
4927
+ parts.push(formatParamsAsToml(example.params));
4928
+ } else {
4929
+ parts.push(formatParamsAsYaml(example.params));
4930
+ }
4931
+ if (example.output !== void 0) {
4932
+ parts.push("Output:");
4933
+ parts.push(example.output);
4934
+ }
4935
+ });
4936
+ }
4559
4937
  return parts.join("\n");
4560
4938
  }
4561
4939
  };
@@ -4567,6 +4945,7 @@ function createGadget(config) {
4567
4945
  description = config.description;
4568
4946
  parameterSchema = config.schema;
4569
4947
  timeoutMs = config.timeoutMs;
4948
+ examples = config.examples;
4570
4949
  execute(params) {
4571
4950
  return config.execute(params);
4572
4951
  }
@@ -4580,8 +4959,20 @@ var askUser = createGadget({
4580
4959
  name: "AskUser",
4581
4960
  description: "Ask the user a question when you need more information or clarification. The user's response will be provided back to you.",
4582
4961
  schema: import_zod.z.object({
4583
- question: import_zod.z.string().describe("The question to ask the user")
4962
+ question: import_zod.z.string().describe("The question to ask the user in plain-text or Markdown")
4584
4963
  }),
4964
+ examples: [
4965
+ {
4966
+ comment: "Ask for clarification about the task",
4967
+ params: { question: "Which file would you like me to modify?" }
4968
+ },
4969
+ {
4970
+ comment: "Ask user to choose between options",
4971
+ params: {
4972
+ question: "I found multiple matches. Which one should I use?\n- src/utils/helper.ts\n- src/lib/helper.ts"
4973
+ }
4974
+ }
4975
+ ],
4585
4976
  execute: ({ question }) => {
4586
4977
  throw new HumanInputException(question);
4587
4978
  }
@@ -4590,10 +4981,28 @@ var tellUser = createGadget({
4590
4981
  name: "TellUser",
4591
4982
  description: "Tell the user something important. Set done=true when your work is complete and you want to end the conversation.",
4592
4983
  schema: import_zod.z.object({
4593
- message: import_zod.z.string().describe("The message to display to the user"),
4594
- done: import_zod.z.boolean().describe("Set to true to end the conversation, false to continue"),
4984
+ message: import_zod.z.string().describe("The message to display to the user in Markdown"),
4985
+ done: import_zod.z.boolean().default(false).describe("Set to true to end the conversation, false to continue"),
4595
4986
  type: import_zod.z.enum(["info", "success", "warning", "error"]).default("info").describe("Message type: info, success, warning, or error")
4596
4987
  }),
4988
+ examples: [
4989
+ {
4990
+ comment: "Report successful completion and end the conversation",
4991
+ params: {
4992
+ message: "I've completed the refactoring. All tests pass.",
4993
+ done: true,
4994
+ type: "success"
4995
+ }
4996
+ },
4997
+ {
4998
+ comment: "Warn the user about something without ending",
4999
+ params: {
5000
+ message: "Found 3 files with potential issues. Continuing analysis...",
5001
+ done: false,
5002
+ type: "warning"
5003
+ }
5004
+ }
5005
+ ],
4597
5006
  execute: ({ message, done, type }) => {
4598
5007
  const prefixes = {
4599
5008
  info: "\u2139\uFE0F ",
@@ -4706,6 +5115,9 @@ async function loadGadgets(specifiers, cwd, importer = (specifier) => import(spe
4706
5115
  return gadgets;
4707
5116
  }
4708
5117
 
5118
+ // src/cli/option-helpers.ts
5119
+ var import_commander2 = require("commander");
5120
+
4709
5121
  // src/cli/utils.ts
4710
5122
  var import_chalk2 = __toESM(require("chalk"), 1);
4711
5123
  var import_commander = require("commander");
@@ -4713,6 +5125,44 @@ init_constants2();
4713
5125
 
4714
5126
  // src/cli/ui/formatters.ts
4715
5127
  var import_chalk = __toESM(require("chalk"), 1);
5128
+ var import_marked = require("marked");
5129
+ var import_marked_terminal = require("marked-terminal");
5130
+ var markedConfigured = false;
5131
+ function ensureMarkedConfigured() {
5132
+ if (!markedConfigured) {
5133
+ import_chalk.default.level = process.env.NO_COLOR ? 0 : 3;
5134
+ import_marked.marked.use(
5135
+ (0, import_marked_terminal.markedTerminal)({
5136
+ // Text styling
5137
+ strong: import_chalk.default.bold,
5138
+ em: import_chalk.default.italic,
5139
+ del: import_chalk.default.dim.gray.strikethrough,
5140
+ // Code styling
5141
+ code: import_chalk.default.yellow,
5142
+ codespan: import_chalk.default.yellow,
5143
+ // Headings
5144
+ heading: import_chalk.default.green.bold,
5145
+ firstHeading: import_chalk.default.magenta.underline.bold,
5146
+ // Links
5147
+ link: import_chalk.default.blue,
5148
+ href: import_chalk.default.blue.underline,
5149
+ // Block elements
5150
+ blockquote: import_chalk.default.gray.italic,
5151
+ // List formatting - reduce indentation and add bullet styling
5152
+ tab: 2,
5153
+ // Reduce from default 4 to 2 spaces
5154
+ listitem: import_chalk.default.reset
5155
+ // Keep items readable (no dim)
5156
+ })
5157
+ );
5158
+ markedConfigured = true;
5159
+ }
5160
+ }
5161
+ function renderMarkdown(text) {
5162
+ ensureMarkedConfigured();
5163
+ const rendered = import_marked.marked.parse(text);
5164
+ return rendered.trimEnd();
5165
+ }
4716
5166
  function formatTokens(tokens) {
4717
5167
  return tokens >= 1e3 ? `${(tokens / 1e3).toFixed(1)}k` : `${tokens}`;
4718
5168
  }
@@ -4731,7 +5181,14 @@ function formatCost(cost) {
4731
5181
  function renderSummary(metadata) {
4732
5182
  const parts = [];
4733
5183
  if (metadata.iterations !== void 0) {
4734
- parts.push(import_chalk.default.cyan(`#${metadata.iterations}`));
5184
+ const iterPart = import_chalk.default.cyan(`#${metadata.iterations}`);
5185
+ if (metadata.model) {
5186
+ parts.push(`${iterPart} ${import_chalk.default.magenta(metadata.model)}`);
5187
+ } else {
5188
+ parts.push(iterPart);
5189
+ }
5190
+ } else if (metadata.model) {
5191
+ parts.push(import_chalk.default.magenta(metadata.model));
4735
5192
  }
4736
5193
  if (metadata.usage) {
4737
5194
  const { inputTokens, outputTokens } = metadata.usage;
@@ -4752,22 +5209,128 @@ function renderSummary(metadata) {
4752
5209
  }
4753
5210
  return parts.join(import_chalk.default.dim(" | "));
4754
5211
  }
5212
+ function renderOverallSummary(metadata) {
5213
+ const parts = [];
5214
+ if (metadata.totalTokens !== void 0 && metadata.totalTokens > 0) {
5215
+ parts.push(import_chalk.default.dim("total:") + import_chalk.default.magenta(` ${formatTokens(metadata.totalTokens)}`));
5216
+ }
5217
+ if (metadata.iterations !== void 0 && metadata.iterations > 0) {
5218
+ parts.push(import_chalk.default.cyan(`#${metadata.iterations}`));
5219
+ }
5220
+ if (metadata.elapsedSeconds !== void 0 && metadata.elapsedSeconds > 0) {
5221
+ parts.push(import_chalk.default.dim(`${metadata.elapsedSeconds}s`));
5222
+ }
5223
+ if (metadata.cost !== void 0 && metadata.cost > 0) {
5224
+ parts.push(import_chalk.default.cyan(`$${formatCost(metadata.cost)}`));
5225
+ }
5226
+ if (parts.length === 0) {
5227
+ return null;
5228
+ }
5229
+ return parts.join(import_chalk.default.dim(" | "));
5230
+ }
5231
+ function formatParametersInline(params) {
5232
+ if (!params || Object.keys(params).length === 0) {
5233
+ return "";
5234
+ }
5235
+ return Object.entries(params).map(([key, value]) => {
5236
+ let formatted;
5237
+ if (typeof value === "string") {
5238
+ formatted = value.length > 30 ? `${value.slice(0, 30)}\u2026` : value;
5239
+ } else if (typeof value === "boolean" || typeof value === "number") {
5240
+ formatted = String(value);
5241
+ } else {
5242
+ const json = JSON.stringify(value);
5243
+ formatted = json.length > 30 ? `${json.slice(0, 30)}\u2026` : json;
5244
+ }
5245
+ return `${import_chalk.default.dim(key)}${import_chalk.default.dim("=")}${import_chalk.default.cyan(formatted)}`;
5246
+ }).join(import_chalk.default.dim(", "));
5247
+ }
5248
+ function formatBytes(bytes) {
5249
+ if (bytes < 1024) {
5250
+ return `${bytes} bytes`;
5251
+ }
5252
+ if (bytes < 1024 * 1024) {
5253
+ return `${(bytes / 1024).toFixed(1)} KB`;
5254
+ }
5255
+ return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
5256
+ }
4755
5257
  function formatGadgetSummary(result) {
4756
5258
  const gadgetLabel = import_chalk.default.magenta.bold(result.gadgetName);
4757
5259
  const timeLabel = import_chalk.default.dim(`${Math.round(result.executionTimeMs)}ms`);
5260
+ const paramsStr = formatParametersInline(result.parameters);
5261
+ const paramsLabel = paramsStr ? `${import_chalk.default.dim("(")}${paramsStr}${import_chalk.default.dim(")")}` : "";
4758
5262
  if (result.error) {
4759
- return `${import_chalk.default.red("\u2717")} ${gadgetLabel} ${import_chalk.default.red("error:")} ${result.error} ${timeLabel}`;
5263
+ const errorMsg = result.error.length > 50 ? `${result.error.slice(0, 50)}\u2026` : result.error;
5264
+ return `${import_chalk.default.red("\u2717")} ${gadgetLabel}${paramsLabel} ${import_chalk.default.red("error:")} ${errorMsg} ${timeLabel}`;
5265
+ }
5266
+ let outputLabel;
5267
+ if (result.tokenCount !== void 0 && result.tokenCount > 0) {
5268
+ outputLabel = import_chalk.default.green(`${formatTokens(result.tokenCount)} tokens`);
5269
+ } else if (result.result) {
5270
+ const outputBytes = Buffer.byteLength(result.result, "utf-8");
5271
+ outputLabel = outputBytes > 0 ? import_chalk.default.green(formatBytes(outputBytes)) : import_chalk.default.dim("no output");
5272
+ } else {
5273
+ outputLabel = import_chalk.default.dim("no output");
4760
5274
  }
4761
- if (result.breaksLoop) {
4762
- return `${import_chalk.default.yellow("\u23F9")} ${gadgetLabel} ${import_chalk.default.yellow("finished:")} ${result.result} ${timeLabel}`;
5275
+ const icon = result.breaksLoop ? import_chalk.default.yellow("\u23F9") : import_chalk.default.green("\u2713");
5276
+ const summaryLine = `${icon} ${gadgetLabel}${paramsLabel} ${import_chalk.default.dim("\u2192")} ${outputLabel} ${timeLabel}`;
5277
+ if (result.gadgetName === "TellUser" && result.parameters?.message) {
5278
+ const message = String(result.parameters.message);
5279
+ const rendered = renderMarkdown(message);
5280
+ return `${summaryLine}
5281
+ ${rendered}`;
4763
5282
  }
4764
- const maxLen = 80;
4765
- const shouldTruncate = result.gadgetName !== "TellUser";
4766
- const resultText = result.result ? shouldTruncate && result.result.length > maxLen ? `${result.result.slice(0, maxLen)}...` : result.result : "";
4767
- return `${import_chalk.default.green("\u2713")} ${gadgetLabel} ${import_chalk.default.dim("\u2192")} ${resultText} ${timeLabel}`;
5283
+ return summaryLine;
4768
5284
  }
4769
5285
 
4770
5286
  // src/cli/utils.ts
5287
+ var RARE_EMOJI = [
5288
+ "\u{1F531}",
5289
+ "\u2697\uFE0F",
5290
+ "\u{1F9FF}",
5291
+ "\u{1F530}",
5292
+ "\u269B\uFE0F",
5293
+ "\u{1F3FA}",
5294
+ "\u{1F9EB}",
5295
+ "\u{1F52C}",
5296
+ "\u2695\uFE0F",
5297
+ "\u{1F5DD}\uFE0F",
5298
+ "\u2696\uFE0F",
5299
+ "\u{1F52E}",
5300
+ "\u{1FAAC}",
5301
+ "\u{1F9EC}",
5302
+ "\u2699\uFE0F",
5303
+ "\u{1F529}",
5304
+ "\u{1FA9B}",
5305
+ "\u26CF\uFE0F",
5306
+ "\u{1FA83}",
5307
+ "\u{1F3F9}",
5308
+ "\u{1F6E1}\uFE0F",
5309
+ "\u2694\uFE0F",
5310
+ "\u{1F5E1}\uFE0F",
5311
+ "\u{1FA93}",
5312
+ "\u{1F5C3}\uFE0F",
5313
+ "\u{1F4DC}",
5314
+ "\u{1F4EF}",
5315
+ "\u{1F3B4}",
5316
+ "\u{1F004}",
5317
+ "\u{1F3B2}"
5318
+ ];
5319
+ function generateMarkers() {
5320
+ const pick = (count) => {
5321
+ const result = [];
5322
+ const pool = [...RARE_EMOJI];
5323
+ for (let i = 0; i < count && pool.length > 0; i++) {
5324
+ const idx = Math.floor(Math.random() * pool.length);
5325
+ result.push(pool.splice(idx, 1)[0]);
5326
+ }
5327
+ return result.join("");
5328
+ };
5329
+ return {
5330
+ startPrefix: pick(5),
5331
+ endPrefix: pick(5)
5332
+ };
5333
+ }
4771
5334
  function createNumericParser({
4772
5335
  label,
4773
5336
  integer = false,
@@ -4934,6 +5497,13 @@ var StreamProgress = class {
4934
5497
  if (this.totalStartTime === 0) return 0;
4935
5498
  return Number(((Date.now() - this.totalStartTime) / 1e3).toFixed(1));
4936
5499
  }
5500
+ /**
5501
+ * Get elapsed time in seconds for the current call.
5502
+ * @returns Elapsed time in seconds with 1 decimal place
5503
+ */
5504
+ getCallElapsedSeconds() {
5505
+ return Number(((Date.now() - this.callStartTime) / 1e3).toFixed(1));
5506
+ }
4937
5507
  /**
4938
5508
  * Starts the progress indicator animation after a brief delay.
4939
5509
  */
@@ -4968,7 +5538,12 @@ var StreamProgress = class {
4968
5538
  const elapsed = ((Date.now() - this.callStartTime) / 1e3).toFixed(1);
4969
5539
  const outTokens = this.callOutputTokensEstimated ? Math.round(this.callOutputChars / FALLBACK_CHARS_PER_TOKEN) : this.callOutputTokens;
4970
5540
  const parts = [];
4971
- parts.push(import_chalk2.default.cyan(`#${this.currentIteration}`));
5541
+ const iterPart = import_chalk2.default.cyan(`#${this.currentIteration}`);
5542
+ if (this.model) {
5543
+ parts.push(`${iterPart} ${import_chalk2.default.magenta(this.model)}`);
5544
+ } else {
5545
+ parts.push(iterPart);
5546
+ }
4972
5547
  if (this.callInputTokens > 0) {
4973
5548
  const prefix = this.callInputTokensEstimated ? "~" : "";
4974
5549
  parts.push(import_chalk2.default.dim("\u2191") + import_chalk2.default.yellow(` ${prefix}${formatTokens(this.callInputTokens)}`));
@@ -4981,7 +5556,7 @@ var StreamProgress = class {
4981
5556
  if (this.totalCost > 0) {
4982
5557
  parts.push(import_chalk2.default.cyan(`$${formatCost(this.totalCost)}`));
4983
5558
  }
4984
- this.target.write(`\r${import_chalk2.default.cyan(spinner)} ${parts.join(import_chalk2.default.dim(" | "))}`);
5559
+ this.target.write(`\r${parts.join(import_chalk2.default.dim(" | "))} ${import_chalk2.default.cyan(spinner)}`);
4985
5560
  }
4986
5561
  renderCumulativeMode(spinner) {
4987
5562
  const elapsed = ((Date.now() - this.totalStartTime) / 1e3).toFixed(1);
@@ -4999,7 +5574,7 @@ var StreamProgress = class {
4999
5574
  parts.push(import_chalk2.default.dim("cost:") + import_chalk2.default.cyan(` $${formatCost(this.totalCost)}`));
5000
5575
  }
5001
5576
  parts.push(import_chalk2.default.dim(`${elapsed}s`));
5002
- this.target.write(`\r${import_chalk2.default.cyan(spinner)} ${parts.join(import_chalk2.default.dim(" | "))}`);
5577
+ this.target.write(`\r${parts.join(import_chalk2.default.dim(" | "))} ${import_chalk2.default.cyan(spinner)}`);
5003
5578
  }
5004
5579
  /**
5005
5580
  * Pauses the progress indicator and clears the line.
@@ -5109,15 +5684,91 @@ async function executeAction(action, env) {
5109
5684
  }
5110
5685
  }
5111
5686
 
5112
- // src/cli/agent-command.ts
5113
- var PARAMETER_FORMAT_VALUES = ["json", "yaml", "auto"];
5687
+ // src/cli/option-helpers.ts
5688
+ var PARAMETER_FORMAT_VALUES = ["json", "yaml", "toml", "auto"];
5114
5689
  function parseParameterFormat(value) {
5115
5690
  const normalized = value.toLowerCase();
5116
5691
  if (!PARAMETER_FORMAT_VALUES.includes(normalized)) {
5117
- throw new import_commander2.InvalidArgumentError("Parameter format must be one of 'json', 'yaml', or 'auto'.");
5692
+ throw new import_commander2.InvalidArgumentError(
5693
+ `Parameter format must be one of: ${PARAMETER_FORMAT_VALUES.join(", ")}`
5694
+ );
5118
5695
  }
5119
5696
  return normalized;
5120
5697
  }
5698
+ function addCompleteOptions(cmd, defaults) {
5699
+ return cmd.option(OPTION_FLAGS.model, OPTION_DESCRIPTIONS.model, defaults?.model ?? DEFAULT_MODEL).option(OPTION_FLAGS.systemPrompt, OPTION_DESCRIPTIONS.systemPrompt, defaults?.system).option(
5700
+ OPTION_FLAGS.temperature,
5701
+ OPTION_DESCRIPTIONS.temperature,
5702
+ createNumericParser({ label: "Temperature", min: 0, max: 2 }),
5703
+ defaults?.temperature
5704
+ ).option(
5705
+ OPTION_FLAGS.maxTokens,
5706
+ OPTION_DESCRIPTIONS.maxTokens,
5707
+ createNumericParser({ label: "Max tokens", integer: true, min: 1 }),
5708
+ defaults?.["max-tokens"]
5709
+ );
5710
+ }
5711
+ function addAgentOptions(cmd, defaults) {
5712
+ const gadgetAccumulator = (value, previous = []) => [
5713
+ ...previous,
5714
+ value
5715
+ ];
5716
+ const defaultGadgets = defaults?.gadget ?? [];
5717
+ return cmd.option(OPTION_FLAGS.model, OPTION_DESCRIPTIONS.model, defaults?.model ?? DEFAULT_MODEL).option(OPTION_FLAGS.systemPrompt, OPTION_DESCRIPTIONS.systemPrompt, defaults?.system).option(
5718
+ OPTION_FLAGS.temperature,
5719
+ OPTION_DESCRIPTIONS.temperature,
5720
+ createNumericParser({ label: "Temperature", min: 0, max: 2 }),
5721
+ defaults?.temperature
5722
+ ).option(
5723
+ OPTION_FLAGS.maxIterations,
5724
+ OPTION_DESCRIPTIONS.maxIterations,
5725
+ createNumericParser({ label: "Max iterations", integer: true, min: 1 }),
5726
+ defaults?.["max-iterations"]
5727
+ ).option(OPTION_FLAGS.gadgetModule, OPTION_DESCRIPTIONS.gadgetModule, gadgetAccumulator, [
5728
+ ...defaultGadgets
5729
+ ]).option(
5730
+ OPTION_FLAGS.parameterFormat,
5731
+ OPTION_DESCRIPTIONS.parameterFormat,
5732
+ parseParameterFormat,
5733
+ defaults?.["parameter-format"] ?? DEFAULT_PARAMETER_FORMAT
5734
+ ).option(OPTION_FLAGS.noBuiltins, OPTION_DESCRIPTIONS.noBuiltins, defaults?.builtins !== false).option(
5735
+ OPTION_FLAGS.noBuiltinInteraction,
5736
+ OPTION_DESCRIPTIONS.noBuiltinInteraction,
5737
+ defaults?.["builtin-interaction"] !== false
5738
+ );
5739
+ }
5740
+ function configToCompleteOptions(config) {
5741
+ const result = {};
5742
+ if (config.model !== void 0) result.model = config.model;
5743
+ if (config.system !== void 0) result.system = config.system;
5744
+ if (config.temperature !== void 0) result.temperature = config.temperature;
5745
+ if (config["max-tokens"] !== void 0) result.maxTokens = config["max-tokens"];
5746
+ return result;
5747
+ }
5748
+ function configToAgentOptions(config) {
5749
+ const result = {};
5750
+ if (config.model !== void 0) result.model = config.model;
5751
+ if (config.system !== void 0) result.system = config.system;
5752
+ if (config.temperature !== void 0) result.temperature = config.temperature;
5753
+ if (config["max-iterations"] !== void 0) result.maxIterations = config["max-iterations"];
5754
+ if (config.gadget !== void 0) result.gadget = config.gadget;
5755
+ if (config["parameter-format"] !== void 0) result.parameterFormat = config["parameter-format"];
5756
+ if (config.builtins !== void 0) result.builtins = config.builtins;
5757
+ if (config["builtin-interaction"] !== void 0)
5758
+ result.builtinInteraction = config["builtin-interaction"];
5759
+ return result;
5760
+ }
5761
+
5762
+ // src/cli/agent-command.ts
5763
+ async function promptApproval(env, prompt) {
5764
+ const rl = (0, import_promises.createInterface)({ input: env.stdin, output: env.stderr });
5765
+ try {
5766
+ const answer = await rl.question(prompt);
5767
+ return answer.toLowerCase().startsWith("y");
5768
+ } finally {
5769
+ rl.close();
5770
+ }
5771
+ }
5121
5772
  function createHumanInputHandler(env, progress) {
5122
5773
  const stdout = env.stdout;
5123
5774
  if (!isInteractive(env.stdin) || typeof stdout.isTTY !== "boolean" || !stdout.isTTY) {
@@ -5128,7 +5779,7 @@ function createHumanInputHandler(env, progress) {
5128
5779
  const rl = (0, import_promises.createInterface)({ input: env.stdin, output: env.stdout });
5129
5780
  try {
5130
5781
  const questionLine = question.trim() ? `
5131
- ${question.trim()}` : "";
5782
+ ${renderMarkdown(question.trim())}` : "";
5132
5783
  let isFirst = true;
5133
5784
  while (true) {
5134
5785
  const statsPrompt = progress.formatPrompt();
@@ -5146,7 +5797,7 @@ ${statsPrompt}` : statsPrompt;
5146
5797
  }
5147
5798
  };
5148
5799
  }
5149
- async function handleAgentCommand(promptArg, options, env) {
5800
+ async function executeAgent(promptArg, options, env) {
5150
5801
  const prompt = await resolvePrompt(promptArg, env);
5151
5802
  const client = env.createClient();
5152
5803
  const registry = new GadgetRegistry();
@@ -5168,7 +5819,6 @@ async function handleAgentCommand(promptArg, options, env) {
5168
5819
  const printer = new StreamPrinter(env.stdout);
5169
5820
  const stderrTTY = env.stderr.isTTY === true;
5170
5821
  const progress = new StreamProgress(env.stderr, stderrTTY, client.modelRegistry);
5171
- let finishReason;
5172
5822
  let usage;
5173
5823
  let iterations = 0;
5174
5824
  const countMessagesTokens = async (model, messages) => {
@@ -5179,6 +5829,15 @@ async function handleAgentCommand(promptArg, options, env) {
5179
5829
  return Math.round(totalChars / FALLBACK_CHARS_PER_TOKEN);
5180
5830
  }
5181
5831
  };
5832
+ const countGadgetOutputTokens = async (output) => {
5833
+ if (!output) return void 0;
5834
+ try {
5835
+ const messages = [{ role: "assistant", content: output }];
5836
+ return await client.countTokens(options.model, messages);
5837
+ } catch {
5838
+ return void 0;
5839
+ }
5840
+ };
5182
5841
  const builder = new AgentBuilder(client).withModel(options.model).withLogger(env.createLogger("llmist:cli:agent")).withHooks({
5183
5842
  observers: {
5184
5843
  // onLLMCallStart: Start progress indicator for each LLM call
@@ -5207,7 +5866,6 @@ async function handleAgentCommand(promptArg, options, env) {
5207
5866
  // onLLMCallComplete: Finalize metrics after each LLM call
5208
5867
  // This is where you'd typically log metrics or update dashboards
5209
5868
  onLLMCallComplete: async (context) => {
5210
- finishReason = context.finishReason;
5211
5869
  usage = context.usage;
5212
5870
  iterations = Math.max(iterations, context.iteration + 1);
5213
5871
  if (context.usage) {
@@ -5218,7 +5876,76 @@ async function handleAgentCommand(promptArg, options, env) {
5218
5876
  progress.setOutputTokens(context.usage.outputTokens, false);
5219
5877
  }
5220
5878
  }
5879
+ let callCost;
5880
+ if (context.usage && client.modelRegistry) {
5881
+ try {
5882
+ const modelName = options.model.includes(":") ? options.model.split(":")[1] : options.model;
5883
+ const costResult = client.modelRegistry.estimateCost(
5884
+ modelName,
5885
+ context.usage.inputTokens,
5886
+ context.usage.outputTokens
5887
+ );
5888
+ if (costResult) callCost = costResult.totalCost;
5889
+ } catch {
5890
+ }
5891
+ }
5892
+ const callElapsed = progress.getCallElapsedSeconds();
5221
5893
  progress.endCall(context.usage);
5894
+ if (stderrTTY) {
5895
+ const summary = renderSummary({
5896
+ iterations: context.iteration + 1,
5897
+ model: options.model,
5898
+ usage: context.usage,
5899
+ elapsedSeconds: callElapsed,
5900
+ cost: callCost,
5901
+ finishReason: context.finishReason
5902
+ });
5903
+ if (summary) {
5904
+ env.stderr.write(`${summary}
5905
+ `);
5906
+ }
5907
+ }
5908
+ }
5909
+ },
5910
+ // SHOWCASE: Controller-based approval gating for dangerous gadgets
5911
+ //
5912
+ // This demonstrates how to add safety layers WITHOUT modifying gadgets.
5913
+ // The RunCommand gadget is simple - it just executes commands. The CLI
5914
+ // adds the approval flow externally via beforeGadgetExecution controller.
5915
+ //
5916
+ // This pattern is composable: you can apply the same gating logic to
5917
+ // any gadget (DeleteFile, SendEmail, etc.) without changing the gadgets.
5918
+ controllers: {
5919
+ beforeGadgetExecution: async (ctx) => {
5920
+ if (ctx.gadgetName !== "RunCommand") {
5921
+ return { action: "proceed" };
5922
+ }
5923
+ const stdinTTY = isInteractive(env.stdin);
5924
+ const stderrTTY2 = env.stderr.isTTY === true;
5925
+ if (!stdinTTY || !stderrTTY2) {
5926
+ return {
5927
+ action: "skip",
5928
+ syntheticResult: "status=denied\n\nRunCommand requires interactive approval. Run in a terminal to approve commands."
5929
+ };
5930
+ }
5931
+ const command = ctx.parameters.command;
5932
+ progress.pause();
5933
+ env.stderr.write(`
5934
+ ${import_chalk3.default.yellow("\u{1F512} Execute:")} ${command}
5935
+ `);
5936
+ const approved = await promptApproval(env, " Approve? [y/n] ");
5937
+ if (!approved) {
5938
+ env.stderr.write(` ${import_chalk3.default.red("\u2717 Denied")}
5939
+
5940
+ `);
5941
+ return {
5942
+ action: "skip",
5943
+ syntheticResult: "status=denied\n\nCommand denied by user. Ask what they'd like to do instead."
5944
+ };
5945
+ }
5946
+ env.stderr.write(` ${import_chalk3.default.green("\u2713 Approved")}
5947
+ `);
5948
+ return { action: "proceed" };
5222
5949
  }
5223
5950
  }
5224
5951
  });
@@ -5239,6 +5966,10 @@ async function handleAgentCommand(promptArg, options, env) {
5239
5966
  if (gadgets.length > 0) {
5240
5967
  builder.withGadgets(...gadgets);
5241
5968
  }
5969
+ builder.withParameterFormat(options.parameterFormat);
5970
+ const markers = generateMarkers();
5971
+ builder.withGadgetStartPrefix(markers.startPrefix);
5972
+ builder.withGadgetEndPrefix(markers.endPrefix);
5242
5973
  const agent = builder.ask(prompt);
5243
5974
  for await (const event of agent.run()) {
5244
5975
  if (event.type === "text") {
@@ -5247,20 +5978,22 @@ async function handleAgentCommand(promptArg, options, env) {
5247
5978
  } else if (event.type === "gadget_result") {
5248
5979
  progress.pause();
5249
5980
  if (stderrTTY) {
5250
- env.stderr.write(`${formatGadgetSummary(event.result)}
5981
+ const tokenCount = await countGadgetOutputTokens(event.result.result);
5982
+ env.stderr.write(`${formatGadgetSummary({ ...event.result, tokenCount })}
5251
5983
  `);
5252
5984
  }
5253
5985
  }
5254
5986
  }
5255
5987
  progress.complete();
5256
5988
  printer.ensureNewline();
5257
- if (stderrTTY) {
5258
- const summary = renderSummary({
5259
- finishReason,
5260
- usage,
5989
+ if (stderrTTY && iterations > 1) {
5990
+ env.stderr.write(`${import_chalk3.default.dim("\u2500".repeat(40))}
5991
+ `);
5992
+ const summary = renderOverallSummary({
5993
+ totalTokens: usage?.totalTokens,
5261
5994
  iterations,
5262
- cost: progress.getTotalCost(),
5263
- elapsedSeconds: progress.getTotalElapsedSeconds()
5995
+ elapsedSeconds: progress.getTotalElapsedSeconds(),
5996
+ cost: progress.getTotalCost()
5264
5997
  });
5265
5998
  if (summary) {
5266
5999
  env.stderr.write(`${summary}
@@ -5268,27 +6001,11 @@ async function handleAgentCommand(promptArg, options, env) {
5268
6001
  }
5269
6002
  }
5270
6003
  }
5271
- function registerAgentCommand(program, env) {
5272
- program.command(COMMANDS.agent).description("Run the llmist agent loop with optional gadgets.").argument("[prompt]", "Prompt for the agent loop. Falls back to stdin when available.").option(OPTION_FLAGS.model, OPTION_DESCRIPTIONS.model, DEFAULT_MODEL).option(OPTION_FLAGS.systemPrompt, OPTION_DESCRIPTIONS.systemPrompt).option(
5273
- OPTION_FLAGS.temperature,
5274
- OPTION_DESCRIPTIONS.temperature,
5275
- createNumericParser({ label: "Temperature", min: 0, max: 2 })
5276
- ).option(
5277
- OPTION_FLAGS.maxIterations,
5278
- OPTION_DESCRIPTIONS.maxIterations,
5279
- createNumericParser({ label: "Max iterations", integer: true, min: 1 })
5280
- ).option(
5281
- OPTION_FLAGS.gadgetModule,
5282
- OPTION_DESCRIPTIONS.gadgetModule,
5283
- (value, previous = []) => [...previous, value],
5284
- []
5285
- ).option(
5286
- OPTION_FLAGS.parameterFormat,
5287
- OPTION_DESCRIPTIONS.parameterFormat,
5288
- parseParameterFormat,
5289
- DEFAULT_PARAMETER_FORMAT
5290
- ).option(OPTION_FLAGS.noBuiltins, OPTION_DESCRIPTIONS.noBuiltins).option(OPTION_FLAGS.noBuiltinInteraction, OPTION_DESCRIPTIONS.noBuiltinInteraction).action(
5291
- (prompt, options) => executeAction(() => handleAgentCommand(prompt, options, env), env)
6004
+ function registerAgentCommand(program, env, config) {
6005
+ const cmd = program.command(COMMANDS.agent).description("Run the llmist agent loop with optional gadgets.").argument("[prompt]", "Prompt for the agent loop. Falls back to stdin when available.");
6006
+ addAgentOptions(cmd, config);
6007
+ cmd.action(
6008
+ (prompt, options) => executeAction(() => executeAgent(prompt, options, env), env)
5292
6009
  );
5293
6010
  }
5294
6011
 
@@ -5296,7 +6013,7 @@ function registerAgentCommand(program, env) {
5296
6013
  init_messages();
5297
6014
  init_model_shortcuts();
5298
6015
  init_constants2();
5299
- async function handleCompleteCommand(promptArg, options, env) {
6016
+ async function executeComplete(promptArg, options, env) {
5300
6017
  const prompt = await resolvePrompt(promptArg, env);
5301
6018
  const client = env.createClient();
5302
6019
  const model = resolveModel(options.model);
@@ -5350,25 +6067,307 @@ async function handleCompleteCommand(promptArg, options, env) {
5350
6067
  }
5351
6068
  }
5352
6069
  }
5353
- function registerCompleteCommand(program, env) {
5354
- program.command(COMMANDS.complete).description("Stream a single completion from a specified model.").argument("[prompt]", "Prompt to send to the LLM. If omitted, stdin is used when available.").option(OPTION_FLAGS.model, OPTION_DESCRIPTIONS.model, DEFAULT_MODEL).option(OPTION_FLAGS.systemPrompt, OPTION_DESCRIPTIONS.systemPrompt).option(
5355
- OPTION_FLAGS.temperature,
5356
- OPTION_DESCRIPTIONS.temperature,
5357
- createNumericParser({ label: "Temperature", min: 0, max: 2 })
5358
- ).option(
5359
- OPTION_FLAGS.maxTokens,
5360
- OPTION_DESCRIPTIONS.maxTokens,
5361
- createNumericParser({ label: "Max tokens", integer: true, min: 1 })
5362
- ).action(
5363
- (prompt, options) => executeAction(
5364
- () => handleCompleteCommand(prompt, options, env),
5365
- env
5366
- )
6070
+ function registerCompleteCommand(program, env, config) {
6071
+ const cmd = program.command(COMMANDS.complete).description("Stream a single completion from a specified model.").argument("[prompt]", "Prompt to send to the LLM. If omitted, stdin is used when available.");
6072
+ addCompleteOptions(cmd, config);
6073
+ cmd.action(
6074
+ (prompt, options) => executeAction(() => executeComplete(prompt, options, env), env)
5367
6075
  );
5368
6076
  }
5369
6077
 
6078
+ // src/cli/config.ts
6079
+ var import_node_fs3 = require("fs");
6080
+ var import_node_os = require("os");
6081
+ var import_node_path3 = require("path");
6082
+ var import_js_toml2 = require("js-toml");
6083
+ var GLOBAL_CONFIG_KEYS = /* @__PURE__ */ new Set(["log-level", "log-file"]);
6084
+ var VALID_LOG_LEVELS = ["silly", "trace", "debug", "info", "warn", "error", "fatal"];
6085
+ var COMPLETE_CONFIG_KEYS = /* @__PURE__ */ new Set(["model", "system", "temperature", "max-tokens"]);
6086
+ var AGENT_CONFIG_KEYS = /* @__PURE__ */ new Set([
6087
+ "model",
6088
+ "system",
6089
+ "temperature",
6090
+ "max-iterations",
6091
+ "gadget",
6092
+ "parameter-format",
6093
+ "builtins",
6094
+ "builtin-interaction"
6095
+ ]);
6096
+ var CUSTOM_CONFIG_KEYS = /* @__PURE__ */ new Set([
6097
+ ...COMPLETE_CONFIG_KEYS,
6098
+ ...AGENT_CONFIG_KEYS,
6099
+ "type",
6100
+ "description"
6101
+ ]);
6102
+ var VALID_PARAMETER_FORMATS = ["json", "yaml", "toml", "auto"];
6103
+ function getConfigPath() {
6104
+ return (0, import_node_path3.join)((0, import_node_os.homedir)(), ".llmist", "cli.toml");
6105
+ }
6106
+ var ConfigError = class extends Error {
6107
+ constructor(message, path2) {
6108
+ super(path2 ? `${path2}: ${message}` : message);
6109
+ this.path = path2;
6110
+ this.name = "ConfigError";
6111
+ }
6112
+ };
6113
+ function validateString(value, key, section) {
6114
+ if (typeof value !== "string") {
6115
+ throw new ConfigError(`[${section}].${key} must be a string`);
6116
+ }
6117
+ return value;
6118
+ }
6119
+ function validateNumber(value, key, section, opts) {
6120
+ if (typeof value !== "number") {
6121
+ throw new ConfigError(`[${section}].${key} must be a number`);
6122
+ }
6123
+ if (opts?.integer && !Number.isInteger(value)) {
6124
+ throw new ConfigError(`[${section}].${key} must be an integer`);
6125
+ }
6126
+ if (opts?.min !== void 0 && value < opts.min) {
6127
+ throw new ConfigError(`[${section}].${key} must be >= ${opts.min}`);
6128
+ }
6129
+ if (opts?.max !== void 0 && value > opts.max) {
6130
+ throw new ConfigError(`[${section}].${key} must be <= ${opts.max}`);
6131
+ }
6132
+ return value;
6133
+ }
6134
+ function validateBoolean(value, key, section) {
6135
+ if (typeof value !== "boolean") {
6136
+ throw new ConfigError(`[${section}].${key} must be a boolean`);
6137
+ }
6138
+ return value;
6139
+ }
6140
+ function validateStringArray(value, key, section) {
6141
+ if (!Array.isArray(value)) {
6142
+ throw new ConfigError(`[${section}].${key} must be an array`);
6143
+ }
6144
+ for (let i = 0; i < value.length; i++) {
6145
+ if (typeof value[i] !== "string") {
6146
+ throw new ConfigError(`[${section}].${key}[${i}] must be a string`);
6147
+ }
6148
+ }
6149
+ return value;
6150
+ }
6151
+ function validateBaseConfig(raw, section) {
6152
+ const result = {};
6153
+ if ("model" in raw) {
6154
+ result.model = validateString(raw.model, "model", section);
6155
+ }
6156
+ if ("system" in raw) {
6157
+ result.system = validateString(raw.system, "system", section);
6158
+ }
6159
+ if ("temperature" in raw) {
6160
+ result.temperature = validateNumber(raw.temperature, "temperature", section, {
6161
+ min: 0,
6162
+ max: 2
6163
+ });
6164
+ }
6165
+ return result;
6166
+ }
6167
+ function validateGlobalConfig(raw, section) {
6168
+ if (typeof raw !== "object" || raw === null) {
6169
+ throw new ConfigError(`[${section}] must be a table`);
6170
+ }
6171
+ const rawObj = raw;
6172
+ for (const key of Object.keys(rawObj)) {
6173
+ if (!GLOBAL_CONFIG_KEYS.has(key)) {
6174
+ throw new ConfigError(`[${section}].${key} is not a valid option`);
6175
+ }
6176
+ }
6177
+ const result = {};
6178
+ if ("log-level" in rawObj) {
6179
+ const level = validateString(rawObj["log-level"], "log-level", section);
6180
+ if (!VALID_LOG_LEVELS.includes(level)) {
6181
+ throw new ConfigError(
6182
+ `[${section}].log-level must be one of: ${VALID_LOG_LEVELS.join(", ")}`
6183
+ );
6184
+ }
6185
+ result["log-level"] = level;
6186
+ }
6187
+ if ("log-file" in rawObj) {
6188
+ result["log-file"] = validateString(rawObj["log-file"], "log-file", section);
6189
+ }
6190
+ return result;
6191
+ }
6192
+ function validateCompleteConfig(raw, section) {
6193
+ if (typeof raw !== "object" || raw === null) {
6194
+ throw new ConfigError(`[${section}] must be a table`);
6195
+ }
6196
+ const rawObj = raw;
6197
+ for (const key of Object.keys(rawObj)) {
6198
+ if (!COMPLETE_CONFIG_KEYS.has(key)) {
6199
+ throw new ConfigError(`[${section}].${key} is not a valid option`);
6200
+ }
6201
+ }
6202
+ const result = { ...validateBaseConfig(rawObj, section) };
6203
+ if ("max-tokens" in rawObj) {
6204
+ result["max-tokens"] = validateNumber(rawObj["max-tokens"], "max-tokens", section, {
6205
+ integer: true,
6206
+ min: 1
6207
+ });
6208
+ }
6209
+ return result;
6210
+ }
6211
+ function validateAgentConfig(raw, section) {
6212
+ if (typeof raw !== "object" || raw === null) {
6213
+ throw new ConfigError(`[${section}] must be a table`);
6214
+ }
6215
+ const rawObj = raw;
6216
+ for (const key of Object.keys(rawObj)) {
6217
+ if (!AGENT_CONFIG_KEYS.has(key)) {
6218
+ throw new ConfigError(`[${section}].${key} is not a valid option`);
6219
+ }
6220
+ }
6221
+ const result = { ...validateBaseConfig(rawObj, section) };
6222
+ if ("max-iterations" in rawObj) {
6223
+ result["max-iterations"] = validateNumber(rawObj["max-iterations"], "max-iterations", section, {
6224
+ integer: true,
6225
+ min: 1
6226
+ });
6227
+ }
6228
+ if ("gadget" in rawObj) {
6229
+ result.gadget = validateStringArray(rawObj.gadget, "gadget", section);
6230
+ }
6231
+ if ("parameter-format" in rawObj) {
6232
+ const format = validateString(rawObj["parameter-format"], "parameter-format", section);
6233
+ if (!VALID_PARAMETER_FORMATS.includes(format)) {
6234
+ throw new ConfigError(
6235
+ `[${section}].parameter-format must be one of: ${VALID_PARAMETER_FORMATS.join(", ")}`
6236
+ );
6237
+ }
6238
+ result["parameter-format"] = format;
6239
+ }
6240
+ if ("builtins" in rawObj) {
6241
+ result.builtins = validateBoolean(rawObj.builtins, "builtins", section);
6242
+ }
6243
+ if ("builtin-interaction" in rawObj) {
6244
+ result["builtin-interaction"] = validateBoolean(
6245
+ rawObj["builtin-interaction"],
6246
+ "builtin-interaction",
6247
+ section
6248
+ );
6249
+ }
6250
+ return result;
6251
+ }
6252
+ function validateCustomConfig(raw, section) {
6253
+ if (typeof raw !== "object" || raw === null) {
6254
+ throw new ConfigError(`[${section}] must be a table`);
6255
+ }
6256
+ const rawObj = raw;
6257
+ for (const key of Object.keys(rawObj)) {
6258
+ if (!CUSTOM_CONFIG_KEYS.has(key)) {
6259
+ throw new ConfigError(`[${section}].${key} is not a valid option`);
6260
+ }
6261
+ }
6262
+ let type = "agent";
6263
+ if ("type" in rawObj) {
6264
+ const typeValue = validateString(rawObj.type, "type", section);
6265
+ if (typeValue !== "agent" && typeValue !== "complete") {
6266
+ throw new ConfigError(`[${section}].type must be "agent" or "complete"`);
6267
+ }
6268
+ type = typeValue;
6269
+ }
6270
+ const result = {
6271
+ ...validateBaseConfig(rawObj, section),
6272
+ type
6273
+ };
6274
+ if ("description" in rawObj) {
6275
+ result.description = validateString(rawObj.description, "description", section);
6276
+ }
6277
+ if ("max-iterations" in rawObj) {
6278
+ result["max-iterations"] = validateNumber(rawObj["max-iterations"], "max-iterations", section, {
6279
+ integer: true,
6280
+ min: 1
6281
+ });
6282
+ }
6283
+ if ("gadget" in rawObj) {
6284
+ result.gadget = validateStringArray(rawObj.gadget, "gadget", section);
6285
+ }
6286
+ if ("parameter-format" in rawObj) {
6287
+ const format = validateString(rawObj["parameter-format"], "parameter-format", section);
6288
+ if (!VALID_PARAMETER_FORMATS.includes(format)) {
6289
+ throw new ConfigError(
6290
+ `[${section}].parameter-format must be one of: ${VALID_PARAMETER_FORMATS.join(", ")}`
6291
+ );
6292
+ }
6293
+ result["parameter-format"] = format;
6294
+ }
6295
+ if ("builtins" in rawObj) {
6296
+ result.builtins = validateBoolean(rawObj.builtins, "builtins", section);
6297
+ }
6298
+ if ("builtin-interaction" in rawObj) {
6299
+ result["builtin-interaction"] = validateBoolean(
6300
+ rawObj["builtin-interaction"],
6301
+ "builtin-interaction",
6302
+ section
6303
+ );
6304
+ }
6305
+ if ("max-tokens" in rawObj) {
6306
+ result["max-tokens"] = validateNumber(rawObj["max-tokens"], "max-tokens", section, {
6307
+ integer: true,
6308
+ min: 1
6309
+ });
6310
+ }
6311
+ return result;
6312
+ }
6313
+ function validateConfig(raw, configPath) {
6314
+ if (typeof raw !== "object" || raw === null) {
6315
+ throw new ConfigError("Config must be a TOML table", configPath);
6316
+ }
6317
+ const rawObj = raw;
6318
+ const result = {};
6319
+ for (const [key, value] of Object.entries(rawObj)) {
6320
+ try {
6321
+ if (key === "global") {
6322
+ result.global = validateGlobalConfig(value, key);
6323
+ } else if (key === "complete") {
6324
+ result.complete = validateCompleteConfig(value, key);
6325
+ } else if (key === "agent") {
6326
+ result.agent = validateAgentConfig(value, key);
6327
+ } else {
6328
+ result[key] = validateCustomConfig(value, key);
6329
+ }
6330
+ } catch (error) {
6331
+ if (error instanceof ConfigError) {
6332
+ throw new ConfigError(error.message, configPath);
6333
+ }
6334
+ throw error;
6335
+ }
6336
+ }
6337
+ return result;
6338
+ }
6339
+ function loadConfig() {
6340
+ const configPath = getConfigPath();
6341
+ if (!(0, import_node_fs3.existsSync)(configPath)) {
6342
+ return {};
6343
+ }
6344
+ let content;
6345
+ try {
6346
+ content = (0, import_node_fs3.readFileSync)(configPath, "utf-8");
6347
+ } catch (error) {
6348
+ throw new ConfigError(
6349
+ `Failed to read config file: ${error instanceof Error ? error.message : "Unknown error"}`,
6350
+ configPath
6351
+ );
6352
+ }
6353
+ let raw;
6354
+ try {
6355
+ raw = (0, import_js_toml2.load)(content);
6356
+ } catch (error) {
6357
+ throw new ConfigError(
6358
+ `Invalid TOML syntax: ${error instanceof Error ? error.message : "Unknown error"}`,
6359
+ configPath
6360
+ );
6361
+ }
6362
+ return validateConfig(raw, configPath);
6363
+ }
6364
+ function getCustomCommandNames(config) {
6365
+ const reserved = /* @__PURE__ */ new Set(["global", "complete", "agent"]);
6366
+ return Object.keys(config).filter((key) => !reserved.has(key));
6367
+ }
6368
+
5370
6369
  // src/cli/models-command.ts
5371
- var import_chalk3 = __toESM(require("chalk"), 1);
6370
+ var import_chalk4 = __toESM(require("chalk"), 1);
5372
6371
  init_model_shortcuts();
5373
6372
  async function handleModelsCommand(options, env) {
5374
6373
  const client = env.createClient();
@@ -5388,13 +6387,13 @@ function renderTable(models, verbose, stream2) {
5388
6387
  }
5389
6388
  grouped.get(provider).push(model);
5390
6389
  }
5391
- stream2.write(import_chalk3.default.bold.cyan("\nAvailable Models\n"));
5392
- stream2.write(import_chalk3.default.cyan("=".repeat(80)) + "\n\n");
6390
+ stream2.write(import_chalk4.default.bold.cyan("\nAvailable Models\n"));
6391
+ stream2.write(import_chalk4.default.cyan("=".repeat(80)) + "\n\n");
5393
6392
  const providers = Array.from(grouped.keys()).sort();
5394
6393
  for (const provider of providers) {
5395
6394
  const providerModels = grouped.get(provider);
5396
6395
  const providerName = provider.charAt(0).toUpperCase() + provider.slice(1);
5397
- stream2.write(import_chalk3.default.bold.yellow(`${providerName} Models
6396
+ stream2.write(import_chalk4.default.bold.yellow(`${providerName} Models
5398
6397
  `));
5399
6398
  if (verbose) {
5400
6399
  renderVerboseTable(providerModels, stream2);
@@ -5403,11 +6402,11 @@ function renderTable(models, verbose, stream2) {
5403
6402
  }
5404
6403
  stream2.write("\n");
5405
6404
  }
5406
- stream2.write(import_chalk3.default.bold.magenta("Model Shortcuts\n"));
5407
- stream2.write(import_chalk3.default.dim("\u2500".repeat(80)) + "\n");
6405
+ stream2.write(import_chalk4.default.bold.magenta("Model Shortcuts\n"));
6406
+ stream2.write(import_chalk4.default.dim("\u2500".repeat(80)) + "\n");
5408
6407
  const shortcuts = Object.entries(MODEL_ALIASES).sort((a, b) => a[0].localeCompare(b[0]));
5409
6408
  for (const [shortcut, fullName] of shortcuts) {
5410
- stream2.write(import_chalk3.default.cyan(` ${shortcut.padEnd(15)}`) + import_chalk3.default.dim(" \u2192 ") + import_chalk3.default.white(fullName) + "\n");
6409
+ stream2.write(import_chalk4.default.cyan(` ${shortcut.padEnd(15)}`) + import_chalk4.default.dim(" \u2192 ") + import_chalk4.default.white(fullName) + "\n");
5411
6410
  }
5412
6411
  stream2.write("\n");
5413
6412
  }
@@ -5417,45 +6416,45 @@ function renderCompactTable(models, stream2) {
5417
6416
  const contextWidth = 13;
5418
6417
  const inputWidth = 10;
5419
6418
  const outputWidth = 10;
5420
- stream2.write(import_chalk3.default.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
6419
+ stream2.write(import_chalk4.default.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
5421
6420
  stream2.write(
5422
- import_chalk3.default.bold(
6421
+ import_chalk4.default.bold(
5423
6422
  "Model ID".padEnd(idWidth) + " " + "Display Name".padEnd(nameWidth) + " " + "Context".padEnd(contextWidth) + " " + "Input".padEnd(inputWidth) + " " + "Output".padEnd(outputWidth)
5424
6423
  ) + "\n"
5425
6424
  );
5426
- stream2.write(import_chalk3.default.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
6425
+ stream2.write(import_chalk4.default.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
5427
6426
  for (const model of models) {
5428
6427
  const contextFormatted = formatTokens2(model.contextWindow);
5429
6428
  const inputPrice = `$${model.pricing.input.toFixed(2)}`;
5430
6429
  const outputPrice = `$${model.pricing.output.toFixed(2)}`;
5431
6430
  stream2.write(
5432
- import_chalk3.default.green(model.modelId.padEnd(idWidth)) + " " + import_chalk3.default.white(model.displayName.padEnd(nameWidth)) + " " + import_chalk3.default.yellow(contextFormatted.padEnd(contextWidth)) + " " + import_chalk3.default.cyan(inputPrice.padEnd(inputWidth)) + " " + import_chalk3.default.cyan(outputPrice.padEnd(outputWidth)) + "\n"
6431
+ import_chalk4.default.green(model.modelId.padEnd(idWidth)) + " " + import_chalk4.default.white(model.displayName.padEnd(nameWidth)) + " " + import_chalk4.default.yellow(contextFormatted.padEnd(contextWidth)) + " " + import_chalk4.default.cyan(inputPrice.padEnd(inputWidth)) + " " + import_chalk4.default.cyan(outputPrice.padEnd(outputWidth)) + "\n"
5433
6432
  );
5434
6433
  }
5435
- stream2.write(import_chalk3.default.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
5436
- stream2.write(import_chalk3.default.dim(` * Prices are per 1M tokens
6434
+ stream2.write(import_chalk4.default.dim("\u2500".repeat(idWidth + nameWidth + contextWidth + inputWidth + outputWidth + 8)) + "\n");
6435
+ stream2.write(import_chalk4.default.dim(` * Prices are per 1M tokens
5437
6436
  `));
5438
6437
  }
5439
6438
  function renderVerboseTable(models, stream2) {
5440
6439
  for (const model of models) {
5441
- stream2.write(import_chalk3.default.bold.green(`
6440
+ stream2.write(import_chalk4.default.bold.green(`
5442
6441
  ${model.modelId}
5443
6442
  `));
5444
- stream2.write(import_chalk3.default.dim(" " + "\u2500".repeat(60)) + "\n");
5445
- stream2.write(` ${import_chalk3.default.dim("Name:")} ${import_chalk3.default.white(model.displayName)}
6443
+ stream2.write(import_chalk4.default.dim(" " + "\u2500".repeat(60)) + "\n");
6444
+ stream2.write(` ${import_chalk4.default.dim("Name:")} ${import_chalk4.default.white(model.displayName)}
5446
6445
  `);
5447
- stream2.write(` ${import_chalk3.default.dim("Context:")} ${import_chalk3.default.yellow(formatTokens2(model.contextWindow))}
6446
+ stream2.write(` ${import_chalk4.default.dim("Context:")} ${import_chalk4.default.yellow(formatTokens2(model.contextWindow))}
5448
6447
  `);
5449
- stream2.write(` ${import_chalk3.default.dim("Max Output:")} ${import_chalk3.default.yellow(formatTokens2(model.maxOutputTokens))}
6448
+ stream2.write(` ${import_chalk4.default.dim("Max Output:")} ${import_chalk4.default.yellow(formatTokens2(model.maxOutputTokens))}
5450
6449
  `);
5451
- stream2.write(` ${import_chalk3.default.dim("Pricing:")} ${import_chalk3.default.cyan(`$${model.pricing.input.toFixed(2)} input`)} ${import_chalk3.default.dim("/")} ${import_chalk3.default.cyan(`$${model.pricing.output.toFixed(2)} output`)} ${import_chalk3.default.dim("(per 1M tokens)")}
6450
+ stream2.write(` ${import_chalk4.default.dim("Pricing:")} ${import_chalk4.default.cyan(`$${model.pricing.input.toFixed(2)} input`)} ${import_chalk4.default.dim("/")} ${import_chalk4.default.cyan(`$${model.pricing.output.toFixed(2)} output`)} ${import_chalk4.default.dim("(per 1M tokens)")}
5452
6451
  `);
5453
6452
  if (model.pricing.cachedInput !== void 0) {
5454
- stream2.write(` ${import_chalk3.default.dim("Cached Input:")} ${import_chalk3.default.cyan(`$${model.pricing.cachedInput.toFixed(2)} per 1M tokens`)}
6453
+ stream2.write(` ${import_chalk4.default.dim("Cached Input:")} ${import_chalk4.default.cyan(`$${model.pricing.cachedInput.toFixed(2)} per 1M tokens`)}
5455
6454
  `);
5456
6455
  }
5457
6456
  if (model.knowledgeCutoff) {
5458
- stream2.write(` ${import_chalk3.default.dim("Knowledge:")} ${model.knowledgeCutoff}
6457
+ stream2.write(` ${import_chalk4.default.dim("Knowledge:")} ${model.knowledgeCutoff}
5459
6458
  `);
5460
6459
  }
5461
6460
  const features = [];
@@ -5466,20 +6465,20 @@ function renderVerboseTable(models, stream2) {
5466
6465
  if (model.features.structuredOutputs) features.push("structured-outputs");
5467
6466
  if (model.features.fineTuning) features.push("fine-tuning");
5468
6467
  if (features.length > 0) {
5469
- stream2.write(` ${import_chalk3.default.dim("Features:")} ${import_chalk3.default.blue(features.join(", "))}
6468
+ stream2.write(` ${import_chalk4.default.dim("Features:")} ${import_chalk4.default.blue(features.join(", "))}
5470
6469
  `);
5471
6470
  }
5472
6471
  if (model.metadata) {
5473
6472
  if (model.metadata.family) {
5474
- stream2.write(` ${import_chalk3.default.dim("Family:")} ${model.metadata.family}
6473
+ stream2.write(` ${import_chalk4.default.dim("Family:")} ${model.metadata.family}
5475
6474
  `);
5476
6475
  }
5477
6476
  if (model.metadata.releaseDate) {
5478
- stream2.write(` ${import_chalk3.default.dim("Released:")} ${model.metadata.releaseDate}
6477
+ stream2.write(` ${import_chalk4.default.dim("Released:")} ${model.metadata.releaseDate}
5479
6478
  `);
5480
6479
  }
5481
6480
  if (model.metadata.notes) {
5482
- stream2.write(` ${import_chalk3.default.dim("Notes:")} ${import_chalk3.default.italic(model.metadata.notes)}
6481
+ stream2.write(` ${import_chalk4.default.dim("Notes:")} ${import_chalk4.default.italic(model.metadata.notes)}
5483
6482
  `);
5484
6483
  }
5485
6484
  }
@@ -5527,9 +6526,41 @@ function registerModelsCommand(program, env) {
5527
6526
  );
5528
6527
  }
5529
6528
 
6529
+ // src/cli/custom-command.ts
6530
+ function registerCustomCommand(program, name, config, env) {
6531
+ const type = config.type ?? "agent";
6532
+ const description = config.description ?? `Custom ${type} command`;
6533
+ const cmd = program.command(name).description(description).argument("[prompt]", "Prompt for the command. Falls back to stdin when available.");
6534
+ if (type === "complete") {
6535
+ addCompleteOptions(cmd, config);
6536
+ cmd.action(
6537
+ (prompt, cliOptions) => executeAction(async () => {
6538
+ const configDefaults = configToCompleteOptions(config);
6539
+ const options = {
6540
+ ...configDefaults,
6541
+ ...cliOptions
6542
+ };
6543
+ await executeComplete(prompt, options, env);
6544
+ }, env)
6545
+ );
6546
+ } else {
6547
+ addAgentOptions(cmd, config);
6548
+ cmd.action(
6549
+ (prompt, cliOptions) => executeAction(async () => {
6550
+ const configDefaults = configToAgentOptions(config);
6551
+ const options = {
6552
+ ...configDefaults,
6553
+ ...cliOptions
6554
+ };
6555
+ await executeAgent(prompt, options, env);
6556
+ }, env)
6557
+ );
6558
+ }
6559
+ }
6560
+
5530
6561
  // src/cli/environment.ts
5531
6562
  var import_node_readline = __toESM(require("readline"), 1);
5532
- var import_chalk4 = __toESM(require("chalk"), 1);
6563
+ var import_chalk5 = __toESM(require("chalk"), 1);
5533
6564
  init_client();
5534
6565
  init_logger();
5535
6566
  var LOG_LEVEL_MAP = {
@@ -5575,14 +6606,14 @@ function createPromptFunction(stdin, stdout) {
5575
6606
  output: stdout
5576
6607
  });
5577
6608
  stdout.write("\n");
5578
- stdout.write(`${import_chalk4.default.cyan("\u2500".repeat(60))}
6609
+ stdout.write(`${import_chalk5.default.cyan("\u2500".repeat(60))}
5579
6610
  `);
5580
- stdout.write(import_chalk4.default.cyan.bold("\u{1F916} Agent asks:\n"));
6611
+ stdout.write(import_chalk5.default.cyan.bold("\u{1F916} Agent asks:\n"));
5581
6612
  stdout.write(`${question}
5582
6613
  `);
5583
- stdout.write(`${import_chalk4.default.cyan("\u2500".repeat(60))}
6614
+ stdout.write(`${import_chalk5.default.cyan("\u2500".repeat(60))}
5584
6615
  `);
5585
- rl.question(import_chalk4.default.green.bold("You: "), (answer) => {
6616
+ rl.question(import_chalk5.default.green.bold("You: "), (answer) => {
5586
6617
  rl.close();
5587
6618
  resolve(answer);
5588
6619
  });
@@ -5617,29 +6648,39 @@ function parseLogLevel2(value) {
5617
6648
  }
5618
6649
  return normalized;
5619
6650
  }
5620
- function createProgram(env) {
6651
+ function createProgram(env, config) {
5621
6652
  const program = new import_commander3.Command();
5622
6653
  program.name(CLI_NAME).description(CLI_DESCRIPTION).version(package_default.version).option(OPTION_FLAGS.logLevel, OPTION_DESCRIPTIONS.logLevel, parseLogLevel2).option(OPTION_FLAGS.logFile, OPTION_DESCRIPTIONS.logFile).configureOutput({
5623
6654
  writeOut: (str) => env.stdout.write(str),
5624
6655
  writeErr: (str) => env.stderr.write(str)
5625
6656
  });
5626
- registerCompleteCommand(program, env);
5627
- registerAgentCommand(program, env);
6657
+ registerCompleteCommand(program, env, config?.complete);
6658
+ registerAgentCommand(program, env, config?.agent);
5628
6659
  registerModelsCommand(program, env);
6660
+ if (config) {
6661
+ const customNames = getCustomCommandNames(config);
6662
+ for (const name of customNames) {
6663
+ const cmdConfig = config[name];
6664
+ registerCustomCommand(program, name, cmdConfig, env);
6665
+ }
6666
+ }
5629
6667
  return program;
5630
6668
  }
5631
6669
  async function runCLI(overrides = {}) {
6670
+ const opts = "env" in overrides || "config" in overrides ? overrides : { env: overrides };
6671
+ const config = opts.config !== void 0 ? opts.config : loadConfig();
6672
+ const envOverrides = opts.env ?? {};
5632
6673
  const preParser = new import_commander3.Command();
5633
6674
  preParser.option(OPTION_FLAGS.logLevel, OPTION_DESCRIPTIONS.logLevel, parseLogLevel2).option(OPTION_FLAGS.logFile, OPTION_DESCRIPTIONS.logFile).allowUnknownOption().allowExcessArguments().helpOption(false);
5634
6675
  preParser.parse(process.argv);
5635
6676
  const globalOpts = preParser.opts();
5636
6677
  const loggerConfig = {
5637
- logLevel: globalOpts.logLevel,
5638
- logFile: globalOpts.logFile
6678
+ logLevel: globalOpts.logLevel ?? config.global?.["log-level"],
6679
+ logFile: globalOpts.logFile ?? config.global?.["log-file"]
5639
6680
  };
5640
6681
  const defaultEnv = createDefaultEnvironment(loggerConfig);
5641
- const env = { ...defaultEnv, ...overrides };
5642
- const program = createProgram(env);
6682
+ const env = { ...defaultEnv, ...envOverrides };
6683
+ const program = createProgram(env, config);
5643
6684
  await program.parseAsync(env.argv);
5644
6685
  }
5645
6686