llmist 0.7.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-CTC2WJZA.js → chunk-53MM55JS.js} +40 -5
- package/dist/chunk-53MM55JS.js.map +1 -0
- package/dist/{chunk-ZFHFBEQ5.js → chunk-T24KLXY4.js} +673 -590
- package/dist/chunk-T24KLXY4.js.map +1 -0
- package/dist/cli.cjs +1287 -789
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.js +532 -116
- package/dist/cli.js.map +1 -1
- package/dist/index.cjs +709 -593
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +69 -14
- package/dist/index.d.ts +69 -14
- package/dist/index.js +2 -2
- package/dist/{mock-stream-B2qwECvd.d.cts → mock-stream-DKF5yatf.d.cts} +61 -121
- package/dist/{mock-stream-B2qwECvd.d.ts → mock-stream-DKF5yatf.d.ts} +61 -121
- package/dist/testing/index.cjs +709 -593
- package/dist/testing/index.cjs.map +1 -1
- package/dist/testing/index.d.cts +2 -2
- package/dist/testing/index.d.ts +2 -2
- package/dist/testing/index.js +2 -2
- package/package.json +2 -1
- package/dist/chunk-CTC2WJZA.js.map +0 -1
- package/dist/chunk-ZFHFBEQ5.js.map +0 -1
package/dist/cli.cjs
CHANGED
|
@@ -32,12 +32,13 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
32
32
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
33
33
|
|
|
34
34
|
// src/core/constants.ts
|
|
35
|
-
var GADGET_START_PREFIX, GADGET_END_PREFIX, DEFAULT_GADGET_OUTPUT_LIMIT, DEFAULT_GADGET_OUTPUT_LIMIT_PERCENT, CHARS_PER_TOKEN, FALLBACK_CONTEXT_WINDOW;
|
|
35
|
+
var GADGET_START_PREFIX, GADGET_END_PREFIX, GADGET_ARG_PREFIX, DEFAULT_GADGET_OUTPUT_LIMIT, DEFAULT_GADGET_OUTPUT_LIMIT_PERCENT, CHARS_PER_TOKEN, FALLBACK_CONTEXT_WINDOW;
|
|
36
36
|
var init_constants = __esm({
|
|
37
37
|
"src/core/constants.ts"() {
|
|
38
38
|
"use strict";
|
|
39
39
|
GADGET_START_PREFIX = "!!!GADGET_START:";
|
|
40
40
|
GADGET_END_PREFIX = "!!!GADGET_END";
|
|
41
|
+
GADGET_ARG_PREFIX = "!!!ARG:";
|
|
41
42
|
DEFAULT_GADGET_OUTPUT_LIMIT = true;
|
|
42
43
|
DEFAULT_GADGET_OUTPUT_LIMIT_PERCENT = 15;
|
|
43
44
|
CHARS_PER_TOKEN = 4;
|
|
@@ -370,17 +371,12 @@ var init_prompt_config = __esm({
|
|
|
370
371
|
"EACH MARKER MUST START WITH A NEWLINE."
|
|
371
372
|
].join("\n"),
|
|
372
373
|
criticalUsage: "INVOKE gadgets using the markers - do not describe what you want to do.",
|
|
373
|
-
|
|
374
|
-
formatDescriptionJson: "Parameters in JSON format (valid JSON object)",
|
|
375
|
-
formatDescriptionToml: "Parameters in TOML format (key = value pairs, use heredoc for multiline: key = <<<EOF ... EOF)",
|
|
374
|
+
formatDescription: (ctx) => `Parameters using ${ctx.argPrefix}name markers (value on next line(s), no escaping needed)`,
|
|
376
375
|
rules: () => [
|
|
377
376
|
"Output ONLY plain text with the exact markers - never use function/tool calling",
|
|
378
377
|
"You can invoke multiple gadgets in a single response",
|
|
379
378
|
"For dependent gadgets, invoke the first one and wait for the result"
|
|
380
379
|
],
|
|
381
|
-
schemaLabelJson: "\n\nInput Schema (JSON):",
|
|
382
|
-
schemaLabelYaml: "\n\nInput Schema (YAML):",
|
|
383
|
-
schemaLabelToml: "\n\nInput Schema (TOML):",
|
|
384
380
|
customExamples: null
|
|
385
381
|
};
|
|
386
382
|
}
|
|
@@ -397,6 +393,7 @@ var init_messages = __esm({
|
|
|
397
393
|
messages = [];
|
|
398
394
|
startPrefix = GADGET_START_PREFIX;
|
|
399
395
|
endPrefix = GADGET_END_PREFIX;
|
|
396
|
+
argPrefix = GADGET_ARG_PREFIX;
|
|
400
397
|
promptConfig;
|
|
401
398
|
constructor(promptConfig) {
|
|
402
399
|
this.promptConfig = promptConfig ?? {};
|
|
@@ -405,26 +402,32 @@ var init_messages = __esm({
|
|
|
405
402
|
* Set custom prefixes for gadget markers.
|
|
406
403
|
* Used to configure history builder to match system prompt markers.
|
|
407
404
|
*/
|
|
408
|
-
withPrefixes(startPrefix, endPrefix) {
|
|
405
|
+
withPrefixes(startPrefix, endPrefix, argPrefix) {
|
|
409
406
|
this.startPrefix = startPrefix;
|
|
410
407
|
this.endPrefix = endPrefix;
|
|
408
|
+
if (argPrefix) {
|
|
409
|
+
this.argPrefix = argPrefix;
|
|
410
|
+
}
|
|
411
411
|
return this;
|
|
412
412
|
}
|
|
413
413
|
addSystem(content, metadata) {
|
|
414
414
|
this.messages.push({ role: "system", content, metadata });
|
|
415
415
|
return this;
|
|
416
416
|
}
|
|
417
|
-
addGadgets(gadgets,
|
|
417
|
+
addGadgets(gadgets, options) {
|
|
418
418
|
if (options?.startPrefix) {
|
|
419
419
|
this.startPrefix = options.startPrefix;
|
|
420
420
|
}
|
|
421
421
|
if (options?.endPrefix) {
|
|
422
422
|
this.endPrefix = options.endPrefix;
|
|
423
423
|
}
|
|
424
|
+
if (options?.argPrefix) {
|
|
425
|
+
this.argPrefix = options.argPrefix;
|
|
426
|
+
}
|
|
424
427
|
const context = {
|
|
425
|
-
parameterFormat,
|
|
426
428
|
startPrefix: this.startPrefix,
|
|
427
429
|
endPrefix: this.endPrefix,
|
|
430
|
+
argPrefix: this.argPrefix,
|
|
428
431
|
gadgetCount: gadgets.length,
|
|
429
432
|
gadgetNames: gadgets.map((g) => g.name ?? g.constructor.name)
|
|
430
433
|
};
|
|
@@ -435,26 +438,19 @@ var init_messages = __esm({
|
|
|
435
438
|
context
|
|
436
439
|
);
|
|
437
440
|
parts.push(mainInstruction);
|
|
438
|
-
parts.push(this.buildGadgetsSection(gadgets
|
|
439
|
-
parts.push(this.buildUsageSection(
|
|
441
|
+
parts.push(this.buildGadgetsSection(gadgets));
|
|
442
|
+
parts.push(this.buildUsageSection(context));
|
|
440
443
|
this.messages.push({ role: "system", content: parts.join("") });
|
|
441
444
|
return this;
|
|
442
445
|
}
|
|
443
|
-
buildGadgetsSection(gadgets
|
|
446
|
+
buildGadgetsSection(gadgets) {
|
|
444
447
|
const parts = [];
|
|
445
448
|
parts.push("\n\nAVAILABLE GADGETS");
|
|
446
449
|
parts.push("\n=================\n");
|
|
447
450
|
for (const gadget of gadgets) {
|
|
448
451
|
const gadgetName = gadget.name ?? gadget.constructor.name;
|
|
449
|
-
const instruction = gadget.getInstruction(
|
|
450
|
-
const
|
|
451
|
-
yaml: "\n\nInput Schema (YAML):",
|
|
452
|
-
json: "\n\nInput Schema (JSON):",
|
|
453
|
-
toml: "\n\nInput Schema (TOML):",
|
|
454
|
-
auto: "\n\nInput Schema (JSON):"
|
|
455
|
-
// auto defaults to JSON schema display
|
|
456
|
-
};
|
|
457
|
-
const schemaMarker = schemaMarkers[parameterFormat];
|
|
452
|
+
const instruction = gadget.getInstruction(this.argPrefix);
|
|
453
|
+
const schemaMarker = "\n\nInput Schema (BLOCK):";
|
|
458
454
|
const schemaIndex = instruction.indexOf(schemaMarker);
|
|
459
455
|
const description = (schemaIndex !== -1 ? instruction.substring(0, schemaIndex) : instruction).trim();
|
|
460
456
|
const schema = schemaIndex !== -1 ? instruction.substring(schemaIndex + schemaMarker.length).trim() : "";
|
|
@@ -465,35 +461,20 @@ ${description}`);
|
|
|
465
461
|
if (schema) {
|
|
466
462
|
parts.push(`
|
|
467
463
|
|
|
468
|
-
PARAMETERS (
|
|
464
|
+
PARAMETERS (BLOCK):
|
|
469
465
|
${schema}`);
|
|
470
466
|
}
|
|
471
467
|
parts.push("\n\n---");
|
|
472
468
|
}
|
|
473
469
|
return parts.join("");
|
|
474
470
|
}
|
|
475
|
-
buildUsageSection(
|
|
471
|
+
buildUsageSection(context) {
|
|
476
472
|
const parts = [];
|
|
477
|
-
const
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
json: {
|
|
483
|
-
config: this.promptConfig.formatDescriptionJson,
|
|
484
|
-
defaultValue: DEFAULT_PROMPTS.formatDescriptionJson
|
|
485
|
-
},
|
|
486
|
-
toml: {
|
|
487
|
-
config: this.promptConfig.formatDescriptionToml,
|
|
488
|
-
defaultValue: DEFAULT_PROMPTS.formatDescriptionToml
|
|
489
|
-
},
|
|
490
|
-
auto: {
|
|
491
|
-
config: this.promptConfig.formatDescriptionJson,
|
|
492
|
-
defaultValue: DEFAULT_PROMPTS.formatDescriptionJson
|
|
493
|
-
}
|
|
494
|
-
};
|
|
495
|
-
const { config, defaultValue } = formatDescriptionMap[parameterFormat];
|
|
496
|
-
const formatDescription = resolvePromptTemplate(config, defaultValue, context);
|
|
473
|
+
const formatDescription = resolvePromptTemplate(
|
|
474
|
+
this.promptConfig.formatDescription,
|
|
475
|
+
DEFAULT_PROMPTS.formatDescription,
|
|
476
|
+
context
|
|
477
|
+
);
|
|
497
478
|
parts.push("\n\nHOW TO INVOKE GADGETS");
|
|
498
479
|
parts.push("\n=====================\n");
|
|
499
480
|
const criticalUsage = resolvePromptTemplate(
|
|
@@ -511,124 +492,90 @@ CRITICAL: ${criticalUsage}
|
|
|
511
492
|
2. ${formatDescription}`);
|
|
512
493
|
parts.push(`
|
|
513
494
|
3. End marker: ${this.endPrefix}`);
|
|
514
|
-
parts.push(this.buildExamplesSection(
|
|
495
|
+
parts.push(this.buildExamplesSection(context));
|
|
515
496
|
parts.push(this.buildRulesSection(context));
|
|
516
497
|
parts.push("\n");
|
|
517
498
|
return parts.join("");
|
|
518
499
|
}
|
|
519
|
-
buildExamplesSection(
|
|
500
|
+
buildExamplesSection(context) {
|
|
520
501
|
if (this.promptConfig.customExamples) {
|
|
521
502
|
return this.promptConfig.customExamples(context);
|
|
522
503
|
}
|
|
523
504
|
const parts = [];
|
|
524
|
-
const
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
to
|
|
528
|
-
|
|
529
|
-
${this.
|
|
530
|
-
|
|
531
|
-
{
|
|
532
|
-
${this.endPrefix}`,
|
|
533
|
-
toml: `${this.startPrefix}translate
|
|
534
|
-
from = "English"
|
|
535
|
-
to = "Polish"
|
|
536
|
-
content = "Paris is the capital of France: a beautiful city."
|
|
537
|
-
${this.endPrefix}`,
|
|
538
|
-
auto: `${this.startPrefix}translate
|
|
539
|
-
{"from": "English", "to": "Polish", "content": "Paris is the capital of France: a beautiful city."}
|
|
540
|
-
${this.endPrefix}`
|
|
541
|
-
};
|
|
505
|
+
const singleExample = `${this.startPrefix}translate
|
|
506
|
+
${this.argPrefix}from
|
|
507
|
+
English
|
|
508
|
+
${this.argPrefix}to
|
|
509
|
+
Polish
|
|
510
|
+
${this.argPrefix}content
|
|
511
|
+
Paris is the capital of France: a beautiful city.
|
|
512
|
+
${this.endPrefix}`;
|
|
542
513
|
parts.push(`
|
|
543
514
|
|
|
544
515
|
EXAMPLE (Single Gadget):
|
|
545
516
|
|
|
546
|
-
${
|
|
547
|
-
const
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
to
|
|
551
|
-
|
|
552
|
-
${this.
|
|
553
|
-
|
|
554
|
-
type: economic_analysis
|
|
555
|
-
matter: "Polish Economy"
|
|
556
|
-
question: <<<EOF
|
|
557
|
-
Analyze the following:
|
|
558
|
-
- Polish arms exports 2025
|
|
559
|
-
- Economic implications
|
|
560
|
-
EOF
|
|
561
|
-
${this.endPrefix}`,
|
|
562
|
-
json: `${this.startPrefix}translate
|
|
563
|
-
{"from": "English", "to": "Polish", "content": "Paris is the capital of France: a beautiful city."}
|
|
564
|
-
${this.endPrefix}
|
|
565
|
-
${this.startPrefix}analyze
|
|
566
|
-
{"type": "economic_analysis", "matter": "Polish Economy", "question": "Analyze the following: Polish arms exports 2025, economic implications"}
|
|
567
|
-
${this.endPrefix}`,
|
|
568
|
-
toml: `${this.startPrefix}translate
|
|
569
|
-
from = "English"
|
|
570
|
-
to = "Polish"
|
|
571
|
-
content = "Paris is the capital of France: a beautiful city."
|
|
517
|
+
${singleExample}`);
|
|
518
|
+
const multipleExample = `${this.startPrefix}translate
|
|
519
|
+
${this.argPrefix}from
|
|
520
|
+
English
|
|
521
|
+
${this.argPrefix}to
|
|
522
|
+
Polish
|
|
523
|
+
${this.argPrefix}content
|
|
524
|
+
Paris is the capital of France: a beautiful city.
|
|
572
525
|
${this.endPrefix}
|
|
573
526
|
${this.startPrefix}analyze
|
|
574
|
-
type
|
|
575
|
-
|
|
576
|
-
|
|
527
|
+
${this.argPrefix}type
|
|
528
|
+
economic_analysis
|
|
529
|
+
${this.argPrefix}matter
|
|
530
|
+
Polish Economy
|
|
531
|
+
${this.argPrefix}question
|
|
577
532
|
Analyze the following:
|
|
578
533
|
- Polish arms exports 2025
|
|
579
534
|
- Economic implications
|
|
580
|
-
|
|
581
|
-
${this.endPrefix}`,
|
|
582
|
-
auto: `${this.startPrefix}translate
|
|
583
|
-
{"from": "English", "to": "Polish", "content": "Paris is the capital of France: a beautiful city."}
|
|
584
|
-
${this.endPrefix}
|
|
585
|
-
${this.startPrefix}analyze
|
|
586
|
-
{"type": "economic_analysis", "matter": "Polish Economy", "question": "Analyze the following: Polish arms exports 2025, economic implications"}
|
|
587
|
-
${this.endPrefix}`
|
|
588
|
-
};
|
|
535
|
+
${this.endPrefix}`;
|
|
589
536
|
parts.push(`
|
|
590
537
|
|
|
591
538
|
EXAMPLE (Multiple Gadgets):
|
|
592
539
|
|
|
593
|
-
${
|
|
594
|
-
|
|
595
|
-
parts.push(`
|
|
596
|
-
|
|
597
|
-
YAML HEREDOC SYNTAX:
|
|
598
|
-
For string values with multiple lines, use heredoc syntax (<<<DELIMITER...DELIMITER):
|
|
599
|
-
|
|
600
|
-
filePath: "README.md"
|
|
601
|
-
content: <<<EOF
|
|
602
|
-
# Project Title
|
|
540
|
+
${multipleExample}`);
|
|
541
|
+
parts.push(`
|
|
603
542
|
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
- Special characters: # : -
|
|
607
|
-
- Multiple paragraphs
|
|
608
|
-
EOF
|
|
543
|
+
BLOCK FORMAT SYNTAX:
|
|
544
|
+
Block format uses ${this.argPrefix}name markers. Values are captured verbatim until the next marker.
|
|
609
545
|
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
546
|
+
${this.argPrefix}filename
|
|
547
|
+
calculator.ts
|
|
548
|
+
${this.argPrefix}code
|
|
549
|
+
class Calculator {
|
|
550
|
+
private history: string[] = [];
|
|
614
551
|
|
|
615
|
-
|
|
616
|
-
|
|
552
|
+
add(a: number, b: number): number {
|
|
553
|
+
const result = a + b;
|
|
554
|
+
this.history.push(\`\${a} + \${b} = \${result}\`);
|
|
555
|
+
return result;
|
|
556
|
+
}
|
|
557
|
+
}
|
|
617
558
|
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
559
|
+
BLOCK FORMAT RULES:
|
|
560
|
+
- Each parameter starts with ${this.argPrefix}parameterName on its own line
|
|
561
|
+
- The value starts on the NEXT line after the marker
|
|
562
|
+
- Value ends when the next ${this.argPrefix} or ${this.endPrefix} appears
|
|
563
|
+
- NO escaping needed - write values exactly as they should appear
|
|
564
|
+
- Perfect for code, JSON, markdown, or any content with special characters
|
|
621
565
|
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
566
|
+
NESTED OBJECTS (use / separator):
|
|
567
|
+
${this.argPrefix}config/timeout
|
|
568
|
+
30
|
|
569
|
+
${this.argPrefix}config/retries
|
|
570
|
+
3
|
|
571
|
+
Produces: { "config": { "timeout": "30", "retries": "3" } }
|
|
627
572
|
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
573
|
+
ARRAYS (use numeric indices):
|
|
574
|
+
${this.argPrefix}items/0
|
|
575
|
+
first
|
|
576
|
+
${this.argPrefix}items/1
|
|
577
|
+
second
|
|
578
|
+
Produces: { "items": ["first", "second"] }`);
|
|
632
579
|
return parts.join("");
|
|
633
580
|
}
|
|
634
581
|
buildRulesSection(context) {
|
|
@@ -649,8 +596,8 @@ NEVER use TOML triple-quote strings ("""). ALWAYS use heredoc syntax (<<<EOF...E
|
|
|
649
596
|
this.messages.push({ role: "assistant", content, metadata });
|
|
650
597
|
return this;
|
|
651
598
|
}
|
|
652
|
-
addGadgetCall(gadget, parameters, result
|
|
653
|
-
const paramStr = this.
|
|
599
|
+
addGadgetCall(gadget, parameters, result) {
|
|
600
|
+
const paramStr = this.formatBlockParameters(parameters, "");
|
|
654
601
|
this.messages.push({
|
|
655
602
|
role: "assistant",
|
|
656
603
|
content: `${this.startPrefix}${gadget}
|
|
@@ -663,26 +610,32 @@ ${this.endPrefix}`
|
|
|
663
610
|
});
|
|
664
611
|
return this;
|
|
665
612
|
}
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
}
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
613
|
+
/**
|
|
614
|
+
* Format parameters as Block format with JSON Pointer paths.
|
|
615
|
+
* Uses the configured argPrefix for consistency with system prompt.
|
|
616
|
+
*/
|
|
617
|
+
formatBlockParameters(params, prefix) {
|
|
618
|
+
const lines = [];
|
|
619
|
+
for (const [key, value] of Object.entries(params)) {
|
|
620
|
+
const fullPath = prefix ? `${prefix}/${key}` : key;
|
|
621
|
+
if (Array.isArray(value)) {
|
|
622
|
+
value.forEach((item, index) => {
|
|
623
|
+
const itemPath = `${fullPath}/${index}`;
|
|
624
|
+
if (typeof item === "object" && item !== null) {
|
|
625
|
+
lines.push(this.formatBlockParameters(item, itemPath));
|
|
626
|
+
} else {
|
|
627
|
+
lines.push(`${this.argPrefix}${itemPath}`);
|
|
628
|
+
lines.push(String(item));
|
|
629
|
+
}
|
|
630
|
+
});
|
|
631
|
+
} else if (typeof value === "object" && value !== null) {
|
|
632
|
+
lines.push(this.formatBlockParameters(value, fullPath));
|
|
633
|
+
} else {
|
|
634
|
+
lines.push(`${this.argPrefix}${fullPath}`);
|
|
635
|
+
lines.push(String(value));
|
|
636
|
+
}
|
|
684
637
|
}
|
|
685
|
-
return
|
|
638
|
+
return lines.join("\n");
|
|
686
639
|
}
|
|
687
640
|
build() {
|
|
688
641
|
return [...this.messages];
|
|
@@ -854,125 +807,72 @@ var init_schema_to_json = __esm({
|
|
|
854
807
|
});
|
|
855
808
|
|
|
856
809
|
// src/gadgets/gadget.ts
|
|
857
|
-
function
|
|
858
|
-
const lines = content.split("\n");
|
|
859
|
-
for (const delimiter of HEREDOC_DELIMITERS) {
|
|
860
|
-
const regex = new RegExp(`^${delimiter}\\s*$`);
|
|
861
|
-
const isUsed = lines.some((line) => regex.test(line));
|
|
862
|
-
if (!isUsed) {
|
|
863
|
-
return delimiter;
|
|
864
|
-
}
|
|
865
|
-
}
|
|
866
|
-
let counter = 1;
|
|
867
|
-
while (counter < 1e3) {
|
|
868
|
-
const delimiter = `HEREDOC_${counter}`;
|
|
869
|
-
const regex = new RegExp(`^${delimiter}\\s*$`);
|
|
870
|
-
const isUsed = lines.some((line) => regex.test(line));
|
|
871
|
-
if (!isUsed) {
|
|
872
|
-
return delimiter;
|
|
873
|
-
}
|
|
874
|
-
counter++;
|
|
875
|
-
}
|
|
876
|
-
return "HEREDOC_FALLBACK";
|
|
877
|
-
}
|
|
878
|
-
function formatYamlValue(value, indent = "") {
|
|
879
|
-
if (typeof value === "string") {
|
|
880
|
-
const lines = value.split("\n");
|
|
881
|
-
if (lines.length === 1 && !value.includes(":") && !value.startsWith("-")) {
|
|
882
|
-
return value;
|
|
883
|
-
}
|
|
884
|
-
const delimiter = findSafeDelimiter(value);
|
|
885
|
-
return `<<<${delimiter}
|
|
886
|
-
${value}
|
|
887
|
-
${delimiter}`;
|
|
888
|
-
}
|
|
889
|
-
if (typeof value === "number" || typeof value === "boolean") {
|
|
890
|
-
return String(value);
|
|
891
|
-
}
|
|
892
|
-
if (value === null || value === void 0) {
|
|
893
|
-
return "null";
|
|
894
|
-
}
|
|
895
|
-
if (Array.isArray(value)) {
|
|
896
|
-
if (value.length === 0) return "[]";
|
|
897
|
-
const items = value.map((item) => `${indent}- ${formatYamlValue(item, indent + " ")}`);
|
|
898
|
-
return "\n" + items.join("\n");
|
|
899
|
-
}
|
|
900
|
-
if (typeof value === "object") {
|
|
901
|
-
const entries = Object.entries(value);
|
|
902
|
-
if (entries.length === 0) return "{}";
|
|
903
|
-
const lines = entries.map(([k, v]) => {
|
|
904
|
-
const formattedValue = formatYamlValue(v, indent + " ");
|
|
905
|
-
if (formattedValue.startsWith("\n") || formattedValue.startsWith("|")) {
|
|
906
|
-
return `${indent}${k}: ${formattedValue}`;
|
|
907
|
-
}
|
|
908
|
-
return `${indent}${k}: ${formattedValue}`;
|
|
909
|
-
});
|
|
910
|
-
return "\n" + lines.join("\n");
|
|
911
|
-
}
|
|
912
|
-
return yaml.dump(value).trimEnd();
|
|
913
|
-
}
|
|
914
|
-
function formatParamsAsYaml(params) {
|
|
810
|
+
function formatParamsAsBlock(params, prefix = "", argPrefix = GADGET_ARG_PREFIX) {
|
|
915
811
|
const lines = [];
|
|
916
812
|
for (const [key, value] of Object.entries(params)) {
|
|
917
|
-
const
|
|
918
|
-
if (
|
|
919
|
-
|
|
813
|
+
const fullPath = prefix ? `${prefix}/${key}` : key;
|
|
814
|
+
if (Array.isArray(value)) {
|
|
815
|
+
value.forEach((item, index) => {
|
|
816
|
+
const itemPath = `${fullPath}/${index}`;
|
|
817
|
+
if (typeof item === "object" && item !== null) {
|
|
818
|
+
lines.push(formatParamsAsBlock(item, itemPath, argPrefix));
|
|
819
|
+
} else {
|
|
820
|
+
lines.push(`${argPrefix}${itemPath}`);
|
|
821
|
+
lines.push(String(item));
|
|
822
|
+
}
|
|
823
|
+
});
|
|
824
|
+
} else if (typeof value === "object" && value !== null) {
|
|
825
|
+
lines.push(formatParamsAsBlock(value, fullPath, argPrefix));
|
|
920
826
|
} else {
|
|
921
|
-
lines.push(`${
|
|
827
|
+
lines.push(`${argPrefix}${fullPath}`);
|
|
828
|
+
lines.push(String(value));
|
|
922
829
|
}
|
|
923
830
|
}
|
|
924
831
|
return lines.join("\n");
|
|
925
832
|
}
|
|
926
|
-
function
|
|
927
|
-
const entries = Object.entries(obj).map(([k, v]) => `${k} = ${formatTomlValue(v)}`);
|
|
928
|
-
return `{ ${entries.join(", ")} }`;
|
|
929
|
-
}
|
|
930
|
-
function formatTomlValue(value) {
|
|
931
|
-
if (typeof value === "string") {
|
|
932
|
-
if (value.includes("\n")) {
|
|
933
|
-
const delimiter = findSafeDelimiter(value);
|
|
934
|
-
return `<<<${delimiter}
|
|
935
|
-
${value}
|
|
936
|
-
${delimiter}`;
|
|
937
|
-
}
|
|
938
|
-
return JSON.stringify(value);
|
|
939
|
-
}
|
|
940
|
-
if (typeof value === "number" || typeof value === "boolean") {
|
|
941
|
-
return String(value);
|
|
942
|
-
}
|
|
943
|
-
if (value === null || value === void 0) {
|
|
944
|
-
return '""';
|
|
945
|
-
}
|
|
946
|
-
if (Array.isArray(value)) {
|
|
947
|
-
if (value.length === 0) return "[]";
|
|
948
|
-
const items = value.map((item) => {
|
|
949
|
-
if (typeof item === "object" && item !== null && !Array.isArray(item)) {
|
|
950
|
-
return formatTomlInlineTable(item);
|
|
951
|
-
}
|
|
952
|
-
return formatTomlValue(item);
|
|
953
|
-
});
|
|
954
|
-
return `[${items.join(", ")}]`;
|
|
955
|
-
}
|
|
956
|
-
if (typeof value === "object") {
|
|
957
|
-
return formatTomlInlineTable(value);
|
|
958
|
-
}
|
|
959
|
-
return JSON.stringify(value);
|
|
960
|
-
}
|
|
961
|
-
function formatParamsAsToml(params) {
|
|
833
|
+
function formatSchemaAsPlainText(schema, indent = "") {
|
|
962
834
|
const lines = [];
|
|
963
|
-
|
|
964
|
-
|
|
835
|
+
const properties = schema.properties || {};
|
|
836
|
+
const required = schema.required || [];
|
|
837
|
+
for (const [key, prop] of Object.entries(properties)) {
|
|
838
|
+
const propObj = prop;
|
|
839
|
+
const type = propObj.type;
|
|
840
|
+
const description = propObj.description;
|
|
841
|
+
const isRequired = required.includes(key);
|
|
842
|
+
const enumValues = propObj.enum;
|
|
843
|
+
let line = `${indent}- ${key}`;
|
|
844
|
+
if (type === "array") {
|
|
845
|
+
const items = propObj.items;
|
|
846
|
+
const itemType = items?.type || "any";
|
|
847
|
+
line += ` (array of ${itemType})`;
|
|
848
|
+
} else if (type === "object" && propObj.properties) {
|
|
849
|
+
line += " (object)";
|
|
850
|
+
} else {
|
|
851
|
+
line += ` (${type})`;
|
|
852
|
+
}
|
|
853
|
+
if (isRequired) {
|
|
854
|
+
line += " [required]";
|
|
855
|
+
}
|
|
856
|
+
if (description) {
|
|
857
|
+
line += `: ${description}`;
|
|
858
|
+
}
|
|
859
|
+
if (enumValues) {
|
|
860
|
+
line += ` - one of: ${enumValues.map((v) => `"${v}"`).join(", ")}`;
|
|
861
|
+
}
|
|
862
|
+
lines.push(line);
|
|
863
|
+
if (type === "object" && propObj.properties) {
|
|
864
|
+
lines.push(formatSchemaAsPlainText(propObj, indent + " "));
|
|
865
|
+
}
|
|
965
866
|
}
|
|
966
867
|
return lines.join("\n");
|
|
967
868
|
}
|
|
968
|
-
var
|
|
869
|
+
var BaseGadget;
|
|
969
870
|
var init_gadget = __esm({
|
|
970
871
|
"src/gadgets/gadget.ts"() {
|
|
971
872
|
"use strict";
|
|
972
|
-
|
|
873
|
+
init_constants();
|
|
973
874
|
init_schema_to_json();
|
|
974
875
|
init_schema_validator();
|
|
975
|
-
HEREDOC_DELIMITERS = ["EOF", "END", "DOC", "CONTENT", "TEXT", "HEREDOC", "DATA", "BLOCK"];
|
|
976
876
|
BaseGadget = class {
|
|
977
877
|
/**
|
|
978
878
|
* The name of the gadget. Used for identification when LLM calls it.
|
|
@@ -1003,19 +903,19 @@ var init_gadget = __esm({
|
|
|
1003
903
|
/**
|
|
1004
904
|
* Auto-generated instruction text for the LLM.
|
|
1005
905
|
* Combines name, description, and parameter schema into a formatted instruction.
|
|
1006
|
-
* @deprecated Use getInstruction(
|
|
906
|
+
* @deprecated Use getInstruction() instead
|
|
1007
907
|
*/
|
|
1008
908
|
get instruction() {
|
|
1009
|
-
return this.getInstruction(
|
|
909
|
+
return this.getInstruction();
|
|
1010
910
|
}
|
|
1011
911
|
/**
|
|
1012
|
-
* Generate instruction text for the LLM
|
|
912
|
+
* Generate instruction text for the LLM.
|
|
1013
913
|
* Combines name, description, and parameter schema into a formatted instruction.
|
|
1014
914
|
*
|
|
1015
|
-
* @param
|
|
915
|
+
* @param argPrefix - Optional custom argument prefix for block format examples
|
|
1016
916
|
* @returns Formatted instruction string
|
|
1017
917
|
*/
|
|
1018
|
-
getInstruction(
|
|
918
|
+
getInstruction(argPrefix) {
|
|
1019
919
|
const parts = [];
|
|
1020
920
|
parts.push(this.description);
|
|
1021
921
|
if (this.parameterSchema) {
|
|
@@ -1024,20 +924,12 @@ var init_gadget = __esm({
|
|
|
1024
924
|
const jsonSchema = schemaToJSONSchema(this.parameterSchema, {
|
|
1025
925
|
target: "draft-7"
|
|
1026
926
|
});
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
parts.push(JSON.stringify(jsonSchema, null, 2));
|
|
1030
|
-
} else if (format === "toml") {
|
|
1031
|
-
parts.push("\n\nInput Schema (TOML):");
|
|
1032
|
-
parts.push(JSON.stringify(jsonSchema, null, 2));
|
|
1033
|
-
} else {
|
|
1034
|
-
const yamlSchema = yaml.dump(jsonSchema).trimEnd();
|
|
1035
|
-
parts.push("\n\nInput Schema (YAML):");
|
|
1036
|
-
parts.push(yamlSchema);
|
|
1037
|
-
}
|
|
927
|
+
parts.push("\n\nParameters:");
|
|
928
|
+
parts.push(formatSchemaAsPlainText(jsonSchema));
|
|
1038
929
|
}
|
|
1039
930
|
if (this.examples && this.examples.length > 0) {
|
|
1040
931
|
parts.push("\n\nExamples:");
|
|
932
|
+
const effectiveArgPrefix = argPrefix ?? GADGET_ARG_PREFIX;
|
|
1041
933
|
this.examples.forEach((example, index) => {
|
|
1042
934
|
if (index > 0) {
|
|
1043
935
|
parts.push("");
|
|
@@ -1046,13 +938,7 @@ var init_gadget = __esm({
|
|
|
1046
938
|
parts.push(`# ${example.comment}`);
|
|
1047
939
|
}
|
|
1048
940
|
parts.push("Input:");
|
|
1049
|
-
|
|
1050
|
-
parts.push(JSON.stringify(example.params, null, 2));
|
|
1051
|
-
} else if (format === "toml") {
|
|
1052
|
-
parts.push(formatParamsAsToml(example.params));
|
|
1053
|
-
} else {
|
|
1054
|
-
parts.push(formatParamsAsYaml(example.params));
|
|
1055
|
-
}
|
|
941
|
+
parts.push(formatParamsAsBlock(example.params, "", effectiveArgPrefix));
|
|
1056
942
|
if (example.output !== void 0) {
|
|
1057
943
|
parts.push("Output:");
|
|
1058
944
|
parts.push(example.output);
|
|
@@ -1329,14 +1215,12 @@ var init_conversation_manager = __esm({
|
|
|
1329
1215
|
baseMessages;
|
|
1330
1216
|
initialMessages;
|
|
1331
1217
|
historyBuilder;
|
|
1332
|
-
parameterFormat;
|
|
1333
1218
|
constructor(baseMessages, initialMessages, options = {}) {
|
|
1334
1219
|
this.baseMessages = baseMessages;
|
|
1335
1220
|
this.initialMessages = initialMessages;
|
|
1336
|
-
this.parameterFormat = options.parameterFormat ?? "json";
|
|
1337
1221
|
this.historyBuilder = new LLMMessageBuilder();
|
|
1338
1222
|
if (options.startPrefix && options.endPrefix) {
|
|
1339
|
-
this.historyBuilder.withPrefixes(options.startPrefix, options.endPrefix);
|
|
1223
|
+
this.historyBuilder.withPrefixes(options.startPrefix, options.endPrefix, options.argPrefix);
|
|
1340
1224
|
}
|
|
1341
1225
|
}
|
|
1342
1226
|
addUserMessage(content) {
|
|
@@ -1346,7 +1230,7 @@ var init_conversation_manager = __esm({
|
|
|
1346
1230
|
this.historyBuilder.addAssistant(content);
|
|
1347
1231
|
}
|
|
1348
1232
|
addGadgetCall(gadgetName, parameters, result) {
|
|
1349
|
-
this.historyBuilder.addGadgetCall(gadgetName, parameters, result
|
|
1233
|
+
this.historyBuilder.addGadgetCall(gadgetName, parameters, result);
|
|
1350
1234
|
}
|
|
1351
1235
|
getMessages() {
|
|
1352
1236
|
return [...this.baseMessages, ...this.initialMessages, ...this.historyBuilder.build()];
|
|
@@ -1369,7 +1253,7 @@ async function runWithHandlers(agentGenerator, handlers) {
|
|
|
1369
1253
|
await handlers.onGadgetCall({
|
|
1370
1254
|
gadgetName: event.call.gadgetName,
|
|
1371
1255
|
parameters: event.call.parameters,
|
|
1372
|
-
|
|
1256
|
+
parametersRaw: event.call.parametersRaw
|
|
1373
1257
|
});
|
|
1374
1258
|
}
|
|
1375
1259
|
break;
|
|
@@ -1646,7 +1530,7 @@ var init_executor = __esm({
|
|
|
1646
1530
|
this.logger.error("Gadget parameter parse error", {
|
|
1647
1531
|
gadgetName: call.gadgetName,
|
|
1648
1532
|
parseError: call.parseError,
|
|
1649
|
-
rawParameters: call.
|
|
1533
|
+
rawParameters: call.parametersRaw
|
|
1650
1534
|
});
|
|
1651
1535
|
return {
|
|
1652
1536
|
gadgetName: call.gadgetName,
|
|
@@ -1808,168 +1692,107 @@ var init_executor = __esm({
|
|
|
1808
1692
|
}
|
|
1809
1693
|
});
|
|
1810
1694
|
|
|
1811
|
-
// src/gadgets/
|
|
1812
|
-
function
|
|
1813
|
-
const
|
|
1814
|
-
const result =
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
1818
|
-
const
|
|
1819
|
-
|
|
1820
|
-
|
|
1821
|
-
const
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
if (i < lines.length) {
|
|
1829
|
-
i++;
|
|
1830
|
-
}
|
|
1831
|
-
result.push(`${indent}${key}: |`);
|
|
1832
|
-
for (const bodyLine of bodyLines) {
|
|
1833
|
-
result.push(`${indent} ${bodyLine}`);
|
|
1695
|
+
// src/gadgets/block-params.ts
|
|
1696
|
+
function parseBlockParams(content, options) {
|
|
1697
|
+
const argPrefix = options?.argPrefix ?? GADGET_ARG_PREFIX;
|
|
1698
|
+
const result = {};
|
|
1699
|
+
const seenPointers = /* @__PURE__ */ new Set();
|
|
1700
|
+
const parts = content.split(argPrefix);
|
|
1701
|
+
for (let i = 1; i < parts.length; i++) {
|
|
1702
|
+
const part = parts[i];
|
|
1703
|
+
const newlineIndex = part.indexOf("\n");
|
|
1704
|
+
if (newlineIndex === -1) {
|
|
1705
|
+
const pointer2 = part.trim();
|
|
1706
|
+
if (pointer2) {
|
|
1707
|
+
if (seenPointers.has(pointer2)) {
|
|
1708
|
+
throw new Error(`Duplicate pointer: ${pointer2}`);
|
|
1709
|
+
}
|
|
1710
|
+
seenPointers.add(pointer2);
|
|
1711
|
+
setByPointer(result, pointer2, "");
|
|
1834
1712
|
}
|
|
1835
1713
|
continue;
|
|
1836
1714
|
}
|
|
1837
|
-
const
|
|
1838
|
-
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
result.push(line);
|
|
1842
|
-
i++;
|
|
1843
|
-
const keyIndentLen2 = indent.length;
|
|
1844
|
-
const blockLines = [];
|
|
1845
|
-
let minContentIndent = Infinity;
|
|
1846
|
-
while (i < lines.length) {
|
|
1847
|
-
const blockLine = lines[i];
|
|
1848
|
-
const blockIndentMatch = blockLine.match(/^(\s*)/);
|
|
1849
|
-
const blockIndentLen = blockIndentMatch ? blockIndentMatch[1].length : 0;
|
|
1850
|
-
if (blockLine.trim() === "") {
|
|
1851
|
-
blockLines.push({ content: "", originalIndent: 0 });
|
|
1852
|
-
i++;
|
|
1853
|
-
continue;
|
|
1854
|
-
}
|
|
1855
|
-
if (blockIndentLen > keyIndentLen2) {
|
|
1856
|
-
const content = blockLine.substring(blockIndentLen);
|
|
1857
|
-
blockLines.push({ content, originalIndent: blockIndentLen });
|
|
1858
|
-
if (content.trim().length > 0) {
|
|
1859
|
-
minContentIndent = Math.min(minContentIndent, blockIndentLen);
|
|
1860
|
-
}
|
|
1861
|
-
i++;
|
|
1862
|
-
} else {
|
|
1863
|
-
break;
|
|
1864
|
-
}
|
|
1865
|
-
}
|
|
1866
|
-
const targetIndent = keyIndentLen2 + 2;
|
|
1867
|
-
for (const blockLine of blockLines) {
|
|
1868
|
-
if (blockLine.content === "") {
|
|
1869
|
-
result.push("");
|
|
1870
|
-
} else {
|
|
1871
|
-
result.push(" ".repeat(targetIndent) + blockLine.content);
|
|
1872
|
-
}
|
|
1873
|
-
}
|
|
1874
|
-
continue;
|
|
1875
|
-
}
|
|
1876
|
-
if (value.startsWith('"') || value.startsWith("'") || value === "true" || value === "false" || /^-?\d+(\.\d+)?$/.test(value)) {
|
|
1877
|
-
result.push(line);
|
|
1878
|
-
i++;
|
|
1879
|
-
continue;
|
|
1880
|
-
}
|
|
1881
|
-
const keyIndentLen = indent.length;
|
|
1882
|
-
const continuationLines = [];
|
|
1883
|
-
let j = i + 1;
|
|
1884
|
-
while (j < lines.length) {
|
|
1885
|
-
const nextLine = lines[j];
|
|
1886
|
-
if (nextLine.trim() === "") {
|
|
1887
|
-
continuationLines.push(nextLine);
|
|
1888
|
-
j++;
|
|
1889
|
-
continue;
|
|
1890
|
-
}
|
|
1891
|
-
const nextIndentMatch = nextLine.match(/^(\s*)/);
|
|
1892
|
-
const nextIndentLen = nextIndentMatch ? nextIndentMatch[1].length : 0;
|
|
1893
|
-
if (nextIndentLen > keyIndentLen) {
|
|
1894
|
-
continuationLines.push(nextLine);
|
|
1895
|
-
j++;
|
|
1896
|
-
} else {
|
|
1897
|
-
break;
|
|
1898
|
-
}
|
|
1899
|
-
}
|
|
1900
|
-
if (continuationLines.length > 0 && continuationLines.some((l) => l.trim().length > 0)) {
|
|
1901
|
-
result.push(`${indent}${key}: |`);
|
|
1902
|
-
result.push(`${indent} ${value}`);
|
|
1903
|
-
for (const contLine of continuationLines) {
|
|
1904
|
-
if (contLine.trim() === "") {
|
|
1905
|
-
result.push("");
|
|
1906
|
-
} else {
|
|
1907
|
-
const contIndentMatch = contLine.match(/^(\s*)/);
|
|
1908
|
-
const contIndent = contIndentMatch ? contIndentMatch[1] : "";
|
|
1909
|
-
const contContent = contLine.substring(contIndent.length);
|
|
1910
|
-
result.push(`${indent} ${contContent}`);
|
|
1911
|
-
}
|
|
1912
|
-
}
|
|
1913
|
-
i = j;
|
|
1914
|
-
continue;
|
|
1915
|
-
}
|
|
1916
|
-
if (value.includes(": ") || value.endsWith(":")) {
|
|
1917
|
-
const escaped = value.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
|
1918
|
-
result.push(`${indent}${key}: "${escaped}"`);
|
|
1919
|
-
i++;
|
|
1920
|
-
continue;
|
|
1921
|
-
}
|
|
1715
|
+
const pointer = part.substring(0, newlineIndex).trim();
|
|
1716
|
+
let value = part.substring(newlineIndex + 1);
|
|
1717
|
+
if (value.endsWith("\n")) {
|
|
1718
|
+
value = value.slice(0, -1);
|
|
1922
1719
|
}
|
|
1923
|
-
|
|
1924
|
-
|
|
1720
|
+
if (!pointer) {
|
|
1721
|
+
continue;
|
|
1722
|
+
}
|
|
1723
|
+
if (seenPointers.has(pointer)) {
|
|
1724
|
+
throw new Error(`Duplicate pointer: ${pointer}`);
|
|
1725
|
+
}
|
|
1726
|
+
seenPointers.add(pointer);
|
|
1727
|
+
setByPointer(result, pointer, value);
|
|
1925
1728
|
}
|
|
1926
|
-
return result
|
|
1729
|
+
return result;
|
|
1927
1730
|
}
|
|
1928
|
-
function
|
|
1929
|
-
|
|
1731
|
+
function coerceValue(value) {
|
|
1732
|
+
if (value.includes("\n")) {
|
|
1733
|
+
return value;
|
|
1734
|
+
}
|
|
1735
|
+
const trimmed = value.trim();
|
|
1736
|
+
if (trimmed === "true") return true;
|
|
1737
|
+
if (trimmed === "false") return false;
|
|
1738
|
+
if (trimmed !== "" && /^-?\d+(\.\d+)?$/.test(trimmed)) {
|
|
1739
|
+
const num = Number(trimmed);
|
|
1740
|
+
if (!isNaN(num) && isFinite(num)) {
|
|
1741
|
+
return num;
|
|
1742
|
+
}
|
|
1743
|
+
}
|
|
1744
|
+
return value;
|
|
1930
1745
|
}
|
|
1931
|
-
function
|
|
1932
|
-
const
|
|
1933
|
-
|
|
1934
|
-
let i = 0;
|
|
1935
|
-
|
|
1936
|
-
|
|
1937
|
-
const
|
|
1938
|
-
|
|
1939
|
-
|
|
1940
|
-
|
|
1941
|
-
|
|
1942
|
-
|
|
1943
|
-
|
|
1944
|
-
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
1954
|
-
}
|
|
1955
|
-
if (bodyLines.length === 0) {
|
|
1956
|
-
result.push(`${indent}${key} = ''''''`);
|
|
1957
|
-
} else {
|
|
1958
|
-
result.push(`${indent}${key} = '''`);
|
|
1959
|
-
for (let j = 0; j < bodyLines.length - 1; j++) {
|
|
1960
|
-
result.push(unescapeHeredocContent(bodyLines[j]));
|
|
1961
|
-
}
|
|
1962
|
-
result.push(`${unescapeHeredocContent(bodyLines[bodyLines.length - 1])}'''`);
|
|
1963
|
-
}
|
|
1964
|
-
if (!foundClosing) {
|
|
1746
|
+
function setByPointer(obj, pointer, value) {
|
|
1747
|
+
const segments = pointer.split("/");
|
|
1748
|
+
let current = obj;
|
|
1749
|
+
for (let i = 0; i < segments.length - 1; i++) {
|
|
1750
|
+
const segment = segments[i];
|
|
1751
|
+
const nextSegment = segments[i + 1];
|
|
1752
|
+
const nextIsArrayIndex = /^\d+$/.test(nextSegment);
|
|
1753
|
+
if (Array.isArray(current)) {
|
|
1754
|
+
const index = parseInt(segment, 10);
|
|
1755
|
+
if (isNaN(index) || index < 0) {
|
|
1756
|
+
throw new Error(`Invalid array index: ${segment}`);
|
|
1757
|
+
}
|
|
1758
|
+
if (index > current.length) {
|
|
1759
|
+
throw new Error(`Array index gap: expected ${current.length}, got ${index}`);
|
|
1760
|
+
}
|
|
1761
|
+
if (current[index] === void 0) {
|
|
1762
|
+
current[index] = nextIsArrayIndex ? [] : {};
|
|
1763
|
+
}
|
|
1764
|
+
current = current[index];
|
|
1765
|
+
} else {
|
|
1766
|
+
const rec = current;
|
|
1767
|
+
if (rec[segment] === void 0) {
|
|
1768
|
+
rec[segment] = nextIsArrayIndex ? [] : {};
|
|
1965
1769
|
}
|
|
1966
|
-
|
|
1770
|
+
current = rec[segment];
|
|
1967
1771
|
}
|
|
1968
|
-
result.push(line);
|
|
1969
|
-
i++;
|
|
1970
1772
|
}
|
|
1971
|
-
|
|
1773
|
+
const lastSegment = segments[segments.length - 1];
|
|
1774
|
+
const coercedValue = coerceValue(value);
|
|
1775
|
+
if (Array.isArray(current)) {
|
|
1776
|
+
const index = parseInt(lastSegment, 10);
|
|
1777
|
+
if (isNaN(index) || index < 0) {
|
|
1778
|
+
throw new Error(`Invalid array index: ${lastSegment}`);
|
|
1779
|
+
}
|
|
1780
|
+
if (index > current.length) {
|
|
1781
|
+
throw new Error(`Array index gap: expected ${current.length}, got ${index}`);
|
|
1782
|
+
}
|
|
1783
|
+
current[index] = coercedValue;
|
|
1784
|
+
} else {
|
|
1785
|
+
current[lastSegment] = coercedValue;
|
|
1786
|
+
}
|
|
1972
1787
|
}
|
|
1788
|
+
var init_block_params = __esm({
|
|
1789
|
+
"src/gadgets/block-params.ts"() {
|
|
1790
|
+
"use strict";
|
|
1791
|
+
init_constants();
|
|
1792
|
+
}
|
|
1793
|
+
});
|
|
1794
|
+
|
|
1795
|
+
// src/gadgets/parser.ts
|
|
1973
1796
|
function stripMarkdownFences(content) {
|
|
1974
1797
|
let cleaned = content.trim();
|
|
1975
1798
|
const openingFence = /^```(?:toml|yaml|json)?\s*\n/i;
|
|
@@ -1978,24 +1801,23 @@ function stripMarkdownFences(content) {
|
|
|
1978
1801
|
cleaned = cleaned.replace(closingFence, "");
|
|
1979
1802
|
return cleaned.trim();
|
|
1980
1803
|
}
|
|
1981
|
-
var
|
|
1804
|
+
var globalInvocationCounter, StreamParser;
|
|
1982
1805
|
var init_parser = __esm({
|
|
1983
1806
|
"src/gadgets/parser.ts"() {
|
|
1984
1807
|
"use strict";
|
|
1985
|
-
yaml2 = __toESM(require("js-yaml"), 1);
|
|
1986
|
-
import_js_toml = require("js-toml");
|
|
1987
1808
|
init_constants();
|
|
1809
|
+
init_block_params();
|
|
1988
1810
|
globalInvocationCounter = 0;
|
|
1989
1811
|
StreamParser = class {
|
|
1990
1812
|
buffer = "";
|
|
1991
1813
|
lastReportedTextLength = 0;
|
|
1992
1814
|
startPrefix;
|
|
1993
1815
|
endPrefix;
|
|
1994
|
-
|
|
1816
|
+
argPrefix;
|
|
1995
1817
|
constructor(options = {}) {
|
|
1996
1818
|
this.startPrefix = options.startPrefix ?? GADGET_START_PREFIX;
|
|
1997
1819
|
this.endPrefix = options.endPrefix ?? GADGET_END_PREFIX;
|
|
1998
|
-
this.
|
|
1820
|
+
this.argPrefix = options.argPrefix ?? GADGET_ARG_PREFIX;
|
|
1999
1821
|
}
|
|
2000
1822
|
takeTextUntil(index) {
|
|
2001
1823
|
if (index <= this.lastReportedTextLength) {
|
|
@@ -2030,43 +1852,14 @@ var init_parser = __esm({
|
|
|
2030
1852
|
return `${firstLine.slice(0, maxLen)}... (${message.length} chars total)`;
|
|
2031
1853
|
}
|
|
2032
1854
|
/**
|
|
2033
|
-
* Parse parameter string
|
|
1855
|
+
* Parse parameter string using block format
|
|
2034
1856
|
*/
|
|
2035
1857
|
parseParameters(raw) {
|
|
2036
1858
|
const cleaned = stripMarkdownFences(raw);
|
|
2037
|
-
if (this.parameterFormat === "json") {
|
|
2038
|
-
try {
|
|
2039
|
-
return { parameters: JSON.parse(cleaned) };
|
|
2040
|
-
} catch (error) {
|
|
2041
|
-
return { parseError: this.truncateParseError(error, "JSON") };
|
|
2042
|
-
}
|
|
2043
|
-
}
|
|
2044
|
-
if (this.parameterFormat === "yaml") {
|
|
2045
|
-
try {
|
|
2046
|
-
return { parameters: yaml2.load(preprocessYaml(cleaned)) };
|
|
2047
|
-
} catch (error) {
|
|
2048
|
-
return { parseError: this.truncateParseError(error, "YAML") };
|
|
2049
|
-
}
|
|
2050
|
-
}
|
|
2051
|
-
if (this.parameterFormat === "toml") {
|
|
2052
|
-
try {
|
|
2053
|
-
return { parameters: (0, import_js_toml.load)(preprocessTomlHeredoc(cleaned)) };
|
|
2054
|
-
} catch (error) {
|
|
2055
|
-
return { parseError: this.truncateParseError(error, "TOML") };
|
|
2056
|
-
}
|
|
2057
|
-
}
|
|
2058
1859
|
try {
|
|
2059
|
-
return { parameters:
|
|
2060
|
-
} catch {
|
|
2061
|
-
|
|
2062
|
-
return { parameters: (0, import_js_toml.load)(preprocessTomlHeredoc(cleaned)) };
|
|
2063
|
-
} catch {
|
|
2064
|
-
try {
|
|
2065
|
-
return { parameters: yaml2.load(preprocessYaml(cleaned)) };
|
|
2066
|
-
} catch (error) {
|
|
2067
|
-
return { parseError: this.truncateParseError(error, "auto") };
|
|
2068
|
-
}
|
|
2069
|
-
}
|
|
1860
|
+
return { parameters: parseBlockParams(cleaned, { argPrefix: this.argPrefix }) };
|
|
1861
|
+
} catch (error) {
|
|
1862
|
+
return { parseError: this.truncateParseError(error, "block") };
|
|
2070
1863
|
}
|
|
2071
1864
|
}
|
|
2072
1865
|
// Feed a chunk of text and get parsed events
|
|
@@ -2125,8 +1918,7 @@ var init_parser = __esm({
|
|
|
2125
1918
|
call: {
|
|
2126
1919
|
gadgetName: actualGadgetName,
|
|
2127
1920
|
invocationId,
|
|
2128
|
-
|
|
2129
|
-
// Keep property name for backward compatibility
|
|
1921
|
+
parametersRaw,
|
|
2130
1922
|
parameters,
|
|
2131
1923
|
parseError
|
|
2132
1924
|
}
|
|
@@ -2160,7 +1952,7 @@ var init_parser = __esm({
|
|
|
2160
1952
|
call: {
|
|
2161
1953
|
gadgetName: actualGadgetName,
|
|
2162
1954
|
invocationId,
|
|
2163
|
-
|
|
1955
|
+
parametersRaw,
|
|
2164
1956
|
parameters,
|
|
2165
1957
|
parseError
|
|
2166
1958
|
}
|
|
@@ -2211,9 +2003,9 @@ var init_stream_processor = __esm({
|
|
|
2211
2003
|
this.stopOnGadgetError = options.stopOnGadgetError ?? true;
|
|
2212
2004
|
this.shouldContinueAfterError = options.shouldContinueAfterError;
|
|
2213
2005
|
this.parser = new StreamParser({
|
|
2214
|
-
parameterFormat: options.parameterFormat,
|
|
2215
2006
|
startPrefix: options.gadgetStartPrefix,
|
|
2216
|
-
endPrefix: options.gadgetEndPrefix
|
|
2007
|
+
endPrefix: options.gadgetEndPrefix,
|
|
2008
|
+
argPrefix: options.gadgetArgPrefix
|
|
2217
2009
|
});
|
|
2218
2010
|
this.executor = new GadgetExecutor(
|
|
2219
2011
|
options.registry,
|
|
@@ -2367,7 +2159,7 @@ var init_stream_processor = __esm({
|
|
|
2367
2159
|
this.logger.warn("Gadget has parse error", {
|
|
2368
2160
|
gadgetName: call.gadgetName,
|
|
2369
2161
|
error: call.parseError,
|
|
2370
|
-
rawParameters: call.
|
|
2162
|
+
rawParameters: call.parametersRaw
|
|
2371
2163
|
});
|
|
2372
2164
|
const shouldContinue = await this.checkContinueAfterError(
|
|
2373
2165
|
call.parseError,
|
|
@@ -2603,9 +2395,9 @@ var init_agent = __esm({
|
|
|
2603
2395
|
hooks;
|
|
2604
2396
|
conversation;
|
|
2605
2397
|
registry;
|
|
2606
|
-
parameterFormat;
|
|
2607
2398
|
gadgetStartPrefix;
|
|
2608
2399
|
gadgetEndPrefix;
|
|
2400
|
+
gadgetArgPrefix;
|
|
2609
2401
|
onHumanInputRequired;
|
|
2610
2402
|
textOnlyHandler;
|
|
2611
2403
|
textWithGadgetsHandler;
|
|
@@ -2634,9 +2426,9 @@ var init_agent = __esm({
|
|
|
2634
2426
|
this.temperature = options.temperature;
|
|
2635
2427
|
this.logger = options.logger ?? createLogger({ name: "llmist:agent" });
|
|
2636
2428
|
this.registry = options.registry;
|
|
2637
|
-
this.parameterFormat = options.parameterFormat ?? "json";
|
|
2638
2429
|
this.gadgetStartPrefix = options.gadgetStartPrefix;
|
|
2639
2430
|
this.gadgetEndPrefix = options.gadgetEndPrefix;
|
|
2431
|
+
this.gadgetArgPrefix = options.gadgetArgPrefix;
|
|
2640
2432
|
this.onHumanInputRequired = options.onHumanInputRequired;
|
|
2641
2433
|
this.textOnlyHandler = options.textOnlyHandler ?? "terminate";
|
|
2642
2434
|
this.textWithGadgetsHandler = options.textWithGadgetsHandler;
|
|
@@ -2658,9 +2450,10 @@ var init_agent = __esm({
|
|
|
2658
2450
|
if (options.systemPrompt) {
|
|
2659
2451
|
baseBuilder.addSystem(options.systemPrompt);
|
|
2660
2452
|
}
|
|
2661
|
-
baseBuilder.addGadgets(this.registry.getAll(),
|
|
2453
|
+
baseBuilder.addGadgets(this.registry.getAll(), {
|
|
2662
2454
|
startPrefix: options.gadgetStartPrefix,
|
|
2663
|
-
endPrefix: options.gadgetEndPrefix
|
|
2455
|
+
endPrefix: options.gadgetEndPrefix,
|
|
2456
|
+
argPrefix: options.gadgetArgPrefix
|
|
2664
2457
|
});
|
|
2665
2458
|
const baseMessages = baseBuilder.build();
|
|
2666
2459
|
const initialMessages = (options.initialMessages ?? []).map((message) => ({
|
|
@@ -2668,9 +2461,9 @@ var init_agent = __esm({
|
|
|
2668
2461
|
content: message.content
|
|
2669
2462
|
}));
|
|
2670
2463
|
this.conversation = new ConversationManager(baseMessages, initialMessages, {
|
|
2671
|
-
parameterFormat: this.parameterFormat,
|
|
2672
2464
|
startPrefix: options.gadgetStartPrefix,
|
|
2673
|
-
endPrefix: options.gadgetEndPrefix
|
|
2465
|
+
endPrefix: options.gadgetEndPrefix,
|
|
2466
|
+
argPrefix: options.gadgetArgPrefix
|
|
2674
2467
|
});
|
|
2675
2468
|
this.userPromptProvided = !!options.userPrompt;
|
|
2676
2469
|
if (options.userPrompt) {
|
|
@@ -2763,9 +2556,9 @@ var init_agent = __esm({
|
|
|
2763
2556
|
const processor = new StreamProcessor({
|
|
2764
2557
|
iteration: currentIteration,
|
|
2765
2558
|
registry: this.registry,
|
|
2766
|
-
parameterFormat: this.parameterFormat,
|
|
2767
2559
|
gadgetStartPrefix: this.gadgetStartPrefix,
|
|
2768
2560
|
gadgetEndPrefix: this.gadgetEndPrefix,
|
|
2561
|
+
gadgetArgPrefix: this.gadgetArgPrefix,
|
|
2769
2562
|
hooks: this.hooks,
|
|
2770
2563
|
logger: this.logger.getSubLogger({ name: "stream-processor" }),
|
|
2771
2564
|
onHumanInputRequired: this.onHumanInputRequired,
|
|
@@ -3050,7 +2843,8 @@ var init_anthropic_models = __esm({
|
|
|
3050
2843
|
pricing: {
|
|
3051
2844
|
input: 3,
|
|
3052
2845
|
output: 15,
|
|
3053
|
-
cachedInput: 0.3
|
|
2846
|
+
cachedInput: 0.3,
|
|
2847
|
+
cacheWriteInput: 3.75
|
|
3054
2848
|
},
|
|
3055
2849
|
knowledgeCutoff: "2025-01",
|
|
3056
2850
|
features: {
|
|
@@ -3074,7 +2868,8 @@ var init_anthropic_models = __esm({
|
|
|
3074
2868
|
pricing: {
|
|
3075
2869
|
input: 1,
|
|
3076
2870
|
output: 5,
|
|
3077
|
-
cachedInput: 0.1
|
|
2871
|
+
cachedInput: 0.1,
|
|
2872
|
+
cacheWriteInput: 1.25
|
|
3078
2873
|
},
|
|
3079
2874
|
knowledgeCutoff: "2025-02",
|
|
3080
2875
|
features: {
|
|
@@ -3098,7 +2893,8 @@ var init_anthropic_models = __esm({
|
|
|
3098
2893
|
pricing: {
|
|
3099
2894
|
input: 3,
|
|
3100
2895
|
output: 15,
|
|
3101
|
-
cachedInput: 0.3
|
|
2896
|
+
cachedInput: 0.3,
|
|
2897
|
+
cacheWriteInput: 3.75
|
|
3102
2898
|
},
|
|
3103
2899
|
knowledgeCutoff: "2025-03",
|
|
3104
2900
|
features: {
|
|
@@ -3122,7 +2918,8 @@ var init_anthropic_models = __esm({
|
|
|
3122
2918
|
pricing: {
|
|
3123
2919
|
input: 3,
|
|
3124
2920
|
output: 15,
|
|
3125
|
-
cachedInput: 0.3
|
|
2921
|
+
cachedInput: 0.3,
|
|
2922
|
+
cacheWriteInput: 3.75
|
|
3126
2923
|
},
|
|
3127
2924
|
knowledgeCutoff: "2024-11",
|
|
3128
2925
|
features: {
|
|
@@ -3146,7 +2943,8 @@ var init_anthropic_models = __esm({
|
|
|
3146
2943
|
pricing: {
|
|
3147
2944
|
input: 15,
|
|
3148
2945
|
output: 75,
|
|
3149
|
-
cachedInput: 1.5
|
|
2946
|
+
cachedInput: 1.5,
|
|
2947
|
+
cacheWriteInput: 18.75
|
|
3150
2948
|
},
|
|
3151
2949
|
knowledgeCutoff: "2025-01",
|
|
3152
2950
|
features: {
|
|
@@ -3170,7 +2968,8 @@ var init_anthropic_models = __esm({
|
|
|
3170
2968
|
pricing: {
|
|
3171
2969
|
input: 15,
|
|
3172
2970
|
output: 75,
|
|
3173
|
-
cachedInput: 1.5
|
|
2971
|
+
cachedInput: 1.5,
|
|
2972
|
+
cacheWriteInput: 18.75
|
|
3174
2973
|
},
|
|
3175
2974
|
knowledgeCutoff: "2025-03",
|
|
3176
2975
|
features: {
|
|
@@ -3193,7 +2992,8 @@ var init_anthropic_models = __esm({
|
|
|
3193
2992
|
pricing: {
|
|
3194
2993
|
input: 0.8,
|
|
3195
2994
|
output: 4,
|
|
3196
|
-
cachedInput: 0.08
|
|
2995
|
+
cachedInput: 0.08,
|
|
2996
|
+
cacheWriteInput: 1
|
|
3197
2997
|
},
|
|
3198
2998
|
knowledgeCutoff: "2024-07",
|
|
3199
2999
|
features: {
|
|
@@ -3216,7 +3016,8 @@ var init_anthropic_models = __esm({
|
|
|
3216
3016
|
pricing: {
|
|
3217
3017
|
input: 0.25,
|
|
3218
3018
|
output: 1.25,
|
|
3219
|
-
cachedInput: 0.025
|
|
3019
|
+
cachedInput: 0.025,
|
|
3020
|
+
cacheWriteInput: 0.3125
|
|
3220
3021
|
},
|
|
3221
3022
|
knowledgeCutoff: "2023-08",
|
|
3222
3023
|
features: {
|
|
@@ -3240,7 +3041,8 @@ var init_anthropic_models = __esm({
|
|
|
3240
3041
|
pricing: {
|
|
3241
3042
|
input: 1,
|
|
3242
3043
|
output: 5,
|
|
3243
|
-
cachedInput: 0.1
|
|
3044
|
+
cachedInput: 0.1,
|
|
3045
|
+
cacheWriteInput: 1.25
|
|
3244
3046
|
},
|
|
3245
3047
|
knowledgeCutoff: "2025-02",
|
|
3246
3048
|
features: {
|
|
@@ -3264,7 +3066,8 @@ var init_anthropic_models = __esm({
|
|
|
3264
3066
|
pricing: {
|
|
3265
3067
|
input: 3,
|
|
3266
3068
|
output: 15,
|
|
3267
|
-
cachedInput: 0.3
|
|
3069
|
+
cachedInput: 0.3,
|
|
3070
|
+
cacheWriteInput: 3.75
|
|
3268
3071
|
},
|
|
3269
3072
|
knowledgeCutoff: "2025-01",
|
|
3270
3073
|
features: {
|
|
@@ -3288,7 +3091,8 @@ var init_anthropic_models = __esm({
|
|
|
3288
3091
|
pricing: {
|
|
3289
3092
|
input: 5,
|
|
3290
3093
|
output: 25,
|
|
3291
|
-
cachedInput: 0.5
|
|
3094
|
+
cachedInput: 0.5,
|
|
3095
|
+
cacheWriteInput: 6.25
|
|
3292
3096
|
},
|
|
3293
3097
|
knowledgeCutoff: "2025-03",
|
|
3294
3098
|
features: {
|
|
@@ -3403,15 +3207,27 @@ var init_anthropic = __esm({
|
|
|
3403
3207
|
}
|
|
3404
3208
|
buildRequestPayload(options, descriptor, spec, messages) {
|
|
3405
3209
|
const systemMessages = messages.filter((message) => message.role === "system");
|
|
3406
|
-
const system = systemMessages.length > 0 ? systemMessages.map((m) =>
|
|
3407
|
-
|
|
3210
|
+
const system = systemMessages.length > 0 ? systemMessages.map((m, index) => ({
|
|
3211
|
+
type: "text",
|
|
3212
|
+
text: m.content,
|
|
3213
|
+
// Add cache_control to the LAST system message block
|
|
3214
|
+
...index === systemMessages.length - 1 ? { cache_control: { type: "ephemeral" } } : {}
|
|
3215
|
+
})) : void 0;
|
|
3216
|
+
const nonSystemMessages = messages.filter(
|
|
3408
3217
|
(message) => message.role !== "system"
|
|
3409
|
-
)
|
|
3218
|
+
);
|
|
3219
|
+
const lastUserIndex = nonSystemMessages.reduce(
|
|
3220
|
+
(lastIdx, msg, idx) => msg.role === "user" ? idx : lastIdx,
|
|
3221
|
+
-1
|
|
3222
|
+
);
|
|
3223
|
+
const conversation = nonSystemMessages.map((message, index) => ({
|
|
3410
3224
|
role: message.role,
|
|
3411
3225
|
content: [
|
|
3412
3226
|
{
|
|
3413
3227
|
type: "text",
|
|
3414
|
-
text: message.content
|
|
3228
|
+
text: message.content,
|
|
3229
|
+
// Add cache_control to the LAST user message
|
|
3230
|
+
...message.role === "user" && index === lastUserIndex ? { cache_control: { type: "ephemeral" } } : {}
|
|
3415
3231
|
}
|
|
3416
3232
|
]
|
|
3417
3233
|
}));
|
|
@@ -3437,15 +3253,22 @@ var init_anthropic = __esm({
|
|
|
3437
3253
|
async *wrapStream(iterable) {
|
|
3438
3254
|
const stream2 = iterable;
|
|
3439
3255
|
let inputTokens = 0;
|
|
3256
|
+
let cachedInputTokens = 0;
|
|
3257
|
+
let cacheCreationInputTokens = 0;
|
|
3440
3258
|
for await (const event of stream2) {
|
|
3441
3259
|
if (event.type === "message_start") {
|
|
3442
|
-
|
|
3260
|
+
const usage = event.message.usage;
|
|
3261
|
+
cachedInputTokens = usage.cache_read_input_tokens ?? 0;
|
|
3262
|
+
cacheCreationInputTokens = usage.cache_creation_input_tokens ?? 0;
|
|
3263
|
+
inputTokens = usage.input_tokens + cachedInputTokens + cacheCreationInputTokens;
|
|
3443
3264
|
yield {
|
|
3444
3265
|
text: "",
|
|
3445
3266
|
usage: {
|
|
3446
3267
|
inputTokens,
|
|
3447
3268
|
outputTokens: 0,
|
|
3448
|
-
totalTokens: inputTokens
|
|
3269
|
+
totalTokens: inputTokens,
|
|
3270
|
+
cachedInputTokens,
|
|
3271
|
+
cacheCreationInputTokens
|
|
3449
3272
|
},
|
|
3450
3273
|
rawEvent: event
|
|
3451
3274
|
};
|
|
@@ -3459,7 +3282,9 @@ var init_anthropic = __esm({
|
|
|
3459
3282
|
const usage = event.usage ? {
|
|
3460
3283
|
inputTokens,
|
|
3461
3284
|
outputTokens: event.usage.output_tokens,
|
|
3462
|
-
totalTokens: inputTokens + event.usage.output_tokens
|
|
3285
|
+
totalTokens: inputTokens + event.usage.output_tokens,
|
|
3286
|
+
cachedInputTokens,
|
|
3287
|
+
cacheCreationInputTokens
|
|
3463
3288
|
} : void 0;
|
|
3464
3289
|
if (event.delta.stop_reason || usage) {
|
|
3465
3290
|
yield {
|
|
@@ -3540,6 +3365,7 @@ var init_gemini_models = __esm({
|
|
|
3540
3365
|
"src/providers/gemini-models.ts"() {
|
|
3541
3366
|
"use strict";
|
|
3542
3367
|
GEMINI_MODELS = [
|
|
3368
|
+
// Gemini 3 Pro (Preview)
|
|
3543
3369
|
{
|
|
3544
3370
|
provider: "gemini",
|
|
3545
3371
|
modelId: "gemini-3-pro-preview",
|
|
@@ -3548,8 +3374,11 @@ var init_gemini_models = __esm({
|
|
|
3548
3374
|
maxOutputTokens: 65536,
|
|
3549
3375
|
pricing: {
|
|
3550
3376
|
input: 2,
|
|
3377
|
+
// $2.00 for prompts <= 200k, $4.00 for > 200k (using lower tier)
|
|
3551
3378
|
output: 12,
|
|
3379
|
+
// $12.00 for prompts <= 200k, $18.00 for > 200k
|
|
3552
3380
|
cachedInput: 0.2
|
|
3381
|
+
// $0.20 for prompts <= 200k
|
|
3553
3382
|
},
|
|
3554
3383
|
knowledgeCutoff: "2025-01",
|
|
3555
3384
|
features: {
|
|
@@ -3562,9 +3391,10 @@ var init_gemini_models = __esm({
|
|
|
3562
3391
|
metadata: {
|
|
3563
3392
|
family: "Gemini 3",
|
|
3564
3393
|
releaseDate: "2025-11-18",
|
|
3565
|
-
notes: "
|
|
3394
|
+
notes: "Best model for multimodal understanding, agentic and vibe-coding. Deep Think mode available."
|
|
3566
3395
|
}
|
|
3567
3396
|
},
|
|
3397
|
+
// Gemini 2.5 Pro
|
|
3568
3398
|
{
|
|
3569
3399
|
provider: "gemini",
|
|
3570
3400
|
modelId: "gemini-2.5-pro",
|
|
@@ -3573,8 +3403,11 @@ var init_gemini_models = __esm({
|
|
|
3573
3403
|
maxOutputTokens: 65536,
|
|
3574
3404
|
pricing: {
|
|
3575
3405
|
input: 1.25,
|
|
3406
|
+
// $1.25 for prompts <= 200k, $2.50 for > 200k
|
|
3576
3407
|
output: 10,
|
|
3408
|
+
// $10.00 for prompts <= 200k, $15.00 for > 200k
|
|
3577
3409
|
cachedInput: 0.125
|
|
3410
|
+
// $0.125 for prompts <= 200k
|
|
3578
3411
|
},
|
|
3579
3412
|
knowledgeCutoff: "2025-01",
|
|
3580
3413
|
features: {
|
|
@@ -3587,9 +3420,10 @@ var init_gemini_models = __esm({
|
|
|
3587
3420
|
metadata: {
|
|
3588
3421
|
family: "Gemini 2.5",
|
|
3589
3422
|
releaseDate: "2025-06",
|
|
3590
|
-
notes: "
|
|
3423
|
+
notes: "State-of-the-art multipurpose model. Excels at coding and complex reasoning."
|
|
3591
3424
|
}
|
|
3592
3425
|
},
|
|
3426
|
+
// Gemini 2.5 Flash
|
|
3593
3427
|
{
|
|
3594
3428
|
provider: "gemini",
|
|
3595
3429
|
modelId: "gemini-2.5-flash",
|
|
@@ -3598,8 +3432,10 @@ var init_gemini_models = __esm({
|
|
|
3598
3432
|
maxOutputTokens: 65536,
|
|
3599
3433
|
pricing: {
|
|
3600
3434
|
input: 0.3,
|
|
3435
|
+
// $0.30 for text/image/video, $1.00 for audio
|
|
3601
3436
|
output: 2.5,
|
|
3602
3437
|
cachedInput: 0.03
|
|
3438
|
+
// $0.03 for text/image/video
|
|
3603
3439
|
},
|
|
3604
3440
|
knowledgeCutoff: "2025-01",
|
|
3605
3441
|
features: {
|
|
@@ -3612,9 +3448,10 @@ var init_gemini_models = __esm({
|
|
|
3612
3448
|
metadata: {
|
|
3613
3449
|
family: "Gemini 2.5",
|
|
3614
3450
|
releaseDate: "2025-06",
|
|
3615
|
-
notes: "
|
|
3451
|
+
notes: "First hybrid reasoning model with 1M context and thinking budgets."
|
|
3616
3452
|
}
|
|
3617
3453
|
},
|
|
3454
|
+
// Gemini 2.5 Flash-Lite
|
|
3618
3455
|
{
|
|
3619
3456
|
provider: "gemini",
|
|
3620
3457
|
modelId: "gemini-2.5-flash-lite",
|
|
@@ -3623,8 +3460,10 @@ var init_gemini_models = __esm({
|
|
|
3623
3460
|
maxOutputTokens: 65536,
|
|
3624
3461
|
pricing: {
|
|
3625
3462
|
input: 0.1,
|
|
3463
|
+
// $0.10 for text/image/video, $0.30 for audio
|
|
3626
3464
|
output: 0.4,
|
|
3627
3465
|
cachedInput: 0.01
|
|
3466
|
+
// $0.01 for text/image/video
|
|
3628
3467
|
},
|
|
3629
3468
|
knowledgeCutoff: "2025-01",
|
|
3630
3469
|
features: {
|
|
@@ -3636,9 +3475,10 @@ var init_gemini_models = __esm({
|
|
|
3636
3475
|
metadata: {
|
|
3637
3476
|
family: "Gemini 2.5",
|
|
3638
3477
|
releaseDate: "2025-06",
|
|
3639
|
-
notes: "
|
|
3478
|
+
notes: "Smallest and most cost effective model, built for at scale usage."
|
|
3640
3479
|
}
|
|
3641
3480
|
},
|
|
3481
|
+
// Gemini 2.0 Flash
|
|
3642
3482
|
{
|
|
3643
3483
|
provider: "gemini",
|
|
3644
3484
|
modelId: "gemini-2.0-flash",
|
|
@@ -3647,8 +3487,10 @@ var init_gemini_models = __esm({
|
|
|
3647
3487
|
maxOutputTokens: 8192,
|
|
3648
3488
|
pricing: {
|
|
3649
3489
|
input: 0.1,
|
|
3490
|
+
// $0.10 for text/image/video, $0.70 for audio
|
|
3650
3491
|
output: 0.4,
|
|
3651
|
-
cachedInput: 0.
|
|
3492
|
+
cachedInput: 0.025
|
|
3493
|
+
// $0.025 for text/image/video
|
|
3652
3494
|
},
|
|
3653
3495
|
knowledgeCutoff: "2024-08",
|
|
3654
3496
|
features: {
|
|
@@ -3659,9 +3501,10 @@ var init_gemini_models = __esm({
|
|
|
3659
3501
|
},
|
|
3660
3502
|
metadata: {
|
|
3661
3503
|
family: "Gemini 2.0",
|
|
3662
|
-
notes: "
|
|
3504
|
+
notes: "Balanced multimodal model with 1M context, built for the era of Agents."
|
|
3663
3505
|
}
|
|
3664
3506
|
},
|
|
3507
|
+
// Gemini 2.0 Flash-Lite
|
|
3665
3508
|
{
|
|
3666
3509
|
provider: "gemini",
|
|
3667
3510
|
modelId: "gemini-2.0-flash-lite",
|
|
@@ -3670,8 +3513,8 @@ var init_gemini_models = __esm({
|
|
|
3670
3513
|
maxOutputTokens: 8192,
|
|
3671
3514
|
pricing: {
|
|
3672
3515
|
input: 0.075,
|
|
3673
|
-
output: 0.3
|
|
3674
|
-
|
|
3516
|
+
output: 0.3
|
|
3517
|
+
// No context caching available for 2.0-flash-lite
|
|
3675
3518
|
},
|
|
3676
3519
|
knowledgeCutoff: "2024-08",
|
|
3677
3520
|
features: {
|
|
@@ -3682,7 +3525,7 @@ var init_gemini_models = __esm({
|
|
|
3682
3525
|
},
|
|
3683
3526
|
metadata: {
|
|
3684
3527
|
family: "Gemini 2.0",
|
|
3685
|
-
notes: "
|
|
3528
|
+
notes: "Smallest and most cost effective 2.0 model for at scale usage."
|
|
3686
3529
|
}
|
|
3687
3530
|
}
|
|
3688
3531
|
];
|
|
@@ -3852,7 +3695,9 @@ var init_gemini = __esm({
|
|
|
3852
3695
|
return {
|
|
3853
3696
|
inputTokens: usageMetadata.promptTokenCount ?? 0,
|
|
3854
3697
|
outputTokens: usageMetadata.candidatesTokenCount ?? 0,
|
|
3855
|
-
totalTokens: usageMetadata.totalTokenCount ?? 0
|
|
3698
|
+
totalTokens: usageMetadata.totalTokenCount ?? 0,
|
|
3699
|
+
// Gemini returns cached token count in cachedContentTokenCount
|
|
3700
|
+
cachedInputTokens: usageMetadata.cachedContentTokenCount ?? 0
|
|
3856
3701
|
};
|
|
3857
3702
|
}
|
|
3858
3703
|
/**
|
|
@@ -3908,10 +3753,11 @@ var init_openai_models = __esm({
|
|
|
3908
3753
|
"src/providers/openai-models.ts"() {
|
|
3909
3754
|
"use strict";
|
|
3910
3755
|
OPENAI_MODELS = [
|
|
3756
|
+
// GPT-5 Family
|
|
3911
3757
|
{
|
|
3912
3758
|
provider: "openai",
|
|
3913
3759
|
modelId: "gpt-5.1",
|
|
3914
|
-
displayName: "GPT-5.1
|
|
3760
|
+
displayName: "GPT-5.1",
|
|
3915
3761
|
contextWindow: 128e3,
|
|
3916
3762
|
maxOutputTokens: 32768,
|
|
3917
3763
|
pricing: {
|
|
@@ -3931,34 +3777,7 @@ var init_openai_models = __esm({
|
|
|
3931
3777
|
metadata: {
|
|
3932
3778
|
family: "GPT-5",
|
|
3933
3779
|
releaseDate: "2025-11-12",
|
|
3934
|
-
notes: "
|
|
3935
|
-
supportsTemperature: false
|
|
3936
|
-
}
|
|
3937
|
-
},
|
|
3938
|
-
{
|
|
3939
|
-
provider: "openai",
|
|
3940
|
-
modelId: "gpt-5.1-thinking",
|
|
3941
|
-
displayName: "GPT-5.1 Thinking",
|
|
3942
|
-
contextWindow: 196e3,
|
|
3943
|
-
maxOutputTokens: 32768,
|
|
3944
|
-
pricing: {
|
|
3945
|
-
input: 1.25,
|
|
3946
|
-
output: 10,
|
|
3947
|
-
cachedInput: 0.125
|
|
3948
|
-
},
|
|
3949
|
-
knowledgeCutoff: "2024-09-30",
|
|
3950
|
-
features: {
|
|
3951
|
-
streaming: true,
|
|
3952
|
-
functionCalling: true,
|
|
3953
|
-
vision: true,
|
|
3954
|
-
reasoning: true,
|
|
3955
|
-
structuredOutputs: true,
|
|
3956
|
-
fineTuning: true
|
|
3957
|
-
},
|
|
3958
|
-
metadata: {
|
|
3959
|
-
family: "GPT-5",
|
|
3960
|
-
releaseDate: "2025-11-12",
|
|
3961
|
-
notes: "Advanced reasoning with thinking levels: Light, Standard, Extended, Heavy. Best for complex tasks.",
|
|
3780
|
+
notes: "Latest GPT-5 with improved instruction following. 2-3x faster than GPT-5.",
|
|
3962
3781
|
supportsTemperature: false
|
|
3963
3782
|
}
|
|
3964
3783
|
},
|
|
@@ -4038,87 +3857,337 @@ var init_openai_models = __esm({
|
|
|
4038
3857
|
notes: "Fastest, most cost-efficient version for well-defined tasks",
|
|
4039
3858
|
supportsTemperature: false
|
|
4040
3859
|
}
|
|
4041
|
-
}
|
|
4042
|
-
|
|
4043
|
-
|
|
4044
|
-
|
|
4045
|
-
|
|
4046
|
-
|
|
4047
|
-
|
|
4048
|
-
|
|
4049
|
-
|
|
4050
|
-
|
|
4051
|
-
|
|
4052
|
-
|
|
4053
|
-
|
|
4054
|
-
|
|
4055
|
-
|
|
4056
|
-
|
|
4057
|
-
|
|
4058
|
-
|
|
4059
|
-
|
|
4060
|
-
|
|
4061
|
-
|
|
4062
|
-
|
|
4063
|
-
|
|
4064
|
-
|
|
4065
|
-
|
|
4066
|
-
|
|
4067
|
-
|
|
4068
|
-
|
|
4069
|
-
|
|
4070
|
-
|
|
4071
|
-
|
|
4072
|
-
|
|
4073
|
-
|
|
4074
|
-
|
|
4075
|
-
|
|
4076
|
-
|
|
4077
|
-
|
|
4078
|
-
|
|
4079
|
-
|
|
4080
|
-
|
|
4081
|
-
|
|
4082
|
-
|
|
4083
|
-
|
|
4084
|
-
|
|
4085
|
-
|
|
4086
|
-
|
|
4087
|
-
|
|
4088
|
-
|
|
4089
|
-
|
|
4090
|
-
|
|
4091
|
-
|
|
4092
|
-
|
|
4093
|
-
|
|
4094
|
-
|
|
4095
|
-
|
|
4096
|
-
|
|
4097
|
-
|
|
4098
|
-
|
|
4099
|
-
|
|
4100
|
-
|
|
4101
|
-
|
|
4102
|
-
|
|
4103
|
-
|
|
4104
|
-
|
|
4105
|
-
|
|
4106
|
-
|
|
4107
|
-
|
|
4108
|
-
|
|
4109
|
-
|
|
4110
|
-
|
|
4111
|
-
|
|
4112
|
-
|
|
4113
|
-
|
|
4114
|
-
|
|
4115
|
-
|
|
4116
|
-
|
|
4117
|
-
|
|
3860
|
+
},
|
|
3861
|
+
{
|
|
3862
|
+
provider: "openai",
|
|
3863
|
+
modelId: "gpt-5-pro",
|
|
3864
|
+
displayName: "GPT-5 Pro",
|
|
3865
|
+
contextWindow: 272e3,
|
|
3866
|
+
maxOutputTokens: 128e3,
|
|
3867
|
+
pricing: {
|
|
3868
|
+
input: 15,
|
|
3869
|
+
output: 120
|
|
3870
|
+
// No cached input pricing for gpt-5-pro
|
|
3871
|
+
},
|
|
3872
|
+
knowledgeCutoff: "2024-09-30",
|
|
3873
|
+
features: {
|
|
3874
|
+
streaming: true,
|
|
3875
|
+
functionCalling: true,
|
|
3876
|
+
vision: true,
|
|
3877
|
+
reasoning: true,
|
|
3878
|
+
structuredOutputs: true
|
|
3879
|
+
},
|
|
3880
|
+
metadata: {
|
|
3881
|
+
family: "GPT-5",
|
|
3882
|
+
notes: "Premium tier with enhanced capabilities. Does not support prompt caching.",
|
|
3883
|
+
supportsTemperature: false
|
|
3884
|
+
}
|
|
3885
|
+
},
|
|
3886
|
+
// GPT-4.1 Family
|
|
3887
|
+
{
|
|
3888
|
+
provider: "openai",
|
|
3889
|
+
modelId: "gpt-4.1",
|
|
3890
|
+
displayName: "GPT-4.1",
|
|
3891
|
+
contextWindow: 128e3,
|
|
3892
|
+
maxOutputTokens: 32768,
|
|
3893
|
+
pricing: {
|
|
3894
|
+
input: 2,
|
|
3895
|
+
output: 8,
|
|
3896
|
+
cachedInput: 0.5
|
|
3897
|
+
},
|
|
3898
|
+
knowledgeCutoff: "2024-04-01",
|
|
3899
|
+
features: {
|
|
3900
|
+
streaming: true,
|
|
3901
|
+
functionCalling: true,
|
|
3902
|
+
vision: true,
|
|
3903
|
+
structuredOutputs: true,
|
|
3904
|
+
fineTuning: true
|
|
3905
|
+
},
|
|
3906
|
+
metadata: {
|
|
3907
|
+
family: "GPT-4.1",
|
|
3908
|
+
notes: "Improved GPT-4 with better instruction following"
|
|
3909
|
+
}
|
|
3910
|
+
},
|
|
3911
|
+
{
|
|
3912
|
+
provider: "openai",
|
|
3913
|
+
modelId: "gpt-4.1-mini",
|
|
3914
|
+
displayName: "GPT-4.1 Mini",
|
|
3915
|
+
contextWindow: 128e3,
|
|
3916
|
+
maxOutputTokens: 32768,
|
|
3917
|
+
pricing: {
|
|
3918
|
+
input: 0.4,
|
|
3919
|
+
output: 1.6,
|
|
3920
|
+
cachedInput: 0.1
|
|
3921
|
+
},
|
|
3922
|
+
knowledgeCutoff: "2024-04-01",
|
|
3923
|
+
features: {
|
|
3924
|
+
streaming: true,
|
|
3925
|
+
functionCalling: true,
|
|
3926
|
+
vision: true,
|
|
3927
|
+
structuredOutputs: true,
|
|
3928
|
+
fineTuning: true
|
|
3929
|
+
},
|
|
3930
|
+
metadata: {
|
|
3931
|
+
family: "GPT-4.1",
|
|
3932
|
+
notes: "Cost-efficient GPT-4.1 variant"
|
|
3933
|
+
}
|
|
3934
|
+
},
|
|
3935
|
+
{
|
|
3936
|
+
provider: "openai",
|
|
3937
|
+
modelId: "gpt-4.1-nano",
|
|
3938
|
+
displayName: "GPT-4.1 Nano",
|
|
3939
|
+
contextWindow: 128e3,
|
|
3940
|
+
maxOutputTokens: 32768,
|
|
3941
|
+
pricing: {
|
|
3942
|
+
input: 0.1,
|
|
3943
|
+
output: 0.4,
|
|
3944
|
+
cachedInput: 0.025
|
|
3945
|
+
},
|
|
3946
|
+
knowledgeCutoff: "2024-04-01",
|
|
3947
|
+
features: {
|
|
3948
|
+
streaming: true,
|
|
3949
|
+
functionCalling: true,
|
|
3950
|
+
vision: true,
|
|
3951
|
+
structuredOutputs: true,
|
|
3952
|
+
fineTuning: true
|
|
3953
|
+
},
|
|
3954
|
+
metadata: {
|
|
3955
|
+
family: "GPT-4.1",
|
|
3956
|
+
notes: "Fastest GPT-4.1 variant for simple tasks"
|
|
3957
|
+
}
|
|
3958
|
+
},
|
|
3959
|
+
// GPT-4o Family
|
|
3960
|
+
{
|
|
3961
|
+
provider: "openai",
|
|
3962
|
+
modelId: "gpt-4o",
|
|
3963
|
+
displayName: "GPT-4o",
|
|
3964
|
+
contextWindow: 128e3,
|
|
3965
|
+
maxOutputTokens: 16384,
|
|
3966
|
+
pricing: {
|
|
3967
|
+
input: 2.5,
|
|
3968
|
+
output: 10,
|
|
3969
|
+
cachedInput: 1.25
|
|
3970
|
+
},
|
|
3971
|
+
knowledgeCutoff: "2024-04-01",
|
|
3972
|
+
features: {
|
|
3973
|
+
streaming: true,
|
|
3974
|
+
functionCalling: true,
|
|
3975
|
+
vision: true,
|
|
3976
|
+
structuredOutputs: true,
|
|
3977
|
+
fineTuning: true
|
|
3978
|
+
},
|
|
3979
|
+
metadata: {
|
|
3980
|
+
family: "GPT-4o",
|
|
3981
|
+
notes: "Multimodal model optimized for speed"
|
|
3982
|
+
}
|
|
3983
|
+
},
|
|
3984
|
+
{
|
|
3985
|
+
provider: "openai",
|
|
3986
|
+
modelId: "gpt-4o-mini",
|
|
3987
|
+
displayName: "GPT-4o Mini",
|
|
3988
|
+
contextWindow: 128e3,
|
|
3989
|
+
maxOutputTokens: 16384,
|
|
3990
|
+
pricing: {
|
|
3991
|
+
input: 0.15,
|
|
3992
|
+
output: 0.6,
|
|
3993
|
+
cachedInput: 0.075
|
|
3994
|
+
},
|
|
3995
|
+
knowledgeCutoff: "2024-04-01",
|
|
3996
|
+
features: {
|
|
3997
|
+
streaming: true,
|
|
3998
|
+
functionCalling: true,
|
|
3999
|
+
vision: true,
|
|
4000
|
+
structuredOutputs: true,
|
|
4001
|
+
fineTuning: true
|
|
4002
|
+
},
|
|
4003
|
+
metadata: {
|
|
4004
|
+
family: "GPT-4o",
|
|
4005
|
+
notes: "Fast and affordable multimodal model"
|
|
4006
|
+
}
|
|
4007
|
+
},
|
|
4008
|
+
// o-series (Reasoning models)
|
|
4009
|
+
{
|
|
4010
|
+
provider: "openai",
|
|
4011
|
+
modelId: "o1",
|
|
4012
|
+
displayName: "o1",
|
|
4013
|
+
contextWindow: 2e5,
|
|
4014
|
+
maxOutputTokens: 1e5,
|
|
4015
|
+
pricing: {
|
|
4016
|
+
input: 15,
|
|
4017
|
+
output: 60,
|
|
4018
|
+
cachedInput: 7.5
|
|
4019
|
+
},
|
|
4020
|
+
knowledgeCutoff: "2024-12-01",
|
|
4021
|
+
features: {
|
|
4022
|
+
streaming: true,
|
|
4023
|
+
functionCalling: true,
|
|
4024
|
+
vision: true,
|
|
4025
|
+
reasoning: true,
|
|
4026
|
+
structuredOutputs: true
|
|
4027
|
+
},
|
|
4028
|
+
metadata: {
|
|
4029
|
+
family: "o-series",
|
|
4030
|
+
notes: "Advanced reasoning model with chain-of-thought",
|
|
4031
|
+
supportsTemperature: false
|
|
4032
|
+
}
|
|
4033
|
+
},
|
|
4034
|
+
{
|
|
4035
|
+
provider: "openai",
|
|
4036
|
+
modelId: "o3",
|
|
4037
|
+
displayName: "o3",
|
|
4038
|
+
contextWindow: 2e5,
|
|
4039
|
+
maxOutputTokens: 1e5,
|
|
4040
|
+
pricing: {
|
|
4041
|
+
input: 2,
|
|
4042
|
+
output: 8,
|
|
4043
|
+
cachedInput: 0.5
|
|
4044
|
+
},
|
|
4045
|
+
knowledgeCutoff: "2025-01-01",
|
|
4046
|
+
features: {
|
|
4047
|
+
streaming: true,
|
|
4048
|
+
functionCalling: true,
|
|
4049
|
+
vision: true,
|
|
4050
|
+
reasoning: true,
|
|
4051
|
+
structuredOutputs: true
|
|
4052
|
+
},
|
|
4053
|
+
metadata: {
|
|
4054
|
+
family: "o-series",
|
|
4055
|
+
notes: "Next-gen reasoning model, more efficient than o1",
|
|
4056
|
+
supportsTemperature: false
|
|
4057
|
+
}
|
|
4058
|
+
},
|
|
4059
|
+
{
|
|
4060
|
+
provider: "openai",
|
|
4061
|
+
modelId: "o4-mini",
|
|
4062
|
+
displayName: "o4 Mini",
|
|
4063
|
+
contextWindow: 2e5,
|
|
4064
|
+
maxOutputTokens: 1e5,
|
|
4065
|
+
pricing: {
|
|
4066
|
+
input: 1.1,
|
|
4067
|
+
output: 4.4,
|
|
4068
|
+
cachedInput: 0.275
|
|
4069
|
+
},
|
|
4070
|
+
knowledgeCutoff: "2025-04-01",
|
|
4071
|
+
features: {
|
|
4072
|
+
streaming: true,
|
|
4073
|
+
functionCalling: true,
|
|
4074
|
+
vision: true,
|
|
4075
|
+
reasoning: true,
|
|
4076
|
+
structuredOutputs: true,
|
|
4077
|
+
fineTuning: true
|
|
4078
|
+
},
|
|
4079
|
+
metadata: {
|
|
4080
|
+
family: "o-series",
|
|
4081
|
+
notes: "Cost-efficient reasoning model",
|
|
4082
|
+
supportsTemperature: false
|
|
4083
|
+
}
|
|
4084
|
+
},
|
|
4085
|
+
{
|
|
4086
|
+
provider: "openai",
|
|
4087
|
+
modelId: "o3-mini",
|
|
4088
|
+
displayName: "o3 Mini",
|
|
4089
|
+
contextWindow: 2e5,
|
|
4090
|
+
maxOutputTokens: 1e5,
|
|
4091
|
+
pricing: {
|
|
4092
|
+
input: 1.1,
|
|
4093
|
+
output: 4.4,
|
|
4094
|
+
cachedInput: 0.55
|
|
4095
|
+
},
|
|
4096
|
+
knowledgeCutoff: "2025-01-01",
|
|
4097
|
+
features: {
|
|
4098
|
+
streaming: true,
|
|
4099
|
+
functionCalling: true,
|
|
4100
|
+
vision: true,
|
|
4101
|
+
reasoning: true,
|
|
4102
|
+
structuredOutputs: true
|
|
4103
|
+
},
|
|
4104
|
+
metadata: {
|
|
4105
|
+
family: "o-series",
|
|
4106
|
+
notes: "Compact reasoning model for cost-sensitive applications",
|
|
4107
|
+
supportsTemperature: false
|
|
4108
|
+
}
|
|
4109
|
+
}
|
|
4110
|
+
];
|
|
4111
|
+
}
|
|
4112
|
+
});
|
|
4113
|
+
|
|
4114
|
+
// src/providers/openai.ts
|
|
4115
|
+
function sanitizeExtra(extra, allowTemperature) {
|
|
4116
|
+
if (!extra) {
|
|
4117
|
+
return void 0;
|
|
4118
|
+
}
|
|
4119
|
+
if (allowTemperature || !Object.hasOwn(extra, "temperature")) {
|
|
4120
|
+
return extra;
|
|
4121
|
+
}
|
|
4122
|
+
return Object.fromEntries(Object.entries(extra).filter(([key]) => key !== "temperature"));
|
|
4123
|
+
}
|
|
4124
|
+
function createOpenAIProviderFromEnv() {
|
|
4125
|
+
return createProviderFromEnv("OPENAI_API_KEY", import_openai.default, OpenAIChatProvider);
|
|
4126
|
+
}
|
|
4127
|
+
var import_openai, import_tiktoken, ROLE_MAP, OpenAIChatProvider;
|
|
4128
|
+
var init_openai = __esm({
|
|
4129
|
+
"src/providers/openai.ts"() {
|
|
4130
|
+
"use strict";
|
|
4131
|
+
import_openai = __toESM(require("openai"), 1);
|
|
4132
|
+
import_tiktoken = require("tiktoken");
|
|
4133
|
+
init_base_provider();
|
|
4134
|
+
init_constants2();
|
|
4135
|
+
init_openai_models();
|
|
4136
|
+
init_utils();
|
|
4137
|
+
ROLE_MAP = {
|
|
4138
|
+
system: "system",
|
|
4139
|
+
user: "user",
|
|
4140
|
+
assistant: "assistant"
|
|
4141
|
+
};
|
|
4142
|
+
OpenAIChatProvider = class extends BaseProviderAdapter {
|
|
4143
|
+
providerId = "openai";
|
|
4144
|
+
supports(descriptor) {
|
|
4145
|
+
return descriptor.provider === this.providerId;
|
|
4146
|
+
}
|
|
4147
|
+
getModelSpecs() {
|
|
4148
|
+
return OPENAI_MODELS;
|
|
4149
|
+
}
|
|
4150
|
+
buildRequestPayload(options, descriptor, spec, messages) {
|
|
4151
|
+
const { maxTokens, temperature, topP, stopSequences, extra } = options;
|
|
4152
|
+
const supportsTemperature = spec?.metadata?.supportsTemperature !== false;
|
|
4153
|
+
const shouldIncludeTemperature = typeof temperature === "number" && supportsTemperature;
|
|
4154
|
+
const sanitizedExtra = sanitizeExtra(extra, shouldIncludeTemperature);
|
|
4155
|
+
return {
|
|
4156
|
+
model: descriptor.name,
|
|
4157
|
+
messages: messages.map((message) => ({
|
|
4158
|
+
role: ROLE_MAP[message.role],
|
|
4159
|
+
content: message.content,
|
|
4160
|
+
name: message.name
|
|
4161
|
+
})),
|
|
4162
|
+
// Only set max_completion_tokens if explicitly provided
|
|
4163
|
+
// Otherwise let the API use "as much as fits" in the context window
|
|
4164
|
+
...maxTokens !== void 0 ? { max_completion_tokens: maxTokens } : {},
|
|
4165
|
+
top_p: topP,
|
|
4166
|
+
stop: stopSequences,
|
|
4167
|
+
stream: true,
|
|
4168
|
+
stream_options: { include_usage: true },
|
|
4169
|
+
...sanitizedExtra ?? {},
|
|
4170
|
+
...shouldIncludeTemperature ? { temperature } : {}
|
|
4171
|
+
};
|
|
4172
|
+
}
|
|
4173
|
+
async executeStreamRequest(payload) {
|
|
4174
|
+
const client = this.client;
|
|
4175
|
+
const stream2 = await client.chat.completions.create(payload);
|
|
4176
|
+
return stream2;
|
|
4177
|
+
}
|
|
4178
|
+
async *wrapStream(iterable) {
|
|
4179
|
+
const stream2 = iterable;
|
|
4180
|
+
for await (const chunk of stream2) {
|
|
4181
|
+
const text = chunk.choices.map((choice) => choice.delta?.content ?? "").join("");
|
|
4182
|
+
if (text) {
|
|
4183
|
+
yield { text, rawEvent: chunk };
|
|
4184
|
+
}
|
|
4185
|
+
const finishReason = chunk.choices.find((choice) => choice.finish_reason)?.finish_reason;
|
|
4118
4186
|
const usage = chunk.usage ? {
|
|
4119
4187
|
inputTokens: chunk.usage.prompt_tokens,
|
|
4120
4188
|
outputTokens: chunk.usage.completion_tokens,
|
|
4121
|
-
totalTokens: chunk.usage.total_tokens
|
|
4189
|
+
totalTokens: chunk.usage.total_tokens,
|
|
4190
|
+
cachedInputTokens: chunk.usage.prompt_tokens_details?.cached_tokens ?? 0
|
|
4122
4191
|
} : void 0;
|
|
4123
4192
|
if (finishReason || usage) {
|
|
4124
4193
|
yield { text: "", finishReason, usage, rawEvent: chunk };
|
|
@@ -4335,20 +4404,28 @@ var init_model_registry = __esm({
|
|
|
4335
4404
|
/**
|
|
4336
4405
|
* Estimate API cost for a given model and token usage
|
|
4337
4406
|
* @param modelId - Full model identifier
|
|
4338
|
-
* @param inputTokens - Number of input tokens
|
|
4407
|
+
* @param inputTokens - Number of input tokens (total, including cached and cache creation)
|
|
4339
4408
|
* @param outputTokens - Number of output tokens
|
|
4340
|
-
* @param
|
|
4409
|
+
* @param cachedInputTokens - Number of cached input tokens (subset of inputTokens)
|
|
4410
|
+
* @param cacheCreationInputTokens - Number of cache creation tokens (subset of inputTokens, Anthropic only)
|
|
4341
4411
|
* @returns CostEstimate if model found, undefined otherwise
|
|
4342
4412
|
*/
|
|
4343
|
-
estimateCost(modelId, inputTokens, outputTokens,
|
|
4413
|
+
estimateCost(modelId, inputTokens, outputTokens, cachedInputTokens = 0, cacheCreationInputTokens = 0) {
|
|
4344
4414
|
const spec = this.getModelSpec(modelId);
|
|
4345
4415
|
if (!spec) return void 0;
|
|
4346
|
-
const
|
|
4347
|
-
const
|
|
4416
|
+
const cachedRate = spec.pricing.cachedInput ?? spec.pricing.input;
|
|
4417
|
+
const cacheWriteRate = spec.pricing.cacheWriteInput ?? spec.pricing.input;
|
|
4418
|
+
const uncachedInputTokens = inputTokens - cachedInputTokens - cacheCreationInputTokens;
|
|
4419
|
+
const uncachedInputCost = uncachedInputTokens / 1e6 * spec.pricing.input;
|
|
4420
|
+
const cachedInputCost = cachedInputTokens / 1e6 * cachedRate;
|
|
4421
|
+
const cacheCreationCost = cacheCreationInputTokens / 1e6 * cacheWriteRate;
|
|
4422
|
+
const inputCost = uncachedInputCost + cachedInputCost + cacheCreationCost;
|
|
4348
4423
|
const outputCost = outputTokens / 1e6 * spec.pricing.output;
|
|
4349
4424
|
const totalCost = inputCost + outputCost;
|
|
4350
4425
|
return {
|
|
4351
4426
|
inputCost,
|
|
4427
|
+
cachedInputCost,
|
|
4428
|
+
cacheCreationCost,
|
|
4352
4429
|
outputCost,
|
|
4353
4430
|
totalCost,
|
|
4354
4431
|
currency: "USD"
|
|
@@ -4747,9 +4824,9 @@ var init_builder = __esm({
|
|
|
4747
4824
|
gadgets = [];
|
|
4748
4825
|
initialMessages = [];
|
|
4749
4826
|
onHumanInputRequired;
|
|
4750
|
-
parameterFormat;
|
|
4751
4827
|
gadgetStartPrefix;
|
|
4752
4828
|
gadgetEndPrefix;
|
|
4829
|
+
gadgetArgPrefix;
|
|
4753
4830
|
textOnlyHandler;
|
|
4754
4831
|
textWithGadgetsHandler;
|
|
4755
4832
|
stopOnGadgetError;
|
|
@@ -4936,21 +5013,6 @@ var init_builder = __esm({
|
|
|
4936
5013
|
this.onHumanInputRequired = handler;
|
|
4937
5014
|
return this;
|
|
4938
5015
|
}
|
|
4939
|
-
/**
|
|
4940
|
-
* Set the parameter format for gadget calls.
|
|
4941
|
-
*
|
|
4942
|
-
* @param format - Parameter format ("json" or "xml")
|
|
4943
|
-
* @returns This builder for chaining
|
|
4944
|
-
*
|
|
4945
|
-
* @example
|
|
4946
|
-
* ```typescript
|
|
4947
|
-
* .withParameterFormat("xml")
|
|
4948
|
-
* ```
|
|
4949
|
-
*/
|
|
4950
|
-
withParameterFormat(format) {
|
|
4951
|
-
this.parameterFormat = format;
|
|
4952
|
-
return this;
|
|
4953
|
-
}
|
|
4954
5016
|
/**
|
|
4955
5017
|
* Set custom gadget marker prefix.
|
|
4956
5018
|
*
|
|
@@ -4981,6 +5043,21 @@ var init_builder = __esm({
|
|
|
4981
5043
|
this.gadgetEndPrefix = suffix;
|
|
4982
5044
|
return this;
|
|
4983
5045
|
}
|
|
5046
|
+
/**
|
|
5047
|
+
* Set custom argument prefix for block format parameters.
|
|
5048
|
+
*
|
|
5049
|
+
* @param prefix - Custom prefix for argument markers (default: "!!!ARG:")
|
|
5050
|
+
* @returns This builder for chaining
|
|
5051
|
+
*
|
|
5052
|
+
* @example
|
|
5053
|
+
* ```typescript
|
|
5054
|
+
* .withGadgetArgPrefix("<<ARG>>")
|
|
5055
|
+
* ```
|
|
5056
|
+
*/
|
|
5057
|
+
withGadgetArgPrefix(prefix) {
|
|
5058
|
+
this.gadgetArgPrefix = prefix;
|
|
5059
|
+
return this;
|
|
5060
|
+
}
|
|
4984
5061
|
/**
|
|
4985
5062
|
* Set the text-only handler strategy.
|
|
4986
5063
|
*
|
|
@@ -5180,8 +5257,7 @@ var init_builder = __esm({
|
|
|
5180
5257
|
withSyntheticGadgetCall(gadgetName, parameters, result) {
|
|
5181
5258
|
const startPrefix = this.gadgetStartPrefix ?? GADGET_START_PREFIX;
|
|
5182
5259
|
const endPrefix = this.gadgetEndPrefix ?? GADGET_END_PREFIX;
|
|
5183
|
-
const
|
|
5184
|
-
const paramStr = this.formatSyntheticParameters(parameters, format);
|
|
5260
|
+
const paramStr = this.formatBlockParameters(parameters, "");
|
|
5185
5261
|
this.initialMessages.push({
|
|
5186
5262
|
role: "assistant",
|
|
5187
5263
|
content: `${startPrefix}${gadgetName}
|
|
@@ -5195,25 +5271,31 @@ ${endPrefix}`
|
|
|
5195
5271
|
return this;
|
|
5196
5272
|
}
|
|
5197
5273
|
/**
|
|
5198
|
-
* Format parameters
|
|
5199
|
-
* Uses heredoc for multiline string values.
|
|
5274
|
+
* Format parameters as block format with JSON Pointer paths.
|
|
5200
5275
|
*/
|
|
5201
|
-
|
|
5202
|
-
|
|
5203
|
-
|
|
5204
|
-
|
|
5205
|
-
|
|
5206
|
-
if (
|
|
5207
|
-
|
|
5208
|
-
|
|
5209
|
-
|
|
5210
|
-
|
|
5211
|
-
|
|
5212
|
-
|
|
5213
|
-
|
|
5276
|
+
formatBlockParameters(params, prefix) {
|
|
5277
|
+
const lines = [];
|
|
5278
|
+
const argPrefix = this.gadgetArgPrefix ?? GADGET_ARG_PREFIX;
|
|
5279
|
+
for (const [key, value] of Object.entries(params)) {
|
|
5280
|
+
const fullPath = prefix ? `${prefix}/${key}` : key;
|
|
5281
|
+
if (Array.isArray(value)) {
|
|
5282
|
+
value.forEach((item, index) => {
|
|
5283
|
+
const itemPath = `${fullPath}/${index}`;
|
|
5284
|
+
if (typeof item === "object" && item !== null) {
|
|
5285
|
+
lines.push(this.formatBlockParameters(item, itemPath));
|
|
5286
|
+
} else {
|
|
5287
|
+
lines.push(`${argPrefix}${itemPath}`);
|
|
5288
|
+
lines.push(String(item));
|
|
5289
|
+
}
|
|
5290
|
+
});
|
|
5291
|
+
} else if (typeof value === "object" && value !== null) {
|
|
5292
|
+
lines.push(this.formatBlockParameters(value, fullPath));
|
|
5293
|
+
} else {
|
|
5294
|
+
lines.push(`${argPrefix}${fullPath}`);
|
|
5295
|
+
lines.push(String(value));
|
|
5214
5296
|
}
|
|
5215
|
-
|
|
5216
|
-
|
|
5297
|
+
}
|
|
5298
|
+
return lines.join("\n");
|
|
5217
5299
|
}
|
|
5218
5300
|
/**
|
|
5219
5301
|
* Build and create the agent with the given user prompt.
|
|
@@ -5253,9 +5335,9 @@ EOF`;
|
|
|
5253
5335
|
promptConfig: this.promptConfig,
|
|
5254
5336
|
initialMessages: this.initialMessages,
|
|
5255
5337
|
onHumanInputRequired: this.onHumanInputRequired,
|
|
5256
|
-
parameterFormat: this.parameterFormat,
|
|
5257
5338
|
gadgetStartPrefix: this.gadgetStartPrefix,
|
|
5258
5339
|
gadgetEndPrefix: this.gadgetEndPrefix,
|
|
5340
|
+
gadgetArgPrefix: this.gadgetArgPrefix,
|
|
5259
5341
|
textOnlyHandler: this.textOnlyHandler,
|
|
5260
5342
|
textWithGadgetsHandler: this.textWithGadgetsHandler,
|
|
5261
5343
|
stopOnGadgetError: this.stopOnGadgetError,
|
|
@@ -5355,9 +5437,9 @@ EOF`;
|
|
|
5355
5437
|
promptConfig: this.promptConfig,
|
|
5356
5438
|
initialMessages: this.initialMessages,
|
|
5357
5439
|
onHumanInputRequired: this.onHumanInputRequired,
|
|
5358
|
-
parameterFormat: this.parameterFormat,
|
|
5359
5440
|
gadgetStartPrefix: this.gadgetStartPrefix,
|
|
5360
5441
|
gadgetEndPrefix: this.gadgetEndPrefix,
|
|
5442
|
+
gadgetArgPrefix: this.gadgetArgPrefix,
|
|
5361
5443
|
textOnlyHandler: this.textOnlyHandler,
|
|
5362
5444
|
textWithGadgetsHandler: this.textWithGadgetsHandler,
|
|
5363
5445
|
stopOnGadgetError: this.stopOnGadgetError,
|
|
@@ -5382,7 +5464,6 @@ var COMMANDS = {
|
|
|
5382
5464
|
};
|
|
5383
5465
|
var LOG_LEVELS = ["silly", "trace", "debug", "info", "warn", "error", "fatal"];
|
|
5384
5466
|
var DEFAULT_MODEL = "openai:gpt-5-nano";
|
|
5385
|
-
var DEFAULT_PARAMETER_FORMAT = "toml";
|
|
5386
5467
|
var OPTION_FLAGS = {
|
|
5387
5468
|
model: "-m, --model <identifier>",
|
|
5388
5469
|
systemPrompt: "-s, --system <prompt>",
|
|
@@ -5390,12 +5471,14 @@ var OPTION_FLAGS = {
|
|
|
5390
5471
|
maxTokens: "--max-tokens <count>",
|
|
5391
5472
|
maxIterations: "-i, --max-iterations <count>",
|
|
5392
5473
|
gadgetModule: "-g, --gadget <module>",
|
|
5393
|
-
parameterFormat: "--parameter-format <format>",
|
|
5394
5474
|
logLevel: "--log-level <level>",
|
|
5395
5475
|
logFile: "--log-file <path>",
|
|
5396
5476
|
logReset: "--log-reset",
|
|
5477
|
+
logLlmRequests: "--log-llm-requests [dir]",
|
|
5478
|
+
logLlmResponses: "--log-llm-responses [dir]",
|
|
5397
5479
|
noBuiltins: "--no-builtins",
|
|
5398
|
-
noBuiltinInteraction: "--no-builtin-interaction"
|
|
5480
|
+
noBuiltinInteraction: "--no-builtin-interaction",
|
|
5481
|
+
quiet: "-q, --quiet"
|
|
5399
5482
|
};
|
|
5400
5483
|
var OPTION_DESCRIPTIONS = {
|
|
5401
5484
|
model: "Model identifier, e.g. openai:gpt-5-nano or anthropic:claude-sonnet-4-5.",
|
|
@@ -5404,22 +5487,24 @@ var OPTION_DESCRIPTIONS = {
|
|
|
5404
5487
|
maxTokens: "Maximum number of output tokens requested from the model.",
|
|
5405
5488
|
maxIterations: "Maximum number of agent loop iterations before exiting.",
|
|
5406
5489
|
gadgetModule: "Path or module specifier for a gadget export. Repeat to register multiple gadgets.",
|
|
5407
|
-
parameterFormat: "Format for gadget parameter schemas: 'json', 'yaml', 'toml', or 'auto'.",
|
|
5408
5490
|
logLevel: "Log level: silly, trace, debug, info, warn, error, fatal.",
|
|
5409
5491
|
logFile: "Path to log file. When set, logs are written to file instead of stderr.",
|
|
5410
5492
|
logReset: "Reset (truncate) the log file at session start instead of appending.",
|
|
5493
|
+
logLlmRequests: "Save raw LLM requests as plain text. Optional dir, defaults to ~/.llmist/logs/requests/",
|
|
5494
|
+
logLlmResponses: "Save raw LLM responses as plain text. Optional dir, defaults to ~/.llmist/logs/responses/",
|
|
5411
5495
|
noBuiltins: "Disable built-in gadgets (AskUser, TellUser).",
|
|
5412
|
-
noBuiltinInteraction: "Disable interactive gadgets (AskUser) while keeping TellUser."
|
|
5496
|
+
noBuiltinInteraction: "Disable interactive gadgets (AskUser) while keeping TellUser.",
|
|
5497
|
+
quiet: "Suppress all output except content (text and TellUser messages)."
|
|
5413
5498
|
};
|
|
5414
5499
|
var SUMMARY_PREFIX = "[llmist]";
|
|
5415
5500
|
|
|
5416
5501
|
// src/cli/program.ts
|
|
5417
|
-
var
|
|
5502
|
+
var import_commander2 = require("commander");
|
|
5418
5503
|
|
|
5419
5504
|
// package.json
|
|
5420
5505
|
var package_default = {
|
|
5421
5506
|
name: "llmist",
|
|
5422
|
-
version: "0.
|
|
5507
|
+
version: "0.8.0",
|
|
5423
5508
|
description: "Universal TypeScript LLM client with streaming-first agent framework. Works with any model - no structured outputs or native tool calling required. Implements its own flexible grammar for function calling.",
|
|
5424
5509
|
type: "module",
|
|
5425
5510
|
main: "dist/index.cjs",
|
|
@@ -5503,6 +5588,7 @@ var package_default = {
|
|
|
5503
5588
|
"@google/genai": "^1.27.0",
|
|
5504
5589
|
chalk: "^5.6.2",
|
|
5505
5590
|
commander: "^12.1.0",
|
|
5591
|
+
eta: "^4.4.1",
|
|
5506
5592
|
"js-toml": "^1.0.2",
|
|
5507
5593
|
"js-yaml": "^4.1.0",
|
|
5508
5594
|
marked: "^15.0.12",
|
|
@@ -5531,7 +5617,7 @@ var package_default = {
|
|
|
5531
5617
|
};
|
|
5532
5618
|
|
|
5533
5619
|
// src/cli/agent-command.ts
|
|
5534
|
-
var
|
|
5620
|
+
var import_promises2 = require("readline/promises");
|
|
5535
5621
|
var import_chalk3 = __toESM(require("chalk"), 1);
|
|
5536
5622
|
init_builder();
|
|
5537
5623
|
init_registry();
|
|
@@ -5622,12 +5708,19 @@ var import_node_path2 = __toESM(require("path"), 1);
|
|
|
5622
5708
|
var import_node_url = require("url");
|
|
5623
5709
|
init_gadget();
|
|
5624
5710
|
var PATH_PREFIXES = [".", "/", "~"];
|
|
5711
|
+
function isGadgetLike(value) {
|
|
5712
|
+
if (typeof value !== "object" || value === null) {
|
|
5713
|
+
return false;
|
|
5714
|
+
}
|
|
5715
|
+
const obj = value;
|
|
5716
|
+
return typeof obj.execute === "function" && typeof obj.description === "string" && ("parameterSchema" in obj || "schema" in obj);
|
|
5717
|
+
}
|
|
5625
5718
|
function isGadgetConstructor(value) {
|
|
5626
5719
|
if (typeof value !== "function") {
|
|
5627
5720
|
return false;
|
|
5628
5721
|
}
|
|
5629
5722
|
const prototype = value.prototype;
|
|
5630
|
-
return Boolean(prototype) && prototype instanceof BaseGadget;
|
|
5723
|
+
return Boolean(prototype) && (prototype instanceof BaseGadget || isGadgetLike(prototype));
|
|
5631
5724
|
}
|
|
5632
5725
|
function expandHomePath(input) {
|
|
5633
5726
|
if (!input.startsWith("~")) {
|
|
@@ -5664,7 +5757,7 @@ function extractGadgetsFromModule(moduleExports) {
|
|
|
5664
5757
|
return;
|
|
5665
5758
|
}
|
|
5666
5759
|
visited.add(value);
|
|
5667
|
-
if (value instanceof BaseGadget) {
|
|
5760
|
+
if (value instanceof BaseGadget || isGadgetLike(value)) {
|
|
5668
5761
|
results.push(value);
|
|
5669
5762
|
return;
|
|
5670
5763
|
}
|
|
@@ -5713,8 +5806,33 @@ async function loadGadgets(specifiers, cwd, importer = (specifier) => import(spe
|
|
|
5713
5806
|
return gadgets;
|
|
5714
5807
|
}
|
|
5715
5808
|
|
|
5716
|
-
// src/cli/
|
|
5717
|
-
var
|
|
5809
|
+
// src/cli/llm-logging.ts
|
|
5810
|
+
var import_promises = require("fs/promises");
|
|
5811
|
+
var import_node_os = require("os");
|
|
5812
|
+
var import_node_path3 = require("path");
|
|
5813
|
+
var DEFAULT_LLM_LOG_DIR = (0, import_node_path3.join)((0, import_node_os.homedir)(), ".llmist", "logs");
|
|
5814
|
+
function resolveLogDir(option, subdir) {
|
|
5815
|
+
if (option === true) {
|
|
5816
|
+
return (0, import_node_path3.join)(DEFAULT_LLM_LOG_DIR, subdir);
|
|
5817
|
+
}
|
|
5818
|
+
if (typeof option === "string") {
|
|
5819
|
+
return option;
|
|
5820
|
+
}
|
|
5821
|
+
return void 0;
|
|
5822
|
+
}
|
|
5823
|
+
function formatLlmRequest(messages) {
|
|
5824
|
+
const lines = [];
|
|
5825
|
+
for (const msg of messages) {
|
|
5826
|
+
lines.push(`=== ${msg.role.toUpperCase()} ===`);
|
|
5827
|
+
lines.push(msg.content ?? "");
|
|
5828
|
+
lines.push("");
|
|
5829
|
+
}
|
|
5830
|
+
return lines.join("\n");
|
|
5831
|
+
}
|
|
5832
|
+
async function writeLogFile(dir, filename, content) {
|
|
5833
|
+
await (0, import_promises.mkdir)(dir, { recursive: true });
|
|
5834
|
+
await (0, import_promises.writeFile)((0, import_node_path3.join)(dir, filename), content, "utf-8");
|
|
5835
|
+
}
|
|
5718
5836
|
|
|
5719
5837
|
// src/cli/utils.ts
|
|
5720
5838
|
var import_chalk2 = __toESM(require("chalk"), 1);
|
|
@@ -5758,9 +5876,29 @@ function ensureMarkedConfigured() {
|
|
|
5758
5876
|
}
|
|
5759
5877
|
function renderMarkdown(text) {
|
|
5760
5878
|
ensureMarkedConfigured();
|
|
5761
|
-
|
|
5879
|
+
let rendered = import_marked.marked.parse(text);
|
|
5880
|
+
rendered = rendered.replace(/\*\*(.+?)\*\*/g, (_, content) => import_chalk.default.bold(content)).replace(/(?<!\*)\*(\S[^*]*)\*(?!\*)/g, (_, content) => import_chalk.default.italic(content));
|
|
5762
5881
|
return rendered.trimEnd();
|
|
5763
5882
|
}
|
|
5883
|
+
function createRainbowSeparator() {
|
|
5884
|
+
const colors = [import_chalk.default.red, import_chalk.default.yellow, import_chalk.default.green, import_chalk.default.cyan, import_chalk.default.blue, import_chalk.default.magenta];
|
|
5885
|
+
const char = "\u2500";
|
|
5886
|
+
const width = process.stdout.columns || 80;
|
|
5887
|
+
let result = "";
|
|
5888
|
+
for (let i = 0; i < width; i++) {
|
|
5889
|
+
result += colors[i % colors.length](char);
|
|
5890
|
+
}
|
|
5891
|
+
return result;
|
|
5892
|
+
}
|
|
5893
|
+
function renderMarkdownWithSeparators(text) {
|
|
5894
|
+
const rendered = renderMarkdown(text);
|
|
5895
|
+
const separator = createRainbowSeparator();
|
|
5896
|
+
return `
|
|
5897
|
+
${separator}
|
|
5898
|
+
${rendered}
|
|
5899
|
+
${separator}
|
|
5900
|
+
`;
|
|
5901
|
+
}
|
|
5764
5902
|
function formatTokens(tokens) {
|
|
5765
5903
|
return tokens >= 1e3 ? `${(tokens / 1e3).toFixed(1)}k` : `${tokens}`;
|
|
5766
5904
|
}
|
|
@@ -5789,8 +5927,14 @@ function renderSummary(metadata) {
|
|
|
5789
5927
|
parts.push(import_chalk.default.magenta(metadata.model));
|
|
5790
5928
|
}
|
|
5791
5929
|
if (metadata.usage) {
|
|
5792
|
-
const { inputTokens, outputTokens } = metadata.usage;
|
|
5930
|
+
const { inputTokens, outputTokens, cachedInputTokens, cacheCreationInputTokens } = metadata.usage;
|
|
5793
5931
|
parts.push(import_chalk.default.dim("\u2191") + import_chalk.default.yellow(` ${formatTokens(inputTokens)}`));
|
|
5932
|
+
if (cachedInputTokens && cachedInputTokens > 0) {
|
|
5933
|
+
parts.push(import_chalk.default.dim("\u27F3") + import_chalk.default.blue(` ${formatTokens(cachedInputTokens)}`));
|
|
5934
|
+
}
|
|
5935
|
+
if (cacheCreationInputTokens && cacheCreationInputTokens > 0) {
|
|
5936
|
+
parts.push(import_chalk.default.dim("\u270E") + import_chalk.default.magenta(` ${formatTokens(cacheCreationInputTokens)}`));
|
|
5937
|
+
}
|
|
5794
5938
|
parts.push(import_chalk.default.dim("\u2193") + import_chalk.default.green(` ${formatTokens(outputTokens)}`));
|
|
5795
5939
|
}
|
|
5796
5940
|
if (metadata.elapsedSeconds !== void 0 && metadata.elapsedSeconds > 0) {
|
|
@@ -5874,7 +6018,7 @@ function formatGadgetSummary(result) {
|
|
|
5874
6018
|
const summaryLine = `${icon} ${gadgetLabel}${paramsLabel} ${import_chalk.default.dim("\u2192")} ${outputLabel} ${timeLabel}`;
|
|
5875
6019
|
if (result.gadgetName === "TellUser" && result.parameters?.message) {
|
|
5876
6020
|
const message = String(result.parameters.message);
|
|
5877
|
-
const rendered =
|
|
6021
|
+
const rendered = renderMarkdownWithSeparators(message);
|
|
5878
6022
|
return `${summaryLine}
|
|
5879
6023
|
${rendered}`;
|
|
5880
6024
|
}
|
|
@@ -5959,6 +6103,9 @@ var StreamProgress = class {
|
|
|
5959
6103
|
callOutputTokensEstimated = true;
|
|
5960
6104
|
callOutputChars = 0;
|
|
5961
6105
|
isStreaming = false;
|
|
6106
|
+
// Cache token tracking for live cost estimation during streaming
|
|
6107
|
+
callCachedInputTokens = 0;
|
|
6108
|
+
callCacheCreationInputTokens = 0;
|
|
5962
6109
|
// Cumulative stats (cumulative mode)
|
|
5963
6110
|
totalStartTime = Date.now();
|
|
5964
6111
|
totalTokens = 0;
|
|
@@ -5984,11 +6131,13 @@ var StreamProgress = class {
|
|
|
5984
6131
|
this.callOutputTokensEstimated = true;
|
|
5985
6132
|
this.callOutputChars = 0;
|
|
5986
6133
|
this.isStreaming = false;
|
|
6134
|
+
this.callCachedInputTokens = 0;
|
|
6135
|
+
this.callCacheCreationInputTokens = 0;
|
|
5987
6136
|
this.start();
|
|
5988
6137
|
}
|
|
5989
6138
|
/**
|
|
5990
6139
|
* Ends the current LLM call. Updates cumulative stats and switches to cumulative mode.
|
|
5991
|
-
* @param usage - Final token usage from the call
|
|
6140
|
+
* @param usage - Final token usage from the call (including cached tokens if available)
|
|
5992
6141
|
*/
|
|
5993
6142
|
endCall(usage) {
|
|
5994
6143
|
this.iterations++;
|
|
@@ -6000,7 +6149,9 @@ var StreamProgress = class {
|
|
|
6000
6149
|
const cost = this.modelRegistry.estimateCost(
|
|
6001
6150
|
modelName,
|
|
6002
6151
|
usage.inputTokens,
|
|
6003
|
-
usage.outputTokens
|
|
6152
|
+
usage.outputTokens,
|
|
6153
|
+
usage.cachedInputTokens ?? 0,
|
|
6154
|
+
usage.cacheCreationInputTokens ?? 0
|
|
6004
6155
|
);
|
|
6005
6156
|
if (cost) {
|
|
6006
6157
|
this.totalCost += cost.totalCost;
|
|
@@ -6040,6 +6191,16 @@ var StreamProgress = class {
|
|
|
6040
6191
|
this.callOutputTokens = tokens;
|
|
6041
6192
|
this.callOutputTokensEstimated = estimated;
|
|
6042
6193
|
}
|
|
6194
|
+
/**
|
|
6195
|
+
* Sets cached token counts for the current call (from stream metadata).
|
|
6196
|
+
* Used for live cost estimation during streaming.
|
|
6197
|
+
* @param cachedInputTokens - Number of tokens read from cache (cheaper)
|
|
6198
|
+
* @param cacheCreationInputTokens - Number of tokens written to cache (more expensive)
|
|
6199
|
+
*/
|
|
6200
|
+
setCachedTokens(cachedInputTokens, cacheCreationInputTokens) {
|
|
6201
|
+
this.callCachedInputTokens = cachedInputTokens;
|
|
6202
|
+
this.callCacheCreationInputTokens = cacheCreationInputTokens;
|
|
6203
|
+
}
|
|
6043
6204
|
/**
|
|
6044
6205
|
* Get total elapsed time in seconds since the first call started.
|
|
6045
6206
|
* @returns Elapsed time in seconds with 1 decimal place
|
|
@@ -6104,11 +6265,32 @@ var StreamProgress = class {
|
|
|
6104
6265
|
parts.push(import_chalk2.default.dim("\u2193") + import_chalk2.default.green(` ${prefix}${formatTokens(outTokens)}`));
|
|
6105
6266
|
}
|
|
6106
6267
|
parts.push(import_chalk2.default.dim(`${elapsed}s`));
|
|
6107
|
-
|
|
6108
|
-
|
|
6268
|
+
const callCost = this.calculateCurrentCallCost(outTokens);
|
|
6269
|
+
if (callCost > 0) {
|
|
6270
|
+
parts.push(import_chalk2.default.cyan(`$${formatCost(callCost)}`));
|
|
6109
6271
|
}
|
|
6110
6272
|
this.target.write(`\r${parts.join(import_chalk2.default.dim(" | "))} ${import_chalk2.default.cyan(spinner)}`);
|
|
6111
6273
|
}
|
|
6274
|
+
/**
|
|
6275
|
+
* Calculates live cost estimate for the current streaming call.
|
|
6276
|
+
* Uses current input/output tokens and cached token counts.
|
|
6277
|
+
*/
|
|
6278
|
+
calculateCurrentCallCost(outputTokens) {
|
|
6279
|
+
if (!this.modelRegistry || !this.model) return 0;
|
|
6280
|
+
try {
|
|
6281
|
+
const modelName = this.model.includes(":") ? this.model.split(":")[1] : this.model;
|
|
6282
|
+
const cost = this.modelRegistry.estimateCost(
|
|
6283
|
+
modelName,
|
|
6284
|
+
this.callInputTokens,
|
|
6285
|
+
outputTokens,
|
|
6286
|
+
this.callCachedInputTokens,
|
|
6287
|
+
this.callCacheCreationInputTokens
|
|
6288
|
+
);
|
|
6289
|
+
return cost?.totalCost ?? 0;
|
|
6290
|
+
} catch {
|
|
6291
|
+
return 0;
|
|
6292
|
+
}
|
|
6293
|
+
}
|
|
6112
6294
|
renderCumulativeMode(spinner) {
|
|
6113
6295
|
const elapsed = ((Date.now() - this.totalStartTime) / 1e3).toFixed(1);
|
|
6114
6296
|
const parts = [];
|
|
@@ -6143,7 +6325,7 @@ var StreamProgress = class {
|
|
|
6143
6325
|
}
|
|
6144
6326
|
this.isRunning = false;
|
|
6145
6327
|
if (this.hasRendered) {
|
|
6146
|
-
this.target.write("\r\x1B[K");
|
|
6328
|
+
this.target.write("\r\x1B[K\x1B[0G");
|
|
6147
6329
|
this.hasRendered = false;
|
|
6148
6330
|
}
|
|
6149
6331
|
}
|
|
@@ -6236,16 +6418,6 @@ async function executeAction(action, env) {
|
|
|
6236
6418
|
}
|
|
6237
6419
|
|
|
6238
6420
|
// src/cli/option-helpers.ts
|
|
6239
|
-
var PARAMETER_FORMAT_VALUES = ["json", "yaml", "toml", "auto"];
|
|
6240
|
-
function parseParameterFormat(value) {
|
|
6241
|
-
const normalized = value.toLowerCase();
|
|
6242
|
-
if (!PARAMETER_FORMAT_VALUES.includes(normalized)) {
|
|
6243
|
-
throw new import_commander2.InvalidArgumentError(
|
|
6244
|
-
`Parameter format must be one of: ${PARAMETER_FORMAT_VALUES.join(", ")}`
|
|
6245
|
-
);
|
|
6246
|
-
}
|
|
6247
|
-
return normalized;
|
|
6248
|
-
}
|
|
6249
6421
|
function addCompleteOptions(cmd, defaults) {
|
|
6250
6422
|
return cmd.option(OPTION_FLAGS.model, OPTION_DESCRIPTIONS.model, defaults?.model ?? DEFAULT_MODEL).option(OPTION_FLAGS.systemPrompt, OPTION_DESCRIPTIONS.systemPrompt, defaults?.system).option(
|
|
6251
6423
|
OPTION_FLAGS.temperature,
|
|
@@ -6257,7 +6429,7 @@ function addCompleteOptions(cmd, defaults) {
|
|
|
6257
6429
|
OPTION_DESCRIPTIONS.maxTokens,
|
|
6258
6430
|
createNumericParser({ label: "Max tokens", integer: true, min: 1 }),
|
|
6259
6431
|
defaults?.["max-tokens"]
|
|
6260
|
-
);
|
|
6432
|
+
).option(OPTION_FLAGS.quiet, OPTION_DESCRIPTIONS.quiet, defaults?.quiet).option(OPTION_FLAGS.logLlmRequests, OPTION_DESCRIPTIONS.logLlmRequests, defaults?.["log-llm-requests"]).option(OPTION_FLAGS.logLlmResponses, OPTION_DESCRIPTIONS.logLlmResponses, defaults?.["log-llm-responses"]);
|
|
6261
6433
|
}
|
|
6262
6434
|
function addAgentOptions(cmd, defaults) {
|
|
6263
6435
|
const gadgetAccumulator = (value, previous = []) => [
|
|
@@ -6277,16 +6449,11 @@ function addAgentOptions(cmd, defaults) {
|
|
|
6277
6449
|
defaults?.["max-iterations"]
|
|
6278
6450
|
).option(OPTION_FLAGS.gadgetModule, OPTION_DESCRIPTIONS.gadgetModule, gadgetAccumulator, [
|
|
6279
6451
|
...defaultGadgets
|
|
6280
|
-
]).option(
|
|
6281
|
-
OPTION_FLAGS.parameterFormat,
|
|
6282
|
-
OPTION_DESCRIPTIONS.parameterFormat,
|
|
6283
|
-
parseParameterFormat,
|
|
6284
|
-
defaults?.["parameter-format"] ?? DEFAULT_PARAMETER_FORMAT
|
|
6285
|
-
).option(OPTION_FLAGS.noBuiltins, OPTION_DESCRIPTIONS.noBuiltins, defaults?.builtins !== false).option(
|
|
6452
|
+
]).option(OPTION_FLAGS.noBuiltins, OPTION_DESCRIPTIONS.noBuiltins, defaults?.builtins !== false).option(
|
|
6286
6453
|
OPTION_FLAGS.noBuiltinInteraction,
|
|
6287
6454
|
OPTION_DESCRIPTIONS.noBuiltinInteraction,
|
|
6288
6455
|
defaults?.["builtin-interaction"] !== false
|
|
6289
|
-
);
|
|
6456
|
+
).option(OPTION_FLAGS.quiet, OPTION_DESCRIPTIONS.quiet, defaults?.quiet).option(OPTION_FLAGS.logLlmRequests, OPTION_DESCRIPTIONS.logLlmRequests, defaults?.["log-llm-requests"]).option(OPTION_FLAGS.logLlmResponses, OPTION_DESCRIPTIONS.logLlmResponses, defaults?.["log-llm-responses"]);
|
|
6290
6457
|
}
|
|
6291
6458
|
function configToCompleteOptions(config) {
|
|
6292
6459
|
const result = {};
|
|
@@ -6294,6 +6461,9 @@ function configToCompleteOptions(config) {
|
|
|
6294
6461
|
if (config.system !== void 0) result.system = config.system;
|
|
6295
6462
|
if (config.temperature !== void 0) result.temperature = config.temperature;
|
|
6296
6463
|
if (config["max-tokens"] !== void 0) result.maxTokens = config["max-tokens"];
|
|
6464
|
+
if (config.quiet !== void 0) result.quiet = config.quiet;
|
|
6465
|
+
if (config["log-llm-requests"] !== void 0) result.logLlmRequests = config["log-llm-requests"];
|
|
6466
|
+
if (config["log-llm-responses"] !== void 0) result.logLlmResponses = config["log-llm-responses"];
|
|
6297
6467
|
return result;
|
|
6298
6468
|
}
|
|
6299
6469
|
function configToAgentOptions(config) {
|
|
@@ -6303,7 +6473,6 @@ function configToAgentOptions(config) {
|
|
|
6303
6473
|
if (config.temperature !== void 0) result.temperature = config.temperature;
|
|
6304
6474
|
if (config["max-iterations"] !== void 0) result.maxIterations = config["max-iterations"];
|
|
6305
6475
|
if (config.gadget !== void 0) result.gadget = config.gadget;
|
|
6306
|
-
if (config["parameter-format"] !== void 0) result.parameterFormat = config["parameter-format"];
|
|
6307
6476
|
if (config.builtins !== void 0) result.builtins = config.builtins;
|
|
6308
6477
|
if (config["builtin-interaction"] !== void 0)
|
|
6309
6478
|
result.builtinInteraction = config["builtin-interaction"];
|
|
@@ -6311,12 +6480,17 @@ function configToAgentOptions(config) {
|
|
|
6311
6480
|
result.gadgetStartPrefix = config["gadget-start-prefix"];
|
|
6312
6481
|
if (config["gadget-end-prefix"] !== void 0)
|
|
6313
6482
|
result.gadgetEndPrefix = config["gadget-end-prefix"];
|
|
6483
|
+
if (config["gadget-arg-prefix"] !== void 0)
|
|
6484
|
+
result.gadgetArgPrefix = config["gadget-arg-prefix"];
|
|
6485
|
+
if (config.quiet !== void 0) result.quiet = config.quiet;
|
|
6486
|
+
if (config["log-llm-requests"] !== void 0) result.logLlmRequests = config["log-llm-requests"];
|
|
6487
|
+
if (config["log-llm-responses"] !== void 0) result.logLlmResponses = config["log-llm-responses"];
|
|
6314
6488
|
return result;
|
|
6315
6489
|
}
|
|
6316
6490
|
|
|
6317
6491
|
// src/cli/agent-command.ts
|
|
6318
6492
|
async function promptApproval(env, prompt) {
|
|
6319
|
-
const rl = (0,
|
|
6493
|
+
const rl = (0, import_promises2.createInterface)({ input: env.stdin, output: env.stderr });
|
|
6320
6494
|
try {
|
|
6321
6495
|
const answer = await rl.question(prompt);
|
|
6322
6496
|
return answer.trim();
|
|
@@ -6331,10 +6505,10 @@ function createHumanInputHandler(env, progress) {
|
|
|
6331
6505
|
}
|
|
6332
6506
|
return async (question) => {
|
|
6333
6507
|
progress.pause();
|
|
6334
|
-
const rl = (0,
|
|
6508
|
+
const rl = (0, import_promises2.createInterface)({ input: env.stdin, output: env.stdout });
|
|
6335
6509
|
try {
|
|
6336
6510
|
const questionLine = question.trim() ? `
|
|
6337
|
-
${
|
|
6511
|
+
${renderMarkdownWithSeparators(question.trim())}` : "";
|
|
6338
6512
|
let isFirst = true;
|
|
6339
6513
|
while (true) {
|
|
6340
6514
|
const statsPrompt = progress.formatPrompt();
|
|
@@ -6356,9 +6530,10 @@ async function executeAgent(promptArg, options, env) {
|
|
|
6356
6530
|
const prompt = await resolvePrompt(promptArg, env);
|
|
6357
6531
|
const client = env.createClient();
|
|
6358
6532
|
const registry = new GadgetRegistry();
|
|
6533
|
+
const stdinIsInteractive = isInteractive(env.stdin);
|
|
6359
6534
|
if (options.builtins !== false) {
|
|
6360
6535
|
for (const gadget of builtinGadgets) {
|
|
6361
|
-
if (
|
|
6536
|
+
if (gadget.name === "AskUser" && (options.builtinInteraction === false || !stdinIsInteractive)) {
|
|
6362
6537
|
continue;
|
|
6363
6538
|
}
|
|
6364
6539
|
registry.registerByClass(gadget);
|
|
@@ -6376,6 +6551,9 @@ async function executeAgent(promptArg, options, env) {
|
|
|
6376
6551
|
const progress = new StreamProgress(env.stderr, stderrTTY, client.modelRegistry);
|
|
6377
6552
|
let usage;
|
|
6378
6553
|
let iterations = 0;
|
|
6554
|
+
const llmRequestsDir = resolveLogDir(options.logLlmRequests, "requests");
|
|
6555
|
+
const llmResponsesDir = resolveLogDir(options.logLlmResponses, "responses");
|
|
6556
|
+
let llmCallCounter = 0;
|
|
6379
6557
|
const countMessagesTokens = async (model, messages) => {
|
|
6380
6558
|
try {
|
|
6381
6559
|
return await client.countTokens(model, messages);
|
|
@@ -6398,12 +6576,18 @@ async function executeAgent(promptArg, options, env) {
|
|
|
6398
6576
|
// onLLMCallStart: Start progress indicator for each LLM call
|
|
6399
6577
|
// This showcases how to react to agent lifecycle events
|
|
6400
6578
|
onLLMCallStart: async (context) => {
|
|
6579
|
+
llmCallCounter++;
|
|
6401
6580
|
const inputTokens = await countMessagesTokens(
|
|
6402
6581
|
context.options.model,
|
|
6403
6582
|
context.options.messages
|
|
6404
6583
|
);
|
|
6405
6584
|
progress.startCall(context.options.model, inputTokens);
|
|
6406
6585
|
progress.setInputTokens(inputTokens, false);
|
|
6586
|
+
if (llmRequestsDir) {
|
|
6587
|
+
const filename = `${Date.now()}_call_${llmCallCounter}.request.txt`;
|
|
6588
|
+
const content = formatLlmRequest(context.options.messages);
|
|
6589
|
+
await writeLogFile(llmRequestsDir, filename, content);
|
|
6590
|
+
}
|
|
6407
6591
|
},
|
|
6408
6592
|
// onStreamChunk: Real-time updates as LLM generates tokens
|
|
6409
6593
|
// This enables responsive UIs that show progress during generation
|
|
@@ -6416,6 +6600,10 @@ async function executeAgent(promptArg, options, env) {
|
|
|
6416
6600
|
if (context.usage.outputTokens) {
|
|
6417
6601
|
progress.setOutputTokens(context.usage.outputTokens, false);
|
|
6418
6602
|
}
|
|
6603
|
+
progress.setCachedTokens(
|
|
6604
|
+
context.usage.cachedInputTokens ?? 0,
|
|
6605
|
+
context.usage.cacheCreationInputTokens ?? 0
|
|
6606
|
+
);
|
|
6419
6607
|
}
|
|
6420
6608
|
},
|
|
6421
6609
|
// onLLMCallComplete: Finalize metrics after each LLM call
|
|
@@ -6434,11 +6622,13 @@ async function executeAgent(promptArg, options, env) {
|
|
|
6434
6622
|
let callCost;
|
|
6435
6623
|
if (context.usage && client.modelRegistry) {
|
|
6436
6624
|
try {
|
|
6437
|
-
const modelName = options.model.includes(":") ? options.model.split(":")[1] : options.model;
|
|
6625
|
+
const modelName = context.options.model.includes(":") ? context.options.model.split(":")[1] : context.options.model;
|
|
6438
6626
|
const costResult = client.modelRegistry.estimateCost(
|
|
6439
6627
|
modelName,
|
|
6440
6628
|
context.usage.inputTokens,
|
|
6441
|
-
context.usage.outputTokens
|
|
6629
|
+
context.usage.outputTokens,
|
|
6630
|
+
context.usage.cachedInputTokens ?? 0,
|
|
6631
|
+
context.usage.cacheCreationInputTokens ?? 0
|
|
6442
6632
|
);
|
|
6443
6633
|
if (costResult) callCost = costResult.totalCost;
|
|
6444
6634
|
} catch {
|
|
@@ -6446,7 +6636,7 @@ async function executeAgent(promptArg, options, env) {
|
|
|
6446
6636
|
}
|
|
6447
6637
|
const callElapsed = progress.getCallElapsedSeconds();
|
|
6448
6638
|
progress.endCall(context.usage);
|
|
6449
|
-
if (
|
|
6639
|
+
if (!options.quiet) {
|
|
6450
6640
|
const summary = renderSummary({
|
|
6451
6641
|
iterations: context.iteration + 1,
|
|
6452
6642
|
model: options.model,
|
|
@@ -6460,6 +6650,10 @@ async function executeAgent(promptArg, options, env) {
|
|
|
6460
6650
|
`);
|
|
6461
6651
|
}
|
|
6462
6652
|
}
|
|
6653
|
+
if (llmResponsesDir) {
|
|
6654
|
+
const filename = `${Date.now()}_call_${llmCallCounter}.response.txt`;
|
|
6655
|
+
await writeLogFile(llmResponsesDir, filename, context.rawResponse);
|
|
6656
|
+
}
|
|
6463
6657
|
}
|
|
6464
6658
|
},
|
|
6465
6659
|
// SHOWCASE: Controller-based approval gating for dangerous gadgets
|
|
@@ -6524,13 +6718,15 @@ Command rejected by user with message: "${response}"`
|
|
|
6524
6718
|
if (gadgets.length > 0) {
|
|
6525
6719
|
builder.withGadgets(...gadgets);
|
|
6526
6720
|
}
|
|
6527
|
-
builder.withParameterFormat(options.parameterFormat);
|
|
6528
6721
|
if (options.gadgetStartPrefix) {
|
|
6529
6722
|
builder.withGadgetStartPrefix(options.gadgetStartPrefix);
|
|
6530
6723
|
}
|
|
6531
6724
|
if (options.gadgetEndPrefix) {
|
|
6532
6725
|
builder.withGadgetEndPrefix(options.gadgetEndPrefix);
|
|
6533
6726
|
}
|
|
6727
|
+
if (options.gadgetArgPrefix) {
|
|
6728
|
+
builder.withGadgetArgPrefix(options.gadgetArgPrefix);
|
|
6729
|
+
}
|
|
6534
6730
|
builder.withSyntheticGadgetCall(
|
|
6535
6731
|
"TellUser",
|
|
6536
6732
|
{
|
|
@@ -6547,22 +6743,38 @@ Command rejected by user with message: "${response}"`
|
|
|
6547
6743
|
resultMapping: (text) => `\u2139\uFE0F ${text}`
|
|
6548
6744
|
});
|
|
6549
6745
|
const agent = builder.ask(prompt);
|
|
6746
|
+
let textBuffer = "";
|
|
6747
|
+
const flushTextBuffer = () => {
|
|
6748
|
+
if (textBuffer) {
|
|
6749
|
+
const output = options.quiet ? textBuffer : renderMarkdownWithSeparators(textBuffer);
|
|
6750
|
+
printer.write(output);
|
|
6751
|
+
textBuffer = "";
|
|
6752
|
+
}
|
|
6753
|
+
};
|
|
6550
6754
|
for await (const event of agent.run()) {
|
|
6551
6755
|
if (event.type === "text") {
|
|
6552
6756
|
progress.pause();
|
|
6553
|
-
|
|
6757
|
+
textBuffer += event.content;
|
|
6554
6758
|
} else if (event.type === "gadget_result") {
|
|
6759
|
+
flushTextBuffer();
|
|
6555
6760
|
progress.pause();
|
|
6556
|
-
if (
|
|
6761
|
+
if (options.quiet) {
|
|
6762
|
+
if (event.result.gadgetName === "TellUser" && event.result.parameters?.message) {
|
|
6763
|
+
const message = String(event.result.parameters.message);
|
|
6764
|
+
env.stdout.write(`${message}
|
|
6765
|
+
`);
|
|
6766
|
+
}
|
|
6767
|
+
} else {
|
|
6557
6768
|
const tokenCount = await countGadgetOutputTokens(event.result.result);
|
|
6558
6769
|
env.stderr.write(`${formatGadgetSummary({ ...event.result, tokenCount })}
|
|
6559
6770
|
`);
|
|
6560
6771
|
}
|
|
6561
6772
|
}
|
|
6562
6773
|
}
|
|
6774
|
+
flushTextBuffer();
|
|
6563
6775
|
progress.complete();
|
|
6564
6776
|
printer.ensureNewline();
|
|
6565
|
-
if (
|
|
6777
|
+
if (!options.quiet && iterations > 1) {
|
|
6566
6778
|
env.stderr.write(`${import_chalk3.default.dim("\u2500".repeat(40))}
|
|
6567
6779
|
`);
|
|
6568
6780
|
const summary = renderOverallSummary({
|
|
@@ -6598,9 +6810,18 @@ async function executeComplete(promptArg, options, env) {
|
|
|
6598
6810
|
builder.addSystem(options.system);
|
|
6599
6811
|
}
|
|
6600
6812
|
builder.addUser(prompt);
|
|
6813
|
+
const messages = builder.build();
|
|
6814
|
+
const llmRequestsDir = resolveLogDir(options.logLlmRequests, "requests");
|
|
6815
|
+
const llmResponsesDir = resolveLogDir(options.logLlmResponses, "responses");
|
|
6816
|
+
const timestamp = Date.now();
|
|
6817
|
+
if (llmRequestsDir) {
|
|
6818
|
+
const filename = `${timestamp}_complete.request.txt`;
|
|
6819
|
+
const content = formatLlmRequest(messages);
|
|
6820
|
+
await writeLogFile(llmRequestsDir, filename, content);
|
|
6821
|
+
}
|
|
6601
6822
|
const stream2 = client.stream({
|
|
6602
6823
|
model,
|
|
6603
|
-
messages
|
|
6824
|
+
messages,
|
|
6604
6825
|
temperature: options.temperature,
|
|
6605
6826
|
maxTokens: options.maxTokens
|
|
6606
6827
|
});
|
|
@@ -6611,7 +6832,7 @@ async function executeComplete(promptArg, options, env) {
|
|
|
6611
6832
|
progress.startCall(model, estimatedInputTokens);
|
|
6612
6833
|
let finishReason;
|
|
6613
6834
|
let usage;
|
|
6614
|
-
let
|
|
6835
|
+
let accumulatedResponse = "";
|
|
6615
6836
|
for await (const chunk of stream2) {
|
|
6616
6837
|
if (chunk.usage) {
|
|
6617
6838
|
usage = chunk.usage;
|
|
@@ -6624,8 +6845,8 @@ async function executeComplete(promptArg, options, env) {
|
|
|
6624
6845
|
}
|
|
6625
6846
|
if (chunk.text) {
|
|
6626
6847
|
progress.pause();
|
|
6627
|
-
|
|
6628
|
-
progress.update(
|
|
6848
|
+
accumulatedResponse += chunk.text;
|
|
6849
|
+
progress.update(accumulatedResponse.length);
|
|
6629
6850
|
printer.write(chunk.text);
|
|
6630
6851
|
}
|
|
6631
6852
|
if (chunk.finishReason !== void 0) {
|
|
@@ -6635,7 +6856,11 @@ async function executeComplete(promptArg, options, env) {
|
|
|
6635
6856
|
progress.endCall(usage);
|
|
6636
6857
|
progress.complete();
|
|
6637
6858
|
printer.ensureNewline();
|
|
6638
|
-
if (
|
|
6859
|
+
if (llmResponsesDir) {
|
|
6860
|
+
const filename = `${timestamp}_complete.response.txt`;
|
|
6861
|
+
await writeLogFile(llmResponsesDir, filename, accumulatedResponse);
|
|
6862
|
+
}
|
|
6863
|
+
if (stderrTTY && !options.quiet) {
|
|
6639
6864
|
const summary = renderSummary({ finishReason, usage, cost: progress.getTotalCost() });
|
|
6640
6865
|
if (summary) {
|
|
6641
6866
|
env.stderr.write(`${summary}
|
|
@@ -6653,36 +6878,136 @@ function registerCompleteCommand(program, env, config) {
|
|
|
6653
6878
|
|
|
6654
6879
|
// src/cli/config.ts
|
|
6655
6880
|
var import_node_fs3 = require("fs");
|
|
6656
|
-
var
|
|
6657
|
-
var
|
|
6658
|
-
var
|
|
6881
|
+
var import_node_os2 = require("os");
|
|
6882
|
+
var import_node_path4 = require("path");
|
|
6883
|
+
var import_js_toml = require("js-toml");
|
|
6884
|
+
|
|
6885
|
+
// src/cli/templates.ts
|
|
6886
|
+
var import_eta = require("eta");
|
|
6887
|
+
var TemplateError = class extends Error {
|
|
6888
|
+
constructor(message, promptName, configPath) {
|
|
6889
|
+
super(promptName ? `[prompts.${promptName}]: ${message}` : message);
|
|
6890
|
+
this.promptName = promptName;
|
|
6891
|
+
this.configPath = configPath;
|
|
6892
|
+
this.name = "TemplateError";
|
|
6893
|
+
}
|
|
6894
|
+
};
|
|
6895
|
+
function createTemplateEngine(prompts, configPath) {
|
|
6896
|
+
const eta = new import_eta.Eta({
|
|
6897
|
+
views: "/",
|
|
6898
|
+
// Required but we use named templates
|
|
6899
|
+
autoEscape: false,
|
|
6900
|
+
// Don't escape - these are prompts, not HTML
|
|
6901
|
+
autoTrim: false
|
|
6902
|
+
// Preserve whitespace in prompts
|
|
6903
|
+
});
|
|
6904
|
+
for (const [name, template] of Object.entries(prompts)) {
|
|
6905
|
+
try {
|
|
6906
|
+
eta.loadTemplate(`@${name}`, template);
|
|
6907
|
+
} catch (error) {
|
|
6908
|
+
throw new TemplateError(
|
|
6909
|
+
error instanceof Error ? error.message : String(error),
|
|
6910
|
+
name,
|
|
6911
|
+
configPath
|
|
6912
|
+
);
|
|
6913
|
+
}
|
|
6914
|
+
}
|
|
6915
|
+
return eta;
|
|
6916
|
+
}
|
|
6917
|
+
function resolveTemplate(eta, template, context = {}, configPath) {
|
|
6918
|
+
try {
|
|
6919
|
+
const fullContext = {
|
|
6920
|
+
...context,
|
|
6921
|
+
env: process.env
|
|
6922
|
+
};
|
|
6923
|
+
return eta.renderString(template, fullContext);
|
|
6924
|
+
} catch (error) {
|
|
6925
|
+
throw new TemplateError(
|
|
6926
|
+
error instanceof Error ? error.message : String(error),
|
|
6927
|
+
void 0,
|
|
6928
|
+
configPath
|
|
6929
|
+
);
|
|
6930
|
+
}
|
|
6931
|
+
}
|
|
6932
|
+
function validatePrompts(prompts, configPath) {
|
|
6933
|
+
const eta = createTemplateEngine(prompts, configPath);
|
|
6934
|
+
for (const [name, template] of Object.entries(prompts)) {
|
|
6935
|
+
try {
|
|
6936
|
+
eta.renderString(template, { env: {} });
|
|
6937
|
+
} catch (error) {
|
|
6938
|
+
throw new TemplateError(
|
|
6939
|
+
error instanceof Error ? error.message : String(error),
|
|
6940
|
+
name,
|
|
6941
|
+
configPath
|
|
6942
|
+
);
|
|
6943
|
+
}
|
|
6944
|
+
}
|
|
6945
|
+
}
|
|
6946
|
+
function validateEnvVars(template, promptName, configPath) {
|
|
6947
|
+
const envVarPattern = /<%=\s*it\.env\.(\w+)\s*%>/g;
|
|
6948
|
+
const matches = template.matchAll(envVarPattern);
|
|
6949
|
+
for (const match of matches) {
|
|
6950
|
+
const varName = match[1];
|
|
6951
|
+
if (process.env[varName] === void 0) {
|
|
6952
|
+
throw new TemplateError(
|
|
6953
|
+
`Environment variable '${varName}' is not set`,
|
|
6954
|
+
promptName,
|
|
6955
|
+
configPath
|
|
6956
|
+
);
|
|
6957
|
+
}
|
|
6958
|
+
}
|
|
6959
|
+
}
|
|
6960
|
+
function hasTemplateSyntax(str) {
|
|
6961
|
+
return str.includes("<%");
|
|
6962
|
+
}
|
|
6963
|
+
|
|
6964
|
+
// src/cli/config.ts
|
|
6659
6965
|
var GLOBAL_CONFIG_KEYS = /* @__PURE__ */ new Set(["log-level", "log-file", "log-reset"]);
|
|
6660
6966
|
var VALID_LOG_LEVELS = ["silly", "trace", "debug", "info", "warn", "error", "fatal"];
|
|
6661
|
-
var COMPLETE_CONFIG_KEYS = /* @__PURE__ */ new Set([
|
|
6967
|
+
var COMPLETE_CONFIG_KEYS = /* @__PURE__ */ new Set([
|
|
6968
|
+
"model",
|
|
6969
|
+
"system",
|
|
6970
|
+
"temperature",
|
|
6971
|
+
"max-tokens",
|
|
6972
|
+
"quiet",
|
|
6973
|
+
"inherits",
|
|
6974
|
+
"log-level",
|
|
6975
|
+
"log-file",
|
|
6976
|
+
"log-reset",
|
|
6977
|
+
"log-llm-requests",
|
|
6978
|
+
"log-llm-responses",
|
|
6979
|
+
"type"
|
|
6980
|
+
// Allowed for inheritance compatibility, ignored for built-in commands
|
|
6981
|
+
]);
|
|
6662
6982
|
var AGENT_CONFIG_KEYS = /* @__PURE__ */ new Set([
|
|
6663
6983
|
"model",
|
|
6664
6984
|
"system",
|
|
6665
6985
|
"temperature",
|
|
6666
6986
|
"max-iterations",
|
|
6667
6987
|
"gadget",
|
|
6668
|
-
"parameter-format",
|
|
6669
6988
|
"builtins",
|
|
6670
6989
|
"builtin-interaction",
|
|
6671
6990
|
"gadget-start-prefix",
|
|
6672
|
-
"gadget-end-prefix"
|
|
6991
|
+
"gadget-end-prefix",
|
|
6992
|
+
"gadget-arg-prefix",
|
|
6993
|
+
"quiet",
|
|
6994
|
+
"inherits",
|
|
6995
|
+
"log-level",
|
|
6996
|
+
"log-file",
|
|
6997
|
+
"log-reset",
|
|
6998
|
+
"log-llm-requests",
|
|
6999
|
+
"log-llm-responses",
|
|
7000
|
+
"type"
|
|
7001
|
+
// Allowed for inheritance compatibility, ignored for built-in commands
|
|
6673
7002
|
]);
|
|
6674
7003
|
var CUSTOM_CONFIG_KEYS = /* @__PURE__ */ new Set([
|
|
6675
7004
|
...COMPLETE_CONFIG_KEYS,
|
|
6676
7005
|
...AGENT_CONFIG_KEYS,
|
|
6677
7006
|
"type",
|
|
6678
|
-
"description"
|
|
6679
|
-
"log-level",
|
|
6680
|
-
"log-file",
|
|
6681
|
-
"log-reset"
|
|
7007
|
+
"description"
|
|
6682
7008
|
]);
|
|
6683
|
-
var VALID_PARAMETER_FORMATS = ["json", "yaml", "toml", "auto"];
|
|
6684
7009
|
function getConfigPath() {
|
|
6685
|
-
return (0,
|
|
7010
|
+
return (0, import_node_path4.join)((0, import_node_os2.homedir)(), ".llmist", "cli.toml");
|
|
6686
7011
|
}
|
|
6687
7012
|
var ConfigError = class extends Error {
|
|
6688
7013
|
constructor(message, path2) {
|
|
@@ -6729,6 +7054,39 @@ function validateStringArray(value, key, section) {
|
|
|
6729
7054
|
}
|
|
6730
7055
|
return value;
|
|
6731
7056
|
}
|
|
7057
|
+
function validateInherits(value, section) {
|
|
7058
|
+
if (typeof value === "string") {
|
|
7059
|
+
return value;
|
|
7060
|
+
}
|
|
7061
|
+
if (Array.isArray(value)) {
|
|
7062
|
+
for (let i = 0; i < value.length; i++) {
|
|
7063
|
+
if (typeof value[i] !== "string") {
|
|
7064
|
+
throw new ConfigError(`[${section}].inherits[${i}] must be a string`);
|
|
7065
|
+
}
|
|
7066
|
+
}
|
|
7067
|
+
return value;
|
|
7068
|
+
}
|
|
7069
|
+
throw new ConfigError(`[${section}].inherits must be a string or array of strings`);
|
|
7070
|
+
}
|
|
7071
|
+
function validateLoggingConfig(raw, section) {
|
|
7072
|
+
const result = {};
|
|
7073
|
+
if ("log-level" in raw) {
|
|
7074
|
+
const level = validateString(raw["log-level"], "log-level", section);
|
|
7075
|
+
if (!VALID_LOG_LEVELS.includes(level)) {
|
|
7076
|
+
throw new ConfigError(
|
|
7077
|
+
`[${section}].log-level must be one of: ${VALID_LOG_LEVELS.join(", ")}`
|
|
7078
|
+
);
|
|
7079
|
+
}
|
|
7080
|
+
result["log-level"] = level;
|
|
7081
|
+
}
|
|
7082
|
+
if ("log-file" in raw) {
|
|
7083
|
+
result["log-file"] = validateString(raw["log-file"], "log-file", section);
|
|
7084
|
+
}
|
|
7085
|
+
if ("log-reset" in raw) {
|
|
7086
|
+
result["log-reset"] = validateBoolean(raw["log-reset"], "log-reset", section);
|
|
7087
|
+
}
|
|
7088
|
+
return result;
|
|
7089
|
+
}
|
|
6732
7090
|
function validateBaseConfig(raw, section) {
|
|
6733
7091
|
const result = {};
|
|
6734
7092
|
if ("model" in raw) {
|
|
@@ -6743,6 +7101,9 @@ function validateBaseConfig(raw, section) {
|
|
|
6743
7101
|
max: 2
|
|
6744
7102
|
});
|
|
6745
7103
|
}
|
|
7104
|
+
if ("inherits" in raw) {
|
|
7105
|
+
result.inherits = validateInherits(raw.inherits, section);
|
|
7106
|
+
}
|
|
6746
7107
|
return result;
|
|
6747
7108
|
}
|
|
6748
7109
|
function validateGlobalConfig(raw, section) {
|
|
@@ -6755,23 +7116,7 @@ function validateGlobalConfig(raw, section) {
|
|
|
6755
7116
|
throw new ConfigError(`[${section}].${key} is not a valid option`);
|
|
6756
7117
|
}
|
|
6757
7118
|
}
|
|
6758
|
-
|
|
6759
|
-
if ("log-level" in rawObj) {
|
|
6760
|
-
const level = validateString(rawObj["log-level"], "log-level", section);
|
|
6761
|
-
if (!VALID_LOG_LEVELS.includes(level)) {
|
|
6762
|
-
throw new ConfigError(
|
|
6763
|
-
`[${section}].log-level must be one of: ${VALID_LOG_LEVELS.join(", ")}`
|
|
6764
|
-
);
|
|
6765
|
-
}
|
|
6766
|
-
result["log-level"] = level;
|
|
6767
|
-
}
|
|
6768
|
-
if ("log-file" in rawObj) {
|
|
6769
|
-
result["log-file"] = validateString(rawObj["log-file"], "log-file", section);
|
|
6770
|
-
}
|
|
6771
|
-
if ("log-reset" in rawObj) {
|
|
6772
|
-
result["log-reset"] = validateBoolean(rawObj["log-reset"], "log-reset", section);
|
|
6773
|
-
}
|
|
6774
|
-
return result;
|
|
7119
|
+
return validateLoggingConfig(rawObj, section);
|
|
6775
7120
|
}
|
|
6776
7121
|
function validateCompleteConfig(raw, section) {
|
|
6777
7122
|
if (typeof raw !== "object" || raw === null) {
|
|
@@ -6783,13 +7128,33 @@ function validateCompleteConfig(raw, section) {
|
|
|
6783
7128
|
throw new ConfigError(`[${section}].${key} is not a valid option`);
|
|
6784
7129
|
}
|
|
6785
7130
|
}
|
|
6786
|
-
const result = {
|
|
7131
|
+
const result = {
|
|
7132
|
+
...validateBaseConfig(rawObj, section),
|
|
7133
|
+
...validateLoggingConfig(rawObj, section)
|
|
7134
|
+
};
|
|
6787
7135
|
if ("max-tokens" in rawObj) {
|
|
6788
7136
|
result["max-tokens"] = validateNumber(rawObj["max-tokens"], "max-tokens", section, {
|
|
6789
7137
|
integer: true,
|
|
6790
7138
|
min: 1
|
|
6791
7139
|
});
|
|
6792
7140
|
}
|
|
7141
|
+
if ("quiet" in rawObj) {
|
|
7142
|
+
result.quiet = validateBoolean(rawObj.quiet, "quiet", section);
|
|
7143
|
+
}
|
|
7144
|
+
if ("log-llm-requests" in rawObj) {
|
|
7145
|
+
result["log-llm-requests"] = validateStringOrBoolean(
|
|
7146
|
+
rawObj["log-llm-requests"],
|
|
7147
|
+
"log-llm-requests",
|
|
7148
|
+
section
|
|
7149
|
+
);
|
|
7150
|
+
}
|
|
7151
|
+
if ("log-llm-responses" in rawObj) {
|
|
7152
|
+
result["log-llm-responses"] = validateStringOrBoolean(
|
|
7153
|
+
rawObj["log-llm-responses"],
|
|
7154
|
+
"log-llm-responses",
|
|
7155
|
+
section
|
|
7156
|
+
);
|
|
7157
|
+
}
|
|
6793
7158
|
return result;
|
|
6794
7159
|
}
|
|
6795
7160
|
function validateAgentConfig(raw, section) {
|
|
@@ -6802,7 +7167,10 @@ function validateAgentConfig(raw, section) {
|
|
|
6802
7167
|
throw new ConfigError(`[${section}].${key} is not a valid option`);
|
|
6803
7168
|
}
|
|
6804
7169
|
}
|
|
6805
|
-
const result = {
|
|
7170
|
+
const result = {
|
|
7171
|
+
...validateBaseConfig(rawObj, section),
|
|
7172
|
+
...validateLoggingConfig(rawObj, section)
|
|
7173
|
+
};
|
|
6806
7174
|
if ("max-iterations" in rawObj) {
|
|
6807
7175
|
result["max-iterations"] = validateNumber(rawObj["max-iterations"], "max-iterations", section, {
|
|
6808
7176
|
integer: true,
|
|
@@ -6812,15 +7180,6 @@ function validateAgentConfig(raw, section) {
|
|
|
6812
7180
|
if ("gadget" in rawObj) {
|
|
6813
7181
|
result.gadget = validateStringArray(rawObj.gadget, "gadget", section);
|
|
6814
7182
|
}
|
|
6815
|
-
if ("parameter-format" in rawObj) {
|
|
6816
|
-
const format = validateString(rawObj["parameter-format"], "parameter-format", section);
|
|
6817
|
-
if (!VALID_PARAMETER_FORMATS.includes(format)) {
|
|
6818
|
-
throw new ConfigError(
|
|
6819
|
-
`[${section}].parameter-format must be one of: ${VALID_PARAMETER_FORMATS.join(", ")}`
|
|
6820
|
-
);
|
|
6821
|
-
}
|
|
6822
|
-
result["parameter-format"] = format;
|
|
6823
|
-
}
|
|
6824
7183
|
if ("builtins" in rawObj) {
|
|
6825
7184
|
result.builtins = validateBoolean(rawObj.builtins, "builtins", section);
|
|
6826
7185
|
}
|
|
@@ -6845,8 +7204,38 @@ function validateAgentConfig(raw, section) {
|
|
|
6845
7204
|
section
|
|
6846
7205
|
);
|
|
6847
7206
|
}
|
|
7207
|
+
if ("gadget-arg-prefix" in rawObj) {
|
|
7208
|
+
result["gadget-arg-prefix"] = validateString(
|
|
7209
|
+
rawObj["gadget-arg-prefix"],
|
|
7210
|
+
"gadget-arg-prefix",
|
|
7211
|
+
section
|
|
7212
|
+
);
|
|
7213
|
+
}
|
|
7214
|
+
if ("quiet" in rawObj) {
|
|
7215
|
+
result.quiet = validateBoolean(rawObj.quiet, "quiet", section);
|
|
7216
|
+
}
|
|
7217
|
+
if ("log-llm-requests" in rawObj) {
|
|
7218
|
+
result["log-llm-requests"] = validateStringOrBoolean(
|
|
7219
|
+
rawObj["log-llm-requests"],
|
|
7220
|
+
"log-llm-requests",
|
|
7221
|
+
section
|
|
7222
|
+
);
|
|
7223
|
+
}
|
|
7224
|
+
if ("log-llm-responses" in rawObj) {
|
|
7225
|
+
result["log-llm-responses"] = validateStringOrBoolean(
|
|
7226
|
+
rawObj["log-llm-responses"],
|
|
7227
|
+
"log-llm-responses",
|
|
7228
|
+
section
|
|
7229
|
+
);
|
|
7230
|
+
}
|
|
6848
7231
|
return result;
|
|
6849
7232
|
}
|
|
7233
|
+
function validateStringOrBoolean(value, field, section) {
|
|
7234
|
+
if (typeof value === "string" || typeof value === "boolean") {
|
|
7235
|
+
return value;
|
|
7236
|
+
}
|
|
7237
|
+
throw new ConfigError(`[${section}].${field} must be a string or boolean`);
|
|
7238
|
+
}
|
|
6850
7239
|
function validateCustomConfig(raw, section) {
|
|
6851
7240
|
if (typeof raw !== "object" || raw === null) {
|
|
6852
7241
|
throw new ConfigError(`[${section}] must be a table`);
|
|
@@ -6881,15 +7270,6 @@ function validateCustomConfig(raw, section) {
|
|
|
6881
7270
|
if ("gadget" in rawObj) {
|
|
6882
7271
|
result.gadget = validateStringArray(rawObj.gadget, "gadget", section);
|
|
6883
7272
|
}
|
|
6884
|
-
if ("parameter-format" in rawObj) {
|
|
6885
|
-
const format = validateString(rawObj["parameter-format"], "parameter-format", section);
|
|
6886
|
-
if (!VALID_PARAMETER_FORMATS.includes(format)) {
|
|
6887
|
-
throw new ConfigError(
|
|
6888
|
-
`[${section}].parameter-format must be one of: ${VALID_PARAMETER_FORMATS.join(", ")}`
|
|
6889
|
-
);
|
|
6890
|
-
}
|
|
6891
|
-
result["parameter-format"] = format;
|
|
6892
|
-
}
|
|
6893
7273
|
if ("builtins" in rawObj) {
|
|
6894
7274
|
result.builtins = validateBoolean(rawObj.builtins, "builtins", section);
|
|
6895
7275
|
}
|
|
@@ -6914,26 +7294,35 @@ function validateCustomConfig(raw, section) {
|
|
|
6914
7294
|
section
|
|
6915
7295
|
);
|
|
6916
7296
|
}
|
|
7297
|
+
if ("gadget-arg-prefix" in rawObj) {
|
|
7298
|
+
result["gadget-arg-prefix"] = validateString(
|
|
7299
|
+
rawObj["gadget-arg-prefix"],
|
|
7300
|
+
"gadget-arg-prefix",
|
|
7301
|
+
section
|
|
7302
|
+
);
|
|
7303
|
+
}
|
|
6917
7304
|
if ("max-tokens" in rawObj) {
|
|
6918
7305
|
result["max-tokens"] = validateNumber(rawObj["max-tokens"], "max-tokens", section, {
|
|
6919
7306
|
integer: true,
|
|
6920
7307
|
min: 1
|
|
6921
7308
|
});
|
|
6922
7309
|
}
|
|
6923
|
-
if ("
|
|
6924
|
-
|
|
6925
|
-
if (!VALID_LOG_LEVELS.includes(level)) {
|
|
6926
|
-
throw new ConfigError(
|
|
6927
|
-
`[${section}].log-level must be one of: ${VALID_LOG_LEVELS.join(", ")}`
|
|
6928
|
-
);
|
|
6929
|
-
}
|
|
6930
|
-
result["log-level"] = level;
|
|
7310
|
+
if ("quiet" in rawObj) {
|
|
7311
|
+
result.quiet = validateBoolean(rawObj.quiet, "quiet", section);
|
|
6931
7312
|
}
|
|
6932
|
-
|
|
6933
|
-
|
|
7313
|
+
Object.assign(result, validateLoggingConfig(rawObj, section));
|
|
7314
|
+
return result;
|
|
7315
|
+
}
|
|
7316
|
+
function validatePromptsConfig(raw, section) {
|
|
7317
|
+
if (typeof raw !== "object" || raw === null) {
|
|
7318
|
+
throw new ConfigError(`[${section}] must be a table`);
|
|
6934
7319
|
}
|
|
6935
|
-
|
|
6936
|
-
|
|
7320
|
+
const result = {};
|
|
7321
|
+
for (const [key, value] of Object.entries(raw)) {
|
|
7322
|
+
if (typeof value !== "string") {
|
|
7323
|
+
throw new ConfigError(`[${section}].${key} must be a string`);
|
|
7324
|
+
}
|
|
7325
|
+
result[key] = value;
|
|
6937
7326
|
}
|
|
6938
7327
|
return result;
|
|
6939
7328
|
}
|
|
@@ -6951,6 +7340,8 @@ function validateConfig(raw, configPath) {
|
|
|
6951
7340
|
result.complete = validateCompleteConfig(value, key);
|
|
6952
7341
|
} else if (key === "agent") {
|
|
6953
7342
|
result.agent = validateAgentConfig(value, key);
|
|
7343
|
+
} else if (key === "prompts") {
|
|
7344
|
+
result.prompts = validatePromptsConfig(value, key);
|
|
6954
7345
|
} else {
|
|
6955
7346
|
result[key] = validateCustomConfig(value, key);
|
|
6956
7347
|
}
|
|
@@ -6979,19 +7370,126 @@ function loadConfig() {
|
|
|
6979
7370
|
}
|
|
6980
7371
|
let raw;
|
|
6981
7372
|
try {
|
|
6982
|
-
raw = (0,
|
|
7373
|
+
raw = (0, import_js_toml.load)(content);
|
|
6983
7374
|
} catch (error) {
|
|
6984
7375
|
throw new ConfigError(
|
|
6985
7376
|
`Invalid TOML syntax: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
6986
7377
|
configPath
|
|
6987
7378
|
);
|
|
6988
7379
|
}
|
|
6989
|
-
|
|
7380
|
+
const validated = validateConfig(raw, configPath);
|
|
7381
|
+
const inherited = resolveInheritance(validated, configPath);
|
|
7382
|
+
return resolveTemplatesInConfig(inherited, configPath);
|
|
6990
7383
|
}
|
|
6991
7384
|
function getCustomCommandNames(config) {
|
|
6992
|
-
const reserved = /* @__PURE__ */ new Set(["global", "complete", "agent"]);
|
|
7385
|
+
const reserved = /* @__PURE__ */ new Set(["global", "complete", "agent", "prompts"]);
|
|
6993
7386
|
return Object.keys(config).filter((key) => !reserved.has(key));
|
|
6994
7387
|
}
|
|
7388
|
+
function resolveTemplatesInConfig(config, configPath) {
|
|
7389
|
+
const prompts = config.prompts ?? {};
|
|
7390
|
+
const hasPrompts = Object.keys(prompts).length > 0;
|
|
7391
|
+
let hasTemplates = false;
|
|
7392
|
+
for (const [sectionName, section] of Object.entries(config)) {
|
|
7393
|
+
if (sectionName === "global" || sectionName === "prompts") continue;
|
|
7394
|
+
if (!section || typeof section !== "object") continue;
|
|
7395
|
+
const sectionObj = section;
|
|
7396
|
+
if (typeof sectionObj.system === "string" && hasTemplateSyntax(sectionObj.system)) {
|
|
7397
|
+
hasTemplates = true;
|
|
7398
|
+
break;
|
|
7399
|
+
}
|
|
7400
|
+
}
|
|
7401
|
+
for (const template of Object.values(prompts)) {
|
|
7402
|
+
if (hasTemplateSyntax(template)) {
|
|
7403
|
+
hasTemplates = true;
|
|
7404
|
+
break;
|
|
7405
|
+
}
|
|
7406
|
+
}
|
|
7407
|
+
if (!hasPrompts && !hasTemplates) {
|
|
7408
|
+
return config;
|
|
7409
|
+
}
|
|
7410
|
+
try {
|
|
7411
|
+
validatePrompts(prompts, configPath);
|
|
7412
|
+
} catch (error) {
|
|
7413
|
+
if (error instanceof TemplateError) {
|
|
7414
|
+
throw new ConfigError(error.message, configPath);
|
|
7415
|
+
}
|
|
7416
|
+
throw error;
|
|
7417
|
+
}
|
|
7418
|
+
for (const [name, template] of Object.entries(prompts)) {
|
|
7419
|
+
try {
|
|
7420
|
+
validateEnvVars(template, name, configPath);
|
|
7421
|
+
} catch (error) {
|
|
7422
|
+
if (error instanceof TemplateError) {
|
|
7423
|
+
throw new ConfigError(error.message, configPath);
|
|
7424
|
+
}
|
|
7425
|
+
throw error;
|
|
7426
|
+
}
|
|
7427
|
+
}
|
|
7428
|
+
const eta = createTemplateEngine(prompts, configPath);
|
|
7429
|
+
const result = { ...config };
|
|
7430
|
+
for (const [sectionName, section] of Object.entries(config)) {
|
|
7431
|
+
if (sectionName === "global" || sectionName === "prompts") continue;
|
|
7432
|
+
if (!section || typeof section !== "object") continue;
|
|
7433
|
+
const sectionObj = section;
|
|
7434
|
+
if (typeof sectionObj.system === "string" && hasTemplateSyntax(sectionObj.system)) {
|
|
7435
|
+
try {
|
|
7436
|
+
validateEnvVars(sectionObj.system, void 0, configPath);
|
|
7437
|
+
} catch (error) {
|
|
7438
|
+
if (error instanceof TemplateError) {
|
|
7439
|
+
throw new ConfigError(`[${sectionName}].system: ${error.message}`, configPath);
|
|
7440
|
+
}
|
|
7441
|
+
throw error;
|
|
7442
|
+
}
|
|
7443
|
+
try {
|
|
7444
|
+
const resolved = resolveTemplate(eta, sectionObj.system, {}, configPath);
|
|
7445
|
+
result[sectionName] = {
|
|
7446
|
+
...sectionObj,
|
|
7447
|
+
system: resolved
|
|
7448
|
+
};
|
|
7449
|
+
} catch (error) {
|
|
7450
|
+
if (error instanceof TemplateError) {
|
|
7451
|
+
throw new ConfigError(`[${sectionName}].system: ${error.message}`, configPath);
|
|
7452
|
+
}
|
|
7453
|
+
throw error;
|
|
7454
|
+
}
|
|
7455
|
+
}
|
|
7456
|
+
}
|
|
7457
|
+
return result;
|
|
7458
|
+
}
|
|
7459
|
+
function resolveInheritance(config, configPath) {
|
|
7460
|
+
const resolved = {};
|
|
7461
|
+
const resolving = /* @__PURE__ */ new Set();
|
|
7462
|
+
function resolveSection(name) {
|
|
7463
|
+
if (name in resolved) {
|
|
7464
|
+
return resolved[name];
|
|
7465
|
+
}
|
|
7466
|
+
if (resolving.has(name)) {
|
|
7467
|
+
throw new ConfigError(`Circular inheritance detected: ${name}`, configPath);
|
|
7468
|
+
}
|
|
7469
|
+
const section = config[name];
|
|
7470
|
+
if (section === void 0 || typeof section !== "object") {
|
|
7471
|
+
throw new ConfigError(`Cannot inherit from unknown section: ${name}`, configPath);
|
|
7472
|
+
}
|
|
7473
|
+
resolving.add(name);
|
|
7474
|
+
const sectionObj = section;
|
|
7475
|
+
const inheritsRaw = sectionObj.inherits;
|
|
7476
|
+
const inheritsList = inheritsRaw ? Array.isArray(inheritsRaw) ? inheritsRaw : [inheritsRaw] : [];
|
|
7477
|
+
let merged = {};
|
|
7478
|
+
for (const parent of inheritsList) {
|
|
7479
|
+
const parentResolved = resolveSection(parent);
|
|
7480
|
+
merged = { ...merged, ...parentResolved };
|
|
7481
|
+
}
|
|
7482
|
+
const { inherits: _inherits, ...ownValues } = sectionObj;
|
|
7483
|
+
merged = { ...merged, ...ownValues };
|
|
7484
|
+
resolving.delete(name);
|
|
7485
|
+
resolved[name] = merged;
|
|
7486
|
+
return merged;
|
|
7487
|
+
}
|
|
7488
|
+
for (const name of Object.keys(config)) {
|
|
7489
|
+
resolveSection(name);
|
|
7490
|
+
}
|
|
7491
|
+
return resolved;
|
|
7492
|
+
}
|
|
6995
7493
|
|
|
6996
7494
|
// src/cli/models-command.ts
|
|
6997
7495
|
var import_chalk4 = __toESM(require("chalk"), 1);
|
|
@@ -7288,12 +7786,12 @@ function registerCustomCommand(program, name, config, env) {
|
|
|
7288
7786
|
function parseLogLevel2(value) {
|
|
7289
7787
|
const normalized = value.toLowerCase();
|
|
7290
7788
|
if (!LOG_LEVELS.includes(normalized)) {
|
|
7291
|
-
throw new
|
|
7789
|
+
throw new import_commander2.InvalidArgumentError(`Log level must be one of: ${LOG_LEVELS.join(", ")}`);
|
|
7292
7790
|
}
|
|
7293
7791
|
return normalized;
|
|
7294
7792
|
}
|
|
7295
7793
|
function createProgram(env, config) {
|
|
7296
|
-
const program = new
|
|
7794
|
+
const program = new import_commander2.Command();
|
|
7297
7795
|
program.name(CLI_NAME).description(CLI_DESCRIPTION).version(package_default.version).option(OPTION_FLAGS.logLevel, OPTION_DESCRIPTIONS.logLevel, parseLogLevel2).option(OPTION_FLAGS.logFile, OPTION_DESCRIPTIONS.logFile).option(OPTION_FLAGS.logReset, OPTION_DESCRIPTIONS.logReset).configureOutput({
|
|
7298
7796
|
writeOut: (str) => env.stdout.write(str),
|
|
7299
7797
|
writeErr: (str) => env.stderr.write(str)
|
|
@@ -7314,7 +7812,7 @@ async function runCLI(overrides = {}) {
|
|
|
7314
7812
|
const opts = "env" in overrides || "config" in overrides ? overrides : { env: overrides };
|
|
7315
7813
|
const config = opts.config !== void 0 ? opts.config : loadConfig();
|
|
7316
7814
|
const envOverrides = opts.env ?? {};
|
|
7317
|
-
const preParser = new
|
|
7815
|
+
const preParser = new import_commander2.Command();
|
|
7318
7816
|
preParser.option(OPTION_FLAGS.logLevel, OPTION_DESCRIPTIONS.logLevel, parseLogLevel2).option(OPTION_FLAGS.logFile, OPTION_DESCRIPTIONS.logFile).option(OPTION_FLAGS.logReset, OPTION_DESCRIPTIONS.logReset).allowUnknownOption().allowExcessArguments().helpOption(false);
|
|
7319
7817
|
preParser.parse(process.argv);
|
|
7320
7818
|
const globalOpts = preParser.opts();
|