@quantish/agent 0.1.15 → 0.1.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +2 -0
- package/README.md +140 -152
- package/dist/index.js +1305 -403
- package/package.json +2 -1
package/dist/index.js
CHANGED
|
@@ -18,6 +18,9 @@ var schema = {
|
|
|
18
18
|
anthropicApiKey: {
|
|
19
19
|
type: "string"
|
|
20
20
|
},
|
|
21
|
+
openrouterApiKey: {
|
|
22
|
+
type: "string"
|
|
23
|
+
},
|
|
21
24
|
quantishApiKey: {
|
|
22
25
|
type: "string"
|
|
23
26
|
},
|
|
@@ -28,6 +31,10 @@ var schema = {
|
|
|
28
31
|
model: {
|
|
29
32
|
type: "string",
|
|
30
33
|
default: "claude-sonnet-4-5-20250929"
|
|
34
|
+
},
|
|
35
|
+
provider: {
|
|
36
|
+
type: "string",
|
|
37
|
+
default: "anthropic"
|
|
31
38
|
}
|
|
32
39
|
};
|
|
33
40
|
var ConfigManager = class {
|
|
@@ -54,6 +61,20 @@ var ConfigManager = class {
|
|
|
54
61
|
setAnthropicApiKey(key) {
|
|
55
62
|
this.conf.set("anthropicApiKey", key);
|
|
56
63
|
}
|
|
64
|
+
/**
|
|
65
|
+
* Get the OpenRouter API key
|
|
66
|
+
*/
|
|
67
|
+
getOpenRouterApiKey() {
|
|
68
|
+
const envKey = process.env.OPENROUTER_API_KEY;
|
|
69
|
+
if (envKey) return envKey;
|
|
70
|
+
return this.conf.get("openrouterApiKey");
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Set the OpenRouter API key
|
|
74
|
+
*/
|
|
75
|
+
setOpenRouterApiKey(key) {
|
|
76
|
+
this.conf.set("openrouterApiKey", key);
|
|
77
|
+
}
|
|
57
78
|
/**
|
|
58
79
|
* Get the Quantish API key
|
|
59
80
|
*/
|
|
@@ -68,13 +89,22 @@ var ConfigManager = class {
|
|
|
68
89
|
setQuantishApiKey(key) {
|
|
69
90
|
this.conf.set("quantishApiKey", key);
|
|
70
91
|
}
|
|
92
|
+
/**
|
|
93
|
+
* Get the current LLM provider
|
|
94
|
+
*/
|
|
95
|
+
getProvider() {
|
|
96
|
+
return this.conf.get("provider") ?? "anthropic";
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Set the LLM provider
|
|
100
|
+
*/
|
|
101
|
+
setProvider(provider) {
|
|
102
|
+
this.conf.set("provider", provider);
|
|
103
|
+
}
|
|
71
104
|
/**
|
|
72
105
|
* Get the Trading MCP server URL (user's wallet/orders)
|
|
73
|
-
* Priority: MCP_SERVER_URL env var > config file > default
|
|
74
106
|
*/
|
|
75
107
|
getMcpServerUrl() {
|
|
76
|
-
const envUrl = process.env.MCP_SERVER_URL;
|
|
77
|
-
if (envUrl) return envUrl;
|
|
78
108
|
return this.conf.get("mcpServerUrl") ?? DEFAULT_MCP_URL;
|
|
79
109
|
}
|
|
80
110
|
/**
|
|
@@ -101,12 +131,6 @@ var ConfigManager = class {
|
|
|
101
131
|
setMcpServerUrl(url) {
|
|
102
132
|
this.conf.set("mcpServerUrl", url);
|
|
103
133
|
}
|
|
104
|
-
/**
|
|
105
|
-
* Generic setter for any config key
|
|
106
|
-
*/
|
|
107
|
-
set(key, value) {
|
|
108
|
-
this.conf.set(key, value);
|
|
109
|
-
}
|
|
110
134
|
/**
|
|
111
135
|
* Get the model to use
|
|
112
136
|
*/
|
|
@@ -120,13 +144,27 @@ var ConfigManager = class {
|
|
|
120
144
|
this.conf.set("model", model);
|
|
121
145
|
}
|
|
122
146
|
/**
|
|
123
|
-
* Check if the CLI is configured (has
|
|
147
|
+
* Check if the CLI is configured (has required LLM API key)
|
|
124
148
|
* Discovery MCP works without any user key (embedded public key)
|
|
125
149
|
* Trading MCP requires a user key
|
|
126
150
|
*/
|
|
127
151
|
isConfigured() {
|
|
152
|
+
const provider = this.getProvider();
|
|
153
|
+
if (provider === "openrouter") {
|
|
154
|
+
return !!this.getOpenRouterApiKey();
|
|
155
|
+
}
|
|
128
156
|
return !!this.getAnthropicApiKey();
|
|
129
157
|
}
|
|
158
|
+
/**
|
|
159
|
+
* Get the appropriate LLM API key based on current provider
|
|
160
|
+
*/
|
|
161
|
+
getLLMApiKey() {
|
|
162
|
+
const provider = this.getProvider();
|
|
163
|
+
if (provider === "openrouter") {
|
|
164
|
+
return this.getOpenRouterApiKey();
|
|
165
|
+
}
|
|
166
|
+
return this.getAnthropicApiKey();
|
|
167
|
+
}
|
|
130
168
|
/**
|
|
131
169
|
* Check if trading is enabled (has Quantish API key)
|
|
132
170
|
*/
|
|
@@ -139,9 +177,11 @@ var ConfigManager = class {
|
|
|
139
177
|
getAll() {
|
|
140
178
|
return {
|
|
141
179
|
anthropicApiKey: this.getAnthropicApiKey(),
|
|
180
|
+
openrouterApiKey: this.getOpenRouterApiKey(),
|
|
142
181
|
quantishApiKey: this.getQuantishApiKey(),
|
|
143
182
|
mcpServerUrl: this.getMcpServerUrl(),
|
|
144
|
-
model: this.getModel()
|
|
183
|
+
model: this.getModel(),
|
|
184
|
+
provider: this.getProvider()
|
|
145
185
|
};
|
|
146
186
|
}
|
|
147
187
|
/**
|
|
@@ -527,27 +567,61 @@ async function runSetup() {
|
|
|
527
567
|
return false;
|
|
528
568
|
}
|
|
529
569
|
console.log();
|
|
530
|
-
console.log(chalk.bold("Step 1:
|
|
531
|
-
console.log(chalk.dim("
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
570
|
+
console.log(chalk.bold("Step 1: Choose your LLM Provider"));
|
|
571
|
+
console.log(chalk.dim("The AI that powers the agent.\n"));
|
|
572
|
+
console.log(" 1. " + chalk.cyan("Anthropic") + chalk.dim(" (Claude models - Opus, Sonnet, Haiku)"));
|
|
573
|
+
console.log(" 2. " + chalk.green("OpenRouter") + chalk.dim(" (Access 100+ models - MiniMax, DeepSeek, etc.)\n"));
|
|
574
|
+
const providerChoice = await prompt("Choose (1 or 2): ");
|
|
575
|
+
const useOpenRouter = providerChoice === "2";
|
|
576
|
+
if (useOpenRouter) {
|
|
577
|
+
config.setProvider("openrouter");
|
|
578
|
+
console.log();
|
|
579
|
+
console.log(chalk.bold("OpenRouter API Key"));
|
|
580
|
+
console.log(chalk.dim("Get yours at https://openrouter.ai/keys\n"));
|
|
581
|
+
let openrouterKey = config.getOpenRouterApiKey();
|
|
582
|
+
if (openrouterKey) {
|
|
583
|
+
console.log(chalk.dim(`Current: ${openrouterKey.slice(0, 10)}...`));
|
|
584
|
+
const newKey = await prompt("Enter new key (or press Enter to keep current): ", true);
|
|
585
|
+
if (newKey) {
|
|
586
|
+
openrouterKey = newKey;
|
|
587
|
+
}
|
|
588
|
+
} else {
|
|
589
|
+
openrouterKey = await prompt("Enter your OpenRouter API key: ", true);
|
|
538
590
|
}
|
|
591
|
+
if (!openrouterKey) {
|
|
592
|
+
console.log(chalk.red("OpenRouter API key is required."));
|
|
593
|
+
return false;
|
|
594
|
+
}
|
|
595
|
+
if (!openrouterKey.startsWith("sk-or-")) {
|
|
596
|
+
console.log(chalk.yellow("Warning: Key doesn't look like an OpenRouter key (should start with sk-or-)"));
|
|
597
|
+
}
|
|
598
|
+
config.setOpenRouterApiKey(openrouterKey);
|
|
599
|
+
console.log(chalk.green("\u2713 OpenRouter API key saved\n"));
|
|
539
600
|
} else {
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
console.log(chalk.
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
601
|
+
config.setProvider("anthropic");
|
|
602
|
+
console.log();
|
|
603
|
+
console.log(chalk.bold("Anthropic API Key"));
|
|
604
|
+
console.log(chalk.dim("Get yours at https://console.anthropic.com/\n"));
|
|
605
|
+
let anthropicKey = config.getAnthropicApiKey();
|
|
606
|
+
if (anthropicKey) {
|
|
607
|
+
console.log(chalk.dim(`Current: ${anthropicKey.slice(0, 10)}...`));
|
|
608
|
+
const newKey = await prompt("Enter new key (or press Enter to keep current): ", true);
|
|
609
|
+
if (newKey) {
|
|
610
|
+
anthropicKey = newKey;
|
|
611
|
+
}
|
|
612
|
+
} else {
|
|
613
|
+
anthropicKey = await prompt("Enter your Anthropic API key: ", true);
|
|
614
|
+
}
|
|
615
|
+
if (!anthropicKey) {
|
|
616
|
+
console.log(chalk.red("Anthropic API key is required."));
|
|
617
|
+
return false;
|
|
618
|
+
}
|
|
619
|
+
if (!anthropicKey.startsWith("sk-ant-")) {
|
|
620
|
+
console.log(chalk.yellow("Warning: Key doesn't look like an Anthropic key (should start with sk-ant-)"));
|
|
621
|
+
}
|
|
622
|
+
config.setAnthropicApiKey(anthropicKey);
|
|
623
|
+
console.log(chalk.green("\u2713 Anthropic API key saved\n"));
|
|
548
624
|
}
|
|
549
|
-
config.setAnthropicApiKey(anthropicKey);
|
|
550
|
-
console.log(chalk.green("\u2713 Anthropic API key saved\n"));
|
|
551
625
|
console.log(chalk.bold("Step 2: Polymarket Trading (Optional)"));
|
|
552
626
|
console.log(chalk.dim("Enable trading on Polymarket with your own managed wallet."));
|
|
553
627
|
console.log(chalk.dim("Skip this if you only want to search/discover markets.\n"));
|
|
@@ -681,15 +755,14 @@ async function runSetup() {
|
|
|
681
755
|
async function ensureConfigured() {
|
|
682
756
|
const config = getConfigManager();
|
|
683
757
|
if (!config.isConfigured()) {
|
|
684
|
-
console.log(chalk.yellow("Quantish CLI is not configured yet
|
|
685
|
-
|
|
686
|
-
return false;
|
|
758
|
+
console.log(chalk.yellow("Quantish CLI is not configured yet.\n"));
|
|
759
|
+
return await runSetup();
|
|
687
760
|
}
|
|
688
761
|
return true;
|
|
689
762
|
}
|
|
690
763
|
|
|
691
764
|
// src/agent/loop.ts
|
|
692
|
-
import
|
|
765
|
+
import Anthropic2 from "@anthropic-ai/sdk";
|
|
693
766
|
|
|
694
767
|
// src/tools/filesystem.ts
|
|
695
768
|
import * as fs from "fs/promises";
|
|
@@ -780,41 +853,6 @@ async function fileExists(filePath) {
|
|
|
780
853
|
return { success: false, error: `Failed to check file: ${error2 instanceof Error ? error2.message : String(error2)}` };
|
|
781
854
|
}
|
|
782
855
|
}
|
|
783
|
-
async function editLines(filePath, startLine, endLine, newContent) {
|
|
784
|
-
try {
|
|
785
|
-
const resolvedPath = path.resolve(filePath);
|
|
786
|
-
if (!existsSync(resolvedPath)) {
|
|
787
|
-
return { success: false, error: `File not found: ${filePath}` };
|
|
788
|
-
}
|
|
789
|
-
const content = await fs.readFile(resolvedPath, "utf-8");
|
|
790
|
-
const lines = content.split("\n");
|
|
791
|
-
if (startLine < 1 || endLine < startLine || startLine > lines.length) {
|
|
792
|
-
return {
|
|
793
|
-
success: false,
|
|
794
|
-
error: `Invalid line range: ${startLine}-${endLine}. File has ${lines.length} lines.`
|
|
795
|
-
};
|
|
796
|
-
}
|
|
797
|
-
const startIdx = startLine - 1;
|
|
798
|
-
const endIdx = Math.min(endLine, lines.length);
|
|
799
|
-
const newLines = newContent.split("\n");
|
|
800
|
-
const beforeLines = lines.slice(0, startIdx);
|
|
801
|
-
const afterLines = lines.slice(endIdx);
|
|
802
|
-
const resultLines = [...beforeLines, ...newLines, ...afterLines];
|
|
803
|
-
const newFileContent = resultLines.join("\n");
|
|
804
|
-
await fs.writeFile(resolvedPath, newFileContent, "utf-8");
|
|
805
|
-
return {
|
|
806
|
-
success: true,
|
|
807
|
-
data: {
|
|
808
|
-
path: resolvedPath,
|
|
809
|
-
linesReplaced: endIdx - startIdx,
|
|
810
|
-
newLinesInserted: newLines.length,
|
|
811
|
-
totalLines: resultLines.length
|
|
812
|
-
}
|
|
813
|
-
};
|
|
814
|
-
} catch (error2) {
|
|
815
|
-
return { success: false, error: `Failed to edit lines: ${error2 instanceof Error ? error2.message : String(error2)}` };
|
|
816
|
-
}
|
|
817
|
-
}
|
|
818
856
|
async function editFile(filePath, oldString, newString, options) {
|
|
819
857
|
try {
|
|
820
858
|
const resolvedPath = path.resolve(filePath);
|
|
@@ -932,35 +970,9 @@ var filesystemTools = [
|
|
|
932
970
|
required: ["path"]
|
|
933
971
|
}
|
|
934
972
|
},
|
|
935
|
-
{
|
|
936
|
-
name: "edit_lines",
|
|
937
|
-
description: "Edit specific lines in a file by line number. MORE EFFICIENT than edit_file - use this when you know the line numbers from read_file. Only sends line numbers + new content, not full old content.",
|
|
938
|
-
input_schema: {
|
|
939
|
-
type: "object",
|
|
940
|
-
properties: {
|
|
941
|
-
path: {
|
|
942
|
-
type: "string",
|
|
943
|
-
description: "The path to the file to edit"
|
|
944
|
-
},
|
|
945
|
-
start_line: {
|
|
946
|
-
type: "number",
|
|
947
|
-
description: "The first line number to replace (1-based, inclusive)"
|
|
948
|
-
},
|
|
949
|
-
end_line: {
|
|
950
|
-
type: "number",
|
|
951
|
-
description: "The last line number to replace (1-based, inclusive)"
|
|
952
|
-
},
|
|
953
|
-
new_content: {
|
|
954
|
-
type: "string",
|
|
955
|
-
description: "The new content to insert (replaces lines start_line through end_line)"
|
|
956
|
-
}
|
|
957
|
-
},
|
|
958
|
-
required: ["path", "start_line", "end_line", "new_content"]
|
|
959
|
-
}
|
|
960
|
-
},
|
|
961
973
|
{
|
|
962
974
|
name: "edit_file",
|
|
963
|
-
description: "Edit a file by replacing a specific string with new content.
|
|
975
|
+
description: "Edit a file by replacing a specific string with new content. Safer than write_file as it only modifies the targeted section. The old_string must match exactly (including whitespace).",
|
|
964
976
|
input_schema: {
|
|
965
977
|
type: "object",
|
|
966
978
|
properties: {
|
|
@@ -983,111 +995,8 @@ var filesystemTools = [
|
|
|
983
995
|
},
|
|
984
996
|
required: ["path", "old_string", "new_string"]
|
|
985
997
|
}
|
|
986
|
-
},
|
|
987
|
-
{
|
|
988
|
-
name: "setup_env",
|
|
989
|
-
description: "Setup or update environment variables in a .env file for an application. Creates .env if it doesn't exist. Optionally creates a .env.example template. Use this when building any application that needs API keys or configuration.",
|
|
990
|
-
input_schema: {
|
|
991
|
-
type: "object",
|
|
992
|
-
properties: {
|
|
993
|
-
path: {
|
|
994
|
-
type: "string",
|
|
995
|
-
description: 'Path to the .env file (default: ".env" in current directory)'
|
|
996
|
-
},
|
|
997
|
-
variables: {
|
|
998
|
-
type: "object",
|
|
999
|
-
description: 'Object with environment variable names as keys and values. Example: { "QUANTISH_API_KEY": "abc123", "TOKEN_ID": "xyz" }',
|
|
1000
|
-
additionalProperties: { type: "string" }
|
|
1001
|
-
},
|
|
1002
|
-
overwrite: {
|
|
1003
|
-
type: "boolean",
|
|
1004
|
-
description: "If true, overwrite existing variables. Default false (skip existing)."
|
|
1005
|
-
},
|
|
1006
|
-
create_example: {
|
|
1007
|
-
type: "boolean",
|
|
1008
|
-
description: "If true, also create a .env.example template file with placeholder values."
|
|
1009
|
-
}
|
|
1010
|
-
},
|
|
1011
|
-
required: ["variables"]
|
|
1012
|
-
}
|
|
1013
998
|
}
|
|
1014
999
|
];
|
|
1015
|
-
async function setupEnv(envPath = ".env", variables, options) {
|
|
1016
|
-
try {
|
|
1017
|
-
const resolvedPath = path.resolve(envPath);
|
|
1018
|
-
let content = "";
|
|
1019
|
-
const existingVars = {};
|
|
1020
|
-
if (existsSync(resolvedPath)) {
|
|
1021
|
-
content = await fs.readFile(resolvedPath, "utf-8");
|
|
1022
|
-
for (const line of content.split("\n")) {
|
|
1023
|
-
const trimmed = line.trim();
|
|
1024
|
-
if (trimmed && !trimmed.startsWith("#")) {
|
|
1025
|
-
const eqIndex = trimmed.indexOf("=");
|
|
1026
|
-
if (eqIndex > 0) {
|
|
1027
|
-
const key = trimmed.slice(0, eqIndex);
|
|
1028
|
-
const value = trimmed.slice(eqIndex + 1);
|
|
1029
|
-
existingVars[key] = value;
|
|
1030
|
-
}
|
|
1031
|
-
}
|
|
1032
|
-
}
|
|
1033
|
-
}
|
|
1034
|
-
const updatedVars = [];
|
|
1035
|
-
const addedVars = [];
|
|
1036
|
-
const skippedVars = [];
|
|
1037
|
-
for (const [key, value] of Object.entries(variables)) {
|
|
1038
|
-
if (existingVars[key] !== void 0) {
|
|
1039
|
-
if (options?.overwrite) {
|
|
1040
|
-
const regex = new RegExp(`^${key}=.*$`, "m");
|
|
1041
|
-
content = content.replace(regex, `${key}=${value}`);
|
|
1042
|
-
updatedVars.push(key);
|
|
1043
|
-
} else {
|
|
1044
|
-
skippedVars.push(key);
|
|
1045
|
-
}
|
|
1046
|
-
} else {
|
|
1047
|
-
if (content && !content.endsWith("\n")) {
|
|
1048
|
-
content += "\n";
|
|
1049
|
-
}
|
|
1050
|
-
content += `${key}=${value}
|
|
1051
|
-
`;
|
|
1052
|
-
addedVars.push(key);
|
|
1053
|
-
}
|
|
1054
|
-
}
|
|
1055
|
-
await fs.writeFile(resolvedPath, content, "utf-8");
|
|
1056
|
-
if (options?.createExample) {
|
|
1057
|
-
const examplePath = resolvedPath.replace(/\.env$/, ".env.example");
|
|
1058
|
-
let exampleContent = "# Environment variables for this application\n";
|
|
1059
|
-
exampleContent += "# Copy this file to .env and fill in your values\n\n";
|
|
1060
|
-
for (const key of Object.keys({ ...existingVars, ...variables })) {
|
|
1061
|
-
if (key === "QUANTISH_API_KEY") {
|
|
1062
|
-
exampleContent += `# Get your API key at https://quantish.live
|
|
1063
|
-
`;
|
|
1064
|
-
exampleContent += `${key}=your_api_key_here
|
|
1065
|
-
|
|
1066
|
-
`;
|
|
1067
|
-
} else {
|
|
1068
|
-
exampleContent += `${key}=
|
|
1069
|
-
`;
|
|
1070
|
-
}
|
|
1071
|
-
}
|
|
1072
|
-
await fs.writeFile(examplePath, exampleContent, "utf-8");
|
|
1073
|
-
}
|
|
1074
|
-
return {
|
|
1075
|
-
success: true,
|
|
1076
|
-
data: {
|
|
1077
|
-
path: resolvedPath,
|
|
1078
|
-
added: addedVars,
|
|
1079
|
-
updated: updatedVars,
|
|
1080
|
-
skipped: skippedVars,
|
|
1081
|
-
exampleCreated: options?.createExample || false
|
|
1082
|
-
}
|
|
1083
|
-
};
|
|
1084
|
-
} catch (error2) {
|
|
1085
|
-
return {
|
|
1086
|
-
success: false,
|
|
1087
|
-
error: `Failed to setup env: ${error2 instanceof Error ? error2.message : String(error2)}`
|
|
1088
|
-
};
|
|
1089
|
-
}
|
|
1090
|
-
}
|
|
1091
1000
|
async function executeFilesystemTool(name, args) {
|
|
1092
1001
|
switch (name) {
|
|
1093
1002
|
case "read_file":
|
|
@@ -1103,13 +1012,6 @@ async function executeFilesystemTool(name, args) {
|
|
|
1103
1012
|
return deleteFile(args.path);
|
|
1104
1013
|
case "file_exists":
|
|
1105
1014
|
return fileExists(args.path);
|
|
1106
|
-
case "edit_lines":
|
|
1107
|
-
return editLines(
|
|
1108
|
-
args.path,
|
|
1109
|
-
args.start_line,
|
|
1110
|
-
args.end_line,
|
|
1111
|
-
args.new_content
|
|
1112
|
-
);
|
|
1113
1015
|
case "edit_file":
|
|
1114
1016
|
return editFile(
|
|
1115
1017
|
args.path,
|
|
@@ -1117,15 +1019,6 @@ async function executeFilesystemTool(name, args) {
|
|
|
1117
1019
|
args.new_string,
|
|
1118
1020
|
{ replaceAll: args.replace_all }
|
|
1119
1021
|
);
|
|
1120
|
-
case "setup_env":
|
|
1121
|
-
return setupEnv(
|
|
1122
|
-
args.path || ".env",
|
|
1123
|
-
args.variables,
|
|
1124
|
-
{
|
|
1125
|
-
overwrite: args.overwrite,
|
|
1126
|
-
createExample: args.create_example
|
|
1127
|
-
}
|
|
1128
|
-
);
|
|
1129
1022
|
default:
|
|
1130
1023
|
return { success: false, error: `Unknown filesystem tool: ${name}` };
|
|
1131
1024
|
}
|
|
@@ -2425,16 +2318,16 @@ async function compactConversation(anthropic, history, model, systemPrompt, tool
|
|
|
2425
2318
|
|
|
2426
2319
|
// src/agent/pricing.ts
|
|
2427
2320
|
var MODELS = {
|
|
2428
|
-
"claude-opus-4-5-
|
|
2429
|
-
id: "claude-opus-4-5-
|
|
2321
|
+
"claude-opus-4-5-20250929": {
|
|
2322
|
+
id: "claude-opus-4-5-20250929",
|
|
2430
2323
|
name: "opus-4.5",
|
|
2431
2324
|
displayName: "Claude Opus 4.5",
|
|
2432
2325
|
pricing: {
|
|
2433
|
-
inputPerMTok:
|
|
2434
|
-
outputPerMTok:
|
|
2435
|
-
cacheWritePerMTok:
|
|
2326
|
+
inputPerMTok: 5,
|
|
2327
|
+
outputPerMTok: 25,
|
|
2328
|
+
cacheWritePerMTok: 6.25,
|
|
2436
2329
|
// 1.25x input
|
|
2437
|
-
cacheReadPerMTok:
|
|
2330
|
+
cacheReadPerMTok: 0.5
|
|
2438
2331
|
// 0.1x input
|
|
2439
2332
|
},
|
|
2440
2333
|
contextWindow: 2e5,
|
|
@@ -2455,8 +2348,8 @@ var MODELS = {
|
|
|
2455
2348
|
contextWindow: 2e5,
|
|
2456
2349
|
description: "Balanced performance and cost. Great for most coding and trading tasks."
|
|
2457
2350
|
},
|
|
2458
|
-
"claude-haiku-4-5-
|
|
2459
|
-
id: "claude-haiku-4-5-
|
|
2351
|
+
"claude-haiku-4-5-20250929": {
|
|
2352
|
+
id: "claude-haiku-4-5-20250929",
|
|
2460
2353
|
name: "haiku-4.5",
|
|
2461
2354
|
displayName: "Claude Haiku 4.5",
|
|
2462
2355
|
pricing: {
|
|
@@ -2473,12 +2366,12 @@ var MODELS = {
|
|
|
2473
2366
|
};
|
|
2474
2367
|
var DEFAULT_MODEL = "claude-sonnet-4-5-20250929";
|
|
2475
2368
|
var MODEL_ALIASES = {
|
|
2476
|
-
"opus": "claude-opus-4-5-
|
|
2477
|
-
"opus-4.5": "claude-opus-4-5-
|
|
2369
|
+
"opus": "claude-opus-4-5-20250929",
|
|
2370
|
+
"opus-4.5": "claude-opus-4-5-20250929",
|
|
2478
2371
|
"sonnet": "claude-sonnet-4-5-20250929",
|
|
2479
2372
|
"sonnet-4.5": "claude-sonnet-4-5-20250929",
|
|
2480
|
-
"haiku": "claude-haiku-4-5-
|
|
2481
|
-
"haiku-4.5": "claude-haiku-4-5-
|
|
2373
|
+
"haiku": "claude-haiku-4-5-20250929",
|
|
2374
|
+
"haiku-4.5": "claude-haiku-4-5-20250929"
|
|
2482
2375
|
};
|
|
2483
2376
|
function resolveModelId(nameOrAlias) {
|
|
2484
2377
|
const lower = nameOrAlias.toLowerCase();
|
|
@@ -2493,14 +2386,35 @@ function resolveModelId(nameOrAlias) {
|
|
|
2493
2386
|
return id;
|
|
2494
2387
|
}
|
|
2495
2388
|
}
|
|
2389
|
+
if (OPENROUTER_MODELS[lower]) {
|
|
2390
|
+
return lower;
|
|
2391
|
+
}
|
|
2392
|
+
if (OPENROUTER_MODEL_ALIASES[lower]) {
|
|
2393
|
+
return OPENROUTER_MODEL_ALIASES[lower];
|
|
2394
|
+
}
|
|
2395
|
+
for (const [id, config] of Object.entries(OPENROUTER_MODELS)) {
|
|
2396
|
+
if (config.name.toLowerCase() === lower) {
|
|
2397
|
+
return id;
|
|
2398
|
+
}
|
|
2399
|
+
}
|
|
2400
|
+
if (nameOrAlias.includes("/")) {
|
|
2401
|
+
return nameOrAlias;
|
|
2402
|
+
}
|
|
2496
2403
|
return null;
|
|
2497
2404
|
}
|
|
2498
2405
|
function getModelPricing(modelId) {
|
|
2499
|
-
const
|
|
2500
|
-
|
|
2406
|
+
const anthropicModel = MODELS[modelId];
|
|
2407
|
+
if (anthropicModel?.pricing) {
|
|
2408
|
+
return anthropicModel.pricing;
|
|
2409
|
+
}
|
|
2410
|
+
const openrouterModel = OPENROUTER_MODELS[modelId];
|
|
2411
|
+
if (openrouterModel?.pricing) {
|
|
2412
|
+
return openrouterModel.pricing;
|
|
2413
|
+
}
|
|
2414
|
+
return null;
|
|
2501
2415
|
}
|
|
2502
2416
|
function getModelConfig(modelId) {
|
|
2503
|
-
return MODELS[modelId] ?? null;
|
|
2417
|
+
return MODELS[modelId] ?? OPENROUTER_MODELS[modelId] ?? null;
|
|
2504
2418
|
}
|
|
2505
2419
|
function calculateCost(modelId, inputTokens, outputTokens, cacheCreationTokens = 0, cacheReadTokens = 0) {
|
|
2506
2420
|
const pricing = getModelPricing(modelId);
|
|
@@ -2544,60 +2458,875 @@ function formatCost(cost) {
|
|
|
2544
2458
|
function listModels() {
|
|
2545
2459
|
return Object.values(MODELS);
|
|
2546
2460
|
}
|
|
2461
|
+
var OPENROUTER_MODELS = {
|
|
2462
|
+
"z-ai/glm-4.7": {
|
|
2463
|
+
id: "z-ai/glm-4.7",
|
|
2464
|
+
name: "glm-4.7",
|
|
2465
|
+
displayName: "GLM 4.7",
|
|
2466
|
+
pricing: {
|
|
2467
|
+
inputPerMTok: 0.4,
|
|
2468
|
+
outputPerMTok: 1.5,
|
|
2469
|
+
cacheWritePerMTok: 0,
|
|
2470
|
+
cacheReadPerMTok: 0
|
|
2471
|
+
},
|
|
2472
|
+
contextWindow: 202752,
|
|
2473
|
+
description: "Z.AI flagship. Enhanced programming, multi-step reasoning, agent tasks."
|
|
2474
|
+
},
|
|
2475
|
+
"minimax/minimax-m2.1": {
|
|
2476
|
+
id: "minimax/minimax-m2.1",
|
|
2477
|
+
name: "minimax-m2.1",
|
|
2478
|
+
displayName: "MiniMax M2.1",
|
|
2479
|
+
pricing: {
|
|
2480
|
+
inputPerMTok: 0.3,
|
|
2481
|
+
outputPerMTok: 1.2,
|
|
2482
|
+
cacheWritePerMTok: 0,
|
|
2483
|
+
cacheReadPerMTok: 0
|
|
2484
|
+
},
|
|
2485
|
+
contextWindow: 204800,
|
|
2486
|
+
description: "Lightweight, optimized for coding and agentic workflows."
|
|
2487
|
+
},
|
|
2488
|
+
"deepseek/deepseek-chat": {
|
|
2489
|
+
id: "deepseek/deepseek-chat",
|
|
2490
|
+
name: "deepseek-chat",
|
|
2491
|
+
displayName: "DeepSeek Chat",
|
|
2492
|
+
pricing: {
|
|
2493
|
+
inputPerMTok: 0.14,
|
|
2494
|
+
outputPerMTok: 0.28,
|
|
2495
|
+
cacheWritePerMTok: 0,
|
|
2496
|
+
cacheReadPerMTok: 0
|
|
2497
|
+
},
|
|
2498
|
+
contextWindow: 128e3,
|
|
2499
|
+
description: "Ultra-cheap, strong coding and reasoning. Great for high-volume."
|
|
2500
|
+
},
|
|
2501
|
+
"google/gemini-2.0-flash-001": {
|
|
2502
|
+
id: "google/gemini-2.0-flash-001",
|
|
2503
|
+
name: "gemini-2.0-flash",
|
|
2504
|
+
displayName: "Gemini 2.0 Flash",
|
|
2505
|
+
pricing: {
|
|
2506
|
+
inputPerMTok: 0.1,
|
|
2507
|
+
outputPerMTok: 0.4,
|
|
2508
|
+
cacheWritePerMTok: 0,
|
|
2509
|
+
cacheReadPerMTok: 0
|
|
2510
|
+
},
|
|
2511
|
+
contextWindow: 1e6,
|
|
2512
|
+
description: "Google's fast multimodal model. 1M context window."
|
|
2513
|
+
},
|
|
2514
|
+
"qwen/qwen-2.5-coder-32b-instruct": {
|
|
2515
|
+
id: "qwen/qwen-2.5-coder-32b-instruct",
|
|
2516
|
+
name: "qwen-coder-32b",
|
|
2517
|
+
displayName: "Qwen 2.5 Coder 32B",
|
|
2518
|
+
pricing: {
|
|
2519
|
+
inputPerMTok: 0.18,
|
|
2520
|
+
outputPerMTok: 0.18,
|
|
2521
|
+
cacheWritePerMTok: 0,
|
|
2522
|
+
cacheReadPerMTok: 0
|
|
2523
|
+
},
|
|
2524
|
+
contextWindow: 32768,
|
|
2525
|
+
description: "Alibaba's coding specialist. Excellent for code generation."
|
|
2526
|
+
}
|
|
2527
|
+
};
|
|
2528
|
+
var OPENROUTER_MODEL_ALIASES = {
|
|
2529
|
+
"glm": "z-ai/glm-4.7",
|
|
2530
|
+
"glm-4.7": "z-ai/glm-4.7",
|
|
2531
|
+
"minimax": "minimax/minimax-m2.1",
|
|
2532
|
+
"deepseek": "deepseek/deepseek-chat",
|
|
2533
|
+
"gemini": "google/gemini-2.0-flash-001",
|
|
2534
|
+
"gemini-flash": "google/gemini-2.0-flash-001",
|
|
2535
|
+
"qwen": "qwen/qwen-2.5-coder-32b-instruct",
|
|
2536
|
+
"qwen-coder": "qwen/qwen-2.5-coder-32b-instruct"
|
|
2537
|
+
};
|
|
2547
2538
|
|
|
2548
|
-
// src/agent/
|
|
2549
|
-
|
|
2550
|
-
|
|
2551
|
-
|
|
2552
|
-
|
|
2553
|
-
|
|
2539
|
+
// src/agent/provider.ts
|
|
2540
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
2541
|
+
|
|
2542
|
+
// src/agent/openrouter.ts
|
|
2543
|
+
var OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1";
|
|
2544
|
+
var OPENROUTER_MODELS2 = {
|
|
2545
|
+
// MiniMax models - very cost effective
|
|
2546
|
+
"minimax/minimax-m2.1": {
|
|
2547
|
+
id: "minimax/minimax-m2.1",
|
|
2548
|
+
name: "minimax-m2.1",
|
|
2549
|
+
displayName: "MiniMax M2.1",
|
|
2550
|
+
provider: "MiniMax",
|
|
2551
|
+
pricing: {
|
|
2552
|
+
inputPerMTok: 0.3,
|
|
2553
|
+
// $0.0000003 * 1M
|
|
2554
|
+
outputPerMTok: 1.2,
|
|
2555
|
+
// $0.0000012 * 1M
|
|
2556
|
+
cacheReadPerMTok: 0.03,
|
|
2557
|
+
cacheWritePerMTok: 0.375
|
|
2558
|
+
},
|
|
2559
|
+
contextWindow: 204800,
|
|
2560
|
+
maxOutputTokens: 131072,
|
|
2561
|
+
supportsTools: true,
|
|
2562
|
+
supportsReasoning: true,
|
|
2563
|
+
description: "10B active params, state-of-the-art for coding and agentic workflows. Very cost efficient."
|
|
2564
|
+
},
|
|
2565
|
+
"minimax/minimax-m2": {
|
|
2566
|
+
id: "minimax/minimax-m2",
|
|
2567
|
+
name: "minimax-m2",
|
|
2568
|
+
displayName: "MiniMax M2",
|
|
2569
|
+
provider: "MiniMax",
|
|
2570
|
+
pricing: {
|
|
2571
|
+
inputPerMTok: 0.2,
|
|
2572
|
+
outputPerMTok: 1,
|
|
2573
|
+
cacheReadPerMTok: 0.03
|
|
2574
|
+
},
|
|
2575
|
+
contextWindow: 196608,
|
|
2576
|
+
maxOutputTokens: 131072,
|
|
2577
|
+
supportsTools: true,
|
|
2578
|
+
supportsReasoning: true,
|
|
2579
|
+
description: "Compact model optimized for end-to-end coding and agentic workflows."
|
|
2580
|
+
},
|
|
2581
|
+
// DeepSeek models - very cheap
|
|
2582
|
+
"deepseek/deepseek-v3.2": {
|
|
2583
|
+
id: "deepseek/deepseek-v3.2",
|
|
2584
|
+
name: "deepseek-v3.2",
|
|
2585
|
+
displayName: "DeepSeek V3.2",
|
|
2586
|
+
provider: "DeepSeek",
|
|
2587
|
+
pricing: {
|
|
2588
|
+
inputPerMTok: 0.224,
|
|
2589
|
+
outputPerMTok: 0.32
|
|
2590
|
+
},
|
|
2591
|
+
contextWindow: 163840,
|
|
2592
|
+
supportsTools: true,
|
|
2593
|
+
supportsReasoning: true,
|
|
2594
|
+
description: "High efficiency with strong reasoning. GPT-5 class performance."
|
|
2595
|
+
},
|
|
2596
|
+
// Mistral models
|
|
2597
|
+
"mistralai/devstral-2512": {
|
|
2598
|
+
id: "mistralai/devstral-2512",
|
|
2599
|
+
name: "devstral-2512",
|
|
2600
|
+
displayName: "Devstral 2 2512",
|
|
2601
|
+
provider: "Mistral",
|
|
2602
|
+
pricing: {
|
|
2603
|
+
inputPerMTok: 0.05,
|
|
2604
|
+
outputPerMTok: 0.22
|
|
2605
|
+
},
|
|
2606
|
+
contextWindow: 262144,
|
|
2607
|
+
supportsTools: true,
|
|
2608
|
+
description: "State-of-the-art open model for agentic coding. 123B params."
|
|
2609
|
+
},
|
|
2610
|
+
"mistralai/mistral-large-2512": {
|
|
2611
|
+
id: "mistralai/mistral-large-2512",
|
|
2612
|
+
name: "mistral-large-2512",
|
|
2613
|
+
displayName: "Mistral Large 3",
|
|
2614
|
+
provider: "Mistral",
|
|
2615
|
+
pricing: {
|
|
2616
|
+
inputPerMTok: 0.5,
|
|
2617
|
+
outputPerMTok: 1.5
|
|
2618
|
+
},
|
|
2619
|
+
contextWindow: 262144,
|
|
2620
|
+
supportsTools: true,
|
|
2621
|
+
description: "Most capable Mistral model. 675B total params (41B active)."
|
|
2622
|
+
},
|
|
2623
|
+
// Google Gemini
|
|
2624
|
+
"google/gemini-3-flash-preview": {
|
|
2625
|
+
id: "google/gemini-3-flash-preview",
|
|
2626
|
+
name: "gemini-3-flash",
|
|
2627
|
+
displayName: "Gemini 3 Flash Preview",
|
|
2628
|
+
provider: "Google",
|
|
2629
|
+
pricing: {
|
|
2630
|
+
inputPerMTok: 0.5,
|
|
2631
|
+
outputPerMTok: 3,
|
|
2632
|
+
cacheReadPerMTok: 0.05
|
|
2633
|
+
},
|
|
2634
|
+
contextWindow: 1048576,
|
|
2635
|
+
supportsTools: true,
|
|
2636
|
+
supportsReasoning: true,
|
|
2637
|
+
description: "High speed thinking model for agentic workflows. 1M context."
|
|
2638
|
+
},
|
|
2639
|
+
"google/gemini-3-pro-preview": {
|
|
2640
|
+
id: "google/gemini-3-pro-preview",
|
|
2641
|
+
name: "gemini-3-pro",
|
|
2642
|
+
displayName: "Gemini 3 Pro Preview",
|
|
2643
|
+
provider: "Google",
|
|
2644
|
+
pricing: {
|
|
2645
|
+
inputPerMTok: 2,
|
|
2646
|
+
outputPerMTok: 12,
|
|
2647
|
+
cacheReadPerMTok: 0.2,
|
|
2648
|
+
cacheWritePerMTok: 2.375
|
|
2649
|
+
},
|
|
2650
|
+
contextWindow: 1048576,
|
|
2651
|
+
supportsTools: true,
|
|
2652
|
+
supportsReasoning: true,
|
|
2653
|
+
description: "Flagship frontier model for high-precision multimodal reasoning."
|
|
2654
|
+
},
|
|
2655
|
+
// xAI Grok
|
|
2656
|
+
"x-ai/grok-4.1-fast": {
|
|
2657
|
+
id: "x-ai/grok-4.1-fast",
|
|
2658
|
+
name: "grok-4.1-fast",
|
|
2659
|
+
displayName: "Grok 4.1 Fast",
|
|
2660
|
+
provider: "xAI",
|
|
2661
|
+
pricing: {
|
|
2662
|
+
inputPerMTok: 0.2,
|
|
2663
|
+
outputPerMTok: 0.5,
|
|
2664
|
+
cacheReadPerMTok: 0.05
|
|
2665
|
+
},
|
|
2666
|
+
contextWindow: 2e6,
|
|
2667
|
+
maxOutputTokens: 3e4,
|
|
2668
|
+
supportsTools: true,
|
|
2669
|
+
supportsReasoning: true,
|
|
2670
|
+
description: "Best agentic tool calling model. 2M context window."
|
|
2671
|
+
},
|
|
2672
|
+
// Anthropic via OpenRouter (for fallback/comparison)
|
|
2673
|
+
"anthropic/claude-opus-4.5": {
|
|
2674
|
+
id: "anthropic/claude-opus-4.5",
|
|
2675
|
+
name: "claude-opus-4.5-or",
|
|
2676
|
+
displayName: "Claude Opus 4.5 (OR)",
|
|
2677
|
+
provider: "Anthropic",
|
|
2678
|
+
pricing: {
|
|
2679
|
+
inputPerMTok: 5,
|
|
2680
|
+
outputPerMTok: 25,
|
|
2681
|
+
cacheReadPerMTok: 0.5,
|
|
2682
|
+
cacheWritePerMTok: 6.25
|
|
2683
|
+
},
|
|
2684
|
+
contextWindow: 2e5,
|
|
2685
|
+
maxOutputTokens: 32e3,
|
|
2686
|
+
supportsTools: true,
|
|
2687
|
+
supportsReasoning: true,
|
|
2688
|
+
description: "Anthropic Opus 4.5 via OpenRouter."
|
|
2689
|
+
},
|
|
2690
|
+
"anthropic/claude-haiku-4.5": {
|
|
2691
|
+
id: "anthropic/claude-haiku-4.5",
|
|
2692
|
+
name: "claude-haiku-4.5-or",
|
|
2693
|
+
displayName: "Claude Haiku 4.5 (OR)",
|
|
2694
|
+
provider: "Anthropic",
|
|
2695
|
+
pricing: {
|
|
2696
|
+
inputPerMTok: 1,
|
|
2697
|
+
outputPerMTok: 5,
|
|
2698
|
+
cacheReadPerMTok: 0.1,
|
|
2699
|
+
cacheWritePerMTok: 1.25
|
|
2700
|
+
},
|
|
2701
|
+
contextWindow: 2e5,
|
|
2702
|
+
maxOutputTokens: 64e3,
|
|
2703
|
+
supportsTools: true,
|
|
2704
|
+
supportsReasoning: true,
|
|
2705
|
+
description: "Anthropic Haiku 4.5 via OpenRouter. Fast and efficient."
|
|
2706
|
+
},
|
|
2707
|
+
// Free models (for testing/experimentation)
|
|
2708
|
+
"mistralai/devstral-2512:free": {
|
|
2709
|
+
id: "mistralai/devstral-2512:free",
|
|
2710
|
+
name: "devstral-free",
|
|
2711
|
+
displayName: "Devstral 2 (Free)",
|
|
2712
|
+
provider: "Mistral",
|
|
2713
|
+
pricing: {
|
|
2714
|
+
inputPerMTok: 0,
|
|
2715
|
+
outputPerMTok: 0
|
|
2716
|
+
},
|
|
2717
|
+
contextWindow: 262144,
|
|
2718
|
+
supportsTools: true,
|
|
2719
|
+
description: "Free tier Devstral for testing. Limited capacity."
|
|
2720
|
+
},
|
|
2721
|
+
"xiaomi/mimo-v2-flash:free": {
|
|
2722
|
+
id: "xiaomi/mimo-v2-flash:free",
|
|
2723
|
+
name: "mimo-v2-flash-free",
|
|
2724
|
+
displayName: "MiMo V2 Flash (Free)",
|
|
2725
|
+
provider: "Xiaomi",
|
|
2726
|
+
pricing: {
|
|
2727
|
+
inputPerMTok: 0,
|
|
2728
|
+
outputPerMTok: 0
|
|
2729
|
+
},
|
|
2730
|
+
contextWindow: 262144,
|
|
2731
|
+
supportsTools: true,
|
|
2732
|
+
supportsReasoning: true,
|
|
2733
|
+
description: "Free MoE model. Top open-source on SWE-bench."
|
|
2554
2734
|
}
|
|
2555
|
-
|
|
2556
|
-
|
|
2557
|
-
|
|
2558
|
-
|
|
2559
|
-
|
|
2560
|
-
|
|
2561
|
-
|
|
2562
|
-
|
|
2563
|
-
|
|
2564
|
-
|
|
2565
|
-
|
|
2566
|
-
|
|
2567
|
-
|
|
2568
|
-
|
|
2569
|
-
|
|
2570
|
-
|
|
2571
|
-
|
|
2572
|
-
|
|
2573
|
-
|
|
2574
|
-
|
|
2575
|
-
|
|
2576
|
-
|
|
2577
|
-
|
|
2578
|
-
|
|
2579
|
-
|
|
2735
|
+
};
|
|
2736
|
+
var OPENROUTER_ALIASES = {
|
|
2737
|
+
// MiniMax
|
|
2738
|
+
"minimax": "minimax/minimax-m2.1",
|
|
2739
|
+
"m2": "minimax/minimax-m2",
|
|
2740
|
+
"m2.1": "minimax/minimax-m2.1",
|
|
2741
|
+
// DeepSeek
|
|
2742
|
+
"deepseek": "deepseek/deepseek-v3.2",
|
|
2743
|
+
"ds": "deepseek/deepseek-v3.2",
|
|
2744
|
+
// Mistral
|
|
2745
|
+
"devstral": "mistralai/devstral-2512",
|
|
2746
|
+
"mistral": "mistralai/mistral-large-2512",
|
|
2747
|
+
"mistral-large": "mistralai/mistral-large-2512",
|
|
2748
|
+
// Google
|
|
2749
|
+
"gemini": "google/gemini-3-flash-preview",
|
|
2750
|
+
"gemini-flash": "google/gemini-3-flash-preview",
|
|
2751
|
+
"gemini-pro": "google/gemini-3-pro-preview",
|
|
2752
|
+
// xAI
|
|
2753
|
+
"grok": "x-ai/grok-4.1-fast",
|
|
2754
|
+
// Anthropic via OR
|
|
2755
|
+
"opus-or": "anthropic/claude-opus-4.5",
|
|
2756
|
+
"haiku-or": "anthropic/claude-haiku-4.5",
|
|
2757
|
+
// Free
|
|
2758
|
+
"free": "mistralai/devstral-2512:free",
|
|
2759
|
+
"mimo": "xiaomi/mimo-v2-flash:free"
|
|
2760
|
+
};
|
|
2761
|
+
function resolveOpenRouterModelId(nameOrAlias) {
|
|
2762
|
+
const lower = nameOrAlias.toLowerCase();
|
|
2763
|
+
if (OPENROUTER_MODELS2[lower]) {
|
|
2764
|
+
return lower;
|
|
2580
2765
|
}
|
|
2581
|
-
if (
|
|
2582
|
-
return
|
|
2766
|
+
if (OPENROUTER_ALIASES[lower]) {
|
|
2767
|
+
return OPENROUTER_ALIASES[lower];
|
|
2583
2768
|
}
|
|
2584
|
-
|
|
2585
|
-
|
|
2769
|
+
for (const [id, config] of Object.entries(OPENROUTER_MODELS2)) {
|
|
2770
|
+
if (config.name.toLowerCase() === lower) {
|
|
2771
|
+
return id;
|
|
2772
|
+
}
|
|
2586
2773
|
}
|
|
2587
|
-
if (
|
|
2588
|
-
return
|
|
2774
|
+
if (nameOrAlias.includes("/")) {
|
|
2775
|
+
return nameOrAlias;
|
|
2589
2776
|
}
|
|
2590
|
-
return
|
|
2777
|
+
return null;
|
|
2591
2778
|
}
|
|
2592
|
-
function
|
|
2593
|
-
|
|
2594
|
-
|
|
2595
|
-
|
|
2596
|
-
)
|
|
2597
|
-
|
|
2598
|
-
|
|
2599
|
-
|
|
2600
|
-
|
|
2779
|
+
function getOpenRouterModelConfig(modelId) {
|
|
2780
|
+
return OPENROUTER_MODELS2[modelId] ?? null;
|
|
2781
|
+
}
|
|
2782
|
+
function convertToOpenAITools(anthropicTools) {
|
|
2783
|
+
return anthropicTools.map((tool) => ({
|
|
2784
|
+
type: "function",
|
|
2785
|
+
function: {
|
|
2786
|
+
name: tool.name,
|
|
2787
|
+
description: tool.description ?? "",
|
|
2788
|
+
parameters: tool.input_schema
|
|
2789
|
+
}
|
|
2790
|
+
}));
|
|
2791
|
+
}
|
|
2792
|
+
var OpenRouterClient = class {
|
|
2793
|
+
apiKey;
|
|
2794
|
+
baseUrl;
|
|
2795
|
+
appName;
|
|
2796
|
+
appUrl;
|
|
2797
|
+
constructor(config) {
|
|
2798
|
+
this.apiKey = config.apiKey;
|
|
2799
|
+
this.baseUrl = config.baseUrl ?? OPENROUTER_BASE_URL;
|
|
2800
|
+
this.appName = config.appName ?? "Quantish Agent";
|
|
2801
|
+
this.appUrl = config.appUrl ?? "https://quantish.ai";
|
|
2802
|
+
}
|
|
2803
|
+
/**
|
|
2804
|
+
* Create a chat completion (non-streaming)
|
|
2805
|
+
*/
|
|
2806
|
+
async createChatCompletion(options) {
|
|
2807
|
+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
2808
|
+
method: "POST",
|
|
2809
|
+
headers: {
|
|
2810
|
+
"Authorization": `Bearer ${this.apiKey}`,
|
|
2811
|
+
"Content-Type": "application/json",
|
|
2812
|
+
"HTTP-Referer": this.appUrl,
|
|
2813
|
+
"X-Title": this.appName
|
|
2814
|
+
},
|
|
2815
|
+
body: JSON.stringify({
|
|
2816
|
+
model: options.model,
|
|
2817
|
+
messages: options.messages,
|
|
2818
|
+
tools: options.tools,
|
|
2819
|
+
tool_choice: options.tool_choice ?? (options.tools ? "auto" : void 0),
|
|
2820
|
+
max_tokens: options.max_tokens,
|
|
2821
|
+
temperature: options.temperature,
|
|
2822
|
+
top_p: options.top_p,
|
|
2823
|
+
stream: false
|
|
2824
|
+
})
|
|
2825
|
+
});
|
|
2826
|
+
if (!response.ok) {
|
|
2827
|
+
const errorText = await response.text();
|
|
2828
|
+
throw new Error(`OpenRouter API error (${response.status}): ${errorText}`);
|
|
2829
|
+
}
|
|
2830
|
+
return response.json();
|
|
2831
|
+
}
|
|
2832
|
+
/**
|
|
2833
|
+
* Create a streaming chat completion
|
|
2834
|
+
*/
|
|
2835
|
+
async *createStreamingChatCompletion(options) {
|
|
2836
|
+
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
2837
|
+
method: "POST",
|
|
2838
|
+
headers: {
|
|
2839
|
+
"Authorization": `Bearer ${this.apiKey}`,
|
|
2840
|
+
"Content-Type": "application/json",
|
|
2841
|
+
"HTTP-Referer": this.appUrl,
|
|
2842
|
+
"X-Title": this.appName
|
|
2843
|
+
},
|
|
2844
|
+
body: JSON.stringify({
|
|
2845
|
+
model: options.model,
|
|
2846
|
+
messages: options.messages,
|
|
2847
|
+
tools: options.tools,
|
|
2848
|
+
tool_choice: options.tool_choice ?? (options.tools ? "auto" : void 0),
|
|
2849
|
+
max_tokens: options.max_tokens,
|
|
2850
|
+
temperature: options.temperature,
|
|
2851
|
+
top_p: options.top_p,
|
|
2852
|
+
stream: true
|
|
2853
|
+
})
|
|
2854
|
+
});
|
|
2855
|
+
if (!response.ok) {
|
|
2856
|
+
const errorText = await response.text();
|
|
2857
|
+
throw new Error(`OpenRouter API error (${response.status}): ${errorText}`);
|
|
2858
|
+
}
|
|
2859
|
+
if (!response.body) {
|
|
2860
|
+
throw new Error("No response body for streaming request");
|
|
2861
|
+
}
|
|
2862
|
+
const reader = response.body.getReader();
|
|
2863
|
+
const decoder = new TextDecoder();
|
|
2864
|
+
let buffer = "";
|
|
2865
|
+
try {
|
|
2866
|
+
while (true) {
|
|
2867
|
+
const { done, value } = await reader.read();
|
|
2868
|
+
if (done) break;
|
|
2869
|
+
buffer += decoder.decode(value, { stream: true });
|
|
2870
|
+
const lines = buffer.split("\n");
|
|
2871
|
+
buffer = lines.pop() ?? "";
|
|
2872
|
+
for (const line of lines) {
|
|
2873
|
+
const trimmed = line.trim();
|
|
2874
|
+
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
2875
|
+
if (!trimmed.startsWith("data: ")) continue;
|
|
2876
|
+
try {
|
|
2877
|
+
const json = JSON.parse(trimmed.slice(6));
|
|
2878
|
+
yield json;
|
|
2879
|
+
} catch {
|
|
2880
|
+
}
|
|
2881
|
+
}
|
|
2882
|
+
}
|
|
2883
|
+
} finally {
|
|
2884
|
+
reader.releaseLock();
|
|
2885
|
+
}
|
|
2886
|
+
}
|
|
2887
|
+
/**
|
|
2888
|
+
* Get generation details including exact cost
|
|
2889
|
+
*/
|
|
2890
|
+
async getGenerationDetails(generationId) {
|
|
2891
|
+
const response = await fetch(`${this.baseUrl}/generation?id=${generationId}`, {
|
|
2892
|
+
method: "GET",
|
|
2893
|
+
headers: {
|
|
2894
|
+
"Authorization": `Bearer ${this.apiKey}`
|
|
2895
|
+
}
|
|
2896
|
+
});
|
|
2897
|
+
if (!response.ok) {
|
|
2898
|
+
const errorText = await response.text();
|
|
2899
|
+
throw new Error(`OpenRouter API error (${response.status}): ${errorText}`);
|
|
2900
|
+
}
|
|
2901
|
+
return response.json();
|
|
2902
|
+
}
|
|
2903
|
+
/**
|
|
2904
|
+
* List available models
|
|
2905
|
+
*/
|
|
2906
|
+
async listModels() {
|
|
2907
|
+
const response = await fetch(`${this.baseUrl}/models`, {
|
|
2908
|
+
method: "GET",
|
|
2909
|
+
headers: {
|
|
2910
|
+
"Authorization": `Bearer ${this.apiKey}`
|
|
2911
|
+
}
|
|
2912
|
+
});
|
|
2913
|
+
if (!response.ok) {
|
|
2914
|
+
const errorText = await response.text();
|
|
2915
|
+
throw new Error(`OpenRouter API error (${response.status}): ${errorText}`);
|
|
2916
|
+
}
|
|
2917
|
+
return response.json();
|
|
2918
|
+
}
|
|
2919
|
+
};
|
|
2920
|
+
function calculateOpenRouterCost(modelId, inputTokens, outputTokens, cacheReadTokens = 0, cacheWriteTokens = 0) {
|
|
2921
|
+
const config = getOpenRouterModelConfig(modelId);
|
|
2922
|
+
const pricing = config?.pricing ?? {
|
|
2923
|
+
inputPerMTok: 1,
|
|
2924
|
+
outputPerMTok: 3,
|
|
2925
|
+
cacheReadPerMTok: 0.1,
|
|
2926
|
+
cacheWritePerMTok: 1.25
|
|
2927
|
+
};
|
|
2928
|
+
const inputCost = inputTokens / 1e6 * pricing.inputPerMTok;
|
|
2929
|
+
const outputCost = outputTokens / 1e6 * pricing.outputPerMTok;
|
|
2930
|
+
const cacheReadCost = cacheReadTokens / 1e6 * (pricing.cacheReadPerMTok ?? pricing.inputPerMTok * 0.1);
|
|
2931
|
+
const cacheWriteCost = cacheWriteTokens / 1e6 * (pricing.cacheWritePerMTok ?? pricing.inputPerMTok * 1.25);
|
|
2932
|
+
return {
|
|
2933
|
+
inputCost,
|
|
2934
|
+
outputCost,
|
|
2935
|
+
cacheReadCost,
|
|
2936
|
+
cacheWriteCost,
|
|
2937
|
+
totalCost: inputCost + outputCost + cacheReadCost + cacheWriteCost
|
|
2938
|
+
};
|
|
2939
|
+
}
|
|
2940
|
+
function listOpenRouterModels() {
|
|
2941
|
+
return Object.values(OPENROUTER_MODELS2);
|
|
2942
|
+
}
|
|
2943
|
+
|
|
2944
|
+
// src/agent/provider.ts
|
|
2945
|
+
var AnthropicProvider = class {
|
|
2946
|
+
client;
|
|
2947
|
+
config;
|
|
2948
|
+
constructor(config) {
|
|
2949
|
+
this.config = config;
|
|
2950
|
+
const headers = {};
|
|
2951
|
+
if (config.contextEditing && config.contextEditing.length > 0) {
|
|
2952
|
+
headers["anthropic-beta"] = "context-management-2025-06-27";
|
|
2953
|
+
}
|
|
2954
|
+
this.client = new Anthropic({
|
|
2955
|
+
apiKey: config.apiKey,
|
|
2956
|
+
defaultHeaders: Object.keys(headers).length > 0 ? headers : void 0
|
|
2957
|
+
});
|
|
2958
|
+
}
|
|
2959
|
+
getModel() {
|
|
2960
|
+
return this.config.model;
|
|
2961
|
+
}
|
|
2962
|
+
async countTokens(messages) {
|
|
2963
|
+
try {
|
|
2964
|
+
const response = await this.client.messages.countTokens({
|
|
2965
|
+
model: this.config.model,
|
|
2966
|
+
system: this.config.systemPrompt,
|
|
2967
|
+
tools: this.config.tools,
|
|
2968
|
+
messages
|
|
2969
|
+
});
|
|
2970
|
+
return response.input_tokens;
|
|
2971
|
+
} catch {
|
|
2972
|
+
return 0;
|
|
2973
|
+
}
|
|
2974
|
+
}
|
|
2975
|
+
async chat(messages) {
|
|
2976
|
+
const systemWithCache = [
|
|
2977
|
+
{
|
|
2978
|
+
type: "text",
|
|
2979
|
+
text: this.config.systemPrompt,
|
|
2980
|
+
cache_control: { type: "ephemeral" }
|
|
2981
|
+
}
|
|
2982
|
+
];
|
|
2983
|
+
const response = await this.client.messages.create({
|
|
2984
|
+
model: this.config.model,
|
|
2985
|
+
max_tokens: this.config.maxTokens,
|
|
2986
|
+
system: systemWithCache,
|
|
2987
|
+
tools: this.config.tools,
|
|
2988
|
+
messages
|
|
2989
|
+
});
|
|
2990
|
+
const usage = response.usage;
|
|
2991
|
+
const cost = calculateCost(
|
|
2992
|
+
this.config.model,
|
|
2993
|
+
usage.input_tokens,
|
|
2994
|
+
usage.output_tokens,
|
|
2995
|
+
usage.cache_creation_input_tokens ?? 0,
|
|
2996
|
+
usage.cache_read_input_tokens ?? 0
|
|
2997
|
+
);
|
|
2998
|
+
const textBlocks = response.content.filter(
|
|
2999
|
+
(block) => block.type === "text"
|
|
3000
|
+
);
|
|
3001
|
+
const toolUses = response.content.filter(
|
|
3002
|
+
(block) => block.type === "tool_use"
|
|
3003
|
+
);
|
|
3004
|
+
return {
|
|
3005
|
+
text: textBlocks.map((b) => b.text).join(""),
|
|
3006
|
+
toolCalls: toolUses.map((t) => ({
|
|
3007
|
+
id: t.id,
|
|
3008
|
+
name: t.name,
|
|
3009
|
+
input: t.input
|
|
3010
|
+
})),
|
|
3011
|
+
usage: {
|
|
3012
|
+
inputTokens: usage.input_tokens,
|
|
3013
|
+
outputTokens: usage.output_tokens,
|
|
3014
|
+
cacheCreationTokens: usage.cache_creation_input_tokens ?? 0,
|
|
3015
|
+
cacheReadTokens: usage.cache_read_input_tokens ?? 0
|
|
3016
|
+
},
|
|
3017
|
+
cost,
|
|
3018
|
+
stopReason: response.stop_reason === "tool_use" ? "tool_use" : "end_turn",
|
|
3019
|
+
rawResponse: response
|
|
3020
|
+
};
|
|
3021
|
+
}
|
|
3022
|
+
async streamChat(messages, callbacks) {
|
|
3023
|
+
const systemWithCache = [
|
|
3024
|
+
{
|
|
3025
|
+
type: "text",
|
|
3026
|
+
text: this.config.systemPrompt,
|
|
3027
|
+
cache_control: { type: "ephemeral" }
|
|
3028
|
+
}
|
|
3029
|
+
];
|
|
3030
|
+
const stream = this.client.messages.stream({
|
|
3031
|
+
model: this.config.model,
|
|
3032
|
+
max_tokens: this.config.maxTokens,
|
|
3033
|
+
system: systemWithCache,
|
|
3034
|
+
tools: this.config.tools,
|
|
3035
|
+
messages
|
|
3036
|
+
});
|
|
3037
|
+
let fullText = "";
|
|
3038
|
+
for await (const event of stream) {
|
|
3039
|
+
if (event.type === "content_block_delta") {
|
|
3040
|
+
const delta = event.delta;
|
|
3041
|
+
if (delta.type === "text_delta" && delta.text) {
|
|
3042
|
+
fullText += delta.text;
|
|
3043
|
+
callbacks.onText?.(delta.text);
|
|
3044
|
+
} else if (delta.type === "thinking_delta" && delta.thinking) {
|
|
3045
|
+
callbacks.onThinking?.(delta.thinking);
|
|
3046
|
+
}
|
|
3047
|
+
}
|
|
3048
|
+
}
|
|
3049
|
+
const response = await stream.finalMessage();
|
|
3050
|
+
const usage = response.usage;
|
|
3051
|
+
const cost = calculateCost(
|
|
3052
|
+
this.config.model,
|
|
3053
|
+
usage.input_tokens,
|
|
3054
|
+
usage.output_tokens,
|
|
3055
|
+
usage.cache_creation_input_tokens ?? 0,
|
|
3056
|
+
usage.cache_read_input_tokens ?? 0
|
|
3057
|
+
);
|
|
3058
|
+
const toolUses = response.content.filter(
|
|
3059
|
+
(block) => block.type === "tool_use"
|
|
3060
|
+
);
|
|
3061
|
+
for (const tool of toolUses) {
|
|
3062
|
+
callbacks.onToolCall?.(tool.id, tool.name, tool.input);
|
|
3063
|
+
}
|
|
3064
|
+
return {
|
|
3065
|
+
text: fullText,
|
|
3066
|
+
toolCalls: toolUses.map((t) => ({
|
|
3067
|
+
id: t.id,
|
|
3068
|
+
name: t.name,
|
|
3069
|
+
input: t.input
|
|
3070
|
+
})),
|
|
3071
|
+
usage: {
|
|
3072
|
+
inputTokens: usage.input_tokens,
|
|
3073
|
+
outputTokens: usage.output_tokens,
|
|
3074
|
+
cacheCreationTokens: usage.cache_creation_input_tokens ?? 0,
|
|
3075
|
+
cacheReadTokens: usage.cache_read_input_tokens ?? 0
|
|
3076
|
+
},
|
|
3077
|
+
cost,
|
|
3078
|
+
stopReason: response.stop_reason === "tool_use" ? "tool_use" : "end_turn",
|
|
3079
|
+
rawResponse: response
|
|
3080
|
+
};
|
|
3081
|
+
}
|
|
3082
|
+
};
|
|
3083
|
+
var OpenRouterProvider = class {
|
|
3084
|
+
client;
|
|
3085
|
+
config;
|
|
3086
|
+
openaiTools;
|
|
3087
|
+
constructor(config) {
|
|
3088
|
+
this.config = config;
|
|
3089
|
+
this.client = new OpenRouterClient({
|
|
3090
|
+
apiKey: config.apiKey
|
|
3091
|
+
});
|
|
3092
|
+
this.openaiTools = convertToOpenAITools(config.tools);
|
|
3093
|
+
}
|
|
3094
|
+
getModel() {
|
|
3095
|
+
return this.config.model;
|
|
3096
|
+
}
|
|
3097
|
+
async countTokens(_messages) {
|
|
3098
|
+
const text = JSON.stringify(_messages);
|
|
3099
|
+
return Math.ceil(text.length / 4);
|
|
3100
|
+
}
|
|
3101
|
+
/**
|
|
3102
|
+
* Convert Anthropic message format to OpenAI format
|
|
3103
|
+
*/
|
|
3104
|
+
convertMessages(messages) {
|
|
3105
|
+
const result = [];
|
|
3106
|
+
result.push({
|
|
3107
|
+
role: "system",
|
|
3108
|
+
content: this.config.systemPrompt
|
|
3109
|
+
});
|
|
3110
|
+
for (const msg of messages) {
|
|
3111
|
+
if (msg.role === "user") {
|
|
3112
|
+
if (typeof msg.content === "string") {
|
|
3113
|
+
result.push({ role: "user", content: msg.content });
|
|
3114
|
+
} else if (Array.isArray(msg.content)) {
|
|
3115
|
+
const toolResults = msg.content.filter(
|
|
3116
|
+
(block) => block.type === "tool_result"
|
|
3117
|
+
);
|
|
3118
|
+
if (toolResults.length > 0) {
|
|
3119
|
+
for (const tr of toolResults) {
|
|
3120
|
+
const toolResult = tr;
|
|
3121
|
+
result.push({
|
|
3122
|
+
role: "tool",
|
|
3123
|
+
tool_call_id: toolResult.tool_use_id,
|
|
3124
|
+
content: typeof toolResult.content === "string" ? toolResult.content : JSON.stringify(toolResult.content)
|
|
3125
|
+
});
|
|
3126
|
+
}
|
|
3127
|
+
} else {
|
|
3128
|
+
const textContent = msg.content.filter((block) => block.type === "text").map((block) => block.text).join("");
|
|
3129
|
+
if (textContent) {
|
|
3130
|
+
result.push({ role: "user", content: textContent });
|
|
3131
|
+
}
|
|
3132
|
+
}
|
|
3133
|
+
}
|
|
3134
|
+
} else if (msg.role === "assistant") {
|
|
3135
|
+
if (typeof msg.content === "string") {
|
|
3136
|
+
result.push({ role: "assistant", content: msg.content });
|
|
3137
|
+
} else if (Array.isArray(msg.content)) {
|
|
3138
|
+
const textBlocks = msg.content.filter(
|
|
3139
|
+
(block) => block.type === "text"
|
|
3140
|
+
);
|
|
3141
|
+
const toolUses = msg.content.filter(
|
|
3142
|
+
(block) => block.type === "tool_use"
|
|
3143
|
+
);
|
|
3144
|
+
const textContent = textBlocks.map((b) => b.text).join("");
|
|
3145
|
+
if (toolUses.length > 0) {
|
|
3146
|
+
result.push({
|
|
3147
|
+
role: "assistant",
|
|
3148
|
+
content: textContent || null,
|
|
3149
|
+
tool_calls: toolUses.map((t) => ({
|
|
3150
|
+
id: t.id,
|
|
3151
|
+
type: "function",
|
|
3152
|
+
function: {
|
|
3153
|
+
name: t.name,
|
|
3154
|
+
arguments: JSON.stringify(t.input)
|
|
3155
|
+
}
|
|
3156
|
+
}))
|
|
3157
|
+
});
|
|
3158
|
+
} else {
|
|
3159
|
+
result.push({ role: "assistant", content: textContent });
|
|
3160
|
+
}
|
|
3161
|
+
}
|
|
3162
|
+
}
|
|
3163
|
+
}
|
|
3164
|
+
return result;
|
|
3165
|
+
}
|
|
3166
|
+
async chat(messages) {
|
|
3167
|
+
const openaiMessages = this.convertMessages(messages);
|
|
3168
|
+
const response = await this.client.createChatCompletion({
|
|
3169
|
+
model: this.config.model,
|
|
3170
|
+
messages: openaiMessages,
|
|
3171
|
+
tools: this.openaiTools.length > 0 ? this.openaiTools : void 0,
|
|
3172
|
+
max_tokens: this.config.maxTokens
|
|
3173
|
+
});
|
|
3174
|
+
const choice = response.choices[0];
|
|
3175
|
+
const usage = response.usage ?? { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
|
|
3176
|
+
const cost = calculateOpenRouterCost(
|
|
3177
|
+
this.config.model,
|
|
3178
|
+
usage.prompt_tokens,
|
|
3179
|
+
usage.completion_tokens
|
|
3180
|
+
);
|
|
3181
|
+
const toolCalls = choice.message.tool_calls ?? [];
|
|
3182
|
+
return {
|
|
3183
|
+
text: choice.message.content ?? "",
|
|
3184
|
+
toolCalls: toolCalls.map((tc) => ({
|
|
3185
|
+
id: tc.id,
|
|
3186
|
+
name: tc.function.name,
|
|
3187
|
+
input: JSON.parse(tc.function.arguments)
|
|
3188
|
+
})),
|
|
3189
|
+
usage: {
|
|
3190
|
+
inputTokens: usage.prompt_tokens,
|
|
3191
|
+
outputTokens: usage.completion_tokens,
|
|
3192
|
+
cacheCreationTokens: 0,
|
|
3193
|
+
cacheReadTokens: 0
|
|
3194
|
+
},
|
|
3195
|
+
cost,
|
|
3196
|
+
stopReason: choice.finish_reason === "tool_calls" ? "tool_use" : "end_turn",
|
|
3197
|
+
rawResponse: response
|
|
3198
|
+
};
|
|
3199
|
+
}
|
|
3200
|
+
async streamChat(messages, callbacks) {
|
|
3201
|
+
const openaiMessages = this.convertMessages(messages);
|
|
3202
|
+
let fullText = "";
|
|
3203
|
+
const toolCallsInProgress = /* @__PURE__ */ new Map();
|
|
3204
|
+
let finishReason = null;
|
|
3205
|
+
let usage = { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
|
|
3206
|
+
const stream = this.client.createStreamingChatCompletion({
|
|
3207
|
+
model: this.config.model,
|
|
3208
|
+
messages: openaiMessages,
|
|
3209
|
+
tools: this.openaiTools.length > 0 ? this.openaiTools : void 0,
|
|
3210
|
+
max_tokens: this.config.maxTokens
|
|
3211
|
+
});
|
|
3212
|
+
for await (const chunk of stream) {
|
|
3213
|
+
const choice = chunk.choices[0];
|
|
3214
|
+
if (!choice) continue;
|
|
3215
|
+
if (choice.delta.content) {
|
|
3216
|
+
fullText += choice.delta.content;
|
|
3217
|
+
callbacks.onText?.(choice.delta.content);
|
|
3218
|
+
}
|
|
3219
|
+
if (choice.delta.tool_calls) {
|
|
3220
|
+
for (const tcDelta of choice.delta.tool_calls) {
|
|
3221
|
+
const existing = toolCallsInProgress.get(tcDelta.index);
|
|
3222
|
+
if (!existing) {
|
|
3223
|
+
toolCallsInProgress.set(tcDelta.index, {
|
|
3224
|
+
id: tcDelta.id ?? "",
|
|
3225
|
+
name: tcDelta.function?.name ?? "",
|
|
3226
|
+
arguments: tcDelta.function?.arguments ?? ""
|
|
3227
|
+
});
|
|
3228
|
+
} else {
|
|
3229
|
+
if (tcDelta.id) existing.id = tcDelta.id;
|
|
3230
|
+
if (tcDelta.function?.name) existing.name = tcDelta.function.name;
|
|
3231
|
+
if (tcDelta.function?.arguments) existing.arguments += tcDelta.function.arguments;
|
|
3232
|
+
}
|
|
3233
|
+
}
|
|
3234
|
+
}
|
|
3235
|
+
if (choice.finish_reason) {
|
|
3236
|
+
finishReason = choice.finish_reason;
|
|
3237
|
+
}
|
|
3238
|
+
if (chunk.usage) {
|
|
3239
|
+
usage = chunk.usage;
|
|
3240
|
+
}
|
|
3241
|
+
}
|
|
3242
|
+
const toolCalls = [];
|
|
3243
|
+
for (const [, tc] of toolCallsInProgress) {
|
|
3244
|
+
try {
|
|
3245
|
+
const input = JSON.parse(tc.arguments || "{}");
|
|
3246
|
+
toolCalls.push({ id: tc.id, name: tc.name, input });
|
|
3247
|
+
callbacks.onToolCall?.(tc.id, tc.name, input);
|
|
3248
|
+
} catch {
|
|
3249
|
+
}
|
|
3250
|
+
}
|
|
3251
|
+
const cost = calculateOpenRouterCost(
|
|
3252
|
+
this.config.model,
|
|
3253
|
+
usage.prompt_tokens,
|
|
3254
|
+
usage.completion_tokens
|
|
3255
|
+
);
|
|
3256
|
+
return {
|
|
3257
|
+
text: fullText,
|
|
3258
|
+
toolCalls,
|
|
3259
|
+
usage: {
|
|
3260
|
+
inputTokens: usage.prompt_tokens,
|
|
3261
|
+
outputTokens: usage.completion_tokens,
|
|
3262
|
+
cacheCreationTokens: 0,
|
|
3263
|
+
cacheReadTokens: 0
|
|
3264
|
+
},
|
|
3265
|
+
cost,
|
|
3266
|
+
stopReason: finishReason === "tool_calls" ? "tool_use" : "end_turn"
|
|
3267
|
+
};
|
|
3268
|
+
}
|
|
3269
|
+
};
|
|
3270
|
+
function createLLMProvider(config) {
|
|
3271
|
+
if (config.provider === "openrouter") {
|
|
3272
|
+
return new OpenRouterProvider(config);
|
|
3273
|
+
}
|
|
3274
|
+
return new AnthropicProvider(config);
|
|
3275
|
+
}
|
|
3276
|
+
|
|
3277
|
+
// src/agent/loop.ts
|
|
3278
|
+
var MAX_TOOL_RESULT_CHARS = 8e3;
|
|
3279
|
+
function truncateToolResult(result, toolName) {
|
|
3280
|
+
const resultStr = JSON.stringify(result);
|
|
3281
|
+
if (resultStr.length <= MAX_TOOL_RESULT_CHARS) {
|
|
3282
|
+
return result;
|
|
3283
|
+
}
|
|
3284
|
+
if (typeof result === "object" && result !== null) {
|
|
3285
|
+
const obj = result;
|
|
3286
|
+
if (Array.isArray(obj.content) && obj.content.length > 0) {
|
|
3287
|
+
const firstContent = obj.content[0];
|
|
3288
|
+
if (firstContent?.type === "text" && typeof firstContent.text === "string") {
|
|
3289
|
+
try {
|
|
3290
|
+
const innerData = JSON.parse(firstContent.text);
|
|
3291
|
+
const truncatedInner = truncateDataObject(innerData);
|
|
3292
|
+
return {
|
|
3293
|
+
content: [{
|
|
3294
|
+
type: "text",
|
|
3295
|
+
text: JSON.stringify(truncatedInner)
|
|
3296
|
+
}]
|
|
3297
|
+
};
|
|
3298
|
+
} catch {
|
|
3299
|
+
const truncatedText = firstContent.text.length > MAX_TOOL_RESULT_CHARS ? firstContent.text.substring(0, MAX_TOOL_RESULT_CHARS) + "... [truncated]" : firstContent.text;
|
|
3300
|
+
return {
|
|
3301
|
+
content: [{
|
|
3302
|
+
type: "text",
|
|
3303
|
+
text: truncatedText
|
|
3304
|
+
}]
|
|
3305
|
+
};
|
|
3306
|
+
}
|
|
3307
|
+
}
|
|
3308
|
+
}
|
|
3309
|
+
}
|
|
3310
|
+
if (Array.isArray(result)) {
|
|
3311
|
+
return truncateArray(result);
|
|
3312
|
+
}
|
|
3313
|
+
if (typeof result === "object" && result !== null) {
|
|
3314
|
+
return truncateDataObject(result);
|
|
3315
|
+
}
|
|
3316
|
+
if (typeof result === "string" && result.length > MAX_TOOL_RESULT_CHARS) {
|
|
3317
|
+
return result.substring(0, MAX_TOOL_RESULT_CHARS) + "... [truncated]";
|
|
3318
|
+
}
|
|
3319
|
+
return result;
|
|
3320
|
+
}
|
|
3321
|
+
function truncateArray(arr) {
|
|
3322
|
+
const MAX_ITEMS = 5;
|
|
3323
|
+
const truncated = arr.slice(0, MAX_ITEMS).map(
|
|
3324
|
+
(item) => typeof item === "object" && item !== null ? truncateObject(item) : item
|
|
3325
|
+
);
|
|
3326
|
+
return {
|
|
3327
|
+
_truncated: arr.length > MAX_ITEMS,
|
|
3328
|
+
_originalCount: arr.length,
|
|
3329
|
+
_note: arr.length > MAX_ITEMS ? `Showing ${MAX_ITEMS} of ${arr.length} items.` : void 0,
|
|
2601
3330
|
items: truncated
|
|
2602
3331
|
};
|
|
2603
3332
|
}
|
|
@@ -2729,82 +3458,36 @@ function extractTokenInfo(token) {
|
|
|
2729
3458
|
price: token.price ?? token.probability
|
|
2730
3459
|
};
|
|
2731
3460
|
}
|
|
2732
|
-
var DEFAULT_SYSTEM_PROMPT = `You are Quantish, an AI coding and trading agent.
|
|
3461
|
+
var DEFAULT_SYSTEM_PROMPT = `You are Quantish, an AI coding and trading agent.
|
|
2733
3462
|
|
|
2734
|
-
|
|
3463
|
+
You have two sets of capabilities:
|
|
2735
3464
|
|
|
2736
|
-
|
|
2737
|
-
|
|
2738
|
-
-
|
|
2739
|
-
-
|
|
3465
|
+
## Trading Tools (via MCP)
|
|
3466
|
+
You can interact with Polymarket prediction markets:
|
|
3467
|
+
- Check wallet balances and positions
|
|
3468
|
+
- Place, cancel, and manage orders
|
|
3469
|
+
- Transfer funds and claim winnings
|
|
3470
|
+
- Get market prices and orderbook data
|
|
2740
3471
|
|
|
2741
|
-
|
|
2742
|
-
|
|
2743
|
-
-
|
|
2744
|
-
-
|
|
2745
|
-
-
|
|
3472
|
+
## Coding Tools (local)
|
|
3473
|
+
You can work with the local filesystem:
|
|
3474
|
+
- Read and write files
|
|
3475
|
+
- List directories and search with grep
|
|
3476
|
+
- Run shell commands
|
|
3477
|
+
- Use git for version control
|
|
2746
3478
|
|
|
2747
|
-
##
|
|
3479
|
+
## Guidelines
|
|
3480
|
+
- Be concise and helpful
|
|
3481
|
+
- When making trades, always confirm details before proceeding
|
|
3482
|
+
- Prices on Polymarket are between 0.01 and 0.99 (probabilities)
|
|
3483
|
+
- Minimum order value is $1
|
|
3484
|
+
- When writing code, follow existing patterns and conventions
|
|
3485
|
+
- For dangerous operations (rm, sudo), explain what you're doing
|
|
2748
3486
|
|
|
2749
|
-
|
|
2750
|
-
{
|
|
2751
|
-
"found": N,
|
|
2752
|
-
"markets": [{ "platform", "id", "title", "markets": [{ "marketId", "question", "outcomes": [{ "name", "price" }], "clobTokenIds": "[json_array]", "conditionId" }] }]
|
|
2753
|
-
}
|
|
2754
|
-
|
|
2755
|
-
get_market_details returns:
|
|
2756
|
-
{
|
|
2757
|
-
"platform": "polymarket",
|
|
2758
|
-
"id": "12345",
|
|
2759
|
-
"conditionId": "0x...",
|
|
2760
|
-
"title": "Market Title",
|
|
2761
|
-
"clobTokenIds": "["TOKEN_YES","TOKEN_NO"]",
|
|
2762
|
-
"markets": [{
|
|
2763
|
-
"marketId": "67890",
|
|
2764
|
-
"question": "Question?",
|
|
2765
|
-
"outcomes": [{ "name": "Yes", "price": 0.55 }, { "name": "No", "price": 0.45 }],
|
|
2766
|
-
"clobTokenIds": "["TOKEN_YES","TOKEN_NO"]"
|
|
2767
|
-
}]
|
|
2768
|
-
}
|
|
2769
|
-
|
|
2770
|
-
KEY FIELDS:
|
|
2771
|
-
- market.id = top-level ID for get_market_details
|
|
2772
|
-
- market.markets[0].marketId = sub-market ID
|
|
2773
|
-
- market.markets[0].outcomes[].name = "Yes"/"No" or outcome name
|
|
2774
|
-
- market.markets[0].outcomes[].price = decimal 0-1
|
|
2775
|
-
- JSON.parse(market.clobTokenIds || market.markets[0].clobTokenIds) = token IDs array
|
|
2776
|
-
- market.conditionId = condition ID for trading
|
|
2777
|
-
|
|
2778
|
-
## Standalone App Code
|
|
2779
|
-
|
|
2780
|
-
Trading helper:
|
|
2781
|
-
async function callTradingTool(name, args = {}) {
|
|
2782
|
-
const res = await fetch('https://quantish-sdk-production.up.railway.app/mcp/execute', {
|
|
2783
|
-
method: 'POST',
|
|
2784
|
-
headers: { 'Content-Type': 'application/json', 'x-api-key': process.env.QUANTISH_API_KEY },
|
|
2785
|
-
body: JSON.stringify({ jsonrpc: '2.0', method: 'tools/call', params: { name, arguments: args }, id: Date.now() })
|
|
2786
|
-
});
|
|
2787
|
-
return JSON.parse((await res.json()).result.content[0].text);
|
|
2788
|
-
}
|
|
2789
|
-
|
|
2790
|
-
Discovery helper:
|
|
2791
|
-
async function callDiscoveryTool(name, args = {}) {
|
|
2792
|
-
const res = await fetch('https://quantish.live/mcp/execute', {
|
|
2793
|
-
method: 'POST',
|
|
2794
|
-
headers: { 'Content-Type': 'application/json', 'X-API-Key': 'qm_ueQeqrmvZyHtR1zuVbLYkhx0fKyVAuV8' },
|
|
2795
|
-
body: JSON.stringify({ name, arguments: args })
|
|
2796
|
-
});
|
|
2797
|
-
return JSON.parse((await res.json()).result.content[0].text);
|
|
2798
|
-
}
|
|
2799
|
-
|
|
2800
|
-
## Rules
|
|
2801
|
-
1. Never use @modelcontextprotocol/sdk - use fetch()
|
|
2802
|
-
2. Always create .env.example and use dotenv
|
|
2803
|
-
3. Never hardcode/mock data - always fetch real data
|
|
2804
|
-
4. Check logs before restarting servers
|
|
2805
|
-
5. PREFER edit_lines over edit_file - uses line numbers, saves tokens`;
|
|
3487
|
+
You help users build trading bots and agents by combining coding skills with trading capabilities.`;
|
|
2806
3488
|
var Agent = class {
|
|
2807
3489
|
anthropic;
|
|
3490
|
+
llmProvider;
|
|
2808
3491
|
mcpClient;
|
|
2809
3492
|
mcpClientManager;
|
|
2810
3493
|
config;
|
|
@@ -2825,6 +3508,8 @@ var Agent = class {
|
|
|
2825
3508
|
this.config = {
|
|
2826
3509
|
enableLocalTools: true,
|
|
2827
3510
|
enableMCPTools: true,
|
|
3511
|
+
provider: "anthropic",
|
|
3512
|
+
// Default to Anthropic
|
|
2828
3513
|
// Default context editing: clear old tool uses when context exceeds 100k tokens
|
|
2829
3514
|
contextEditing: config.contextEditing || [
|
|
2830
3515
|
{
|
|
@@ -2839,14 +3524,176 @@ var Agent = class {
|
|
|
2839
3524
|
if (this.config.contextEditing && this.config.contextEditing.length > 0) {
|
|
2840
3525
|
headers["anthropic-beta"] = "context-management-2025-06-27";
|
|
2841
3526
|
}
|
|
2842
|
-
|
|
2843
|
-
|
|
3527
|
+
const anthropicKey = config.anthropicApiKey || "placeholder";
|
|
3528
|
+
this.anthropic = new Anthropic2({
|
|
3529
|
+
apiKey: anthropicKey,
|
|
2844
3530
|
defaultHeaders: Object.keys(headers).length > 0 ? headers : void 0
|
|
2845
3531
|
});
|
|
2846
3532
|
this.mcpClient = config.mcpClient;
|
|
2847
3533
|
this.mcpClientManager = config.mcpClientManager;
|
|
2848
3534
|
this.workingDirectory = config.workingDirectory || process.cwd();
|
|
2849
3535
|
}
|
|
3536
|
+
/**
|
|
3537
|
+
* Get the API key for the current provider
|
|
3538
|
+
*/
|
|
3539
|
+
getApiKey() {
|
|
3540
|
+
if (this.config.provider === "openrouter") {
|
|
3541
|
+
return this.config.openrouterApiKey || "";
|
|
3542
|
+
}
|
|
3543
|
+
return this.config.anthropicApiKey || "";
|
|
3544
|
+
}
|
|
3545
|
+
/**
|
|
3546
|
+
* Check if using OpenRouter provider
|
|
3547
|
+
*/
|
|
3548
|
+
isOpenRouter() {
|
|
3549
|
+
return this.config.provider === "openrouter";
|
|
3550
|
+
}
|
|
3551
|
+
/**
|
|
3552
|
+
* Get the current provider name
|
|
3553
|
+
*/
|
|
3554
|
+
getProvider() {
|
|
3555
|
+
return this.config.provider || "anthropic";
|
|
3556
|
+
}
|
|
3557
|
+
/**
|
|
3558
|
+
* Set the LLM provider
|
|
3559
|
+
*/
|
|
3560
|
+
setProvider(provider) {
|
|
3561
|
+
this.config.provider = provider;
|
|
3562
|
+
this.llmProvider = void 0;
|
|
3563
|
+
}
|
|
3564
|
+
/**
|
|
3565
|
+
* Get or create the LLM provider instance
|
|
3566
|
+
*/
|
|
3567
|
+
async getOrCreateProvider() {
|
|
3568
|
+
if (this.llmProvider) {
|
|
3569
|
+
return this.llmProvider;
|
|
3570
|
+
}
|
|
3571
|
+
const allTools = await this.getAllTools();
|
|
3572
|
+
const systemPrompt = this.config.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;
|
|
3573
|
+
const model = this.config.model ?? DEFAULT_MODEL;
|
|
3574
|
+
const maxTokens = this.config.maxTokens ?? 8192;
|
|
3575
|
+
this.llmProvider = createLLMProvider({
|
|
3576
|
+
provider: this.config.provider || "anthropic",
|
|
3577
|
+
apiKey: this.getApiKey(),
|
|
3578
|
+
model,
|
|
3579
|
+
maxTokens,
|
|
3580
|
+
systemPrompt,
|
|
3581
|
+
tools: allTools,
|
|
3582
|
+
contextEditing: this.config.contextEditing
|
|
3583
|
+
});
|
|
3584
|
+
return this.llmProvider;
|
|
3585
|
+
}
|
|
3586
|
+
/**
|
|
3587
|
+
* Run the agent using the provider abstraction (for OpenRouter and future providers)
|
|
3588
|
+
*/
|
|
3589
|
+
async runWithProvider(userMessage) {
|
|
3590
|
+
const maxIterations = this.config.maxIterations ?? 200;
|
|
3591
|
+
const useStreaming = this.config.streaming ?? true;
|
|
3592
|
+
const provider = await this.getOrCreateProvider();
|
|
3593
|
+
const contextMessage = `[Working directory: ${this.workingDirectory}]
|
|
3594
|
+
|
|
3595
|
+
${userMessage}`;
|
|
3596
|
+
this.conversationHistory.push({
|
|
3597
|
+
role: "user",
|
|
3598
|
+
content: contextMessage
|
|
3599
|
+
});
|
|
3600
|
+
const toolCalls = [];
|
|
3601
|
+
let iterations = 0;
|
|
3602
|
+
let finalText = "";
|
|
3603
|
+
while (iterations < maxIterations) {
|
|
3604
|
+
iterations++;
|
|
3605
|
+
this.config.onStreamStart?.();
|
|
3606
|
+
let response;
|
|
3607
|
+
if (useStreaming) {
|
|
3608
|
+
response = await provider.streamChat(this.conversationHistory, {
|
|
3609
|
+
onText: (text) => {
|
|
3610
|
+
finalText += text;
|
|
3611
|
+
this.config.onText?.(text, false);
|
|
3612
|
+
},
|
|
3613
|
+
onThinking: (text) => {
|
|
3614
|
+
this.config.onThinking?.(text);
|
|
3615
|
+
},
|
|
3616
|
+
onToolCall: (id, name, input) => {
|
|
3617
|
+
this.config.onToolCall?.(name, input);
|
|
3618
|
+
}
|
|
3619
|
+
});
|
|
3620
|
+
if (response.text) {
|
|
3621
|
+
this.config.onText?.("", true);
|
|
3622
|
+
}
|
|
3623
|
+
} else {
|
|
3624
|
+
response = await provider.chat(this.conversationHistory);
|
|
3625
|
+
if (response.text) {
|
|
3626
|
+
finalText += response.text;
|
|
3627
|
+
this.config.onText?.(response.text, true);
|
|
3628
|
+
}
|
|
3629
|
+
}
|
|
3630
|
+
this.config.onStreamEnd?.();
|
|
3631
|
+
this.updateTokenUsage({
|
|
3632
|
+
input_tokens: response.usage.inputTokens,
|
|
3633
|
+
output_tokens: response.usage.outputTokens,
|
|
3634
|
+
cache_creation_input_tokens: response.usage.cacheCreationTokens,
|
|
3635
|
+
cache_read_input_tokens: response.usage.cacheReadTokens
|
|
3636
|
+
});
|
|
3637
|
+
const responseContent = [];
|
|
3638
|
+
if (response.text) {
|
|
3639
|
+
responseContent.push({ type: "text", text: response.text });
|
|
3640
|
+
}
|
|
3641
|
+
for (const tc of response.toolCalls) {
|
|
3642
|
+
responseContent.push({
|
|
3643
|
+
type: "tool_use",
|
|
3644
|
+
id: tc.id,
|
|
3645
|
+
name: tc.name,
|
|
3646
|
+
input: tc.input
|
|
3647
|
+
});
|
|
3648
|
+
}
|
|
3649
|
+
if (response.toolCalls.length === 0) {
|
|
3650
|
+
this.conversationHistory.push({
|
|
3651
|
+
role: "assistant",
|
|
3652
|
+
content: responseContent
|
|
3653
|
+
});
|
|
3654
|
+
break;
|
|
3655
|
+
}
|
|
3656
|
+
const toolResults = [];
|
|
3657
|
+
for (const toolCall2 of response.toolCalls) {
|
|
3658
|
+
await new Promise((resolve2) => setImmediate(resolve2));
|
|
3659
|
+
const { result, source } = await this.executeTool(
|
|
3660
|
+
toolCall2.name,
|
|
3661
|
+
toolCall2.input
|
|
3662
|
+
);
|
|
3663
|
+
const success2 = !(result && typeof result === "object" && "error" in result);
|
|
3664
|
+
this.config.onToolResult?.(toolCall2.name, result, success2);
|
|
3665
|
+
toolCalls.push({
|
|
3666
|
+
name: toolCall2.name,
|
|
3667
|
+
input: toolCall2.input,
|
|
3668
|
+
result,
|
|
3669
|
+
source
|
|
3670
|
+
});
|
|
3671
|
+
toolResults.push({
|
|
3672
|
+
type: "tool_result",
|
|
3673
|
+
tool_use_id: toolCall2.id,
|
|
3674
|
+
content: JSON.stringify(result)
|
|
3675
|
+
});
|
|
3676
|
+
}
|
|
3677
|
+
this.conversationHistory.push({
|
|
3678
|
+
role: "assistant",
|
|
3679
|
+
content: responseContent
|
|
3680
|
+
});
|
|
3681
|
+
this.conversationHistory.push({
|
|
3682
|
+
role: "user",
|
|
3683
|
+
content: toolResults
|
|
3684
|
+
});
|
|
3685
|
+
this.truncateLastToolResults();
|
|
3686
|
+
if (response.stopReason === "end_turn" && response.toolCalls.length === 0) {
|
|
3687
|
+
break;
|
|
3688
|
+
}
|
|
3689
|
+
}
|
|
3690
|
+
return {
|
|
3691
|
+
text: finalText,
|
|
3692
|
+
toolCalls,
|
|
3693
|
+
iterations,
|
|
3694
|
+
tokenUsage: { ...this.cumulativeTokenUsage }
|
|
3695
|
+
};
|
|
3696
|
+
}
|
|
2850
3697
|
/**
|
|
2851
3698
|
* Get all available tools
|
|
2852
3699
|
*/
|
|
@@ -2899,16 +3746,16 @@ var Agent = class {
|
|
|
2899
3746
|
}
|
|
2900
3747
|
/**
|
|
2901
3748
|
* Run the agent with a user message (supports streaming)
|
|
2902
|
-
* @param userMessage - The user's input message
|
|
2903
|
-
* @param options - Optional configuration including abort signal
|
|
2904
3749
|
*/
|
|
2905
|
-
async run(userMessage
|
|
2906
|
-
|
|
3750
|
+
async run(userMessage) {
|
|
3751
|
+
if (this.config.provider === "openrouter") {
|
|
3752
|
+
return this.runWithProvider(userMessage);
|
|
3753
|
+
}
|
|
3754
|
+
const maxIterations = this.config.maxIterations ?? 15;
|
|
2907
3755
|
const model = this.config.model ?? "claude-sonnet-4-5-20250929";
|
|
2908
3756
|
const maxTokens = this.config.maxTokens ?? 8192;
|
|
2909
3757
|
const systemPrompt = this.config.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;
|
|
2910
3758
|
const useStreaming = this.config.streaming ?? true;
|
|
2911
|
-
const signal = options?.signal;
|
|
2912
3759
|
const allTools = await this.getAllTools();
|
|
2913
3760
|
const contextManagement = this.config.contextEditing && this.config.contextEditing.length > 0 ? { edits: this.config.contextEditing } : void 0;
|
|
2914
3761
|
const contextMessage = `[Working directory: ${this.workingDirectory}]
|
|
@@ -2922,9 +3769,6 @@ ${userMessage}`;
|
|
|
2922
3769
|
let iterations = 0;
|
|
2923
3770
|
let finalText = "";
|
|
2924
3771
|
while (iterations < maxIterations) {
|
|
2925
|
-
if (signal?.aborted) {
|
|
2926
|
-
throw new Error("Operation aborted by user");
|
|
2927
|
-
}
|
|
2928
3772
|
iterations++;
|
|
2929
3773
|
this.config.onStreamStart?.();
|
|
2930
3774
|
let response;
|
|
@@ -2949,12 +3793,8 @@ ${userMessage}`;
|
|
|
2949
3793
|
if (contextManagement) {
|
|
2950
3794
|
streamOptions.context_management = contextManagement;
|
|
2951
3795
|
}
|
|
2952
|
-
const stream = this.anthropic.messages.stream(streamOptions
|
|
3796
|
+
const stream = this.anthropic.messages.stream(streamOptions);
|
|
2953
3797
|
for await (const event of stream) {
|
|
2954
|
-
if (signal?.aborted) {
|
|
2955
|
-
stream.controller.abort();
|
|
2956
|
-
throw new Error("Operation aborted by user");
|
|
2957
|
-
}
|
|
2958
3798
|
if (event.type === "content_block_delta") {
|
|
2959
3799
|
const delta = event.delta;
|
|
2960
3800
|
if (delta.type === "text_delta" && delta.text) {
|
|
@@ -3019,11 +3859,7 @@ ${userMessage}`;
|
|
|
3019
3859
|
}
|
|
3020
3860
|
const toolResults = [];
|
|
3021
3861
|
for (const toolUse of toolUses) {
|
|
3022
|
-
if (signal?.aborted) {
|
|
3023
|
-
throw new Error("Operation aborted by user");
|
|
3024
|
-
}
|
|
3025
3862
|
this.config.onToolCall?.(toolUse.name, toolUse.input);
|
|
3026
|
-
await new Promise((resolve2) => setImmediate(resolve2));
|
|
3027
3863
|
const { result, source } = await this.executeTool(
|
|
3028
3864
|
toolUse.name,
|
|
3029
3865
|
toolUse.input
|
|
@@ -3204,19 +4040,34 @@ ${userMessage}`;
|
|
|
3204
4040
|
* Set the model to use for future requests
|
|
3205
4041
|
*/
|
|
3206
4042
|
setModel(modelIdOrAlias) {
|
|
3207
|
-
|
|
4043
|
+
let resolvedId = resolveModelId(modelIdOrAlias);
|
|
4044
|
+
let displayName;
|
|
4045
|
+
if (resolvedId) {
|
|
4046
|
+
const modelConfig = getModelConfig(resolvedId);
|
|
4047
|
+
displayName = modelConfig?.displayName;
|
|
4048
|
+
} else {
|
|
4049
|
+
resolvedId = resolveOpenRouterModelId(modelIdOrAlias);
|
|
4050
|
+
if (resolvedId) {
|
|
4051
|
+
const orConfig = getOpenRouterModelConfig(resolvedId);
|
|
4052
|
+
displayName = orConfig?.displayName ?? resolvedId;
|
|
4053
|
+
if (!this.isOpenRouter() && resolvedId.includes("/")) {
|
|
4054
|
+
this.config.provider = "openrouter";
|
|
4055
|
+
}
|
|
4056
|
+
}
|
|
4057
|
+
}
|
|
3208
4058
|
if (!resolvedId) {
|
|
3209
|
-
const
|
|
4059
|
+
const anthropicModels = Object.values(MODELS).map((m) => m.name).join(", ");
|
|
4060
|
+
const orModels = Object.values(OPENROUTER_MODELS2).slice(0, 5).map((m) => m.name).join(", ");
|
|
3210
4061
|
return {
|
|
3211
4062
|
success: false,
|
|
3212
|
-
error: `Unknown model: "${modelIdOrAlias}".
|
|
4063
|
+
error: `Unknown model: "${modelIdOrAlias}". Anthropic: ${anthropicModels}. OpenRouter: ${orModels}, ...`
|
|
3213
4064
|
};
|
|
3214
4065
|
}
|
|
3215
4066
|
this.config.model = resolvedId;
|
|
3216
|
-
|
|
4067
|
+
this.llmProvider = void 0;
|
|
3217
4068
|
return {
|
|
3218
4069
|
success: true,
|
|
3219
|
-
model:
|
|
4070
|
+
model: displayName ?? resolvedId
|
|
3220
4071
|
};
|
|
3221
4072
|
}
|
|
3222
4073
|
/**
|
|
@@ -3342,7 +4193,7 @@ import { useState, useCallback, useRef, useEffect } from "react";
|
|
|
3342
4193
|
import { Box, Text, useApp, useInput } from "ink";
|
|
3343
4194
|
import TextInput from "ink-text-input";
|
|
3344
4195
|
import Spinner from "ink-spinner";
|
|
3345
|
-
import {
|
|
4196
|
+
import { jsx, jsxs } from "react/jsx-runtime";
|
|
3346
4197
|
function formatTokenCount(count) {
|
|
3347
4198
|
if (count < 1e3) return String(count);
|
|
3348
4199
|
if (count < 1e5) return `${(count / 1e3).toFixed(1)}k`;
|
|
@@ -3357,7 +4208,8 @@ var SLASH_COMMANDS = [
|
|
|
3357
4208
|
{ cmd: "/help", desc: "Show available commands" },
|
|
3358
4209
|
{ cmd: "/clear", desc: "Clear conversation history" },
|
|
3359
4210
|
{ cmd: "/compact", desc: "Summarize conversation to save tokens" },
|
|
3360
|
-
{ cmd: "/model", desc: "Switch model (opus, sonnet, haiku)" },
|
|
4211
|
+
{ cmd: "/model", desc: "Switch model (opus, sonnet, haiku, minimax, etc.)" },
|
|
4212
|
+
{ cmd: "/provider", desc: "Switch LLM provider (anthropic, openrouter)" },
|
|
3361
4213
|
{ cmd: "/cost", desc: "Show session cost breakdown" },
|
|
3362
4214
|
{ cmd: "/tools", desc: "List available tools" },
|
|
3363
4215
|
{ cmd: "/config", desc: "Show configuration info" },
|
|
@@ -3423,7 +4275,8 @@ function App({ agent, onExit }) {
|
|
|
3423
4275
|
content: `\u{1F4DA} Available Commands:
|
|
3424
4276
|
/clear - Clear conversation history
|
|
3425
4277
|
/compact - Summarize conversation (keeps context, saves tokens)
|
|
3426
|
-
/model - Switch model (opus, sonnet, haiku)
|
|
4278
|
+
/model - Switch model (opus, sonnet, haiku, minimax, deepseek, etc.)
|
|
4279
|
+
/provider - Switch LLM provider (anthropic, openrouter)
|
|
3427
4280
|
/cost - Show session cost breakdown
|
|
3428
4281
|
/help - Show this help message
|
|
3429
4282
|
/tools - List available tools
|
|
@@ -3576,30 +4429,46 @@ Use /stop <id> to stop a process.`
|
|
|
3576
4429
|
case "model":
|
|
3577
4430
|
if (!args) {
|
|
3578
4431
|
const currentModel = agent.getModel();
|
|
4432
|
+
const currentProvider = agent.getProvider();
|
|
3579
4433
|
const modelConfig = getModelConfig(currentModel);
|
|
3580
|
-
const
|
|
3581
|
-
const
|
|
4434
|
+
const orModelConfig = getOpenRouterModelConfig(currentModel);
|
|
4435
|
+
const displayName = modelConfig?.displayName || orModelConfig?.displayName || currentModel;
|
|
4436
|
+
const anthropicModels = listModels();
|
|
4437
|
+
const anthropicList = anthropicModels.map((m) => {
|
|
3582
4438
|
const isCurrent = m.id === currentModel ? " (current)" : "";
|
|
3583
4439
|
return ` ${m.name}${isCurrent} - ${m.description}`;
|
|
3584
4440
|
}).join("\n");
|
|
4441
|
+
const orModels = listOpenRouterModels().slice(0, 8);
|
|
4442
|
+
const orList = orModels.map((m) => {
|
|
4443
|
+
const isCurrent = m.id === currentModel ? " (current)" : "";
|
|
4444
|
+
return ` ${m.name}${isCurrent} - ${m.description.slice(0, 50)}...`;
|
|
4445
|
+
}).join("\n");
|
|
3585
4446
|
setMessages((prev) => [...prev, {
|
|
3586
4447
|
role: "system",
|
|
3587
|
-
content: `\u{1F916} Current
|
|
4448
|
+
content: `\u{1F916} Current: ${displayName} (${currentProvider})
|
|
4449
|
+
|
|
4450
|
+
Anthropic Models:
|
|
4451
|
+
${anthropicList}
|
|
3588
4452
|
|
|
3589
|
-
|
|
3590
|
-
${
|
|
4453
|
+
OpenRouter Models (selection):
|
|
4454
|
+
${orList}
|
|
4455
|
+
... and many more! Use any OpenRouter model ID like 'minimax/minimax-m2.1'
|
|
3591
4456
|
|
|
3592
|
-
Usage: /model <name> (e.g., /model haiku, /model
|
|
4457
|
+
Usage: /model <name> (e.g., /model haiku, /model minimax)
|
|
4458
|
+
Using an OpenRouter model auto-switches to OpenRouter provider.`
|
|
3593
4459
|
}]);
|
|
3594
4460
|
return true;
|
|
3595
4461
|
}
|
|
3596
4462
|
const result = agent.setModel(args);
|
|
3597
4463
|
if (result.success) {
|
|
3598
|
-
const
|
|
4464
|
+
const anthropicConfig = getModelConfig(agent.getModel());
|
|
4465
|
+
const orConfig = getOpenRouterModelConfig(agent.getModel());
|
|
4466
|
+
const description = anthropicConfig?.description || orConfig?.description || "";
|
|
4467
|
+
const providerInfo = agent.isOpenRouter() ? " (OpenRouter)" : " (Anthropic)";
|
|
3599
4468
|
setMessages((prev) => [...prev, {
|
|
3600
4469
|
role: "system",
|
|
3601
|
-
content: `\u2705 Switched to ${result.model}
|
|
3602
|
-
${
|
|
4470
|
+
content: `\u2705 Switched to ${result.model}${providerInfo}
|
|
4471
|
+
${description}`
|
|
3603
4472
|
}]);
|
|
3604
4473
|
} else {
|
|
3605
4474
|
setMessages((prev) => [...prev, {
|
|
@@ -3608,6 +4477,43 @@ Usage: /model <name> (e.g., /model haiku, /model opus)`
|
|
|
3608
4477
|
}]);
|
|
3609
4478
|
}
|
|
3610
4479
|
return true;
|
|
4480
|
+
case "provider":
|
|
4481
|
+
if (!args) {
|
|
4482
|
+
const currentProvider = agent.getProvider();
|
|
4483
|
+
setMessages((prev) => [...prev, {
|
|
4484
|
+
role: "system",
|
|
4485
|
+
content: `\u{1F527} LLM Provider
|
|
4486
|
+
|
|
4487
|
+
Current: ${currentProvider}
|
|
4488
|
+
|
|
4489
|
+
Available providers:
|
|
4490
|
+
anthropic - Claude models (Opus, Sonnet, Haiku)
|
|
4491
|
+
openrouter - Multi-provider access (MiniMax, DeepSeek, Gemini, etc.)
|
|
4492
|
+
|
|
4493
|
+
Usage: /provider <name> (e.g., /provider openrouter)
|
|
4494
|
+
|
|
4495
|
+
Note: When switching to OpenRouter, make sure OPENROUTER_API_KEY is set.
|
|
4496
|
+
You can also just use /model with an OpenRouter model name.`
|
|
4497
|
+
}]);
|
|
4498
|
+
return true;
|
|
4499
|
+
}
|
|
4500
|
+
const providerArg = args.toLowerCase();
|
|
4501
|
+
if (providerArg !== "anthropic" && providerArg !== "openrouter") {
|
|
4502
|
+
setMessages((prev) => [...prev, {
|
|
4503
|
+
role: "system",
|
|
4504
|
+
content: `\u274C Unknown provider: "${args}". Use: anthropic, openrouter`
|
|
4505
|
+
}]);
|
|
4506
|
+
return true;
|
|
4507
|
+
}
|
|
4508
|
+
agent.setProvider(providerArg);
|
|
4509
|
+
const providerModels = providerArg === "openrouter" ? "minimax, deepseek, gemini, grok, devstral" : "opus, sonnet, haiku";
|
|
4510
|
+
setMessages((prev) => [...prev, {
|
|
4511
|
+
role: "system",
|
|
4512
|
+
content: `\u2705 Switched to ${providerArg} provider
|
|
4513
|
+
Available models: ${providerModels}
|
|
4514
|
+
Use /model to select a model.`
|
|
4515
|
+
}]);
|
|
4516
|
+
return true;
|
|
3611
4517
|
case "cost":
|
|
3612
4518
|
const usage = agent.getTokenUsage();
|
|
3613
4519
|
const sessionCost = agent.getSessionCost();
|
|
@@ -3680,7 +4586,7 @@ Last API Call Cost:
|
|
|
3680
4586
|
completedToolCalls.current = [];
|
|
3681
4587
|
abortController.current = new AbortController();
|
|
3682
4588
|
try {
|
|
3683
|
-
const result = await agent.run(trimmed
|
|
4589
|
+
const result = await agent.run(trimmed);
|
|
3684
4590
|
if (isInterrupted) {
|
|
3685
4591
|
setMessages((prev) => [...prev, {
|
|
3686
4592
|
role: "system",
|
|
@@ -3819,22 +4725,18 @@ Stopped ${count} background process${count > 1 ? "es" : ""}.`);
|
|
|
3819
4725
|
msg.role === "system" && /* @__PURE__ */ jsx(Box, { children: /* @__PURE__ */ jsx(Text, { color: "gray", italic: true, children: msg.content }) })
|
|
3820
4726
|
] }, i)) }),
|
|
3821
4727
|
currentToolCalls.length > 0 && /* @__PURE__ */ jsx(Box, { flexDirection: "column", marginBottom: 1, marginLeft: 2, children: currentToolCalls.map((tc, i) => /* @__PURE__ */ jsxs(Box, { flexDirection: "column", children: [
|
|
3822
|
-
/* @__PURE__ */
|
|
3823
|
-
/* @__PURE__ */
|
|
3824
|
-
|
|
4728
|
+
/* @__PURE__ */ jsxs(Box, { children: [
|
|
4729
|
+
tc.pending ? /* @__PURE__ */ jsxs(Text, { color: "cyan", children: [
|
|
4730
|
+
/* @__PURE__ */ jsx(Spinner, { type: "dots" }),
|
|
3825
4731
|
" ",
|
|
3826
4732
|
tc.name
|
|
3827
|
-
] }),
|
|
3828
|
-
/* @__PURE__ */ jsx(Text, { color: "gray", children: formatArgs(tc.args) }),
|
|
3829
|
-
/* @__PURE__ */ jsx(Text, { color: "yellow", dimColor: true, children: " Running..." })
|
|
3830
|
-
] }) : /* @__PURE__ */ jsxs(Fragment, { children: [
|
|
3831
|
-
/* @__PURE__ */ jsxs(Text, { color: tc.success ? "green" : "red", children: [
|
|
4733
|
+
] }) : /* @__PURE__ */ jsxs(Text, { color: tc.success ? "blue" : "red", children: [
|
|
3832
4734
|
tc.success ? "\u2713" : "\u2717",
|
|
3833
4735
|
" ",
|
|
3834
4736
|
tc.name
|
|
3835
4737
|
] }),
|
|
3836
4738
|
/* @__PURE__ */ jsx(Text, { color: "gray", children: formatArgs(tc.args) })
|
|
3837
|
-
] })
|
|
4739
|
+
] }),
|
|
3838
4740
|
!tc.pending && tc.result && /* @__PURE__ */ jsx(Box, { marginLeft: 2, children: /* @__PURE__ */ jsxs(Text, { color: "gray", dimColor: true, children: [
|
|
3839
4741
|
"\u2192 ",
|
|
3840
4742
|
formatResult(tc.result, 100)
|
|
@@ -3936,19 +4838,8 @@ program.name("quantish").description("AI coding & trading agent for Polymarket")
|
|
|
3936
4838
|
program.command("init").description("Configure Quantish CLI with your API keys").action(async () => {
|
|
3937
4839
|
await runSetup();
|
|
3938
4840
|
});
|
|
3939
|
-
program.command("config").description("View or edit configuration").option("-s, --show", "Show current configuration").option("-c, --clear", "Clear all configuration").option("--path", "Show config file path").option("--export", "Export configuration as .env format").option("--show-keys", "Show full API keys (use with caution)").
|
|
4841
|
+
program.command("config").description("View or edit configuration").option("-s, --show", "Show current configuration").option("-c, --clear", "Clear all configuration").option("--path", "Show config file path").option("--export", "Export configuration as .env format").option("--show-keys", "Show full API keys (use with caution)").action(async (options) => {
|
|
3940
4842
|
const config = getConfigManager();
|
|
3941
|
-
if (options.server) {
|
|
3942
|
-
try {
|
|
3943
|
-
new URL(options.server);
|
|
3944
|
-
} catch {
|
|
3945
|
-
error("Invalid URL format. Please provide a valid URL (e.g., https://your-server.com/mcp)");
|
|
3946
|
-
return;
|
|
3947
|
-
}
|
|
3948
|
-
config.set("mcpServerUrl", options.server);
|
|
3949
|
-
success(`Trading MCP server URL set to: ${options.server}`);
|
|
3950
|
-
return;
|
|
3951
|
-
}
|
|
3952
4843
|
if (options.path) {
|
|
3953
4844
|
console.log(config.getConfigPath());
|
|
3954
4845
|
return;
|
|
@@ -3967,11 +4858,15 @@ program.command("config").description("View or edit configuration").option("-s,
|
|
|
3967
4858
|
if (all2.anthropicApiKey) {
|
|
3968
4859
|
console.log(`ANTHROPIC_API_KEY=${all2.anthropicApiKey}`);
|
|
3969
4860
|
}
|
|
4861
|
+
if (all2.openrouterApiKey) {
|
|
4862
|
+
console.log(`OPENROUTER_API_KEY=${all2.openrouterApiKey}`);
|
|
4863
|
+
}
|
|
3970
4864
|
if (all2.quantishApiKey) {
|
|
3971
4865
|
console.log(`QUANTISH_API_KEY=${all2.quantishApiKey}`);
|
|
3972
4866
|
}
|
|
3973
4867
|
console.log(`QUANTISH_MCP_URL=${all2.mcpServerUrl}`);
|
|
3974
4868
|
console.log(`QUANTISH_MODEL=${all2.model || "claude-sonnet-4-5-20250929"}`);
|
|
4869
|
+
console.log(`QUANTISH_PROVIDER=${all2.provider || "anthropic"}`);
|
|
3975
4870
|
console.log();
|
|
3976
4871
|
console.log(chalk3.dim("# Discovery MCP (public, read-only market data)"));
|
|
3977
4872
|
console.log(`QUANTISH_DISCOVERY_URL=https://quantish.live/mcp`);
|
|
@@ -3986,11 +4881,14 @@ program.command("config").description("View or edit configuration").option("-s,
|
|
|
3986
4881
|
printDivider();
|
|
3987
4882
|
if (options.showKeys) {
|
|
3988
4883
|
tableRow("Anthropic API Key", all.anthropicApiKey || chalk3.dim("Not set"));
|
|
4884
|
+
tableRow("OpenRouter API Key", all.openrouterApiKey || chalk3.dim("Not set"));
|
|
3989
4885
|
tableRow("Quantish API Key", all.quantishApiKey || chalk3.dim("Not set"));
|
|
3990
4886
|
} else {
|
|
3991
4887
|
tableRow("Anthropic API Key", all.anthropicApiKey ? `${all.anthropicApiKey.slice(0, 10)}...` : chalk3.dim("Not set"));
|
|
4888
|
+
tableRow("OpenRouter API Key", all.openrouterApiKey ? `${all.openrouterApiKey.slice(0, 10)}...` : chalk3.dim("Not set"));
|
|
3992
4889
|
tableRow("Quantish API Key", all.quantishApiKey ? `${all.quantishApiKey.slice(0, 12)}...` : chalk3.dim("Not set"));
|
|
3993
4890
|
}
|
|
4891
|
+
tableRow("Provider", all.provider || "anthropic");
|
|
3994
4892
|
tableRow("MCP Server URL", all.mcpServerUrl);
|
|
3995
4893
|
tableRow("Model", all.model || "claude-sonnet-4-5-20250929");
|
|
3996
4894
|
printDivider();
|
|
@@ -4123,7 +5021,9 @@ async function runInteractiveChat(options = {}) {
|
|
|
4123
5021
|
const config = getConfigManager();
|
|
4124
5022
|
const mcpClientManager = createMCPManager(options);
|
|
4125
5023
|
const agent = createAgent({
|
|
5024
|
+
provider: config.getProvider(),
|
|
4126
5025
|
anthropicApiKey: config.getAnthropicApiKey(),
|
|
5026
|
+
openrouterApiKey: config.getOpenRouterApiKey(),
|
|
4127
5027
|
mcpClientManager,
|
|
4128
5028
|
model: config.getModel(),
|
|
4129
5029
|
enableLocalTools: options.enableLocal !== false,
|
|
@@ -4256,7 +5156,9 @@ async function runOneShotPrompt(message, options = {}) {
|
|
|
4256
5156
|
const config = getConfigManager();
|
|
4257
5157
|
const mcpClientManager = createMCPManager(options);
|
|
4258
5158
|
const agent = createAgent({
|
|
5159
|
+
provider: config.getProvider(),
|
|
4259
5160
|
anthropicApiKey: config.getAnthropicApiKey(),
|
|
5161
|
+
openrouterApiKey: config.getOpenRouterApiKey(),
|
|
4260
5162
|
mcpClientManager,
|
|
4261
5163
|
model: config.getModel(),
|
|
4262
5164
|
enableLocalTools: options.enableLocal !== false,
|