@quantish/agent 0.1.14 → 0.1.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/LICENSE +2 -0
  2. package/README.md +140 -152
  3. package/dist/index.js +1156 -283
  4. package/package.json +2 -1
package/dist/index.js CHANGED
@@ -18,6 +18,9 @@ var schema = {
18
18
  anthropicApiKey: {
19
19
  type: "string"
20
20
  },
21
+ openrouterApiKey: {
22
+ type: "string"
23
+ },
21
24
  quantishApiKey: {
22
25
  type: "string"
23
26
  },
@@ -28,6 +31,10 @@ var schema = {
28
31
  model: {
29
32
  type: "string",
30
33
  default: "claude-sonnet-4-5-20250929"
34
+ },
35
+ provider: {
36
+ type: "string",
37
+ default: "anthropic"
31
38
  }
32
39
  };
33
40
  var ConfigManager = class {
@@ -54,6 +61,20 @@ var ConfigManager = class {
54
61
  setAnthropicApiKey(key) {
55
62
  this.conf.set("anthropicApiKey", key);
56
63
  }
64
+ /**
65
+ * Get the OpenRouter API key
66
+ */
67
+ getOpenRouterApiKey() {
68
+ const envKey = process.env.OPENROUTER_API_KEY;
69
+ if (envKey) return envKey;
70
+ return this.conf.get("openrouterApiKey");
71
+ }
72
+ /**
73
+ * Set the OpenRouter API key
74
+ */
75
+ setOpenRouterApiKey(key) {
76
+ this.conf.set("openrouterApiKey", key);
77
+ }
57
78
  /**
58
79
  * Get the Quantish API key
59
80
  */
@@ -68,13 +89,22 @@ var ConfigManager = class {
68
89
  setQuantishApiKey(key) {
69
90
  this.conf.set("quantishApiKey", key);
70
91
  }
92
+ /**
93
+ * Get the current LLM provider
94
+ */
95
+ getProvider() {
96
+ return this.conf.get("provider") ?? "anthropic";
97
+ }
98
+ /**
99
+ * Set the LLM provider
100
+ */
101
+ setProvider(provider) {
102
+ this.conf.set("provider", provider);
103
+ }
71
104
  /**
72
105
  * Get the Trading MCP server URL (user's wallet/orders)
73
- * Priority: MCP_SERVER_URL env var > config file > default
74
106
  */
75
107
  getMcpServerUrl() {
76
- const envUrl = process.env.MCP_SERVER_URL;
77
- if (envUrl) return envUrl;
78
108
  return this.conf.get("mcpServerUrl") ?? DEFAULT_MCP_URL;
79
109
  }
80
110
  /**
@@ -101,12 +131,6 @@ var ConfigManager = class {
101
131
  setMcpServerUrl(url) {
102
132
  this.conf.set("mcpServerUrl", url);
103
133
  }
104
- /**
105
- * Generic setter for any config key
106
- */
107
- set(key, value) {
108
- this.conf.set(key, value);
109
- }
110
134
  /**
111
135
  * Get the model to use
112
136
  */
@@ -120,13 +144,27 @@ var ConfigManager = class {
120
144
  this.conf.set("model", model);
121
145
  }
122
146
  /**
123
- * Check if the CLI is configured (has at least Anthropic key)
147
+ * Check if the CLI is configured (has required LLM API key)
124
148
  * Discovery MCP works without any user key (embedded public key)
125
149
  * Trading MCP requires a user key
126
150
  */
127
151
  isConfigured() {
152
+ const provider = this.getProvider();
153
+ if (provider === "openrouter") {
154
+ return !!this.getOpenRouterApiKey();
155
+ }
128
156
  return !!this.getAnthropicApiKey();
129
157
  }
158
+ /**
159
+ * Get the appropriate LLM API key based on current provider
160
+ */
161
+ getLLMApiKey() {
162
+ const provider = this.getProvider();
163
+ if (provider === "openrouter") {
164
+ return this.getOpenRouterApiKey();
165
+ }
166
+ return this.getAnthropicApiKey();
167
+ }
130
168
  /**
131
169
  * Check if trading is enabled (has Quantish API key)
132
170
  */
@@ -139,9 +177,11 @@ var ConfigManager = class {
139
177
  getAll() {
140
178
  return {
141
179
  anthropicApiKey: this.getAnthropicApiKey(),
180
+ openrouterApiKey: this.getOpenRouterApiKey(),
142
181
  quantishApiKey: this.getQuantishApiKey(),
143
182
  mcpServerUrl: this.getMcpServerUrl(),
144
- model: this.getModel()
183
+ model: this.getModel(),
184
+ provider: this.getProvider()
145
185
  };
146
186
  }
147
187
  /**
@@ -527,27 +567,61 @@ async function runSetup() {
527
567
  return false;
528
568
  }
529
569
  console.log();
530
- console.log(chalk.bold("Step 1: Anthropic API Key"));
531
- console.log(chalk.dim("Powers the AI agent. Get yours at https://console.anthropic.com/"));
532
- let anthropicKey = config.getAnthropicApiKey();
533
- if (anthropicKey) {
534
- console.log(chalk.dim(`Current: ${anthropicKey.slice(0, 10)}...`));
535
- const newKey = await prompt("Enter new key (or press Enter to keep current): ", true);
536
- if (newKey) {
537
- anthropicKey = newKey;
570
+ console.log(chalk.bold("Step 1: Choose your LLM Provider"));
571
+ console.log(chalk.dim("The AI that powers the agent.\n"));
572
+ console.log(" 1. " + chalk.cyan("Anthropic") + chalk.dim(" (Claude models - Opus, Sonnet, Haiku)"));
573
+ console.log(" 2. " + chalk.green("OpenRouter") + chalk.dim(" (Access 100+ models - MiniMax, DeepSeek, etc.)\n"));
574
+ const providerChoice = await prompt("Choose (1 or 2): ");
575
+ const useOpenRouter = providerChoice === "2";
576
+ if (useOpenRouter) {
577
+ config.setProvider("openrouter");
578
+ console.log();
579
+ console.log(chalk.bold("OpenRouter API Key"));
580
+ console.log(chalk.dim("Get yours at https://openrouter.ai/keys\n"));
581
+ let openrouterKey = config.getOpenRouterApiKey();
582
+ if (openrouterKey) {
583
+ console.log(chalk.dim(`Current: ${openrouterKey.slice(0, 10)}...`));
584
+ const newKey = await prompt("Enter new key (or press Enter to keep current): ", true);
585
+ if (newKey) {
586
+ openrouterKey = newKey;
587
+ }
588
+ } else {
589
+ openrouterKey = await prompt("Enter your OpenRouter API key: ", true);
590
+ }
591
+ if (!openrouterKey) {
592
+ console.log(chalk.red("OpenRouter API key is required."));
593
+ return false;
538
594
  }
595
+ if (!openrouterKey.startsWith("sk-or-")) {
596
+ console.log(chalk.yellow("Warning: Key doesn't look like an OpenRouter key (should start with sk-or-)"));
597
+ }
598
+ config.setOpenRouterApiKey(openrouterKey);
599
+ console.log(chalk.green("\u2713 OpenRouter API key saved\n"));
539
600
  } else {
540
- anthropicKey = await prompt("Enter your Anthropic API key: ", true);
541
- }
542
- if (!anthropicKey) {
543
- console.log(chalk.red("Anthropic API key is required."));
544
- return false;
545
- }
546
- if (!anthropicKey.startsWith("sk-ant-")) {
547
- console.log(chalk.yellow("Warning: Key doesn't look like an Anthropic key (should start with sk-ant-)"));
601
+ config.setProvider("anthropic");
602
+ console.log();
603
+ console.log(chalk.bold("Anthropic API Key"));
604
+ console.log(chalk.dim("Get yours at https://console.anthropic.com/\n"));
605
+ let anthropicKey = config.getAnthropicApiKey();
606
+ if (anthropicKey) {
607
+ console.log(chalk.dim(`Current: ${anthropicKey.slice(0, 10)}...`));
608
+ const newKey = await prompt("Enter new key (or press Enter to keep current): ", true);
609
+ if (newKey) {
610
+ anthropicKey = newKey;
611
+ }
612
+ } else {
613
+ anthropicKey = await prompt("Enter your Anthropic API key: ", true);
614
+ }
615
+ if (!anthropicKey) {
616
+ console.log(chalk.red("Anthropic API key is required."));
617
+ return false;
618
+ }
619
+ if (!anthropicKey.startsWith("sk-ant-")) {
620
+ console.log(chalk.yellow("Warning: Key doesn't look like an Anthropic key (should start with sk-ant-)"));
621
+ }
622
+ config.setAnthropicApiKey(anthropicKey);
623
+ console.log(chalk.green("\u2713 Anthropic API key saved\n"));
548
624
  }
549
- config.setAnthropicApiKey(anthropicKey);
550
- console.log(chalk.green("\u2713 Anthropic API key saved\n"));
551
625
  console.log(chalk.bold("Step 2: Polymarket Trading (Optional)"));
552
626
  console.log(chalk.dim("Enable trading on Polymarket with your own managed wallet."));
553
627
  console.log(chalk.dim("Skip this if you only want to search/discover markets.\n"));
@@ -681,15 +755,14 @@ async function runSetup() {
681
755
  async function ensureConfigured() {
682
756
  const config = getConfigManager();
683
757
  if (!config.isConfigured()) {
684
- console.log(chalk.yellow("Quantish CLI is not configured yet."));
685
- console.log("Run " + chalk.yellow("quantish init") + " to set up.\n");
686
- return false;
758
+ console.log(chalk.yellow("Quantish CLI is not configured yet.\n"));
759
+ return await runSetup();
687
760
  }
688
761
  return true;
689
762
  }
690
763
 
691
764
  // src/agent/loop.ts
692
- import Anthropic from "@anthropic-ai/sdk";
765
+ import Anthropic2 from "@anthropic-ai/sdk";
693
766
 
694
767
  // src/tools/filesystem.ts
695
768
  import * as fs from "fs/promises";
@@ -922,111 +995,8 @@ var filesystemTools = [
922
995
  },
923
996
  required: ["path", "old_string", "new_string"]
924
997
  }
925
- },
926
- {
927
- name: "setup_env",
928
- description: "Setup or update environment variables in a .env file for an application. Creates .env if it doesn't exist. Optionally creates a .env.example template. Use this when building any application that needs API keys or configuration.",
929
- input_schema: {
930
- type: "object",
931
- properties: {
932
- path: {
933
- type: "string",
934
- description: 'Path to the .env file (default: ".env" in current directory)'
935
- },
936
- variables: {
937
- type: "object",
938
- description: 'Object with environment variable names as keys and values. Example: { "QUANTISH_API_KEY": "abc123", "TOKEN_ID": "xyz" }',
939
- additionalProperties: { type: "string" }
940
- },
941
- overwrite: {
942
- type: "boolean",
943
- description: "If true, overwrite existing variables. Default false (skip existing)."
944
- },
945
- create_example: {
946
- type: "boolean",
947
- description: "If true, also create a .env.example template file with placeholder values."
948
- }
949
- },
950
- required: ["variables"]
951
- }
952
998
  }
953
999
  ];
954
- async function setupEnv(envPath = ".env", variables, options) {
955
- try {
956
- const resolvedPath = path.resolve(envPath);
957
- let content = "";
958
- const existingVars = {};
959
- if (existsSync(resolvedPath)) {
960
- content = await fs.readFile(resolvedPath, "utf-8");
961
- for (const line of content.split("\n")) {
962
- const trimmed = line.trim();
963
- if (trimmed && !trimmed.startsWith("#")) {
964
- const eqIndex = trimmed.indexOf("=");
965
- if (eqIndex > 0) {
966
- const key = trimmed.slice(0, eqIndex);
967
- const value = trimmed.slice(eqIndex + 1);
968
- existingVars[key] = value;
969
- }
970
- }
971
- }
972
- }
973
- const updatedVars = [];
974
- const addedVars = [];
975
- const skippedVars = [];
976
- for (const [key, value] of Object.entries(variables)) {
977
- if (existingVars[key] !== void 0) {
978
- if (options?.overwrite) {
979
- const regex = new RegExp(`^${key}=.*$`, "m");
980
- content = content.replace(regex, `${key}=${value}`);
981
- updatedVars.push(key);
982
- } else {
983
- skippedVars.push(key);
984
- }
985
- } else {
986
- if (content && !content.endsWith("\n")) {
987
- content += "\n";
988
- }
989
- content += `${key}=${value}
990
- `;
991
- addedVars.push(key);
992
- }
993
- }
994
- await fs.writeFile(resolvedPath, content, "utf-8");
995
- if (options?.createExample) {
996
- const examplePath = resolvedPath.replace(/\.env$/, ".env.example");
997
- let exampleContent = "# Environment variables for this application\n";
998
- exampleContent += "# Copy this file to .env and fill in your values\n\n";
999
- for (const key of Object.keys({ ...existingVars, ...variables })) {
1000
- if (key === "QUANTISH_API_KEY") {
1001
- exampleContent += `# Get your API key at https://quantish.live
1002
- `;
1003
- exampleContent += `${key}=your_api_key_here
1004
-
1005
- `;
1006
- } else {
1007
- exampleContent += `${key}=
1008
- `;
1009
- }
1010
- }
1011
- await fs.writeFile(examplePath, exampleContent, "utf-8");
1012
- }
1013
- return {
1014
- success: true,
1015
- data: {
1016
- path: resolvedPath,
1017
- added: addedVars,
1018
- updated: updatedVars,
1019
- skipped: skippedVars,
1020
- exampleCreated: options?.createExample || false
1021
- }
1022
- };
1023
- } catch (error2) {
1024
- return {
1025
- success: false,
1026
- error: `Failed to setup env: ${error2 instanceof Error ? error2.message : String(error2)}`
1027
- };
1028
- }
1029
- }
1030
1000
  async function executeFilesystemTool(name, args) {
1031
1001
  switch (name) {
1032
1002
  case "read_file":
@@ -1049,15 +1019,6 @@ async function executeFilesystemTool(name, args) {
1049
1019
  args.new_string,
1050
1020
  { replaceAll: args.replace_all }
1051
1021
  );
1052
- case "setup_env":
1053
- return setupEnv(
1054
- args.path || ".env",
1055
- args.variables,
1056
- {
1057
- overwrite: args.overwrite,
1058
- createExample: args.create_example
1059
- }
1060
- );
1061
1022
  default:
1062
1023
  return { success: false, error: `Unknown filesystem tool: ${name}` };
1063
1024
  }
@@ -2357,16 +2318,16 @@ async function compactConversation(anthropic, history, model, systemPrompt, tool
2357
2318
 
2358
2319
  // src/agent/pricing.ts
2359
2320
  var MODELS = {
2360
- "claude-opus-4-5-20251101": {
2361
- id: "claude-opus-4-5-20251101",
2321
+ "claude-opus-4-5-20250929": {
2322
+ id: "claude-opus-4-5-20250929",
2362
2323
  name: "opus-4.5",
2363
2324
  displayName: "Claude Opus 4.5",
2364
2325
  pricing: {
2365
- inputPerMTok: 15,
2366
- outputPerMTok: 75,
2367
- cacheWritePerMTok: 18.75,
2326
+ inputPerMTok: 5,
2327
+ outputPerMTok: 25,
2328
+ cacheWritePerMTok: 6.25,
2368
2329
  // 1.25x input
2369
- cacheReadPerMTok: 1.5
2330
+ cacheReadPerMTok: 0.5
2370
2331
  // 0.1x input
2371
2332
  },
2372
2333
  contextWindow: 2e5,
@@ -2387,8 +2348,8 @@ var MODELS = {
2387
2348
  contextWindow: 2e5,
2388
2349
  description: "Balanced performance and cost. Great for most coding and trading tasks."
2389
2350
  },
2390
- "claude-haiku-4-5-20251001": {
2391
- id: "claude-haiku-4-5-20251001",
2351
+ "claude-haiku-4-5-20250929": {
2352
+ id: "claude-haiku-4-5-20250929",
2392
2353
  name: "haiku-4.5",
2393
2354
  displayName: "Claude Haiku 4.5",
2394
2355
  pricing: {
@@ -2405,12 +2366,12 @@ var MODELS = {
2405
2366
  };
2406
2367
  var DEFAULT_MODEL = "claude-sonnet-4-5-20250929";
2407
2368
  var MODEL_ALIASES = {
2408
- "opus": "claude-opus-4-5-20251101",
2409
- "opus-4.5": "claude-opus-4-5-20251101",
2369
+ "opus": "claude-opus-4-5-20250929",
2370
+ "opus-4.5": "claude-opus-4-5-20250929",
2410
2371
  "sonnet": "claude-sonnet-4-5-20250929",
2411
2372
  "sonnet-4.5": "claude-sonnet-4-5-20250929",
2412
- "haiku": "claude-haiku-4-5-20251001",
2413
- "haiku-4.5": "claude-haiku-4-5-20251001"
2373
+ "haiku": "claude-haiku-4-5-20250929",
2374
+ "haiku-4.5": "claude-haiku-4-5-20250929"
2414
2375
  };
2415
2376
  function resolveModelId(nameOrAlias) {
2416
2377
  const lower = nameOrAlias.toLowerCase();
@@ -2477,6 +2438,744 @@ function listModels() {
2477
2438
  return Object.values(MODELS);
2478
2439
  }
2479
2440
 
2441
+ // src/agent/provider.ts
2442
+ import Anthropic from "@anthropic-ai/sdk";
2443
+
2444
+ // src/agent/openrouter.ts
2445
+ var OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1";
2446
+ var OPENROUTER_MODELS = {
2447
+ // MiniMax models - very cost effective
2448
+ "minimax/minimax-m2.1": {
2449
+ id: "minimax/minimax-m2.1",
2450
+ name: "minimax-m2.1",
2451
+ displayName: "MiniMax M2.1",
2452
+ provider: "MiniMax",
2453
+ pricing: {
2454
+ inputPerMTok: 0.3,
2455
+ // $0.0000003 * 1M
2456
+ outputPerMTok: 1.2,
2457
+ // $0.0000012 * 1M
2458
+ cacheReadPerMTok: 0.03,
2459
+ cacheWritePerMTok: 0.375
2460
+ },
2461
+ contextWindow: 204800,
2462
+ maxOutputTokens: 131072,
2463
+ supportsTools: true,
2464
+ supportsReasoning: true,
2465
+ description: "10B active params, state-of-the-art for coding and agentic workflows. Very cost efficient."
2466
+ },
2467
+ "minimax/minimax-m2": {
2468
+ id: "minimax/minimax-m2",
2469
+ name: "minimax-m2",
2470
+ displayName: "MiniMax M2",
2471
+ provider: "MiniMax",
2472
+ pricing: {
2473
+ inputPerMTok: 0.2,
2474
+ outputPerMTok: 1,
2475
+ cacheReadPerMTok: 0.03
2476
+ },
2477
+ contextWindow: 196608,
2478
+ maxOutputTokens: 131072,
2479
+ supportsTools: true,
2480
+ supportsReasoning: true,
2481
+ description: "Compact model optimized for end-to-end coding and agentic workflows."
2482
+ },
2483
+ // DeepSeek models - very cheap
2484
+ "deepseek/deepseek-v3.2": {
2485
+ id: "deepseek/deepseek-v3.2",
2486
+ name: "deepseek-v3.2",
2487
+ displayName: "DeepSeek V3.2",
2488
+ provider: "DeepSeek",
2489
+ pricing: {
2490
+ inputPerMTok: 0.224,
2491
+ outputPerMTok: 0.32
2492
+ },
2493
+ contextWindow: 163840,
2494
+ supportsTools: true,
2495
+ supportsReasoning: true,
2496
+ description: "High efficiency with strong reasoning. GPT-5 class performance."
2497
+ },
2498
+ // Mistral models
2499
+ "mistralai/devstral-2512": {
2500
+ id: "mistralai/devstral-2512",
2501
+ name: "devstral-2512",
2502
+ displayName: "Devstral 2 2512",
2503
+ provider: "Mistral",
2504
+ pricing: {
2505
+ inputPerMTok: 0.05,
2506
+ outputPerMTok: 0.22
2507
+ },
2508
+ contextWindow: 262144,
2509
+ supportsTools: true,
2510
+ description: "State-of-the-art open model for agentic coding. 123B params."
2511
+ },
2512
+ "mistralai/mistral-large-2512": {
2513
+ id: "mistralai/mistral-large-2512",
2514
+ name: "mistral-large-2512",
2515
+ displayName: "Mistral Large 3",
2516
+ provider: "Mistral",
2517
+ pricing: {
2518
+ inputPerMTok: 0.5,
2519
+ outputPerMTok: 1.5
2520
+ },
2521
+ contextWindow: 262144,
2522
+ supportsTools: true,
2523
+ description: "Most capable Mistral model. 675B total params (41B active)."
2524
+ },
2525
+ // Google Gemini
2526
+ "google/gemini-3-flash-preview": {
2527
+ id: "google/gemini-3-flash-preview",
2528
+ name: "gemini-3-flash",
2529
+ displayName: "Gemini 3 Flash Preview",
2530
+ provider: "Google",
2531
+ pricing: {
2532
+ inputPerMTok: 0.5,
2533
+ outputPerMTok: 3,
2534
+ cacheReadPerMTok: 0.05
2535
+ },
2536
+ contextWindow: 1048576,
2537
+ supportsTools: true,
2538
+ supportsReasoning: true,
2539
+ description: "High speed thinking model for agentic workflows. 1M context."
2540
+ },
2541
+ "google/gemini-3-pro-preview": {
2542
+ id: "google/gemini-3-pro-preview",
2543
+ name: "gemini-3-pro",
2544
+ displayName: "Gemini 3 Pro Preview",
2545
+ provider: "Google",
2546
+ pricing: {
2547
+ inputPerMTok: 2,
2548
+ outputPerMTok: 12,
2549
+ cacheReadPerMTok: 0.2,
2550
+ cacheWritePerMTok: 2.375
2551
+ },
2552
+ contextWindow: 1048576,
2553
+ supportsTools: true,
2554
+ supportsReasoning: true,
2555
+ description: "Flagship frontier model for high-precision multimodal reasoning."
2556
+ },
2557
+ // xAI Grok
2558
+ "x-ai/grok-4.1-fast": {
2559
+ id: "x-ai/grok-4.1-fast",
2560
+ name: "grok-4.1-fast",
2561
+ displayName: "Grok 4.1 Fast",
2562
+ provider: "xAI",
2563
+ pricing: {
2564
+ inputPerMTok: 0.2,
2565
+ outputPerMTok: 0.5,
2566
+ cacheReadPerMTok: 0.05
2567
+ },
2568
+ contextWindow: 2e6,
2569
+ maxOutputTokens: 3e4,
2570
+ supportsTools: true,
2571
+ supportsReasoning: true,
2572
+ description: "Best agentic tool calling model. 2M context window."
2573
+ },
2574
+ // Anthropic via OpenRouter (for fallback/comparison)
2575
+ "anthropic/claude-opus-4.5": {
2576
+ id: "anthropic/claude-opus-4.5",
2577
+ name: "claude-opus-4.5-or",
2578
+ displayName: "Claude Opus 4.5 (OR)",
2579
+ provider: "Anthropic",
2580
+ pricing: {
2581
+ inputPerMTok: 5,
2582
+ outputPerMTok: 25,
2583
+ cacheReadPerMTok: 0.5,
2584
+ cacheWritePerMTok: 6.25
2585
+ },
2586
+ contextWindow: 2e5,
2587
+ maxOutputTokens: 32e3,
2588
+ supportsTools: true,
2589
+ supportsReasoning: true,
2590
+ description: "Anthropic Opus 4.5 via OpenRouter."
2591
+ },
2592
+ "anthropic/claude-haiku-4.5": {
2593
+ id: "anthropic/claude-haiku-4.5",
2594
+ name: "claude-haiku-4.5-or",
2595
+ displayName: "Claude Haiku 4.5 (OR)",
2596
+ provider: "Anthropic",
2597
+ pricing: {
2598
+ inputPerMTok: 1,
2599
+ outputPerMTok: 5,
2600
+ cacheReadPerMTok: 0.1,
2601
+ cacheWritePerMTok: 1.25
2602
+ },
2603
+ contextWindow: 2e5,
2604
+ maxOutputTokens: 64e3,
2605
+ supportsTools: true,
2606
+ supportsReasoning: true,
2607
+ description: "Anthropic Haiku 4.5 via OpenRouter. Fast and efficient."
2608
+ },
2609
+ // Free models (for testing/experimentation)
2610
+ "mistralai/devstral-2512:free": {
2611
+ id: "mistralai/devstral-2512:free",
2612
+ name: "devstral-free",
2613
+ displayName: "Devstral 2 (Free)",
2614
+ provider: "Mistral",
2615
+ pricing: {
2616
+ inputPerMTok: 0,
2617
+ outputPerMTok: 0
2618
+ },
2619
+ contextWindow: 262144,
2620
+ supportsTools: true,
2621
+ description: "Free tier Devstral for testing. Limited capacity."
2622
+ },
2623
+ "xiaomi/mimo-v2-flash:free": {
2624
+ id: "xiaomi/mimo-v2-flash:free",
2625
+ name: "mimo-v2-flash-free",
2626
+ displayName: "MiMo V2 Flash (Free)",
2627
+ provider: "Xiaomi",
2628
+ pricing: {
2629
+ inputPerMTok: 0,
2630
+ outputPerMTok: 0
2631
+ },
2632
+ contextWindow: 262144,
2633
+ supportsTools: true,
2634
+ supportsReasoning: true,
2635
+ description: "Free MoE model. Top open-source on SWE-bench."
2636
+ }
2637
+ };
2638
+ var OPENROUTER_ALIASES = {
2639
+ // MiniMax
2640
+ "minimax": "minimax/minimax-m2.1",
2641
+ "m2": "minimax/minimax-m2",
2642
+ "m2.1": "minimax/minimax-m2.1",
2643
+ // DeepSeek
2644
+ "deepseek": "deepseek/deepseek-v3.2",
2645
+ "ds": "deepseek/deepseek-v3.2",
2646
+ // Mistral
2647
+ "devstral": "mistralai/devstral-2512",
2648
+ "mistral": "mistralai/mistral-large-2512",
2649
+ "mistral-large": "mistralai/mistral-large-2512",
2650
+ // Google
2651
+ "gemini": "google/gemini-3-flash-preview",
2652
+ "gemini-flash": "google/gemini-3-flash-preview",
2653
+ "gemini-pro": "google/gemini-3-pro-preview",
2654
+ // xAI
2655
+ "grok": "x-ai/grok-4.1-fast",
2656
+ // Anthropic via OR
2657
+ "opus-or": "anthropic/claude-opus-4.5",
2658
+ "haiku-or": "anthropic/claude-haiku-4.5",
2659
+ // Free
2660
+ "free": "mistralai/devstral-2512:free",
2661
+ "mimo": "xiaomi/mimo-v2-flash:free"
2662
+ };
2663
+ function resolveOpenRouterModelId(nameOrAlias) {
2664
+ const lower = nameOrAlias.toLowerCase();
2665
+ if (OPENROUTER_MODELS[lower]) {
2666
+ return lower;
2667
+ }
2668
+ if (OPENROUTER_ALIASES[lower]) {
2669
+ return OPENROUTER_ALIASES[lower];
2670
+ }
2671
+ for (const [id, config] of Object.entries(OPENROUTER_MODELS)) {
2672
+ if (config.name.toLowerCase() === lower) {
2673
+ return id;
2674
+ }
2675
+ }
2676
+ if (nameOrAlias.includes("/")) {
2677
+ return nameOrAlias;
2678
+ }
2679
+ return null;
2680
+ }
2681
+ function getOpenRouterModelConfig(modelId) {
2682
+ return OPENROUTER_MODELS[modelId] ?? null;
2683
+ }
2684
+ function convertToOpenAITools(anthropicTools) {
2685
+ return anthropicTools.map((tool) => ({
2686
+ type: "function",
2687
+ function: {
2688
+ name: tool.name,
2689
+ description: tool.description ?? "",
2690
+ parameters: tool.input_schema
2691
+ }
2692
+ }));
2693
+ }
2694
+ var OpenRouterClient = class {
2695
+ apiKey;
2696
+ baseUrl;
2697
+ appName;
2698
+ appUrl;
2699
+ constructor(config) {
2700
+ this.apiKey = config.apiKey;
2701
+ this.baseUrl = config.baseUrl ?? OPENROUTER_BASE_URL;
2702
+ this.appName = config.appName ?? "Quantish Agent";
2703
+ this.appUrl = config.appUrl ?? "https://quantish.ai";
2704
+ }
2705
+ /**
2706
+ * Create a chat completion (non-streaming)
2707
+ */
2708
+ async createChatCompletion(options) {
2709
+ const response = await fetch(`${this.baseUrl}/chat/completions`, {
2710
+ method: "POST",
2711
+ headers: {
2712
+ "Authorization": `Bearer ${this.apiKey}`,
2713
+ "Content-Type": "application/json",
2714
+ "HTTP-Referer": this.appUrl,
2715
+ "X-Title": this.appName
2716
+ },
2717
+ body: JSON.stringify({
2718
+ model: options.model,
2719
+ messages: options.messages,
2720
+ tools: options.tools,
2721
+ tool_choice: options.tool_choice ?? (options.tools ? "auto" : void 0),
2722
+ max_tokens: options.max_tokens,
2723
+ temperature: options.temperature,
2724
+ top_p: options.top_p,
2725
+ stream: false
2726
+ })
2727
+ });
2728
+ if (!response.ok) {
2729
+ const errorText = await response.text();
2730
+ throw new Error(`OpenRouter API error (${response.status}): ${errorText}`);
2731
+ }
2732
+ return response.json();
2733
+ }
2734
+ /**
2735
+ * Create a streaming chat completion
2736
+ */
2737
+ async *createStreamingChatCompletion(options) {
2738
+ const response = await fetch(`${this.baseUrl}/chat/completions`, {
2739
+ method: "POST",
2740
+ headers: {
2741
+ "Authorization": `Bearer ${this.apiKey}`,
2742
+ "Content-Type": "application/json",
2743
+ "HTTP-Referer": this.appUrl,
2744
+ "X-Title": this.appName
2745
+ },
2746
+ body: JSON.stringify({
2747
+ model: options.model,
2748
+ messages: options.messages,
2749
+ tools: options.tools,
2750
+ tool_choice: options.tool_choice ?? (options.tools ? "auto" : void 0),
2751
+ max_tokens: options.max_tokens,
2752
+ temperature: options.temperature,
2753
+ top_p: options.top_p,
2754
+ stream: true
2755
+ })
2756
+ });
2757
+ if (!response.ok) {
2758
+ const errorText = await response.text();
2759
+ throw new Error(`OpenRouter API error (${response.status}): ${errorText}`);
2760
+ }
2761
+ if (!response.body) {
2762
+ throw new Error("No response body for streaming request");
2763
+ }
2764
+ const reader = response.body.getReader();
2765
+ const decoder = new TextDecoder();
2766
+ let buffer = "";
2767
+ try {
2768
+ while (true) {
2769
+ const { done, value } = await reader.read();
2770
+ if (done) break;
2771
+ buffer += decoder.decode(value, { stream: true });
2772
+ const lines = buffer.split("\n");
2773
+ buffer = lines.pop() ?? "";
2774
+ for (const line of lines) {
2775
+ const trimmed = line.trim();
2776
+ if (!trimmed || trimmed === "data: [DONE]") continue;
2777
+ if (!trimmed.startsWith("data: ")) continue;
2778
+ try {
2779
+ const json = JSON.parse(trimmed.slice(6));
2780
+ yield json;
2781
+ } catch {
2782
+ }
2783
+ }
2784
+ }
2785
+ } finally {
2786
+ reader.releaseLock();
2787
+ }
2788
+ }
2789
+ /**
2790
+ * Get generation details including exact cost
2791
+ */
2792
+ async getGenerationDetails(generationId) {
2793
+ const response = await fetch(`${this.baseUrl}/generation?id=${generationId}`, {
2794
+ method: "GET",
2795
+ headers: {
2796
+ "Authorization": `Bearer ${this.apiKey}`
2797
+ }
2798
+ });
2799
+ if (!response.ok) {
2800
+ const errorText = await response.text();
2801
+ throw new Error(`OpenRouter API error (${response.status}): ${errorText}`);
2802
+ }
2803
+ return response.json();
2804
+ }
2805
+ /**
2806
+ * List available models
2807
+ */
2808
+ async listModels() {
2809
+ const response = await fetch(`${this.baseUrl}/models`, {
2810
+ method: "GET",
2811
+ headers: {
2812
+ "Authorization": `Bearer ${this.apiKey}`
2813
+ }
2814
+ });
2815
+ if (!response.ok) {
2816
+ const errorText = await response.text();
2817
+ throw new Error(`OpenRouter API error (${response.status}): ${errorText}`);
2818
+ }
2819
+ return response.json();
2820
+ }
2821
+ };
2822
+ function calculateOpenRouterCost(modelId, inputTokens, outputTokens, cacheReadTokens = 0, cacheWriteTokens = 0) {
2823
+ const config = getOpenRouterModelConfig(modelId);
2824
+ const pricing = config?.pricing ?? {
2825
+ inputPerMTok: 1,
2826
+ outputPerMTok: 3,
2827
+ cacheReadPerMTok: 0.1,
2828
+ cacheWritePerMTok: 1.25
2829
+ };
2830
+ const inputCost = inputTokens / 1e6 * pricing.inputPerMTok;
2831
+ const outputCost = outputTokens / 1e6 * pricing.outputPerMTok;
2832
+ const cacheReadCost = cacheReadTokens / 1e6 * (pricing.cacheReadPerMTok ?? pricing.inputPerMTok * 0.1);
2833
+ const cacheWriteCost = cacheWriteTokens / 1e6 * (pricing.cacheWritePerMTok ?? pricing.inputPerMTok * 1.25);
2834
+ return {
2835
+ inputCost,
2836
+ outputCost,
2837
+ cacheReadCost,
2838
+ cacheWriteCost,
2839
+ totalCost: inputCost + outputCost + cacheReadCost + cacheWriteCost
2840
+ };
2841
+ }
2842
+ function listOpenRouterModels() {
2843
+ return Object.values(OPENROUTER_MODELS);
2844
+ }
2845
+
2846
+ // src/agent/provider.ts
2847
+ var AnthropicProvider = class {
2848
+ client;
2849
+ config;
2850
+ constructor(config) {
2851
+ this.config = config;
2852
+ const headers = {};
2853
+ if (config.contextEditing && config.contextEditing.length > 0) {
2854
+ headers["anthropic-beta"] = "context-management-2025-06-27";
2855
+ }
2856
+ this.client = new Anthropic({
2857
+ apiKey: config.apiKey,
2858
+ defaultHeaders: Object.keys(headers).length > 0 ? headers : void 0
2859
+ });
2860
+ }
2861
+ getModel() {
2862
+ return this.config.model;
2863
+ }
2864
+ async countTokens(messages) {
2865
+ try {
2866
+ const response = await this.client.messages.countTokens({
2867
+ model: this.config.model,
2868
+ system: this.config.systemPrompt,
2869
+ tools: this.config.tools,
2870
+ messages
2871
+ });
2872
+ return response.input_tokens;
2873
+ } catch {
2874
+ return 0;
2875
+ }
2876
+ }
2877
+ async chat(messages) {
2878
+ const systemWithCache = [
2879
+ {
2880
+ type: "text",
2881
+ text: this.config.systemPrompt,
2882
+ cache_control: { type: "ephemeral" }
2883
+ }
2884
+ ];
2885
+ const response = await this.client.messages.create({
2886
+ model: this.config.model,
2887
+ max_tokens: this.config.maxTokens,
2888
+ system: systemWithCache,
2889
+ tools: this.config.tools,
2890
+ messages
2891
+ });
2892
+ const usage = response.usage;
2893
+ const cost = calculateCost(
2894
+ this.config.model,
2895
+ usage.input_tokens,
2896
+ usage.output_tokens,
2897
+ usage.cache_creation_input_tokens ?? 0,
2898
+ usage.cache_read_input_tokens ?? 0
2899
+ );
2900
+ const textBlocks = response.content.filter(
2901
+ (block) => block.type === "text"
2902
+ );
2903
+ const toolUses = response.content.filter(
2904
+ (block) => block.type === "tool_use"
2905
+ );
2906
+ return {
2907
+ text: textBlocks.map((b) => b.text).join(""),
2908
+ toolCalls: toolUses.map((t) => ({
2909
+ id: t.id,
2910
+ name: t.name,
2911
+ input: t.input
2912
+ })),
2913
+ usage: {
2914
+ inputTokens: usage.input_tokens,
2915
+ outputTokens: usage.output_tokens,
2916
+ cacheCreationTokens: usage.cache_creation_input_tokens ?? 0,
2917
+ cacheReadTokens: usage.cache_read_input_tokens ?? 0
2918
+ },
2919
+ cost,
2920
+ stopReason: response.stop_reason === "tool_use" ? "tool_use" : "end_turn",
2921
+ rawResponse: response
2922
+ };
2923
+ }
2924
+ async streamChat(messages, callbacks) {
2925
+ const systemWithCache = [
2926
+ {
2927
+ type: "text",
2928
+ text: this.config.systemPrompt,
2929
+ cache_control: { type: "ephemeral" }
2930
+ }
2931
+ ];
2932
+ const stream = this.client.messages.stream({
2933
+ model: this.config.model,
2934
+ max_tokens: this.config.maxTokens,
2935
+ system: systemWithCache,
2936
+ tools: this.config.tools,
2937
+ messages
2938
+ });
2939
+ let fullText = "";
2940
+ for await (const event of stream) {
2941
+ if (event.type === "content_block_delta") {
2942
+ const delta = event.delta;
2943
+ if (delta.type === "text_delta" && delta.text) {
2944
+ fullText += delta.text;
2945
+ callbacks.onText?.(delta.text);
2946
+ } else if (delta.type === "thinking_delta" && delta.thinking) {
2947
+ callbacks.onThinking?.(delta.thinking);
2948
+ }
2949
+ }
2950
+ }
2951
+ const response = await stream.finalMessage();
2952
+ const usage = response.usage;
2953
+ const cost = calculateCost(
2954
+ this.config.model,
2955
+ usage.input_tokens,
2956
+ usage.output_tokens,
2957
+ usage.cache_creation_input_tokens ?? 0,
2958
+ usage.cache_read_input_tokens ?? 0
2959
+ );
2960
+ const toolUses = response.content.filter(
2961
+ (block) => block.type === "tool_use"
2962
+ );
2963
+ for (const tool of toolUses) {
2964
+ callbacks.onToolCall?.(tool.id, tool.name, tool.input);
2965
+ }
2966
+ return {
2967
+ text: fullText,
2968
+ toolCalls: toolUses.map((t) => ({
2969
+ id: t.id,
2970
+ name: t.name,
2971
+ input: t.input
2972
+ })),
2973
+ usage: {
2974
+ inputTokens: usage.input_tokens,
2975
+ outputTokens: usage.output_tokens,
2976
+ cacheCreationTokens: usage.cache_creation_input_tokens ?? 0,
2977
+ cacheReadTokens: usage.cache_read_input_tokens ?? 0
2978
+ },
2979
+ cost,
2980
+ stopReason: response.stop_reason === "tool_use" ? "tool_use" : "end_turn",
2981
+ rawResponse: response
2982
+ };
2983
+ }
2984
+ };
2985
+ var OpenRouterProvider = class {
2986
+ client;
2987
+ config;
2988
+ openaiTools;
2989
+ constructor(config) {
2990
+ this.config = config;
2991
+ this.client = new OpenRouterClient({
2992
+ apiKey: config.apiKey
2993
+ });
2994
+ this.openaiTools = convertToOpenAITools(config.tools);
2995
+ }
2996
+ getModel() {
2997
+ return this.config.model;
2998
+ }
2999
+ async countTokens(_messages) {
3000
+ const text = JSON.stringify(_messages);
3001
+ return Math.ceil(text.length / 4);
3002
+ }
3003
+ /**
3004
+ * Convert Anthropic message format to OpenAI format
3005
+ */
3006
+ convertMessages(messages) {
3007
+ const result = [];
3008
+ result.push({
3009
+ role: "system",
3010
+ content: this.config.systemPrompt
3011
+ });
3012
+ for (const msg of messages) {
3013
+ if (msg.role === "user") {
3014
+ if (typeof msg.content === "string") {
3015
+ result.push({ role: "user", content: msg.content });
3016
+ } else if (Array.isArray(msg.content)) {
3017
+ const toolResults = msg.content.filter(
3018
+ (block) => block.type === "tool_result"
3019
+ );
3020
+ if (toolResults.length > 0) {
3021
+ for (const tr of toolResults) {
3022
+ const toolResult = tr;
3023
+ result.push({
3024
+ role: "tool",
3025
+ tool_call_id: toolResult.tool_use_id,
3026
+ content: typeof toolResult.content === "string" ? toolResult.content : JSON.stringify(toolResult.content)
3027
+ });
3028
+ }
3029
+ } else {
3030
+ const textContent = msg.content.filter((block) => block.type === "text").map((block) => block.text).join("");
3031
+ if (textContent) {
3032
+ result.push({ role: "user", content: textContent });
3033
+ }
3034
+ }
3035
+ }
3036
+ } else if (msg.role === "assistant") {
3037
+ if (typeof msg.content === "string") {
3038
+ result.push({ role: "assistant", content: msg.content });
3039
+ } else if (Array.isArray(msg.content)) {
3040
+ const textBlocks = msg.content.filter(
3041
+ (block) => block.type === "text"
3042
+ );
3043
+ const toolUses = msg.content.filter(
3044
+ (block) => block.type === "tool_use"
3045
+ );
3046
+ const textContent = textBlocks.map((b) => b.text).join("");
3047
+ if (toolUses.length > 0) {
3048
+ result.push({
3049
+ role: "assistant",
3050
+ content: textContent || null,
3051
+ tool_calls: toolUses.map((t) => ({
3052
+ id: t.id,
3053
+ type: "function",
3054
+ function: {
3055
+ name: t.name,
3056
+ arguments: JSON.stringify(t.input)
3057
+ }
3058
+ }))
3059
+ });
3060
+ } else {
3061
+ result.push({ role: "assistant", content: textContent });
3062
+ }
3063
+ }
3064
+ }
3065
+ }
3066
+ return result;
3067
+ }
3068
+ async chat(messages) {
3069
+ const openaiMessages = this.convertMessages(messages);
3070
+ const response = await this.client.createChatCompletion({
3071
+ model: this.config.model,
3072
+ messages: openaiMessages,
3073
+ tools: this.openaiTools.length > 0 ? this.openaiTools : void 0,
3074
+ max_tokens: this.config.maxTokens
3075
+ });
3076
+ const choice = response.choices[0];
3077
+ const usage = response.usage ?? { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
3078
+ const cost = calculateOpenRouterCost(
3079
+ this.config.model,
3080
+ usage.prompt_tokens,
3081
+ usage.completion_tokens
3082
+ );
3083
+ const toolCalls = choice.message.tool_calls ?? [];
3084
+ return {
3085
+ text: choice.message.content ?? "",
3086
+ toolCalls: toolCalls.map((tc) => ({
3087
+ id: tc.id,
3088
+ name: tc.function.name,
3089
+ input: JSON.parse(tc.function.arguments)
3090
+ })),
3091
+ usage: {
3092
+ inputTokens: usage.prompt_tokens,
3093
+ outputTokens: usage.completion_tokens,
3094
+ cacheCreationTokens: 0,
3095
+ cacheReadTokens: 0
3096
+ },
3097
+ cost,
3098
+ stopReason: choice.finish_reason === "tool_calls" ? "tool_use" : "end_turn",
3099
+ rawResponse: response
3100
+ };
3101
+ }
3102
+ async streamChat(messages, callbacks) {
3103
+ const openaiMessages = this.convertMessages(messages);
3104
+ let fullText = "";
3105
+ const toolCallsInProgress = /* @__PURE__ */ new Map();
3106
+ let finishReason = null;
3107
+ let usage = { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
3108
+ const stream = this.client.createStreamingChatCompletion({
3109
+ model: this.config.model,
3110
+ messages: openaiMessages,
3111
+ tools: this.openaiTools.length > 0 ? this.openaiTools : void 0,
3112
+ max_tokens: this.config.maxTokens
3113
+ });
3114
+ for await (const chunk of stream) {
3115
+ const choice = chunk.choices[0];
3116
+ if (!choice) continue;
3117
+ if (choice.delta.content) {
3118
+ fullText += choice.delta.content;
3119
+ callbacks.onText?.(choice.delta.content);
3120
+ }
3121
+ if (choice.delta.tool_calls) {
3122
+ for (const tcDelta of choice.delta.tool_calls) {
3123
+ const existing = toolCallsInProgress.get(tcDelta.index);
3124
+ if (!existing) {
3125
+ toolCallsInProgress.set(tcDelta.index, {
3126
+ id: tcDelta.id ?? "",
3127
+ name: tcDelta.function?.name ?? "",
3128
+ arguments: tcDelta.function?.arguments ?? ""
3129
+ });
3130
+ } else {
3131
+ if (tcDelta.id) existing.id = tcDelta.id;
3132
+ if (tcDelta.function?.name) existing.name = tcDelta.function.name;
3133
+ if (tcDelta.function?.arguments) existing.arguments += tcDelta.function.arguments;
3134
+ }
3135
+ }
3136
+ }
3137
+ if (choice.finish_reason) {
3138
+ finishReason = choice.finish_reason;
3139
+ }
3140
+ if (chunk.usage) {
3141
+ usage = chunk.usage;
3142
+ }
3143
+ }
3144
+ const toolCalls = [];
3145
+ for (const [, tc] of toolCallsInProgress) {
3146
+ try {
3147
+ const input = JSON.parse(tc.arguments || "{}");
3148
+ toolCalls.push({ id: tc.id, name: tc.name, input });
3149
+ callbacks.onToolCall?.(tc.id, tc.name, input);
3150
+ } catch {
3151
+ }
3152
+ }
3153
+ const cost = calculateOpenRouterCost(
3154
+ this.config.model,
3155
+ usage.prompt_tokens,
3156
+ usage.completion_tokens
3157
+ );
3158
+ return {
3159
+ text: fullText,
3160
+ toolCalls,
3161
+ usage: {
3162
+ inputTokens: usage.prompt_tokens,
3163
+ outputTokens: usage.completion_tokens,
3164
+ cacheCreationTokens: 0,
3165
+ cacheReadTokens: 0
3166
+ },
3167
+ cost,
3168
+ stopReason: finishReason === "tool_calls" ? "tool_use" : "end_turn"
3169
+ };
3170
+ }
3171
+ };
3172
+ function createLLMProvider(config) {
3173
+ if (config.provider === "openrouter") {
3174
+ return new OpenRouterProvider(config);
3175
+ }
3176
+ return new AnthropicProvider(config);
3177
+ }
3178
+
2480
3179
  // src/agent/loop.ts
2481
3180
  var MAX_TOOL_RESULT_CHARS = 8e3;
2482
3181
  function truncateToolResult(result, toolName) {
@@ -2661,81 +3360,36 @@ function extractTokenInfo(token) {
2661
3360
  price: token.price ?? token.probability
2662
3361
  };
2663
3362
  }
2664
- var DEFAULT_SYSTEM_PROMPT = `You are Quantish, an AI coding and trading agent. Be concise.
2665
-
2666
- ## APIs
2667
-
2668
- TRADING (requires QUANTISH_API_KEY):
2669
- - URL: https://quantish-sdk-production.up.railway.app/mcp/execute
2670
- - Format: JSON-RPC 2.0 { jsonrpc: '2.0', method: 'tools/call', params: { name, arguments }, id }
2671
- - Tools: get_balances, get_positions, place_order, cancel_order, get_orders, get_orderbook, get_price
2672
-
2673
- DISCOVERY (free):
2674
- - URL: https://quantish.live/mcp/execute
2675
- - Format: { name, arguments }
2676
- - Key: qm_ueQeqrmvZyHtR1zuVbLYkhx0fKyVAuV8
2677
- - Tools: search_markets, get_market_details, get_trending_markets
2678
-
2679
- ## Response Structures (IMPORTANT - use these field paths)
2680
-
2681
- search_markets / get_trending_markets returns:
2682
- {
2683
- "found": N,
2684
- "markets": [{ "platform", "id", "title", "markets": [{ "marketId", "question", "outcomes": [{ "name", "price" }], "clobTokenIds": "[json_array]", "conditionId" }] }]
2685
- }
3363
+ var DEFAULT_SYSTEM_PROMPT = `You are Quantish, an AI coding and trading agent.
2686
3364
 
2687
- get_market_details returns:
2688
- {
2689
- "platform": "polymarket",
2690
- "id": "12345",
2691
- "conditionId": "0x...",
2692
- "title": "Market Title",
2693
- "clobTokenIds": "["TOKEN_YES","TOKEN_NO"]",
2694
- "markets": [{
2695
- "marketId": "67890",
2696
- "question": "Question?",
2697
- "outcomes": [{ "name": "Yes", "price": 0.55 }, { "name": "No", "price": 0.45 }],
2698
- "clobTokenIds": "["TOKEN_YES","TOKEN_NO"]"
2699
- }]
2700
- }
3365
+ You have two sets of capabilities:
2701
3366
 
2702
- KEY FIELDS:
2703
- - market.id = top-level ID for get_market_details
2704
- - market.markets[0].marketId = sub-market ID
2705
- - market.markets[0].outcomes[].name = "Yes"/"No" or outcome name
2706
- - market.markets[0].outcomes[].price = decimal 0-1
2707
- - JSON.parse(market.clobTokenIds || market.markets[0].clobTokenIds) = token IDs array
2708
- - market.conditionId = condition ID for trading
3367
+ ## Trading Tools (via MCP)
3368
+ You can interact with Polymarket prediction markets:
3369
+ - Check wallet balances and positions
3370
+ - Place, cancel, and manage orders
3371
+ - Transfer funds and claim winnings
3372
+ - Get market prices and orderbook data
2709
3373
 
2710
- ## Standalone App Code
3374
+ ## Coding Tools (local)
3375
+ You can work with the local filesystem:
3376
+ - Read and write files
3377
+ - List directories and search with grep
3378
+ - Run shell commands
3379
+ - Use git for version control
2711
3380
 
2712
- Trading helper:
2713
- async function callTradingTool(name, args = {}) {
2714
- const res = await fetch('https://quantish-sdk-production.up.railway.app/mcp/execute', {
2715
- method: 'POST',
2716
- headers: { 'Content-Type': 'application/json', 'x-api-key': process.env.QUANTISH_API_KEY },
2717
- body: JSON.stringify({ jsonrpc: '2.0', method: 'tools/call', params: { name, arguments: args }, id: Date.now() })
2718
- });
2719
- return JSON.parse((await res.json()).result.content[0].text);
2720
- }
3381
+ ## Guidelines
3382
+ - Be concise and helpful
3383
+ - When making trades, always confirm details before proceeding
3384
+ - Prices on Polymarket are between 0.01 and 0.99 (probabilities)
3385
+ - Minimum order value is $1
3386
+ - When writing code, follow existing patterns and conventions
3387
+ - For dangerous operations (rm, sudo), explain what you're doing
2721
3388
 
2722
- Discovery helper:
2723
- async function callDiscoveryTool(name, args = {}) {
2724
- const res = await fetch('https://quantish.live/mcp/execute', {
2725
- method: 'POST',
2726
- headers: { 'Content-Type': 'application/json', 'X-API-Key': 'qm_ueQeqrmvZyHtR1zuVbLYkhx0fKyVAuV8' },
2727
- body: JSON.stringify({ name, arguments: args })
2728
- });
2729
- return JSON.parse((await res.json()).result.content[0].text);
2730
- }
2731
-
2732
- ## Rules
2733
- 1. Never use @modelcontextprotocol/sdk - use fetch()
2734
- 2. Always create .env.example and use dotenv
2735
- 3. Never hardcode/mock data - always fetch real data
2736
- 4. Check logs before restarting servers`;
3389
+ You help users build trading bots and agents by combining coding skills with trading capabilities.`;
2737
3390
  var Agent = class {
2738
3391
  anthropic;
3392
+ llmProvider;
2739
3393
  mcpClient;
2740
3394
  mcpClientManager;
2741
3395
  config;
@@ -2756,6 +3410,8 @@ var Agent = class {
2756
3410
  this.config = {
2757
3411
  enableLocalTools: true,
2758
3412
  enableMCPTools: true,
3413
+ provider: "anthropic",
3414
+ // Default to Anthropic
2759
3415
  // Default context editing: clear old tool uses when context exceeds 100k tokens
2760
3416
  contextEditing: config.contextEditing || [
2761
3417
  {
@@ -2770,14 +3426,176 @@ var Agent = class {
2770
3426
  if (this.config.contextEditing && this.config.contextEditing.length > 0) {
2771
3427
  headers["anthropic-beta"] = "context-management-2025-06-27";
2772
3428
  }
2773
- this.anthropic = new Anthropic({
2774
- apiKey: config.anthropicApiKey,
3429
+ const anthropicKey = config.anthropicApiKey || "placeholder";
3430
+ this.anthropic = new Anthropic2({
3431
+ apiKey: anthropicKey,
2775
3432
  defaultHeaders: Object.keys(headers).length > 0 ? headers : void 0
2776
3433
  });
2777
3434
  this.mcpClient = config.mcpClient;
2778
3435
  this.mcpClientManager = config.mcpClientManager;
2779
3436
  this.workingDirectory = config.workingDirectory || process.cwd();
2780
3437
  }
3438
+ /**
3439
+ * Get the API key for the current provider
3440
+ */
3441
+ getApiKey() {
3442
+ if (this.config.provider === "openrouter") {
3443
+ return this.config.openrouterApiKey || "";
3444
+ }
3445
+ return this.config.anthropicApiKey || "";
3446
+ }
3447
+ /**
3448
+ * Check if using OpenRouter provider
3449
+ */
3450
+ isOpenRouter() {
3451
+ return this.config.provider === "openrouter";
3452
+ }
3453
+ /**
3454
+ * Get the current provider name
3455
+ */
3456
+ getProvider() {
3457
+ return this.config.provider || "anthropic";
3458
+ }
3459
+ /**
3460
+ * Set the LLM provider
3461
+ */
3462
+ setProvider(provider) {
3463
+ this.config.provider = provider;
3464
+ this.llmProvider = void 0;
3465
+ }
3466
+ /**
3467
+ * Get or create the LLM provider instance
3468
+ */
3469
+ async getOrCreateProvider() {
3470
+ if (this.llmProvider) {
3471
+ return this.llmProvider;
3472
+ }
3473
+ const allTools = await this.getAllTools();
3474
+ const systemPrompt = this.config.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;
3475
+ const model = this.config.model ?? DEFAULT_MODEL;
3476
+ const maxTokens = this.config.maxTokens ?? 8192;
3477
+ this.llmProvider = createLLMProvider({
3478
+ provider: this.config.provider || "anthropic",
3479
+ apiKey: this.getApiKey(),
3480
+ model,
3481
+ maxTokens,
3482
+ systemPrompt,
3483
+ tools: allTools,
3484
+ contextEditing: this.config.contextEditing
3485
+ });
3486
+ return this.llmProvider;
3487
+ }
3488
+ /**
3489
+ * Run the agent using the provider abstraction (for OpenRouter and future providers)
3490
+ */
3491
+ async runWithProvider(userMessage) {
3492
+ const maxIterations = this.config.maxIterations ?? 200;
3493
+ const useStreaming = this.config.streaming ?? true;
3494
+ const provider = await this.getOrCreateProvider();
3495
+ const contextMessage = `[Working directory: ${this.workingDirectory}]
3496
+
3497
+ ${userMessage}`;
3498
+ this.conversationHistory.push({
3499
+ role: "user",
3500
+ content: contextMessage
3501
+ });
3502
+ const toolCalls = [];
3503
+ let iterations = 0;
3504
+ let finalText = "";
3505
+ while (iterations < maxIterations) {
3506
+ iterations++;
3507
+ this.config.onStreamStart?.();
3508
+ let response;
3509
+ if (useStreaming) {
3510
+ response = await provider.streamChat(this.conversationHistory, {
3511
+ onText: (text) => {
3512
+ finalText += text;
3513
+ this.config.onText?.(text, false);
3514
+ },
3515
+ onThinking: (text) => {
3516
+ this.config.onThinking?.(text);
3517
+ },
3518
+ onToolCall: (id, name, input) => {
3519
+ this.config.onToolCall?.(name, input);
3520
+ }
3521
+ });
3522
+ if (response.text) {
3523
+ this.config.onText?.("", true);
3524
+ }
3525
+ } else {
3526
+ response = await provider.chat(this.conversationHistory);
3527
+ if (response.text) {
3528
+ finalText += response.text;
3529
+ this.config.onText?.(response.text, true);
3530
+ }
3531
+ }
3532
+ this.config.onStreamEnd?.();
3533
+ this.updateTokenUsage({
3534
+ input_tokens: response.usage.inputTokens,
3535
+ output_tokens: response.usage.outputTokens,
3536
+ cache_creation_input_tokens: response.usage.cacheCreationTokens,
3537
+ cache_read_input_tokens: response.usage.cacheReadTokens
3538
+ });
3539
+ const responseContent = [];
3540
+ if (response.text) {
3541
+ responseContent.push({ type: "text", text: response.text });
3542
+ }
3543
+ for (const tc of response.toolCalls) {
3544
+ responseContent.push({
3545
+ type: "tool_use",
3546
+ id: tc.id,
3547
+ name: tc.name,
3548
+ input: tc.input
3549
+ });
3550
+ }
3551
+ if (response.toolCalls.length === 0) {
3552
+ this.conversationHistory.push({
3553
+ role: "assistant",
3554
+ content: responseContent
3555
+ });
3556
+ break;
3557
+ }
3558
+ const toolResults = [];
3559
+ for (const toolCall2 of response.toolCalls) {
3560
+ await new Promise((resolve2) => setImmediate(resolve2));
3561
+ const { result, source } = await this.executeTool(
3562
+ toolCall2.name,
3563
+ toolCall2.input
3564
+ );
3565
+ const success2 = !(result && typeof result === "object" && "error" in result);
3566
+ this.config.onToolResult?.(toolCall2.name, result, success2);
3567
+ toolCalls.push({
3568
+ name: toolCall2.name,
3569
+ input: toolCall2.input,
3570
+ result,
3571
+ source
3572
+ });
3573
+ toolResults.push({
3574
+ type: "tool_result",
3575
+ tool_use_id: toolCall2.id,
3576
+ content: JSON.stringify(result)
3577
+ });
3578
+ }
3579
+ this.conversationHistory.push({
3580
+ role: "assistant",
3581
+ content: responseContent
3582
+ });
3583
+ this.conversationHistory.push({
3584
+ role: "user",
3585
+ content: toolResults
3586
+ });
3587
+ this.truncateLastToolResults();
3588
+ if (response.stopReason === "end_turn" && response.toolCalls.length === 0) {
3589
+ break;
3590
+ }
3591
+ }
3592
+ return {
3593
+ text: finalText,
3594
+ toolCalls,
3595
+ iterations,
3596
+ tokenUsage: { ...this.cumulativeTokenUsage }
3597
+ };
3598
+ }
2781
3599
  /**
2782
3600
  * Get all available tools
2783
3601
  */
@@ -2830,16 +3648,16 @@ var Agent = class {
2830
3648
  }
2831
3649
  /**
2832
3650
  * Run the agent with a user message (supports streaming)
2833
- * @param userMessage - The user's input message
2834
- * @param options - Optional configuration including abort signal
2835
3651
  */
2836
- async run(userMessage, options) {
2837
- const maxIterations = this.config.maxIterations ?? 200;
3652
+ async run(userMessage) {
3653
+ if (this.config.provider === "openrouter") {
3654
+ return this.runWithProvider(userMessage);
3655
+ }
3656
+ const maxIterations = this.config.maxIterations ?? 15;
2838
3657
  const model = this.config.model ?? "claude-sonnet-4-5-20250929";
2839
3658
  const maxTokens = this.config.maxTokens ?? 8192;
2840
3659
  const systemPrompt = this.config.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;
2841
3660
  const useStreaming = this.config.streaming ?? true;
2842
- const signal = options?.signal;
2843
3661
  const allTools = await this.getAllTools();
2844
3662
  const contextManagement = this.config.contextEditing && this.config.contextEditing.length > 0 ? { edits: this.config.contextEditing } : void 0;
2845
3663
  const contextMessage = `[Working directory: ${this.workingDirectory}]
@@ -2853,9 +3671,6 @@ ${userMessage}`;
2853
3671
  let iterations = 0;
2854
3672
  let finalText = "";
2855
3673
  while (iterations < maxIterations) {
2856
- if (signal?.aborted) {
2857
- throw new Error("Operation aborted by user");
2858
- }
2859
3674
  iterations++;
2860
3675
  this.config.onStreamStart?.();
2861
3676
  let response;
@@ -2880,12 +3695,8 @@ ${userMessage}`;
2880
3695
  if (contextManagement) {
2881
3696
  streamOptions.context_management = contextManagement;
2882
3697
  }
2883
- const stream = this.anthropic.messages.stream(streamOptions, { signal });
3698
+ const stream = this.anthropic.messages.stream(streamOptions);
2884
3699
  for await (const event of stream) {
2885
- if (signal?.aborted) {
2886
- stream.controller.abort();
2887
- throw new Error("Operation aborted by user");
2888
- }
2889
3700
  if (event.type === "content_block_delta") {
2890
3701
  const delta = event.delta;
2891
3702
  if (delta.type === "text_delta" && delta.text) {
@@ -2950,11 +3761,7 @@ ${userMessage}`;
2950
3761
  }
2951
3762
  const toolResults = [];
2952
3763
  for (const toolUse of toolUses) {
2953
- if (signal?.aborted) {
2954
- throw new Error("Operation aborted by user");
2955
- }
2956
3764
  this.config.onToolCall?.(toolUse.name, toolUse.input);
2957
- await new Promise((resolve2) => setImmediate(resolve2));
2958
3765
  const { result, source } = await this.executeTool(
2959
3766
  toolUse.name,
2960
3767
  toolUse.input
@@ -3135,19 +3942,34 @@ ${userMessage}`;
3135
3942
  * Set the model to use for future requests
3136
3943
  */
3137
3944
  setModel(modelIdOrAlias) {
3138
- const resolvedId = resolveModelId(modelIdOrAlias);
3945
+ let resolvedId = resolveModelId(modelIdOrAlias);
3946
+ let displayName;
3947
+ if (resolvedId) {
3948
+ const modelConfig = getModelConfig(resolvedId);
3949
+ displayName = modelConfig?.displayName;
3950
+ } else {
3951
+ resolvedId = resolveOpenRouterModelId(modelIdOrAlias);
3952
+ if (resolvedId) {
3953
+ const orConfig = getOpenRouterModelConfig(resolvedId);
3954
+ displayName = orConfig?.displayName ?? resolvedId;
3955
+ if (!this.isOpenRouter() && resolvedId.includes("/")) {
3956
+ this.config.provider = "openrouter";
3957
+ }
3958
+ }
3959
+ }
3139
3960
  if (!resolvedId) {
3140
- const availableModels = Object.values(MODELS).map((m) => m.name).join(", ");
3961
+ const anthropicModels = Object.values(MODELS).map((m) => m.name).join(", ");
3962
+ const orModels = Object.values(OPENROUTER_MODELS).slice(0, 5).map((m) => m.name).join(", ");
3141
3963
  return {
3142
3964
  success: false,
3143
- error: `Unknown model: "${modelIdOrAlias}". Available: ${availableModels}`
3965
+ error: `Unknown model: "${modelIdOrAlias}". Anthropic: ${anthropicModels}. OpenRouter: ${orModels}, ...`
3144
3966
  };
3145
3967
  }
3146
3968
  this.config.model = resolvedId;
3147
- const modelConfig = getModelConfig(resolvedId);
3969
+ this.llmProvider = void 0;
3148
3970
  return {
3149
3971
  success: true,
3150
- model: modelConfig?.displayName ?? resolvedId
3972
+ model: displayName ?? resolvedId
3151
3973
  };
3152
3974
  }
3153
3975
  /**
@@ -3273,7 +4095,7 @@ import { useState, useCallback, useRef, useEffect } from "react";
3273
4095
  import { Box, Text, useApp, useInput } from "ink";
3274
4096
  import TextInput from "ink-text-input";
3275
4097
  import Spinner from "ink-spinner";
3276
- import { Fragment, jsx, jsxs } from "react/jsx-runtime";
4098
+ import { jsx, jsxs } from "react/jsx-runtime";
3277
4099
  function formatTokenCount(count) {
3278
4100
  if (count < 1e3) return String(count);
3279
4101
  if (count < 1e5) return `${(count / 1e3).toFixed(1)}k`;
@@ -3288,7 +4110,8 @@ var SLASH_COMMANDS = [
3288
4110
  { cmd: "/help", desc: "Show available commands" },
3289
4111
  { cmd: "/clear", desc: "Clear conversation history" },
3290
4112
  { cmd: "/compact", desc: "Summarize conversation to save tokens" },
3291
- { cmd: "/model", desc: "Switch model (opus, sonnet, haiku)" },
4113
+ { cmd: "/model", desc: "Switch model (opus, sonnet, haiku, minimax, etc.)" },
4114
+ { cmd: "/provider", desc: "Switch LLM provider (anthropic, openrouter)" },
3292
4115
  { cmd: "/cost", desc: "Show session cost breakdown" },
3293
4116
  { cmd: "/tools", desc: "List available tools" },
3294
4117
  { cmd: "/config", desc: "Show configuration info" },
@@ -3354,7 +4177,8 @@ function App({ agent, onExit }) {
3354
4177
  content: `\u{1F4DA} Available Commands:
3355
4178
  /clear - Clear conversation history
3356
4179
  /compact - Summarize conversation (keeps context, saves tokens)
3357
- /model - Switch model (opus, sonnet, haiku)
4180
+ /model - Switch model (opus, sonnet, haiku, minimax, deepseek, etc.)
4181
+ /provider - Switch LLM provider (anthropic, openrouter)
3358
4182
  /cost - Show session cost breakdown
3359
4183
  /help - Show this help message
3360
4184
  /tools - List available tools
@@ -3507,30 +4331,46 @@ Use /stop <id> to stop a process.`
3507
4331
  case "model":
3508
4332
  if (!args) {
3509
4333
  const currentModel = agent.getModel();
4334
+ const currentProvider = agent.getProvider();
3510
4335
  const modelConfig = getModelConfig(currentModel);
3511
- const models = listModels();
3512
- const modelList = models.map((m) => {
4336
+ const orModelConfig = getOpenRouterModelConfig(currentModel);
4337
+ const displayName = modelConfig?.displayName || orModelConfig?.displayName || currentModel;
4338
+ const anthropicModels = listModels();
4339
+ const anthropicList = anthropicModels.map((m) => {
3513
4340
  const isCurrent = m.id === currentModel ? " (current)" : "";
3514
4341
  return ` ${m.name}${isCurrent} - ${m.description}`;
3515
4342
  }).join("\n");
4343
+ const orModels = listOpenRouterModels().slice(0, 8);
4344
+ const orList = orModels.map((m) => {
4345
+ const isCurrent = m.id === currentModel ? " (current)" : "";
4346
+ return ` ${m.name}${isCurrent} - ${m.description.slice(0, 50)}...`;
4347
+ }).join("\n");
3516
4348
  setMessages((prev) => [...prev, {
3517
4349
  role: "system",
3518
- content: `\u{1F916} Current model: ${modelConfig?.displayName || currentModel}
4350
+ content: `\u{1F916} Current: ${displayName} (${currentProvider})
4351
+
4352
+ Anthropic Models:
4353
+ ${anthropicList}
3519
4354
 
3520
- Available models:
3521
- ${modelList}
4355
+ OpenRouter Models (selection):
4356
+ ${orList}
4357
+ ... and many more! Use any OpenRouter model ID like 'minimax/minimax-m2.1'
3522
4358
 
3523
- Usage: /model <name> (e.g., /model haiku, /model opus)`
4359
+ Usage: /model <name> (e.g., /model haiku, /model minimax)
4360
+ Using an OpenRouter model auto-switches to OpenRouter provider.`
3524
4361
  }]);
3525
4362
  return true;
3526
4363
  }
3527
4364
  const result = agent.setModel(args);
3528
4365
  if (result.success) {
3529
- const newConfig = getModelConfig(agent.getModel());
4366
+ const anthropicConfig = getModelConfig(agent.getModel());
4367
+ const orConfig = getOpenRouterModelConfig(agent.getModel());
4368
+ const description = anthropicConfig?.description || orConfig?.description || "";
4369
+ const providerInfo = agent.isOpenRouter() ? " (OpenRouter)" : " (Anthropic)";
3530
4370
  setMessages((prev) => [...prev, {
3531
4371
  role: "system",
3532
- content: `\u2705 Switched to ${result.model}
3533
- ${newConfig?.description || ""}`
4372
+ content: `\u2705 Switched to ${result.model}${providerInfo}
4373
+ ${description}`
3534
4374
  }]);
3535
4375
  } else {
3536
4376
  setMessages((prev) => [...prev, {
@@ -3539,6 +4379,43 @@ Usage: /model <name> (e.g., /model haiku, /model opus)`
3539
4379
  }]);
3540
4380
  }
3541
4381
  return true;
4382
+ case "provider":
4383
+ if (!args) {
4384
+ const currentProvider = agent.getProvider();
4385
+ setMessages((prev) => [...prev, {
4386
+ role: "system",
4387
+ content: `\u{1F527} LLM Provider
4388
+
4389
+ Current: ${currentProvider}
4390
+
4391
+ Available providers:
4392
+ anthropic - Claude models (Opus, Sonnet, Haiku)
4393
+ openrouter - Multi-provider access (MiniMax, DeepSeek, Gemini, etc.)
4394
+
4395
+ Usage: /provider <name> (e.g., /provider openrouter)
4396
+
4397
+ Note: When switching to OpenRouter, make sure OPENROUTER_API_KEY is set.
4398
+ You can also just use /model with an OpenRouter model name.`
4399
+ }]);
4400
+ return true;
4401
+ }
4402
+ const providerArg = args.toLowerCase();
4403
+ if (providerArg !== "anthropic" && providerArg !== "openrouter") {
4404
+ setMessages((prev) => [...prev, {
4405
+ role: "system",
4406
+ content: `\u274C Unknown provider: "${args}". Use: anthropic, openrouter`
4407
+ }]);
4408
+ return true;
4409
+ }
4410
+ agent.setProvider(providerArg);
4411
+ const providerModels = providerArg === "openrouter" ? "minimax, deepseek, gemini, grok, devstral" : "opus, sonnet, haiku";
4412
+ setMessages((prev) => [...prev, {
4413
+ role: "system",
4414
+ content: `\u2705 Switched to ${providerArg} provider
4415
+ Available models: ${providerModels}
4416
+ Use /model to select a model.`
4417
+ }]);
4418
+ return true;
3542
4419
  case "cost":
3543
4420
  const usage = agent.getTokenUsage();
3544
4421
  const sessionCost = agent.getSessionCost();
@@ -3611,7 +4488,7 @@ Last API Call Cost:
3611
4488
  completedToolCalls.current = [];
3612
4489
  abortController.current = new AbortController();
3613
4490
  try {
3614
- const result = await agent.run(trimmed, { signal: abortController.current?.signal });
4491
+ const result = await agent.run(trimmed);
3615
4492
  if (isInterrupted) {
3616
4493
  setMessages((prev) => [...prev, {
3617
4494
  role: "system",
@@ -3750,22 +4627,18 @@ Stopped ${count} background process${count > 1 ? "es" : ""}.`);
3750
4627
  msg.role === "system" && /* @__PURE__ */ jsx(Box, { children: /* @__PURE__ */ jsx(Text, { color: "gray", italic: true, children: msg.content }) })
3751
4628
  ] }, i)) }),
3752
4629
  currentToolCalls.length > 0 && /* @__PURE__ */ jsx(Box, { flexDirection: "column", marginBottom: 1, marginLeft: 2, children: currentToolCalls.map((tc, i) => /* @__PURE__ */ jsxs(Box, { flexDirection: "column", children: [
3753
- /* @__PURE__ */ jsx(Box, { children: tc.pending ? /* @__PURE__ */ jsxs(Fragment, { children: [
3754
- /* @__PURE__ */ jsx(Text, { color: "yellow", children: /* @__PURE__ */ jsx(Spinner, { type: "dots" }) }),
3755
- /* @__PURE__ */ jsxs(Text, { color: "cyan", bold: true, children: [
4630
+ /* @__PURE__ */ jsxs(Box, { children: [
4631
+ tc.pending ? /* @__PURE__ */ jsxs(Text, { color: "cyan", children: [
4632
+ /* @__PURE__ */ jsx(Spinner, { type: "dots" }),
3756
4633
  " ",
3757
4634
  tc.name
3758
- ] }),
3759
- /* @__PURE__ */ jsx(Text, { color: "gray", children: formatArgs(tc.args) }),
3760
- /* @__PURE__ */ jsx(Text, { color: "yellow", dimColor: true, children: " Running..." })
3761
- ] }) : /* @__PURE__ */ jsxs(Fragment, { children: [
3762
- /* @__PURE__ */ jsxs(Text, { color: tc.success ? "green" : "red", children: [
4635
+ ] }) : /* @__PURE__ */ jsxs(Text, { color: tc.success ? "blue" : "red", children: [
3763
4636
  tc.success ? "\u2713" : "\u2717",
3764
4637
  " ",
3765
4638
  tc.name
3766
4639
  ] }),
3767
4640
  /* @__PURE__ */ jsx(Text, { color: "gray", children: formatArgs(tc.args) })
3768
- ] }) }),
4641
+ ] }),
3769
4642
  !tc.pending && tc.result && /* @__PURE__ */ jsx(Box, { marginLeft: 2, children: /* @__PURE__ */ jsxs(Text, { color: "gray", dimColor: true, children: [
3770
4643
  "\u2192 ",
3771
4644
  formatResult(tc.result, 100)
@@ -3867,19 +4740,8 @@ program.name("quantish").description("AI coding & trading agent for Polymarket")
3867
4740
  program.command("init").description("Configure Quantish CLI with your API keys").action(async () => {
3868
4741
  await runSetup();
3869
4742
  });
3870
- program.command("config").description("View or edit configuration").option("-s, --show", "Show current configuration").option("-c, --clear", "Clear all configuration").option("--path", "Show config file path").option("--export", "Export configuration as .env format").option("--show-keys", "Show full API keys (use with caution)").option("--server <url>", "Set custom Trading MCP server URL").action(async (options) => {
4743
+ program.command("config").description("View or edit configuration").option("-s, --show", "Show current configuration").option("-c, --clear", "Clear all configuration").option("--path", "Show config file path").option("--export", "Export configuration as .env format").option("--show-keys", "Show full API keys (use with caution)").action(async (options) => {
3871
4744
  const config = getConfigManager();
3872
- if (options.server) {
3873
- try {
3874
- new URL(options.server);
3875
- } catch {
3876
- error("Invalid URL format. Please provide a valid URL (e.g., https://your-server.com/mcp)");
3877
- return;
3878
- }
3879
- config.set("mcpServerUrl", options.server);
3880
- success(`Trading MCP server URL set to: ${options.server}`);
3881
- return;
3882
- }
3883
4745
  if (options.path) {
3884
4746
  console.log(config.getConfigPath());
3885
4747
  return;
@@ -3898,11 +4760,15 @@ program.command("config").description("View or edit configuration").option("-s,
3898
4760
  if (all2.anthropicApiKey) {
3899
4761
  console.log(`ANTHROPIC_API_KEY=${all2.anthropicApiKey}`);
3900
4762
  }
4763
+ if (all2.openrouterApiKey) {
4764
+ console.log(`OPENROUTER_API_KEY=${all2.openrouterApiKey}`);
4765
+ }
3901
4766
  if (all2.quantishApiKey) {
3902
4767
  console.log(`QUANTISH_API_KEY=${all2.quantishApiKey}`);
3903
4768
  }
3904
4769
  console.log(`QUANTISH_MCP_URL=${all2.mcpServerUrl}`);
3905
4770
  console.log(`QUANTISH_MODEL=${all2.model || "claude-sonnet-4-5-20250929"}`);
4771
+ console.log(`QUANTISH_PROVIDER=${all2.provider || "anthropic"}`);
3906
4772
  console.log();
3907
4773
  console.log(chalk3.dim("# Discovery MCP (public, read-only market data)"));
3908
4774
  console.log(`QUANTISH_DISCOVERY_URL=https://quantish.live/mcp`);
@@ -3917,11 +4783,14 @@ program.command("config").description("View or edit configuration").option("-s,
3917
4783
  printDivider();
3918
4784
  if (options.showKeys) {
3919
4785
  tableRow("Anthropic API Key", all.anthropicApiKey || chalk3.dim("Not set"));
4786
+ tableRow("OpenRouter API Key", all.openrouterApiKey || chalk3.dim("Not set"));
3920
4787
  tableRow("Quantish API Key", all.quantishApiKey || chalk3.dim("Not set"));
3921
4788
  } else {
3922
4789
  tableRow("Anthropic API Key", all.anthropicApiKey ? `${all.anthropicApiKey.slice(0, 10)}...` : chalk3.dim("Not set"));
4790
+ tableRow("OpenRouter API Key", all.openrouterApiKey ? `${all.openrouterApiKey.slice(0, 10)}...` : chalk3.dim("Not set"));
3923
4791
  tableRow("Quantish API Key", all.quantishApiKey ? `${all.quantishApiKey.slice(0, 12)}...` : chalk3.dim("Not set"));
3924
4792
  }
4793
+ tableRow("Provider", all.provider || "anthropic");
3925
4794
  tableRow("MCP Server URL", all.mcpServerUrl);
3926
4795
  tableRow("Model", all.model || "claude-sonnet-4-5-20250929");
3927
4796
  printDivider();
@@ -4054,7 +4923,9 @@ async function runInteractiveChat(options = {}) {
4054
4923
  const config = getConfigManager();
4055
4924
  const mcpClientManager = createMCPManager(options);
4056
4925
  const agent = createAgent({
4926
+ provider: config.getProvider(),
4057
4927
  anthropicApiKey: config.getAnthropicApiKey(),
4928
+ openrouterApiKey: config.getOpenRouterApiKey(),
4058
4929
  mcpClientManager,
4059
4930
  model: config.getModel(),
4060
4931
  enableLocalTools: options.enableLocal !== false,
@@ -4187,7 +5058,9 @@ async function runOneShotPrompt(message, options = {}) {
4187
5058
  const config = getConfigManager();
4188
5059
  const mcpClientManager = createMCPManager(options);
4189
5060
  const agent = createAgent({
5061
+ provider: config.getProvider(),
4190
5062
  anthropicApiKey: config.getAnthropicApiKey(),
5063
+ openrouterApiKey: config.getOpenRouterApiKey(),
4191
5064
  mcpClientManager,
4192
5065
  model: config.getModel(),
4193
5066
  enableLocalTools: options.enableLocal !== false,