@yeshwanthyk/ai 0.1.0 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/agent-loop.d.ts.map +1 -1
- package/dist/agent/agent-loop.js +50 -16
- package/dist/agent/agent-loop.js.map +1 -1
- package/dist/agent/tools/calculate.d.ts.map +1 -1
- package/dist/agent/tools/calculate.js +7 -2
- package/dist/agent/tools/calculate.js.map +1 -1
- package/dist/agent/tools/get-current-time.d.ts.map +1 -1
- package/dist/agent/tools/get-current-time.js +7 -2
- package/dist/agent/tools/get-current-time.js.map +1 -1
- package/dist/agent/types.d.ts +4 -4
- package/dist/agent/types.d.ts.map +1 -1
- package/dist/index.d.ts +2 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/models.generated.d.ts +160 -98
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +260 -193
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/anthropic.d.ts.map +1 -1
- package/dist/providers/anthropic.js +44 -14
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/google.d.ts.map +1 -1
- package/dist/providers/google.js +47 -13
- package/dist/providers/google.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +41 -12
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/providers/openai-responses.js +6 -2
- package/dist/providers/openai-responses.js.map +1 -1
- package/dist/providers/transform-messages.d.ts.map +1 -1
- package/dist/providers/transform-messages.js +2 -1
- package/dist/providers/transform-messages.js.map +1 -1
- package/dist/stream.d.ts +1 -1
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +12 -7
- package/dist/stream.js.map +1 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/utils/event-stream.d.ts.map +1 -1
- package/dist/utils/event-stream.js.map +1 -1
- package/dist/utils/json-parse.d.ts.map +1 -1
- package/dist/utils/json-parse.js.map +1 -1
- package/dist/utils/oauth/anthropic.d.ts.map +1 -1
- package/dist/utils/oauth/anthropic.js.map +1 -1
- package/dist/utils/overflow.d.ts.map +1 -1
- package/dist/utils/overflow.js.map +1 -1
- package/dist/utils/sanitize-unicode.d.ts.map +1 -1
- package/dist/utils/sanitize-unicode.js.map +1 -1
- package/dist/utils/validation.d.ts.map +1 -1
- package/dist/utils/validation.js +5 -2
- package/dist/utils/validation.js.map +1 -1
- package/package.json +1 -1
package/dist/models.generated.js
CHANGED
|
@@ -1003,7 +1003,7 @@ export const MODELS = {
|
|
|
1003
1003
|
cost: {
|
|
1004
1004
|
input: 0.05,
|
|
1005
1005
|
output: 0.4,
|
|
1006
|
-
cacheRead: 0.
|
|
1006
|
+
cacheRead: 0.005,
|
|
1007
1007
|
cacheWrite: 0,
|
|
1008
1008
|
},
|
|
1009
1009
|
contextWindow: 400000,
|
|
@@ -1105,7 +1105,7 @@ export const MODELS = {
|
|
|
1105
1105
|
cost: {
|
|
1106
1106
|
input: 0.25,
|
|
1107
1107
|
output: 2,
|
|
1108
|
-
cacheRead: 0.
|
|
1108
|
+
cacheRead: 0.025,
|
|
1109
1109
|
cacheWrite: 0,
|
|
1110
1110
|
},
|
|
1111
1111
|
contextWindow: 400000,
|
|
@@ -1224,7 +1224,7 @@ export const MODELS = {
|
|
|
1224
1224
|
cost: {
|
|
1225
1225
|
input: 1.25,
|
|
1226
1226
|
output: 10,
|
|
1227
|
-
cacheRead: 0.
|
|
1227
|
+
cacheRead: 0.125,
|
|
1228
1228
|
cacheWrite: 0,
|
|
1229
1229
|
},
|
|
1230
1230
|
contextWindow: 400000,
|
|
@@ -2037,6 +2037,23 @@ export const MODELS = {
|
|
|
2037
2037
|
contextWindow: 131072,
|
|
2038
2038
|
maxTokens: 98304,
|
|
2039
2039
|
},
|
|
2040
|
+
"glm-4.7-flash": {
|
|
2041
|
+
id: "glm-4.7-flash",
|
|
2042
|
+
name: "GLM-4.7-Flash",
|
|
2043
|
+
api: "anthropic-messages",
|
|
2044
|
+
provider: "zai",
|
|
2045
|
+
baseUrl: "https://api.z.ai/api/anthropic",
|
|
2046
|
+
reasoning: true,
|
|
2047
|
+
input: ["text"],
|
|
2048
|
+
cost: {
|
|
2049
|
+
input: 0,
|
|
2050
|
+
output: 0,
|
|
2051
|
+
cacheRead: 0,
|
|
2052
|
+
cacheWrite: 0,
|
|
2053
|
+
},
|
|
2054
|
+
contextWindow: 200000,
|
|
2055
|
+
maxTokens: 131072,
|
|
2056
|
+
},
|
|
2040
2057
|
"glm-4.5-air": {
|
|
2041
2058
|
id: "glm-4.5-air",
|
|
2042
2059
|
name: "GLM-4.5-Air",
|
|
@@ -2218,8 +2235,8 @@ export const MODELS = {
|
|
|
2218
2235
|
reasoning: false,
|
|
2219
2236
|
input: ["text"],
|
|
2220
2237
|
cost: {
|
|
2221
|
-
input: 0,
|
|
2222
|
-
output:
|
|
2238
|
+
input: 0.4,
|
|
2239
|
+
output: 2,
|
|
2223
2240
|
cacheRead: 0,
|
|
2224
2241
|
cacheWrite: 0,
|
|
2225
2242
|
},
|
|
@@ -2534,6 +2551,23 @@ export const MODELS = {
|
|
|
2534
2551
|
},
|
|
2535
2552
|
},
|
|
2536
2553
|
"opencode": {
|
|
2554
|
+
"glm-4.7": {
|
|
2555
|
+
id: "glm-4.7",
|
|
2556
|
+
name: "GLM-4.7",
|
|
2557
|
+
api: "openai-completions",
|
|
2558
|
+
provider: "opencode",
|
|
2559
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
2560
|
+
reasoning: true,
|
|
2561
|
+
input: ["text"],
|
|
2562
|
+
cost: {
|
|
2563
|
+
input: 0.6,
|
|
2564
|
+
output: 2.2,
|
|
2565
|
+
cacheRead: 0.1,
|
|
2566
|
+
cacheWrite: 0,
|
|
2567
|
+
},
|
|
2568
|
+
contextWindow: 204800,
|
|
2569
|
+
maxTokens: 131072,
|
|
2570
|
+
},
|
|
2537
2571
|
"qwen3-coder": {
|
|
2538
2572
|
id: "qwen3-coder",
|
|
2539
2573
|
name: "Qwen3 Coder",
|
|
@@ -2653,39 +2687,39 @@ export const MODELS = {
|
|
|
2653
2687
|
contextWindow: 200000,
|
|
2654
2688
|
maxTokens: 64000,
|
|
2655
2689
|
},
|
|
2656
|
-
"
|
|
2657
|
-
id: "
|
|
2658
|
-
name: "
|
|
2690
|
+
"kimi-k2.5": {
|
|
2691
|
+
id: "kimi-k2.5",
|
|
2692
|
+
name: "Kimi K2.5",
|
|
2659
2693
|
api: "openai-completions",
|
|
2660
2694
|
provider: "opencode",
|
|
2661
2695
|
baseUrl: "https://opencode.ai/zen/v1",
|
|
2662
2696
|
reasoning: true,
|
|
2663
2697
|
input: ["text", "image"],
|
|
2664
2698
|
cost: {
|
|
2665
|
-
input:
|
|
2666
|
-
output:
|
|
2667
|
-
cacheRead: 0.
|
|
2699
|
+
input: 0.6,
|
|
2700
|
+
output: 3,
|
|
2701
|
+
cacheRead: 0.08,
|
|
2668
2702
|
cacheWrite: 0,
|
|
2669
2703
|
},
|
|
2670
|
-
contextWindow:
|
|
2671
|
-
maxTokens:
|
|
2704
|
+
contextWindow: 262144,
|
|
2705
|
+
maxTokens: 262144,
|
|
2672
2706
|
},
|
|
2673
|
-
"
|
|
2674
|
-
id: "
|
|
2675
|
-
name: "
|
|
2707
|
+
"gemini-3-pro": {
|
|
2708
|
+
id: "gemini-3-pro",
|
|
2709
|
+
name: "Gemini 3 Pro",
|
|
2676
2710
|
api: "openai-completions",
|
|
2677
2711
|
provider: "opencode",
|
|
2678
2712
|
baseUrl: "https://opencode.ai/zen/v1",
|
|
2679
2713
|
reasoning: true,
|
|
2680
|
-
input: ["text"],
|
|
2714
|
+
input: ["text", "image"],
|
|
2681
2715
|
cost: {
|
|
2682
|
-
input:
|
|
2683
|
-
output:
|
|
2684
|
-
cacheRead: 0.
|
|
2716
|
+
input: 2,
|
|
2717
|
+
output: 12,
|
|
2718
|
+
cacheRead: 0.2,
|
|
2685
2719
|
cacheWrite: 0,
|
|
2686
2720
|
},
|
|
2687
|
-
contextWindow:
|
|
2688
|
-
maxTokens:
|
|
2721
|
+
contextWindow: 1048576,
|
|
2722
|
+
maxTokens: 65536,
|
|
2689
2723
|
},
|
|
2690
2724
|
"claude-sonnet-4-5": {
|
|
2691
2725
|
id: "claude-sonnet-4-5",
|
|
@@ -2721,23 +2755,6 @@ export const MODELS = {
|
|
|
2721
2755
|
contextWindow: 400000,
|
|
2722
2756
|
maxTokens: 128000,
|
|
2723
2757
|
},
|
|
2724
|
-
"alpha-gd4": {
|
|
2725
|
-
id: "alpha-gd4",
|
|
2726
|
-
name: "Alpha GD4",
|
|
2727
|
-
api: "openai-completions",
|
|
2728
|
-
provider: "opencode",
|
|
2729
|
-
baseUrl: "https://opencode.ai/zen/v1",
|
|
2730
|
-
reasoning: true,
|
|
2731
|
-
input: ["text"],
|
|
2732
|
-
cost: {
|
|
2733
|
-
input: 0.5,
|
|
2734
|
-
output: 2,
|
|
2735
|
-
cacheRead: 0.15,
|
|
2736
|
-
cacheWrite: 0,
|
|
2737
|
-
},
|
|
2738
|
-
contextWindow: 262144,
|
|
2739
|
-
maxTokens: 32768,
|
|
2740
|
-
},
|
|
2741
2758
|
"kimi-k2-thinking": {
|
|
2742
2759
|
id: "kimi-k2-thinking",
|
|
2743
2760
|
name: "Kimi K2 Thinking",
|
|
@@ -2859,7 +2876,7 @@ export const MODELS = {
|
|
|
2859
2876
|
},
|
|
2860
2877
|
"glm-4.7-free": {
|
|
2861
2878
|
id: "glm-4.7-free",
|
|
2862
|
-
name: "GLM-4.7",
|
|
2879
|
+
name: "GLM-4.7 Free",
|
|
2863
2880
|
api: "openai-completions",
|
|
2864
2881
|
provider: "opencode",
|
|
2865
2882
|
baseUrl: "https://opencode.ai/zen/v1",
|
|
@@ -2929,7 +2946,7 @@ export const MODELS = {
|
|
|
2929
2946
|
},
|
|
2930
2947
|
"minimax-m2.1-free": {
|
|
2931
2948
|
id: "minimax-m2.1-free",
|
|
2932
|
-
name: "MiniMax M2.1",
|
|
2949
|
+
name: "MiniMax M2.1 Free",
|
|
2933
2950
|
api: "openai-completions",
|
|
2934
2951
|
provider: "opencode",
|
|
2935
2952
|
baseUrl: "https://opencode.ai/zen/v1",
|
|
@@ -2944,6 +2961,23 @@ export const MODELS = {
|
|
|
2944
2961
|
contextWindow: 204800,
|
|
2945
2962
|
maxTokens: 131072,
|
|
2946
2963
|
},
|
|
2964
|
+
"kimi-k2.5-free": {
|
|
2965
|
+
id: "kimi-k2.5-free",
|
|
2966
|
+
name: "Kimi K2.5 Free",
|
|
2967
|
+
api: "openai-completions",
|
|
2968
|
+
provider: "opencode",
|
|
2969
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
2970
|
+
reasoning: true,
|
|
2971
|
+
input: ["text", "image"],
|
|
2972
|
+
cost: {
|
|
2973
|
+
input: 0,
|
|
2974
|
+
output: 0,
|
|
2975
|
+
cacheRead: 0,
|
|
2976
|
+
cacheWrite: 0,
|
|
2977
|
+
},
|
|
2978
|
+
contextWindow: 262144,
|
|
2979
|
+
maxTokens: 262144,
|
|
2980
|
+
},
|
|
2947
2981
|
"claude-sonnet-4": {
|
|
2948
2982
|
id: "claude-sonnet-4",
|
|
2949
2983
|
name: "Claude Sonnet 4",
|
|
@@ -2978,6 +3012,23 @@ export const MODELS = {
|
|
|
2978
3012
|
contextWindow: 400000,
|
|
2979
3013
|
maxTokens: 128000,
|
|
2980
3014
|
},
|
|
3015
|
+
"minimax-m2.1": {
|
|
3016
|
+
id: "minimax-m2.1",
|
|
3017
|
+
name: "MiniMax M2.1",
|
|
3018
|
+
api: "openai-completions",
|
|
3019
|
+
provider: "opencode",
|
|
3020
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
3021
|
+
reasoning: true,
|
|
3022
|
+
input: ["text"],
|
|
3023
|
+
cost: {
|
|
3024
|
+
input: 0.3,
|
|
3025
|
+
output: 1.2,
|
|
3026
|
+
cacheRead: 0.1,
|
|
3027
|
+
cacheWrite: 0,
|
|
3028
|
+
},
|
|
3029
|
+
contextWindow: 204800,
|
|
3030
|
+
maxTokens: 131072,
|
|
3031
|
+
},
|
|
2981
3032
|
"gpt-5.2": {
|
|
2982
3033
|
id: "gpt-5.2",
|
|
2983
3034
|
name: "GPT-5.2",
|
|
@@ -3145,24 +3196,6 @@ export const MODELS = {
|
|
|
3145
3196
|
contextWindow: 128000,
|
|
3146
3197
|
maxTokens: 128000,
|
|
3147
3198
|
},
|
|
3148
|
-
"gpt-5-codex": {
|
|
3149
|
-
id: "gpt-5-codex",
|
|
3150
|
-
name: "GPT-5-Codex",
|
|
3151
|
-
api: "openai-responses",
|
|
3152
|
-
provider: "github-copilot",
|
|
3153
|
-
baseUrl: "https://api.individual.githubcopilot.com",
|
|
3154
|
-
headers: { "User-Agent": "GitHubCopilotChat/0.35.0", "Editor-Version": "vscode/1.107.0", "Editor-Plugin-Version": "copilot-chat/0.35.0", "Copilot-Integration-Id": "vscode-chat" },
|
|
3155
|
-
reasoning: true,
|
|
3156
|
-
input: ["text", "image"],
|
|
3157
|
-
cost: {
|
|
3158
|
-
input: 0,
|
|
3159
|
-
output: 0,
|
|
3160
|
-
cacheRead: 0,
|
|
3161
|
-
cacheWrite: 0,
|
|
3162
|
-
},
|
|
3163
|
-
contextWindow: 128000,
|
|
3164
|
-
maxTokens: 128000,
|
|
3165
|
-
},
|
|
3166
3199
|
"gpt-4o": {
|
|
3167
3200
|
id: "gpt-4o",
|
|
3168
3201
|
name: "GPT-4o",
|
|
@@ -3198,7 +3231,7 @@ export const MODELS = {
|
|
|
3198
3231
|
cacheRead: 0,
|
|
3199
3232
|
cacheWrite: 0,
|
|
3200
3233
|
},
|
|
3201
|
-
contextWindow:
|
|
3234
|
+
contextWindow: 64000,
|
|
3202
3235
|
maxTokens: 16384,
|
|
3203
3236
|
},
|
|
3204
3237
|
"gpt-5-mini": {
|
|
@@ -3351,6 +3384,74 @@ export const MODELS = {
|
|
|
3351
3384
|
},
|
|
3352
3385
|
},
|
|
3353
3386
|
"openrouter": {
|
|
3387
|
+
"arcee-ai/trinity-large-preview:free": {
|
|
3388
|
+
id: "arcee-ai/trinity-large-preview:free",
|
|
3389
|
+
name: "Arcee AI: Trinity Large Preview (free)",
|
|
3390
|
+
api: "openai-completions",
|
|
3391
|
+
provider: "openrouter",
|
|
3392
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
3393
|
+
reasoning: false,
|
|
3394
|
+
input: ["text"],
|
|
3395
|
+
cost: {
|
|
3396
|
+
input: 0,
|
|
3397
|
+
output: 0,
|
|
3398
|
+
cacheRead: 0,
|
|
3399
|
+
cacheWrite: 0,
|
|
3400
|
+
},
|
|
3401
|
+
contextWindow: 131000,
|
|
3402
|
+
maxTokens: 4096,
|
|
3403
|
+
},
|
|
3404
|
+
"moonshotai/kimi-k2.5": {
|
|
3405
|
+
id: "moonshotai/kimi-k2.5",
|
|
3406
|
+
name: "MoonshotAI: Kimi K2.5",
|
|
3407
|
+
api: "openai-completions",
|
|
3408
|
+
provider: "openrouter",
|
|
3409
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
3410
|
+
reasoning: true,
|
|
3411
|
+
input: ["text", "image"],
|
|
3412
|
+
cost: {
|
|
3413
|
+
input: 0.5,
|
|
3414
|
+
output: 2.8,
|
|
3415
|
+
cacheRead: 0,
|
|
3416
|
+
cacheWrite: 0,
|
|
3417
|
+
},
|
|
3418
|
+
contextWindow: 262144,
|
|
3419
|
+
maxTokens: 4096,
|
|
3420
|
+
},
|
|
3421
|
+
"upstage/solar-pro-3:free": {
|
|
3422
|
+
id: "upstage/solar-pro-3:free",
|
|
3423
|
+
name: "Upstage: Solar Pro 3 (free)",
|
|
3424
|
+
api: "openai-completions",
|
|
3425
|
+
provider: "openrouter",
|
|
3426
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
3427
|
+
reasoning: true,
|
|
3428
|
+
input: ["text"],
|
|
3429
|
+
cost: {
|
|
3430
|
+
input: 0,
|
|
3431
|
+
output: 0,
|
|
3432
|
+
cacheRead: 0,
|
|
3433
|
+
cacheWrite: 0,
|
|
3434
|
+
},
|
|
3435
|
+
contextWindow: 128000,
|
|
3436
|
+
maxTokens: 4096,
|
|
3437
|
+
},
|
|
3438
|
+
"z-ai/glm-4.7-flash": {
|
|
3439
|
+
id: "z-ai/glm-4.7-flash",
|
|
3440
|
+
name: "Z.AI: GLM 4.7 Flash",
|
|
3441
|
+
api: "openai-completions",
|
|
3442
|
+
provider: "openrouter",
|
|
3443
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
3444
|
+
reasoning: true,
|
|
3445
|
+
input: ["text"],
|
|
3446
|
+
cost: {
|
|
3447
|
+
input: 0.07,
|
|
3448
|
+
output: 0.39999999999999997,
|
|
3449
|
+
cacheRead: 0.01,
|
|
3450
|
+
cacheWrite: 0,
|
|
3451
|
+
},
|
|
3452
|
+
contextWindow: 200000,
|
|
3453
|
+
maxTokens: 131072,
|
|
3454
|
+
},
|
|
3354
3455
|
"openai/gpt-5.2-codex": {
|
|
3355
3456
|
id: "openai/gpt-5.2-codex",
|
|
3356
3457
|
name: "OpenAI: GPT-5.2-Codex",
|
|
@@ -3400,7 +3501,7 @@ export const MODELS = {
|
|
|
3400
3501
|
cacheWrite: 0,
|
|
3401
3502
|
},
|
|
3402
3503
|
contextWindow: 262144,
|
|
3403
|
-
maxTokens:
|
|
3504
|
+
maxTokens: 32768,
|
|
3404
3505
|
},
|
|
3405
3506
|
"bytedance-seed/seed-1.6": {
|
|
3406
3507
|
id: "bytedance-seed/seed-1.6",
|
|
@@ -3429,12 +3530,12 @@ export const MODELS = {
|
|
|
3429
3530
|
input: ["text"],
|
|
3430
3531
|
cost: {
|
|
3431
3532
|
input: 0.27,
|
|
3432
|
-
output: 1.
|
|
3533
|
+
output: 1.1,
|
|
3433
3534
|
cacheRead: 0,
|
|
3434
3535
|
cacheWrite: 0,
|
|
3435
3536
|
},
|
|
3436
3537
|
contextWindow: 196608,
|
|
3437
|
-
maxTokens:
|
|
3538
|
+
maxTokens: 196608,
|
|
3438
3539
|
},
|
|
3439
3540
|
"z-ai/glm-4.7": {
|
|
3440
3541
|
id: "z-ai/glm-4.7",
|
|
@@ -3465,7 +3566,7 @@ export const MODELS = {
|
|
|
3465
3566
|
input: 0.5,
|
|
3466
3567
|
output: 3,
|
|
3467
3568
|
cacheRead: 0.049999999999999996,
|
|
3468
|
-
cacheWrite: 0,
|
|
3569
|
+
cacheWrite: 0.08333333333333334,
|
|
3469
3570
|
},
|
|
3470
3571
|
contextWindow: 1048576,
|
|
3471
3572
|
maxTokens: 65535,
|
|
@@ -3487,23 +3588,6 @@ export const MODELS = {
|
|
|
3487
3588
|
contextWindow: 32768,
|
|
3488
3589
|
maxTokens: 4096,
|
|
3489
3590
|
},
|
|
3490
|
-
"xiaomi/mimo-v2-flash:free": {
|
|
3491
|
-
id: "xiaomi/mimo-v2-flash:free",
|
|
3492
|
-
name: "Xiaomi: MiMo-V2-Flash (free)",
|
|
3493
|
-
api: "openai-completions",
|
|
3494
|
-
provider: "openrouter",
|
|
3495
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
3496
|
-
reasoning: true,
|
|
3497
|
-
input: ["text"],
|
|
3498
|
-
cost: {
|
|
3499
|
-
input: 0,
|
|
3500
|
-
output: 0,
|
|
3501
|
-
cacheRead: 0,
|
|
3502
|
-
cacheWrite: 0,
|
|
3503
|
-
},
|
|
3504
|
-
contextWindow: 262144,
|
|
3505
|
-
maxTokens: 65536,
|
|
3506
|
-
},
|
|
3507
3591
|
"xiaomi/mimo-v2-flash": {
|
|
3508
3592
|
id: "xiaomi/mimo-v2-flash",
|
|
3509
3593
|
name: "Xiaomi: MiMo-V2-Flash",
|
|
@@ -3547,13 +3631,13 @@ export const MODELS = {
|
|
|
3547
3631
|
reasoning: true,
|
|
3548
3632
|
input: ["text"],
|
|
3549
3633
|
cost: {
|
|
3550
|
-
input: 0.
|
|
3551
|
-
output: 0.
|
|
3634
|
+
input: 0.049999999999999996,
|
|
3635
|
+
output: 0.19999999999999998,
|
|
3552
3636
|
cacheRead: 0,
|
|
3553
3637
|
cacheWrite: 0,
|
|
3554
3638
|
},
|
|
3555
3639
|
contextWindow: 262144,
|
|
3556
|
-
maxTokens:
|
|
3640
|
+
maxTokens: 4096,
|
|
3557
3641
|
},
|
|
3558
3642
|
"openai/gpt-5.2-chat": {
|
|
3559
3643
|
id: "openai/gpt-5.2-chat",
|
|
@@ -3606,23 +3690,6 @@ export const MODELS = {
|
|
|
3606
3690
|
contextWindow: 400000,
|
|
3607
3691
|
maxTokens: 128000,
|
|
3608
3692
|
},
|
|
3609
|
-
"mistralai/devstral-2512:free": {
|
|
3610
|
-
id: "mistralai/devstral-2512:free",
|
|
3611
|
-
name: "Mistral: Devstral 2 2512 (free)",
|
|
3612
|
-
api: "openai-completions",
|
|
3613
|
-
provider: "openrouter",
|
|
3614
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
3615
|
-
reasoning: false,
|
|
3616
|
-
input: ["text"],
|
|
3617
|
-
cost: {
|
|
3618
|
-
input: 0,
|
|
3619
|
-
output: 0,
|
|
3620
|
-
cacheRead: 0,
|
|
3621
|
-
cacheWrite: 0,
|
|
3622
|
-
},
|
|
3623
|
-
contextWindow: 262144,
|
|
3624
|
-
maxTokens: 4096,
|
|
3625
|
-
},
|
|
3626
3693
|
"mistralai/devstral-2512": {
|
|
3627
3694
|
id: "mistralai/devstral-2512",
|
|
3628
3695
|
name: "Mistral: Devstral 2 2512",
|
|
@@ -4314,7 +4381,7 @@ export const MODELS = {
|
|
|
4314
4381
|
cost: {
|
|
4315
4382
|
input: 0.15,
|
|
4316
4383
|
output: 0.6,
|
|
4317
|
-
cacheRead: 0,
|
|
4384
|
+
cacheRead: 0.075,
|
|
4318
4385
|
cacheWrite: 0,
|
|
4319
4386
|
},
|
|
4320
4387
|
contextWindow: 262144,
|
|
@@ -4365,7 +4432,7 @@ export const MODELS = {
|
|
|
4365
4432
|
cost: {
|
|
4366
4433
|
input: 0.44,
|
|
4367
4434
|
output: 1.76,
|
|
4368
|
-
cacheRead: 0,
|
|
4435
|
+
cacheRead: 0.11,
|
|
4369
4436
|
cacheWrite: 0,
|
|
4370
4437
|
},
|
|
4371
4438
|
contextWindow: 204800,
|
|
@@ -4399,7 +4466,7 @@ export const MODELS = {
|
|
|
4399
4466
|
cost: {
|
|
4400
4467
|
input: 0.21,
|
|
4401
4468
|
output: 0.32,
|
|
4402
|
-
cacheRead: 0,
|
|
4469
|
+
cacheRead: 0.21,
|
|
4403
4470
|
cacheWrite: 0,
|
|
4404
4471
|
},
|
|
4405
4472
|
contextWindow: 163840,
|
|
@@ -4416,11 +4483,11 @@ export const MODELS = {
|
|
|
4416
4483
|
cost: {
|
|
4417
4484
|
input: 0.3,
|
|
4418
4485
|
output: 2.5,
|
|
4419
|
-
cacheRead: 0.
|
|
4420
|
-
cacheWrite: 0.
|
|
4486
|
+
cacheRead: 0.03,
|
|
4487
|
+
cacheWrite: 0.08333333333333334,
|
|
4421
4488
|
},
|
|
4422
4489
|
contextWindow: 1048576,
|
|
4423
|
-
maxTokens:
|
|
4490
|
+
maxTokens: 65536,
|
|
4424
4491
|
},
|
|
4425
4492
|
"google/gemini-2.5-flash-lite-preview-09-2025": {
|
|
4426
4493
|
id: "google/gemini-2.5-flash-lite-preview-09-2025",
|
|
@@ -4434,10 +4501,27 @@ export const MODELS = {
|
|
|
4434
4501
|
input: 0.09999999999999999,
|
|
4435
4502
|
output: 0.39999999999999997,
|
|
4436
4503
|
cacheRead: 0.01,
|
|
4437
|
-
cacheWrite: 0.
|
|
4504
|
+
cacheWrite: 0.08333333333333334,
|
|
4438
4505
|
},
|
|
4439
4506
|
contextWindow: 1048576,
|
|
4440
|
-
maxTokens:
|
|
4507
|
+
maxTokens: 65535,
|
|
4508
|
+
},
|
|
4509
|
+
"qwen/qwen3-vl-235b-a22b-thinking": {
|
|
4510
|
+
id: "qwen/qwen3-vl-235b-a22b-thinking",
|
|
4511
|
+
name: "Qwen: Qwen3 VL 235B A22B Thinking",
|
|
4512
|
+
api: "openai-completions",
|
|
4513
|
+
provider: "openrouter",
|
|
4514
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
4515
|
+
reasoning: true,
|
|
4516
|
+
input: ["text", "image"],
|
|
4517
|
+
cost: {
|
|
4518
|
+
input: 0.44999999999999996,
|
|
4519
|
+
output: 3.5,
|
|
4520
|
+
cacheRead: 0,
|
|
4521
|
+
cacheWrite: 0,
|
|
4522
|
+
},
|
|
4523
|
+
contextWindow: 262144,
|
|
4524
|
+
maxTokens: 262144,
|
|
4441
4525
|
},
|
|
4442
4526
|
"qwen/qwen3-vl-235b-a22b-instruct": {
|
|
4443
4527
|
id: "qwen/qwen3-vl-235b-a22b-instruct",
|
|
@@ -5317,7 +5401,7 @@ export const MODELS = {
|
|
|
5317
5401
|
cost: {
|
|
5318
5402
|
input: 0.22,
|
|
5319
5403
|
output: 1.7999999999999998,
|
|
5320
|
-
cacheRead: 0,
|
|
5404
|
+
cacheRead: 0.022,
|
|
5321
5405
|
cacheWrite: 0,
|
|
5322
5406
|
},
|
|
5323
5407
|
contextWindow: 262144,
|
|
@@ -5335,7 +5419,7 @@ export const MODELS = {
|
|
|
5335
5419
|
input: 0.09999999999999999,
|
|
5336
5420
|
output: 0.39999999999999997,
|
|
5337
5421
|
cacheRead: 0.01,
|
|
5338
|
-
cacheWrite: 0.
|
|
5422
|
+
cacheWrite: 0.08333333333333334,
|
|
5339
5423
|
},
|
|
5340
5424
|
contextWindow: 1048576,
|
|
5341
5425
|
maxTokens: 65535,
|
|
@@ -5589,7 +5673,7 @@ export const MODELS = {
|
|
|
5589
5673
|
cost: {
|
|
5590
5674
|
input: 1.25,
|
|
5591
5675
|
output: 10,
|
|
5592
|
-
cacheRead: 0.
|
|
5676
|
+
cacheRead: 0.125,
|
|
5593
5677
|
cacheWrite: 0.375,
|
|
5594
5678
|
},
|
|
5595
5679
|
contextWindow: 1048576,
|
|
@@ -5691,7 +5775,7 @@ export const MODELS = {
|
|
|
5691
5775
|
cost: {
|
|
5692
5776
|
input: 1.25,
|
|
5693
5777
|
output: 10,
|
|
5694
|
-
cacheRead: 0.
|
|
5778
|
+
cacheRead: 0.125,
|
|
5695
5779
|
cacheWrite: 0.375,
|
|
5696
5780
|
},
|
|
5697
5781
|
contextWindow: 1048576,
|
|
@@ -5776,7 +5860,7 @@ export const MODELS = {
|
|
|
5776
5860
|
cost: {
|
|
5777
5861
|
input: 0.049999999999999996,
|
|
5778
5862
|
output: 0.25,
|
|
5779
|
-
cacheRead: 0,
|
|
5863
|
+
cacheRead: 0.049999999999999996,
|
|
5780
5864
|
cacheWrite: 0,
|
|
5781
5865
|
},
|
|
5782
5866
|
contextWindow: 32000,
|
|
@@ -6202,7 +6286,7 @@ export const MODELS = {
|
|
|
6202
6286
|
input: 0.09999999999999999,
|
|
6203
6287
|
output: 0.39999999999999997,
|
|
6204
6288
|
cacheRead: 0.024999999999999998,
|
|
6205
|
-
cacheWrite: 0.
|
|
6289
|
+
cacheWrite: 0.08333333333333334,
|
|
6206
6290
|
},
|
|
6207
6291
|
contextWindow: 1048576,
|
|
6208
6292
|
maxTokens: 8192,
|
|
@@ -6377,23 +6461,6 @@ export const MODELS = {
|
|
|
6377
6461
|
contextWindow: 200000,
|
|
6378
6462
|
maxTokens: 100000,
|
|
6379
6463
|
},
|
|
6380
|
-
"google/gemini-2.0-flash-exp:free": {
|
|
6381
|
-
id: "google/gemini-2.0-flash-exp:free",
|
|
6382
|
-
name: "Google: Gemini 2.0 Flash Experimental (free)",
|
|
6383
|
-
api: "openai-completions",
|
|
6384
|
-
provider: "openrouter",
|
|
6385
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
6386
|
-
reasoning: false,
|
|
6387
|
-
input: ["text", "image"],
|
|
6388
|
-
cost: {
|
|
6389
|
-
input: 0,
|
|
6390
|
-
output: 0,
|
|
6391
|
-
cacheRead: 0,
|
|
6392
|
-
cacheWrite: 0,
|
|
6393
|
-
},
|
|
6394
|
-
contextWindow: 1048576,
|
|
6395
|
-
maxTokens: 8192,
|
|
6396
|
-
},
|
|
6397
6464
|
"meta-llama/llama-3.3-70b-instruct:free": {
|
|
6398
6465
|
id: "meta-llama/llama-3.3-70b-instruct:free",
|
|
6399
6466
|
name: "Meta: Llama 3.3 70B Instruct (free)",
|
|
@@ -6598,34 +6665,34 @@ export const MODELS = {
|
|
|
6598
6665
|
contextWindow: 200000,
|
|
6599
6666
|
maxTokens: 8192,
|
|
6600
6667
|
},
|
|
6601
|
-
"mistralai/ministral-
|
|
6602
|
-
id: "mistralai/ministral-
|
|
6603
|
-
name: "Mistral: Ministral
|
|
6668
|
+
"mistralai/ministral-8b": {
|
|
6669
|
+
id: "mistralai/ministral-8b",
|
|
6670
|
+
name: "Mistral: Ministral 8B",
|
|
6604
6671
|
api: "openai-completions",
|
|
6605
6672
|
provider: "openrouter",
|
|
6606
6673
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6607
6674
|
reasoning: false,
|
|
6608
6675
|
input: ["text"],
|
|
6609
6676
|
cost: {
|
|
6610
|
-
input: 0.
|
|
6611
|
-
output: 0.
|
|
6677
|
+
input: 0.09999999999999999,
|
|
6678
|
+
output: 0.09999999999999999,
|
|
6612
6679
|
cacheRead: 0,
|
|
6613
6680
|
cacheWrite: 0,
|
|
6614
6681
|
},
|
|
6615
6682
|
contextWindow: 131072,
|
|
6616
6683
|
maxTokens: 4096,
|
|
6617
6684
|
},
|
|
6618
|
-
"mistralai/ministral-
|
|
6619
|
-
id: "mistralai/ministral-
|
|
6620
|
-
name: "Mistral: Ministral
|
|
6685
|
+
"mistralai/ministral-3b": {
|
|
6686
|
+
id: "mistralai/ministral-3b",
|
|
6687
|
+
name: "Mistral: Ministral 3B",
|
|
6621
6688
|
api: "openai-completions",
|
|
6622
6689
|
provider: "openrouter",
|
|
6623
6690
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6624
6691
|
reasoning: false,
|
|
6625
6692
|
input: ["text"],
|
|
6626
6693
|
cost: {
|
|
6627
|
-
input: 0.
|
|
6628
|
-
output: 0.
|
|
6694
|
+
input: 0.04,
|
|
6695
|
+
output: 0.04,
|
|
6629
6696
|
cacheRead: 0,
|
|
6630
6697
|
cacheWrite: 0,
|
|
6631
6698
|
},
|
|
@@ -6785,23 +6852,6 @@ export const MODELS = {
|
|
|
6785
6852
|
contextWindow: 128000,
|
|
6786
6853
|
maxTokens: 16384,
|
|
6787
6854
|
},
|
|
6788
|
-
"meta-llama/llama-3.1-70b-instruct": {
|
|
6789
|
-
id: "meta-llama/llama-3.1-70b-instruct",
|
|
6790
|
-
name: "Meta: Llama 3.1 70B Instruct",
|
|
6791
|
-
api: "openai-completions",
|
|
6792
|
-
provider: "openrouter",
|
|
6793
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
6794
|
-
reasoning: false,
|
|
6795
|
-
input: ["text"],
|
|
6796
|
-
cost: {
|
|
6797
|
-
input: 0.39999999999999997,
|
|
6798
|
-
output: 0.39999999999999997,
|
|
6799
|
-
cacheRead: 0,
|
|
6800
|
-
cacheWrite: 0,
|
|
6801
|
-
},
|
|
6802
|
-
contextWindow: 131072,
|
|
6803
|
-
maxTokens: 4096,
|
|
6804
|
-
},
|
|
6805
6855
|
"meta-llama/llama-3.1-8b-instruct": {
|
|
6806
6856
|
id: "meta-llama/llama-3.1-8b-instruct",
|
|
6807
6857
|
name: "Meta: Llama 3.1 8B Instruct",
|
|
@@ -6836,6 +6886,23 @@ export const MODELS = {
|
|
|
6836
6886
|
contextWindow: 10000,
|
|
6837
6887
|
maxTokens: 4096,
|
|
6838
6888
|
},
|
|
6889
|
+
"meta-llama/llama-3.1-70b-instruct": {
|
|
6890
|
+
id: "meta-llama/llama-3.1-70b-instruct",
|
|
6891
|
+
name: "Meta: Llama 3.1 70B Instruct",
|
|
6892
|
+
api: "openai-completions",
|
|
6893
|
+
provider: "openrouter",
|
|
6894
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
6895
|
+
reasoning: false,
|
|
6896
|
+
input: ["text"],
|
|
6897
|
+
cost: {
|
|
6898
|
+
input: 0.39999999999999997,
|
|
6899
|
+
output: 0.39999999999999997,
|
|
6900
|
+
cacheRead: 0,
|
|
6901
|
+
cacheWrite: 0,
|
|
6902
|
+
},
|
|
6903
|
+
contextWindow: 131072,
|
|
6904
|
+
maxTokens: 4096,
|
|
6905
|
+
},
|
|
6839
6906
|
"mistralai/mistral-nemo": {
|
|
6840
6907
|
id: "mistralai/mistral-nemo",
|
|
6841
6908
|
name: "Mistral: Mistral Nemo",
|
|
@@ -6851,11 +6918,11 @@ export const MODELS = {
|
|
|
6851
6918
|
cacheWrite: 0,
|
|
6852
6919
|
},
|
|
6853
6920
|
contextWindow: 131072,
|
|
6854
|
-
maxTokens:
|
|
6921
|
+
maxTokens: 16384,
|
|
6855
6922
|
},
|
|
6856
|
-
"openai/gpt-4o-mini": {
|
|
6857
|
-
id: "openai/gpt-4o-mini",
|
|
6858
|
-
name: "OpenAI: GPT-4o-mini",
|
|
6923
|
+
"openai/gpt-4o-mini-2024-07-18": {
|
|
6924
|
+
id: "openai/gpt-4o-mini-2024-07-18",
|
|
6925
|
+
name: "OpenAI: GPT-4o-mini (2024-07-18)",
|
|
6859
6926
|
api: "openai-completions",
|
|
6860
6927
|
provider: "openrouter",
|
|
6861
6928
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -6870,9 +6937,9 @@ export const MODELS = {
|
|
|
6870
6937
|
contextWindow: 128000,
|
|
6871
6938
|
maxTokens: 16384,
|
|
6872
6939
|
},
|
|
6873
|
-
"openai/gpt-4o-mini
|
|
6874
|
-
id: "openai/gpt-4o-mini
|
|
6875
|
-
name: "OpenAI: GPT-4o-mini
|
|
6940
|
+
"openai/gpt-4o-mini": {
|
|
6941
|
+
id: "openai/gpt-4o-mini",
|
|
6942
|
+
name: "OpenAI: GPT-4o-mini",
|
|
6876
6943
|
api: "openai-completions",
|
|
6877
6944
|
provider: "openrouter",
|
|
6878
6945
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -7040,38 +7107,38 @@ export const MODELS = {
|
|
|
7040
7107
|
contextWindow: 128000,
|
|
7041
7108
|
maxTokens: 4096,
|
|
7042
7109
|
},
|
|
7043
|
-
"openai/gpt-
|
|
7044
|
-
id: "openai/gpt-
|
|
7045
|
-
name: "OpenAI: GPT-
|
|
7110
|
+
"openai/gpt-3.5-turbo-0613": {
|
|
7111
|
+
id: "openai/gpt-3.5-turbo-0613",
|
|
7112
|
+
name: "OpenAI: GPT-3.5 Turbo (older v0613)",
|
|
7046
7113
|
api: "openai-completions",
|
|
7047
7114
|
provider: "openrouter",
|
|
7048
7115
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
7049
7116
|
reasoning: false,
|
|
7050
7117
|
input: ["text"],
|
|
7051
7118
|
cost: {
|
|
7052
|
-
input:
|
|
7053
|
-
output:
|
|
7119
|
+
input: 1,
|
|
7120
|
+
output: 2,
|
|
7054
7121
|
cacheRead: 0,
|
|
7055
7122
|
cacheWrite: 0,
|
|
7056
7123
|
},
|
|
7057
|
-
contextWindow:
|
|
7124
|
+
contextWindow: 4095,
|
|
7058
7125
|
maxTokens: 4096,
|
|
7059
7126
|
},
|
|
7060
|
-
"openai/gpt-
|
|
7061
|
-
id: "openai/gpt-
|
|
7062
|
-
name: "OpenAI: GPT-
|
|
7127
|
+
"openai/gpt-4-turbo-preview": {
|
|
7128
|
+
id: "openai/gpt-4-turbo-preview",
|
|
7129
|
+
name: "OpenAI: GPT-4 Turbo Preview",
|
|
7063
7130
|
api: "openai-completions",
|
|
7064
7131
|
provider: "openrouter",
|
|
7065
7132
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
7066
7133
|
reasoning: false,
|
|
7067
7134
|
input: ["text"],
|
|
7068
7135
|
cost: {
|
|
7069
|
-
input:
|
|
7070
|
-
output:
|
|
7136
|
+
input: 10,
|
|
7137
|
+
output: 30,
|
|
7071
7138
|
cacheRead: 0,
|
|
7072
7139
|
cacheWrite: 0,
|
|
7073
7140
|
},
|
|
7074
|
-
contextWindow:
|
|
7141
|
+
contextWindow: 128000,
|
|
7075
7142
|
maxTokens: 4096,
|
|
7076
7143
|
},
|
|
7077
7144
|
"mistralai/mistral-tiny": {
|
|
@@ -7142,26 +7209,26 @@ export const MODELS = {
|
|
|
7142
7209
|
contextWindow: 16385,
|
|
7143
7210
|
maxTokens: 4096,
|
|
7144
7211
|
},
|
|
7145
|
-
"openai/gpt-
|
|
7146
|
-
id: "openai/gpt-
|
|
7147
|
-
name: "OpenAI: GPT-
|
|
7212
|
+
"openai/gpt-4-0314": {
|
|
7213
|
+
id: "openai/gpt-4-0314",
|
|
7214
|
+
name: "OpenAI: GPT-4 (older v0314)",
|
|
7148
7215
|
api: "openai-completions",
|
|
7149
7216
|
provider: "openrouter",
|
|
7150
7217
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
7151
7218
|
reasoning: false,
|
|
7152
7219
|
input: ["text"],
|
|
7153
7220
|
cost: {
|
|
7154
|
-
input:
|
|
7155
|
-
output:
|
|
7221
|
+
input: 30,
|
|
7222
|
+
output: 60,
|
|
7156
7223
|
cacheRead: 0,
|
|
7157
7224
|
cacheWrite: 0,
|
|
7158
7225
|
},
|
|
7159
|
-
contextWindow:
|
|
7226
|
+
contextWindow: 8191,
|
|
7160
7227
|
maxTokens: 4096,
|
|
7161
7228
|
},
|
|
7162
|
-
"openai/gpt-4
|
|
7163
|
-
id: "openai/gpt-4
|
|
7164
|
-
name: "OpenAI: GPT-4
|
|
7229
|
+
"openai/gpt-4": {
|
|
7230
|
+
id: "openai/gpt-4",
|
|
7231
|
+
name: "OpenAI: GPT-4",
|
|
7165
7232
|
api: "openai-completions",
|
|
7166
7233
|
provider: "openrouter",
|
|
7167
7234
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -7176,21 +7243,21 @@ export const MODELS = {
|
|
|
7176
7243
|
contextWindow: 8191,
|
|
7177
7244
|
maxTokens: 4096,
|
|
7178
7245
|
},
|
|
7179
|
-
"openai/gpt-
|
|
7180
|
-
id: "openai/gpt-
|
|
7181
|
-
name: "OpenAI: GPT-
|
|
7246
|
+
"openai/gpt-3.5-turbo": {
|
|
7247
|
+
id: "openai/gpt-3.5-turbo",
|
|
7248
|
+
name: "OpenAI: GPT-3.5 Turbo",
|
|
7182
7249
|
api: "openai-completions",
|
|
7183
7250
|
provider: "openrouter",
|
|
7184
7251
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
7185
7252
|
reasoning: false,
|
|
7186
7253
|
input: ["text"],
|
|
7187
7254
|
cost: {
|
|
7188
|
-
input:
|
|
7189
|
-
output:
|
|
7255
|
+
input: 0.5,
|
|
7256
|
+
output: 1.5,
|
|
7190
7257
|
cacheRead: 0,
|
|
7191
7258
|
cacheWrite: 0,
|
|
7192
7259
|
},
|
|
7193
|
-
contextWindow:
|
|
7260
|
+
contextWindow: 16385,
|
|
7194
7261
|
maxTokens: 4096,
|
|
7195
7262
|
},
|
|
7196
7263
|
"openrouter/auto": {
|