@yeshwanthyk/ai 0.1.0 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/agent-loop.d.ts.map +1 -1
- package/dist/agent/agent-loop.js +50 -16
- package/dist/agent/agent-loop.js.map +1 -1
- package/dist/agent/tools/calculate.d.ts.map +1 -1
- package/dist/agent/tools/calculate.js +7 -2
- package/dist/agent/tools/calculate.js.map +1 -1
- package/dist/agent/tools/get-current-time.d.ts.map +1 -1
- package/dist/agent/tools/get-current-time.js +7 -2
- package/dist/agent/tools/get-current-time.js.map +1 -1
- package/dist/agent/types.d.ts +4 -4
- package/dist/agent/types.d.ts.map +1 -1
- package/dist/index.d.ts +2 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/models.generated.d.ts +167 -105
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +283 -216
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/anthropic.d.ts.map +1 -1
- package/dist/providers/anthropic.js +44 -14
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/google.d.ts.map +1 -1
- package/dist/providers/google.js +47 -13
- package/dist/providers/google.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +41 -12
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/providers/openai-responses.js +6 -2
- package/dist/providers/openai-responses.js.map +1 -1
- package/dist/providers/transform-messages.d.ts.map +1 -1
- package/dist/providers/transform-messages.js +2 -1
- package/dist/providers/transform-messages.js.map +1 -1
- package/dist/stream.d.ts +1 -1
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +12 -7
- package/dist/stream.js.map +1 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/utils/event-stream.d.ts.map +1 -1
- package/dist/utils/event-stream.js.map +1 -1
- package/dist/utils/json-parse.d.ts.map +1 -1
- package/dist/utils/json-parse.js.map +1 -1
- package/dist/utils/oauth/anthropic.d.ts +19 -1
- package/dist/utils/oauth/anthropic.d.ts.map +1 -1
- package/dist/utils/oauth/anthropic.js +23 -7
- package/dist/utils/oauth/anthropic.js.map +1 -1
- package/dist/utils/overflow.d.ts.map +1 -1
- package/dist/utils/overflow.js.map +1 -1
- package/dist/utils/sanitize-unicode.d.ts.map +1 -1
- package/dist/utils/sanitize-unicode.js.map +1 -1
- package/dist/utils/validation.d.ts.map +1 -1
- package/dist/utils/validation.js +5 -2
- package/dist/utils/validation.js.map +1 -1
- package/package.json +1 -1
package/dist/models.generated.js
CHANGED
|
@@ -1003,7 +1003,7 @@ export const MODELS = {
|
|
|
1003
1003
|
cost: {
|
|
1004
1004
|
input: 0.05,
|
|
1005
1005
|
output: 0.4,
|
|
1006
|
-
cacheRead: 0.
|
|
1006
|
+
cacheRead: 0.005,
|
|
1007
1007
|
cacheWrite: 0,
|
|
1008
1008
|
},
|
|
1009
1009
|
contextWindow: 400000,
|
|
@@ -1105,7 +1105,7 @@ export const MODELS = {
|
|
|
1105
1105
|
cost: {
|
|
1106
1106
|
input: 0.25,
|
|
1107
1107
|
output: 2,
|
|
1108
|
-
cacheRead: 0.
|
|
1108
|
+
cacheRead: 0.025,
|
|
1109
1109
|
cacheWrite: 0,
|
|
1110
1110
|
},
|
|
1111
1111
|
contextWindow: 400000,
|
|
@@ -1224,7 +1224,7 @@ export const MODELS = {
|
|
|
1224
1224
|
cost: {
|
|
1225
1225
|
input: 1.25,
|
|
1226
1226
|
output: 10,
|
|
1227
|
-
cacheRead: 0.
|
|
1227
|
+
cacheRead: 0.125,
|
|
1228
1228
|
cacheWrite: 0,
|
|
1229
1229
|
},
|
|
1230
1230
|
contextWindow: 400000,
|
|
@@ -2037,6 +2037,23 @@ export const MODELS = {
|
|
|
2037
2037
|
contextWindow: 131072,
|
|
2038
2038
|
maxTokens: 98304,
|
|
2039
2039
|
},
|
|
2040
|
+
"glm-4.7-flash": {
|
|
2041
|
+
id: "glm-4.7-flash",
|
|
2042
|
+
name: "GLM-4.7-Flash",
|
|
2043
|
+
api: "anthropic-messages",
|
|
2044
|
+
provider: "zai",
|
|
2045
|
+
baseUrl: "https://api.z.ai/api/anthropic",
|
|
2046
|
+
reasoning: true,
|
|
2047
|
+
input: ["text"],
|
|
2048
|
+
cost: {
|
|
2049
|
+
input: 0,
|
|
2050
|
+
output: 0,
|
|
2051
|
+
cacheRead: 0,
|
|
2052
|
+
cacheWrite: 0,
|
|
2053
|
+
},
|
|
2054
|
+
contextWindow: 200000,
|
|
2055
|
+
maxTokens: 131072,
|
|
2056
|
+
},
|
|
2040
2057
|
"glm-4.5-air": {
|
|
2041
2058
|
id: "glm-4.5-air",
|
|
2042
2059
|
name: "GLM-4.5-Air",
|
|
@@ -2218,8 +2235,8 @@ export const MODELS = {
|
|
|
2218
2235
|
reasoning: false,
|
|
2219
2236
|
input: ["text"],
|
|
2220
2237
|
cost: {
|
|
2221
|
-
input: 0,
|
|
2222
|
-
output:
|
|
2238
|
+
input: 0.4,
|
|
2239
|
+
output: 2,
|
|
2223
2240
|
cacheRead: 0,
|
|
2224
2241
|
cacheWrite: 0,
|
|
2225
2242
|
},
|
|
@@ -2534,6 +2551,23 @@ export const MODELS = {
|
|
|
2534
2551
|
},
|
|
2535
2552
|
},
|
|
2536
2553
|
"opencode": {
|
|
2554
|
+
"glm-4.7": {
|
|
2555
|
+
id: "glm-4.7",
|
|
2556
|
+
name: "GLM-4.7",
|
|
2557
|
+
api: "openai-completions",
|
|
2558
|
+
provider: "opencode",
|
|
2559
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
2560
|
+
reasoning: true,
|
|
2561
|
+
input: ["text"],
|
|
2562
|
+
cost: {
|
|
2563
|
+
input: 0.6,
|
|
2564
|
+
output: 2.2,
|
|
2565
|
+
cacheRead: 0.1,
|
|
2566
|
+
cacheWrite: 0,
|
|
2567
|
+
},
|
|
2568
|
+
contextWindow: 204800,
|
|
2569
|
+
maxTokens: 131072,
|
|
2570
|
+
},
|
|
2537
2571
|
"qwen3-coder": {
|
|
2538
2572
|
id: "qwen3-coder",
|
|
2539
2573
|
name: "Qwen3 Coder",
|
|
@@ -2636,6 +2670,23 @@ export const MODELS = {
|
|
|
2636
2670
|
contextWindow: 200000,
|
|
2637
2671
|
maxTokens: 64000,
|
|
2638
2672
|
},
|
|
2673
|
+
"trinity-large-preview-free": {
|
|
2674
|
+
id: "trinity-large-preview-free",
|
|
2675
|
+
name: "Trinity Large Preview",
|
|
2676
|
+
api: "openai-completions",
|
|
2677
|
+
provider: "opencode",
|
|
2678
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
2679
|
+
reasoning: false,
|
|
2680
|
+
input: ["text"],
|
|
2681
|
+
cost: {
|
|
2682
|
+
input: 0,
|
|
2683
|
+
output: 0,
|
|
2684
|
+
cacheRead: 0,
|
|
2685
|
+
cacheWrite: 0,
|
|
2686
|
+
},
|
|
2687
|
+
contextWindow: 131072,
|
|
2688
|
+
maxTokens: 131072,
|
|
2689
|
+
},
|
|
2639
2690
|
"claude-opus-4-5": {
|
|
2640
2691
|
id: "claude-opus-4-5",
|
|
2641
2692
|
name: "Claude Opus 4.5",
|
|
@@ -2653,39 +2704,39 @@ export const MODELS = {
|
|
|
2653
2704
|
contextWindow: 200000,
|
|
2654
2705
|
maxTokens: 64000,
|
|
2655
2706
|
},
|
|
2656
|
-
"
|
|
2657
|
-
id: "
|
|
2658
|
-
name: "
|
|
2707
|
+
"kimi-k2.5": {
|
|
2708
|
+
id: "kimi-k2.5",
|
|
2709
|
+
name: "Kimi K2.5",
|
|
2659
2710
|
api: "openai-completions",
|
|
2660
2711
|
provider: "opencode",
|
|
2661
2712
|
baseUrl: "https://opencode.ai/zen/v1",
|
|
2662
2713
|
reasoning: true,
|
|
2663
2714
|
input: ["text", "image"],
|
|
2664
2715
|
cost: {
|
|
2665
|
-
input:
|
|
2666
|
-
output:
|
|
2667
|
-
cacheRead: 0.
|
|
2716
|
+
input: 0.6,
|
|
2717
|
+
output: 3,
|
|
2718
|
+
cacheRead: 0.08,
|
|
2668
2719
|
cacheWrite: 0,
|
|
2669
2720
|
},
|
|
2670
|
-
contextWindow:
|
|
2671
|
-
maxTokens:
|
|
2721
|
+
contextWindow: 262144,
|
|
2722
|
+
maxTokens: 262144,
|
|
2672
2723
|
},
|
|
2673
|
-
"
|
|
2674
|
-
id: "
|
|
2675
|
-
name: "
|
|
2724
|
+
"gemini-3-pro": {
|
|
2725
|
+
id: "gemini-3-pro",
|
|
2726
|
+
name: "Gemini 3 Pro",
|
|
2676
2727
|
api: "openai-completions",
|
|
2677
2728
|
provider: "opencode",
|
|
2678
2729
|
baseUrl: "https://opencode.ai/zen/v1",
|
|
2679
2730
|
reasoning: true,
|
|
2680
|
-
input: ["text"],
|
|
2731
|
+
input: ["text", "image"],
|
|
2681
2732
|
cost: {
|
|
2682
|
-
input:
|
|
2683
|
-
output:
|
|
2684
|
-
cacheRead: 0.
|
|
2733
|
+
input: 2,
|
|
2734
|
+
output: 12,
|
|
2735
|
+
cacheRead: 0.2,
|
|
2685
2736
|
cacheWrite: 0,
|
|
2686
2737
|
},
|
|
2687
|
-
contextWindow:
|
|
2688
|
-
maxTokens:
|
|
2738
|
+
contextWindow: 1048576,
|
|
2739
|
+
maxTokens: 65536,
|
|
2689
2740
|
},
|
|
2690
2741
|
"claude-sonnet-4-5": {
|
|
2691
2742
|
id: "claude-sonnet-4-5",
|
|
@@ -2721,23 +2772,6 @@ export const MODELS = {
|
|
|
2721
2772
|
contextWindow: 400000,
|
|
2722
2773
|
maxTokens: 128000,
|
|
2723
2774
|
},
|
|
2724
|
-
"alpha-gd4": {
|
|
2725
|
-
id: "alpha-gd4",
|
|
2726
|
-
name: "Alpha GD4",
|
|
2727
|
-
api: "openai-completions",
|
|
2728
|
-
provider: "opencode",
|
|
2729
|
-
baseUrl: "https://opencode.ai/zen/v1",
|
|
2730
|
-
reasoning: true,
|
|
2731
|
-
input: ["text"],
|
|
2732
|
-
cost: {
|
|
2733
|
-
input: 0.5,
|
|
2734
|
-
output: 2,
|
|
2735
|
-
cacheRead: 0.15,
|
|
2736
|
-
cacheWrite: 0,
|
|
2737
|
-
},
|
|
2738
|
-
contextWindow: 262144,
|
|
2739
|
-
maxTokens: 32768,
|
|
2740
|
-
},
|
|
2741
2775
|
"kimi-k2-thinking": {
|
|
2742
2776
|
id: "kimi-k2-thinking",
|
|
2743
2777
|
name: "Kimi K2 Thinking",
|
|
@@ -2859,7 +2893,7 @@ export const MODELS = {
|
|
|
2859
2893
|
},
|
|
2860
2894
|
"glm-4.7-free": {
|
|
2861
2895
|
id: "glm-4.7-free",
|
|
2862
|
-
name: "GLM-4.7",
|
|
2896
|
+
name: "GLM-4.7 Free",
|
|
2863
2897
|
api: "openai-completions",
|
|
2864
2898
|
provider: "opencode",
|
|
2865
2899
|
baseUrl: "https://opencode.ai/zen/v1",
|
|
@@ -2929,7 +2963,7 @@ export const MODELS = {
|
|
|
2929
2963
|
},
|
|
2930
2964
|
"minimax-m2.1-free": {
|
|
2931
2965
|
id: "minimax-m2.1-free",
|
|
2932
|
-
name: "MiniMax M2.1",
|
|
2966
|
+
name: "MiniMax M2.1 Free",
|
|
2933
2967
|
api: "openai-completions",
|
|
2934
2968
|
provider: "opencode",
|
|
2935
2969
|
baseUrl: "https://opencode.ai/zen/v1",
|
|
@@ -2944,6 +2978,23 @@ export const MODELS = {
|
|
|
2944
2978
|
contextWindow: 204800,
|
|
2945
2979
|
maxTokens: 131072,
|
|
2946
2980
|
},
|
|
2981
|
+
"kimi-k2.5-free": {
|
|
2982
|
+
id: "kimi-k2.5-free",
|
|
2983
|
+
name: "Kimi K2.5 Free",
|
|
2984
|
+
api: "openai-completions",
|
|
2985
|
+
provider: "opencode",
|
|
2986
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
2987
|
+
reasoning: true,
|
|
2988
|
+
input: ["text", "image"],
|
|
2989
|
+
cost: {
|
|
2990
|
+
input: 0,
|
|
2991
|
+
output: 0,
|
|
2992
|
+
cacheRead: 0,
|
|
2993
|
+
cacheWrite: 0,
|
|
2994
|
+
},
|
|
2995
|
+
contextWindow: 262144,
|
|
2996
|
+
maxTokens: 262144,
|
|
2997
|
+
},
|
|
2947
2998
|
"claude-sonnet-4": {
|
|
2948
2999
|
id: "claude-sonnet-4",
|
|
2949
3000
|
name: "Claude Sonnet 4",
|
|
@@ -2978,6 +3029,23 @@ export const MODELS = {
|
|
|
2978
3029
|
contextWindow: 400000,
|
|
2979
3030
|
maxTokens: 128000,
|
|
2980
3031
|
},
|
|
3032
|
+
"minimax-m2.1": {
|
|
3033
|
+
id: "minimax-m2.1",
|
|
3034
|
+
name: "MiniMax M2.1",
|
|
3035
|
+
api: "openai-completions",
|
|
3036
|
+
provider: "opencode",
|
|
3037
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
3038
|
+
reasoning: true,
|
|
3039
|
+
input: ["text"],
|
|
3040
|
+
cost: {
|
|
3041
|
+
input: 0.3,
|
|
3042
|
+
output: 1.2,
|
|
3043
|
+
cacheRead: 0.1,
|
|
3044
|
+
cacheWrite: 0,
|
|
3045
|
+
},
|
|
3046
|
+
contextWindow: 204800,
|
|
3047
|
+
maxTokens: 131072,
|
|
3048
|
+
},
|
|
2981
3049
|
"gpt-5.2": {
|
|
2982
3050
|
id: "gpt-5.2",
|
|
2983
3051
|
name: "GPT-5.2",
|
|
@@ -3145,24 +3213,6 @@ export const MODELS = {
|
|
|
3145
3213
|
contextWindow: 128000,
|
|
3146
3214
|
maxTokens: 128000,
|
|
3147
3215
|
},
|
|
3148
|
-
"gpt-5-codex": {
|
|
3149
|
-
id: "gpt-5-codex",
|
|
3150
|
-
name: "GPT-5-Codex",
|
|
3151
|
-
api: "openai-responses",
|
|
3152
|
-
provider: "github-copilot",
|
|
3153
|
-
baseUrl: "https://api.individual.githubcopilot.com",
|
|
3154
|
-
headers: { "User-Agent": "GitHubCopilotChat/0.35.0", "Editor-Version": "vscode/1.107.0", "Editor-Plugin-Version": "copilot-chat/0.35.0", "Copilot-Integration-Id": "vscode-chat" },
|
|
3155
|
-
reasoning: true,
|
|
3156
|
-
input: ["text", "image"],
|
|
3157
|
-
cost: {
|
|
3158
|
-
input: 0,
|
|
3159
|
-
output: 0,
|
|
3160
|
-
cacheRead: 0,
|
|
3161
|
-
cacheWrite: 0,
|
|
3162
|
-
},
|
|
3163
|
-
contextWindow: 128000,
|
|
3164
|
-
maxTokens: 128000,
|
|
3165
|
-
},
|
|
3166
3216
|
"gpt-4o": {
|
|
3167
3217
|
id: "gpt-4o",
|
|
3168
3218
|
name: "GPT-4o",
|
|
@@ -3198,7 +3248,7 @@ export const MODELS = {
|
|
|
3198
3248
|
cacheRead: 0,
|
|
3199
3249
|
cacheWrite: 0,
|
|
3200
3250
|
},
|
|
3201
|
-
contextWindow:
|
|
3251
|
+
contextWindow: 64000,
|
|
3202
3252
|
maxTokens: 16384,
|
|
3203
3253
|
},
|
|
3204
3254
|
"gpt-5-mini": {
|
|
@@ -3351,6 +3401,74 @@ export const MODELS = {
|
|
|
3351
3401
|
},
|
|
3352
3402
|
},
|
|
3353
3403
|
"openrouter": {
|
|
3404
|
+
"arcee-ai/trinity-large-preview:free": {
|
|
3405
|
+
id: "arcee-ai/trinity-large-preview:free",
|
|
3406
|
+
name: "Arcee AI: Trinity Large Preview (free)",
|
|
3407
|
+
api: "openai-completions",
|
|
3408
|
+
provider: "openrouter",
|
|
3409
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
3410
|
+
reasoning: false,
|
|
3411
|
+
input: ["text"],
|
|
3412
|
+
cost: {
|
|
3413
|
+
input: 0,
|
|
3414
|
+
output: 0,
|
|
3415
|
+
cacheRead: 0,
|
|
3416
|
+
cacheWrite: 0,
|
|
3417
|
+
},
|
|
3418
|
+
contextWindow: 131000,
|
|
3419
|
+
maxTokens: 4096,
|
|
3420
|
+
},
|
|
3421
|
+
"moonshotai/kimi-k2.5": {
|
|
3422
|
+
id: "moonshotai/kimi-k2.5",
|
|
3423
|
+
name: "MoonshotAI: Kimi K2.5",
|
|
3424
|
+
api: "openai-completions",
|
|
3425
|
+
provider: "openrouter",
|
|
3426
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
3427
|
+
reasoning: true,
|
|
3428
|
+
input: ["text", "image"],
|
|
3429
|
+
cost: {
|
|
3430
|
+
input: 0.5,
|
|
3431
|
+
output: 2.8,
|
|
3432
|
+
cacheRead: 0,
|
|
3433
|
+
cacheWrite: 0,
|
|
3434
|
+
},
|
|
3435
|
+
contextWindow: 262144,
|
|
3436
|
+
maxTokens: 4096,
|
|
3437
|
+
},
|
|
3438
|
+
"upstage/solar-pro-3:free": {
|
|
3439
|
+
id: "upstage/solar-pro-3:free",
|
|
3440
|
+
name: "Upstage: Solar Pro 3 (free)",
|
|
3441
|
+
api: "openai-completions",
|
|
3442
|
+
provider: "openrouter",
|
|
3443
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
3444
|
+
reasoning: true,
|
|
3445
|
+
input: ["text"],
|
|
3446
|
+
cost: {
|
|
3447
|
+
input: 0,
|
|
3448
|
+
output: 0,
|
|
3449
|
+
cacheRead: 0,
|
|
3450
|
+
cacheWrite: 0,
|
|
3451
|
+
},
|
|
3452
|
+
contextWindow: 128000,
|
|
3453
|
+
maxTokens: 4096,
|
|
3454
|
+
},
|
|
3455
|
+
"z-ai/glm-4.7-flash": {
|
|
3456
|
+
id: "z-ai/glm-4.7-flash",
|
|
3457
|
+
name: "Z.AI: GLM 4.7 Flash",
|
|
3458
|
+
api: "openai-completions",
|
|
3459
|
+
provider: "openrouter",
|
|
3460
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
3461
|
+
reasoning: true,
|
|
3462
|
+
input: ["text"],
|
|
3463
|
+
cost: {
|
|
3464
|
+
input: 0.07,
|
|
3465
|
+
output: 0.39999999999999997,
|
|
3466
|
+
cacheRead: 0.01,
|
|
3467
|
+
cacheWrite: 0,
|
|
3468
|
+
},
|
|
3469
|
+
contextWindow: 200000,
|
|
3470
|
+
maxTokens: 131072,
|
|
3471
|
+
},
|
|
3354
3472
|
"openai/gpt-5.2-codex": {
|
|
3355
3473
|
id: "openai/gpt-5.2-codex",
|
|
3356
3474
|
name: "OpenAI: GPT-5.2-Codex",
|
|
@@ -3400,7 +3518,7 @@ export const MODELS = {
|
|
|
3400
3518
|
cacheWrite: 0,
|
|
3401
3519
|
},
|
|
3402
3520
|
contextWindow: 262144,
|
|
3403
|
-
maxTokens:
|
|
3521
|
+
maxTokens: 32768,
|
|
3404
3522
|
},
|
|
3405
3523
|
"bytedance-seed/seed-1.6": {
|
|
3406
3524
|
id: "bytedance-seed/seed-1.6",
|
|
@@ -3429,12 +3547,12 @@ export const MODELS = {
|
|
|
3429
3547
|
input: ["text"],
|
|
3430
3548
|
cost: {
|
|
3431
3549
|
input: 0.27,
|
|
3432
|
-
output: 1.
|
|
3550
|
+
output: 1.1,
|
|
3433
3551
|
cacheRead: 0,
|
|
3434
3552
|
cacheWrite: 0,
|
|
3435
3553
|
},
|
|
3436
3554
|
contextWindow: 196608,
|
|
3437
|
-
maxTokens:
|
|
3555
|
+
maxTokens: 196608,
|
|
3438
3556
|
},
|
|
3439
3557
|
"z-ai/glm-4.7": {
|
|
3440
3558
|
id: "z-ai/glm-4.7",
|
|
@@ -3465,7 +3583,7 @@ export const MODELS = {
|
|
|
3465
3583
|
input: 0.5,
|
|
3466
3584
|
output: 3,
|
|
3467
3585
|
cacheRead: 0.049999999999999996,
|
|
3468
|
-
cacheWrite: 0,
|
|
3586
|
+
cacheWrite: 0.08333333333333334,
|
|
3469
3587
|
},
|
|
3470
3588
|
contextWindow: 1048576,
|
|
3471
3589
|
maxTokens: 65535,
|
|
@@ -3487,23 +3605,6 @@ export const MODELS = {
|
|
|
3487
3605
|
contextWindow: 32768,
|
|
3488
3606
|
maxTokens: 4096,
|
|
3489
3607
|
},
|
|
3490
|
-
"xiaomi/mimo-v2-flash:free": {
|
|
3491
|
-
id: "xiaomi/mimo-v2-flash:free",
|
|
3492
|
-
name: "Xiaomi: MiMo-V2-Flash (free)",
|
|
3493
|
-
api: "openai-completions",
|
|
3494
|
-
provider: "openrouter",
|
|
3495
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
3496
|
-
reasoning: true,
|
|
3497
|
-
input: ["text"],
|
|
3498
|
-
cost: {
|
|
3499
|
-
input: 0,
|
|
3500
|
-
output: 0,
|
|
3501
|
-
cacheRead: 0,
|
|
3502
|
-
cacheWrite: 0,
|
|
3503
|
-
},
|
|
3504
|
-
contextWindow: 262144,
|
|
3505
|
-
maxTokens: 65536,
|
|
3506
|
-
},
|
|
3507
3608
|
"xiaomi/mimo-v2-flash": {
|
|
3508
3609
|
id: "xiaomi/mimo-v2-flash",
|
|
3509
3610
|
name: "Xiaomi: MiMo-V2-Flash",
|
|
@@ -3547,13 +3648,13 @@ export const MODELS = {
|
|
|
3547
3648
|
reasoning: true,
|
|
3548
3649
|
input: ["text"],
|
|
3549
3650
|
cost: {
|
|
3550
|
-
input: 0.
|
|
3551
|
-
output: 0.
|
|
3651
|
+
input: 0.049999999999999996,
|
|
3652
|
+
output: 0.19999999999999998,
|
|
3552
3653
|
cacheRead: 0,
|
|
3553
3654
|
cacheWrite: 0,
|
|
3554
3655
|
},
|
|
3555
3656
|
contextWindow: 262144,
|
|
3556
|
-
maxTokens:
|
|
3657
|
+
maxTokens: 4096,
|
|
3557
3658
|
},
|
|
3558
3659
|
"openai/gpt-5.2-chat": {
|
|
3559
3660
|
id: "openai/gpt-5.2-chat",
|
|
@@ -3606,23 +3707,6 @@ export const MODELS = {
|
|
|
3606
3707
|
contextWindow: 400000,
|
|
3607
3708
|
maxTokens: 128000,
|
|
3608
3709
|
},
|
|
3609
|
-
"mistralai/devstral-2512:free": {
|
|
3610
|
-
id: "mistralai/devstral-2512:free",
|
|
3611
|
-
name: "Mistral: Devstral 2 2512 (free)",
|
|
3612
|
-
api: "openai-completions",
|
|
3613
|
-
provider: "openrouter",
|
|
3614
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
3615
|
-
reasoning: false,
|
|
3616
|
-
input: ["text"],
|
|
3617
|
-
cost: {
|
|
3618
|
-
input: 0,
|
|
3619
|
-
output: 0,
|
|
3620
|
-
cacheRead: 0,
|
|
3621
|
-
cacheWrite: 0,
|
|
3622
|
-
},
|
|
3623
|
-
contextWindow: 262144,
|
|
3624
|
-
maxTokens: 4096,
|
|
3625
|
-
},
|
|
3626
3710
|
"mistralai/devstral-2512": {
|
|
3627
3711
|
id: "mistralai/devstral-2512",
|
|
3628
3712
|
name: "Mistral: Devstral 2 2512",
|
|
@@ -4314,7 +4398,7 @@ export const MODELS = {
|
|
|
4314
4398
|
cost: {
|
|
4315
4399
|
input: 0.15,
|
|
4316
4400
|
output: 0.6,
|
|
4317
|
-
cacheRead: 0,
|
|
4401
|
+
cacheRead: 0.075,
|
|
4318
4402
|
cacheWrite: 0,
|
|
4319
4403
|
},
|
|
4320
4404
|
contextWindow: 262144,
|
|
@@ -4365,7 +4449,7 @@ export const MODELS = {
|
|
|
4365
4449
|
cost: {
|
|
4366
4450
|
input: 0.44,
|
|
4367
4451
|
output: 1.76,
|
|
4368
|
-
cacheRead: 0,
|
|
4452
|
+
cacheRead: 0.11,
|
|
4369
4453
|
cacheWrite: 0,
|
|
4370
4454
|
},
|
|
4371
4455
|
contextWindow: 204800,
|
|
@@ -4399,7 +4483,7 @@ export const MODELS = {
|
|
|
4399
4483
|
cost: {
|
|
4400
4484
|
input: 0.21,
|
|
4401
4485
|
output: 0.32,
|
|
4402
|
-
cacheRead: 0,
|
|
4486
|
+
cacheRead: 0.21,
|
|
4403
4487
|
cacheWrite: 0,
|
|
4404
4488
|
},
|
|
4405
4489
|
contextWindow: 163840,
|
|
@@ -4416,11 +4500,11 @@ export const MODELS = {
|
|
|
4416
4500
|
cost: {
|
|
4417
4501
|
input: 0.3,
|
|
4418
4502
|
output: 2.5,
|
|
4419
|
-
cacheRead: 0.
|
|
4420
|
-
cacheWrite: 0.
|
|
4503
|
+
cacheRead: 0.03,
|
|
4504
|
+
cacheWrite: 0.08333333333333334,
|
|
4421
4505
|
},
|
|
4422
4506
|
contextWindow: 1048576,
|
|
4423
|
-
maxTokens:
|
|
4507
|
+
maxTokens: 65536,
|
|
4424
4508
|
},
|
|
4425
4509
|
"google/gemini-2.5-flash-lite-preview-09-2025": {
|
|
4426
4510
|
id: "google/gemini-2.5-flash-lite-preview-09-2025",
|
|
@@ -4434,10 +4518,27 @@ export const MODELS = {
|
|
|
4434
4518
|
input: 0.09999999999999999,
|
|
4435
4519
|
output: 0.39999999999999997,
|
|
4436
4520
|
cacheRead: 0.01,
|
|
4437
|
-
cacheWrite: 0.
|
|
4521
|
+
cacheWrite: 0.08333333333333334,
|
|
4438
4522
|
},
|
|
4439
4523
|
contextWindow: 1048576,
|
|
4440
|
-
maxTokens:
|
|
4524
|
+
maxTokens: 65535,
|
|
4525
|
+
},
|
|
4526
|
+
"qwen/qwen3-vl-235b-a22b-thinking": {
|
|
4527
|
+
id: "qwen/qwen3-vl-235b-a22b-thinking",
|
|
4528
|
+
name: "Qwen: Qwen3 VL 235B A22B Thinking",
|
|
4529
|
+
api: "openai-completions",
|
|
4530
|
+
provider: "openrouter",
|
|
4531
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
4532
|
+
reasoning: true,
|
|
4533
|
+
input: ["text", "image"],
|
|
4534
|
+
cost: {
|
|
4535
|
+
input: 0.44999999999999996,
|
|
4536
|
+
output: 3.5,
|
|
4537
|
+
cacheRead: 0,
|
|
4538
|
+
cacheWrite: 0,
|
|
4539
|
+
},
|
|
4540
|
+
contextWindow: 262144,
|
|
4541
|
+
maxTokens: 262144,
|
|
4441
4542
|
},
|
|
4442
4543
|
"qwen/qwen3-vl-235b-a22b-instruct": {
|
|
4443
4544
|
id: "qwen/qwen3-vl-235b-a22b-instruct",
|
|
@@ -5049,7 +5150,7 @@ export const MODELS = {
|
|
|
5049
5150
|
cacheWrite: 0,
|
|
5050
5151
|
},
|
|
5051
5152
|
contextWindow: 131072,
|
|
5052
|
-
maxTokens:
|
|
5153
|
+
maxTokens: 131072,
|
|
5053
5154
|
},
|
|
5054
5155
|
"openai/gpt-oss-120b": {
|
|
5055
5156
|
id: "openai/gpt-oss-120b",
|
|
@@ -5100,7 +5201,7 @@ export const MODELS = {
|
|
|
5100
5201
|
cacheWrite: 0,
|
|
5101
5202
|
},
|
|
5102
5203
|
contextWindow: 131072,
|
|
5103
|
-
maxTokens:
|
|
5204
|
+
maxTokens: 131072,
|
|
5104
5205
|
},
|
|
5105
5206
|
"openai/gpt-oss-20b": {
|
|
5106
5207
|
id: "openai/gpt-oss-20b",
|
|
@@ -5317,7 +5418,7 @@ export const MODELS = {
|
|
|
5317
5418
|
cost: {
|
|
5318
5419
|
input: 0.22,
|
|
5319
5420
|
output: 1.7999999999999998,
|
|
5320
|
-
cacheRead: 0,
|
|
5421
|
+
cacheRead: 0.022,
|
|
5321
5422
|
cacheWrite: 0,
|
|
5322
5423
|
},
|
|
5323
5424
|
contextWindow: 262144,
|
|
@@ -5335,7 +5436,7 @@ export const MODELS = {
|
|
|
5335
5436
|
input: 0.09999999999999999,
|
|
5336
5437
|
output: 0.39999999999999997,
|
|
5337
5438
|
cacheRead: 0.01,
|
|
5338
|
-
cacheWrite: 0.
|
|
5439
|
+
cacheWrite: 0.08333333333333334,
|
|
5339
5440
|
},
|
|
5340
5441
|
contextWindow: 1048576,
|
|
5341
5442
|
maxTokens: 65535,
|
|
@@ -5589,7 +5690,7 @@ export const MODELS = {
|
|
|
5589
5690
|
cost: {
|
|
5590
5691
|
input: 1.25,
|
|
5591
5692
|
output: 10,
|
|
5592
|
-
cacheRead: 0.
|
|
5693
|
+
cacheRead: 0.125,
|
|
5593
5694
|
cacheWrite: 0.375,
|
|
5594
5695
|
},
|
|
5595
5696
|
contextWindow: 1048576,
|
|
@@ -5691,7 +5792,7 @@ export const MODELS = {
|
|
|
5691
5792
|
cost: {
|
|
5692
5793
|
input: 1.25,
|
|
5693
5794
|
output: 10,
|
|
5694
|
-
cacheRead: 0.
|
|
5795
|
+
cacheRead: 0.125,
|
|
5695
5796
|
cacheWrite: 0.375,
|
|
5696
5797
|
},
|
|
5697
5798
|
contextWindow: 1048576,
|
|
@@ -5776,7 +5877,7 @@ export const MODELS = {
|
|
|
5776
5877
|
cost: {
|
|
5777
5878
|
input: 0.049999999999999996,
|
|
5778
5879
|
output: 0.25,
|
|
5779
|
-
cacheRead: 0,
|
|
5880
|
+
cacheRead: 0.049999999999999996,
|
|
5780
5881
|
cacheWrite: 0,
|
|
5781
5882
|
},
|
|
5782
5883
|
contextWindow: 32000,
|
|
@@ -6103,7 +6204,7 @@ export const MODELS = {
|
|
|
6103
6204
|
cacheWrite: 0,
|
|
6104
6205
|
},
|
|
6105
6206
|
contextWindow: 32768,
|
|
6106
|
-
maxTokens:
|
|
6207
|
+
maxTokens: 32768,
|
|
6107
6208
|
},
|
|
6108
6209
|
"google/gemini-2.0-flash-lite-001": {
|
|
6109
6210
|
id: "google/gemini-2.0-flash-lite-001",
|
|
@@ -6202,7 +6303,7 @@ export const MODELS = {
|
|
|
6202
6303
|
input: 0.09999999999999999,
|
|
6203
6304
|
output: 0.39999999999999997,
|
|
6204
6305
|
cacheRead: 0.024999999999999998,
|
|
6205
|
-
cacheWrite: 0.
|
|
6306
|
+
cacheWrite: 0.08333333333333334,
|
|
6206
6307
|
},
|
|
6207
6308
|
contextWindow: 1048576,
|
|
6208
6309
|
maxTokens: 8192,
|
|
@@ -6309,23 +6410,6 @@ export const MODELS = {
|
|
|
6309
6410
|
contextWindow: 32768,
|
|
6310
6411
|
maxTokens: 32768,
|
|
6311
6412
|
},
|
|
6312
|
-
"deepseek/deepseek-r1-distill-llama-70b": {
|
|
6313
|
-
id: "deepseek/deepseek-r1-distill-llama-70b",
|
|
6314
|
-
name: "DeepSeek: R1 Distill Llama 70B",
|
|
6315
|
-
api: "openai-completions",
|
|
6316
|
-
provider: "openrouter",
|
|
6317
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
6318
|
-
reasoning: true,
|
|
6319
|
-
input: ["text"],
|
|
6320
|
-
cost: {
|
|
6321
|
-
input: 0.03,
|
|
6322
|
-
output: 0.11,
|
|
6323
|
-
cacheRead: 0,
|
|
6324
|
-
cacheWrite: 0,
|
|
6325
|
-
},
|
|
6326
|
-
contextWindow: 131072,
|
|
6327
|
-
maxTokens: 131072,
|
|
6328
|
-
},
|
|
6329
6413
|
"deepseek/deepseek-r1": {
|
|
6330
6414
|
id: "deepseek/deepseek-r1",
|
|
6331
6415
|
name: "DeepSeek: R1",
|
|
@@ -6377,23 +6461,6 @@ export const MODELS = {
|
|
|
6377
6461
|
contextWindow: 200000,
|
|
6378
6462
|
maxTokens: 100000,
|
|
6379
6463
|
},
|
|
6380
|
-
"google/gemini-2.0-flash-exp:free": {
|
|
6381
|
-
id: "google/gemini-2.0-flash-exp:free",
|
|
6382
|
-
name: "Google: Gemini 2.0 Flash Experimental (free)",
|
|
6383
|
-
api: "openai-completions",
|
|
6384
|
-
provider: "openrouter",
|
|
6385
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
6386
|
-
reasoning: false,
|
|
6387
|
-
input: ["text", "image"],
|
|
6388
|
-
cost: {
|
|
6389
|
-
input: 0,
|
|
6390
|
-
output: 0,
|
|
6391
|
-
cacheRead: 0,
|
|
6392
|
-
cacheWrite: 0,
|
|
6393
|
-
},
|
|
6394
|
-
contextWindow: 1048576,
|
|
6395
|
-
maxTokens: 8192,
|
|
6396
|
-
},
|
|
6397
6464
|
"meta-llama/llama-3.3-70b-instruct:free": {
|
|
6398
6465
|
id: "meta-llama/llama-3.3-70b-instruct:free",
|
|
6399
6466
|
name: "Meta: Llama 3.3 70B Instruct (free)",
|
|
@@ -6562,7 +6629,7 @@ export const MODELS = {
|
|
|
6562
6629
|
cacheWrite: 0,
|
|
6563
6630
|
},
|
|
6564
6631
|
contextWindow: 32768,
|
|
6565
|
-
maxTokens:
|
|
6632
|
+
maxTokens: 32768,
|
|
6566
6633
|
},
|
|
6567
6634
|
"anthropic/claude-3.5-haiku": {
|
|
6568
6635
|
id: "anthropic/claude-3.5-haiku",
|
|
@@ -6598,34 +6665,34 @@ export const MODELS = {
|
|
|
6598
6665
|
contextWindow: 200000,
|
|
6599
6666
|
maxTokens: 8192,
|
|
6600
6667
|
},
|
|
6601
|
-
"mistralai/ministral-
|
|
6602
|
-
id: "mistralai/ministral-
|
|
6603
|
-
name: "Mistral: Ministral
|
|
6668
|
+
"mistralai/ministral-8b": {
|
|
6669
|
+
id: "mistralai/ministral-8b",
|
|
6670
|
+
name: "Mistral: Ministral 8B",
|
|
6604
6671
|
api: "openai-completions",
|
|
6605
6672
|
provider: "openrouter",
|
|
6606
6673
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6607
6674
|
reasoning: false,
|
|
6608
6675
|
input: ["text"],
|
|
6609
6676
|
cost: {
|
|
6610
|
-
input: 0.
|
|
6611
|
-
output: 0.
|
|
6677
|
+
input: 0.09999999999999999,
|
|
6678
|
+
output: 0.09999999999999999,
|
|
6612
6679
|
cacheRead: 0,
|
|
6613
6680
|
cacheWrite: 0,
|
|
6614
6681
|
},
|
|
6615
6682
|
contextWindow: 131072,
|
|
6616
6683
|
maxTokens: 4096,
|
|
6617
6684
|
},
|
|
6618
|
-
"mistralai/ministral-
|
|
6619
|
-
id: "mistralai/ministral-
|
|
6620
|
-
name: "Mistral: Ministral
|
|
6685
|
+
"mistralai/ministral-3b": {
|
|
6686
|
+
id: "mistralai/ministral-3b",
|
|
6687
|
+
name: "Mistral: Ministral 3B",
|
|
6621
6688
|
api: "openai-completions",
|
|
6622
6689
|
provider: "openrouter",
|
|
6623
6690
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6624
6691
|
reasoning: false,
|
|
6625
6692
|
input: ["text"],
|
|
6626
6693
|
cost: {
|
|
6627
|
-
input: 0.
|
|
6628
|
-
output: 0.
|
|
6694
|
+
input: 0.04,
|
|
6695
|
+
output: 0.04,
|
|
6629
6696
|
cacheRead: 0,
|
|
6630
6697
|
cacheWrite: 0,
|
|
6631
6698
|
},
|
|
@@ -6681,7 +6748,7 @@ export const MODELS = {
|
|
|
6681
6748
|
cacheWrite: 0,
|
|
6682
6749
|
},
|
|
6683
6750
|
contextWindow: 32768,
|
|
6684
|
-
maxTokens:
|
|
6751
|
+
maxTokens: 32768,
|
|
6685
6752
|
},
|
|
6686
6753
|
"qwen/qwen-2.5-72b-instruct": {
|
|
6687
6754
|
id: "qwen/qwen-2.5-72b-instruct",
|
|
@@ -6766,7 +6833,7 @@ export const MODELS = {
|
|
|
6766
6833
|
cacheWrite: 0,
|
|
6767
6834
|
},
|
|
6768
6835
|
contextWindow: 32768,
|
|
6769
|
-
maxTokens:
|
|
6836
|
+
maxTokens: 32768,
|
|
6770
6837
|
},
|
|
6771
6838
|
"openai/gpt-4o-2024-08-06": {
|
|
6772
6839
|
id: "openai/gpt-4o-2024-08-06",
|
|
@@ -6785,23 +6852,6 @@ export const MODELS = {
|
|
|
6785
6852
|
contextWindow: 128000,
|
|
6786
6853
|
maxTokens: 16384,
|
|
6787
6854
|
},
|
|
6788
|
-
"meta-llama/llama-3.1-70b-instruct": {
|
|
6789
|
-
id: "meta-llama/llama-3.1-70b-instruct",
|
|
6790
|
-
name: "Meta: Llama 3.1 70B Instruct",
|
|
6791
|
-
api: "openai-completions",
|
|
6792
|
-
provider: "openrouter",
|
|
6793
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
6794
|
-
reasoning: false,
|
|
6795
|
-
input: ["text"],
|
|
6796
|
-
cost: {
|
|
6797
|
-
input: 0.39999999999999997,
|
|
6798
|
-
output: 0.39999999999999997,
|
|
6799
|
-
cacheRead: 0,
|
|
6800
|
-
cacheWrite: 0,
|
|
6801
|
-
},
|
|
6802
|
-
contextWindow: 131072,
|
|
6803
|
-
maxTokens: 4096,
|
|
6804
|
-
},
|
|
6805
6855
|
"meta-llama/llama-3.1-8b-instruct": {
|
|
6806
6856
|
id: "meta-llama/llama-3.1-8b-instruct",
|
|
6807
6857
|
name: "Meta: Llama 3.1 8B Instruct",
|
|
@@ -6836,6 +6886,23 @@ export const MODELS = {
|
|
|
6836
6886
|
contextWindow: 10000,
|
|
6837
6887
|
maxTokens: 4096,
|
|
6838
6888
|
},
|
|
6889
|
+
"meta-llama/llama-3.1-70b-instruct": {
|
|
6890
|
+
id: "meta-llama/llama-3.1-70b-instruct",
|
|
6891
|
+
name: "Meta: Llama 3.1 70B Instruct",
|
|
6892
|
+
api: "openai-completions",
|
|
6893
|
+
provider: "openrouter",
|
|
6894
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
6895
|
+
reasoning: false,
|
|
6896
|
+
input: ["text"],
|
|
6897
|
+
cost: {
|
|
6898
|
+
input: 0.39999999999999997,
|
|
6899
|
+
output: 0.39999999999999997,
|
|
6900
|
+
cacheRead: 0,
|
|
6901
|
+
cacheWrite: 0,
|
|
6902
|
+
},
|
|
6903
|
+
contextWindow: 131072,
|
|
6904
|
+
maxTokens: 4096,
|
|
6905
|
+
},
|
|
6839
6906
|
"mistralai/mistral-nemo": {
|
|
6840
6907
|
id: "mistralai/mistral-nemo",
|
|
6841
6908
|
name: "Mistral: Mistral Nemo",
|
|
@@ -6851,11 +6918,11 @@ export const MODELS = {
|
|
|
6851
6918
|
cacheWrite: 0,
|
|
6852
6919
|
},
|
|
6853
6920
|
contextWindow: 131072,
|
|
6854
|
-
maxTokens:
|
|
6921
|
+
maxTokens: 16384,
|
|
6855
6922
|
},
|
|
6856
|
-
"openai/gpt-4o-mini": {
|
|
6857
|
-
id: "openai/gpt-4o-mini",
|
|
6858
|
-
name: "OpenAI: GPT-4o-mini",
|
|
6923
|
+
"openai/gpt-4o-mini-2024-07-18": {
|
|
6924
|
+
id: "openai/gpt-4o-mini-2024-07-18",
|
|
6925
|
+
name: "OpenAI: GPT-4o-mini (2024-07-18)",
|
|
6859
6926
|
api: "openai-completions",
|
|
6860
6927
|
provider: "openrouter",
|
|
6861
6928
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -6870,9 +6937,9 @@ export const MODELS = {
|
|
|
6870
6937
|
contextWindow: 128000,
|
|
6871
6938
|
maxTokens: 16384,
|
|
6872
6939
|
},
|
|
6873
|
-
"openai/gpt-4o-mini
|
|
6874
|
-
id: "openai/gpt-4o-mini
|
|
6875
|
-
name: "OpenAI: GPT-4o-mini
|
|
6940
|
+
"openai/gpt-4o-mini": {
|
|
6941
|
+
id: "openai/gpt-4o-mini",
|
|
6942
|
+
name: "OpenAI: GPT-4o-mini",
|
|
6876
6943
|
api: "openai-completions",
|
|
6877
6944
|
provider: "openrouter",
|
|
6878
6945
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -7040,38 +7107,38 @@ export const MODELS = {
|
|
|
7040
7107
|
contextWindow: 128000,
|
|
7041
7108
|
maxTokens: 4096,
|
|
7042
7109
|
},
|
|
7043
|
-
"openai/gpt-
|
|
7044
|
-
id: "openai/gpt-
|
|
7045
|
-
name: "OpenAI: GPT-
|
|
7110
|
+
"openai/gpt-3.5-turbo-0613": {
|
|
7111
|
+
id: "openai/gpt-3.5-turbo-0613",
|
|
7112
|
+
name: "OpenAI: GPT-3.5 Turbo (older v0613)",
|
|
7046
7113
|
api: "openai-completions",
|
|
7047
7114
|
provider: "openrouter",
|
|
7048
7115
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
7049
7116
|
reasoning: false,
|
|
7050
7117
|
input: ["text"],
|
|
7051
7118
|
cost: {
|
|
7052
|
-
input:
|
|
7053
|
-
output:
|
|
7119
|
+
input: 1,
|
|
7120
|
+
output: 2,
|
|
7054
7121
|
cacheRead: 0,
|
|
7055
7122
|
cacheWrite: 0,
|
|
7056
7123
|
},
|
|
7057
|
-
contextWindow:
|
|
7124
|
+
contextWindow: 4095,
|
|
7058
7125
|
maxTokens: 4096,
|
|
7059
7126
|
},
|
|
7060
|
-
"openai/gpt-
|
|
7061
|
-
id: "openai/gpt-
|
|
7062
|
-
name: "OpenAI: GPT-
|
|
7127
|
+
"openai/gpt-4-turbo-preview": {
|
|
7128
|
+
id: "openai/gpt-4-turbo-preview",
|
|
7129
|
+
name: "OpenAI: GPT-4 Turbo Preview",
|
|
7063
7130
|
api: "openai-completions",
|
|
7064
7131
|
provider: "openrouter",
|
|
7065
7132
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
7066
7133
|
reasoning: false,
|
|
7067
7134
|
input: ["text"],
|
|
7068
7135
|
cost: {
|
|
7069
|
-
input:
|
|
7070
|
-
output:
|
|
7136
|
+
input: 10,
|
|
7137
|
+
output: 30,
|
|
7071
7138
|
cacheRead: 0,
|
|
7072
7139
|
cacheWrite: 0,
|
|
7073
7140
|
},
|
|
7074
|
-
contextWindow:
|
|
7141
|
+
contextWindow: 128000,
|
|
7075
7142
|
maxTokens: 4096,
|
|
7076
7143
|
},
|
|
7077
7144
|
"mistralai/mistral-tiny": {
|
|
@@ -7142,26 +7209,26 @@ export const MODELS = {
|
|
|
7142
7209
|
contextWindow: 16385,
|
|
7143
7210
|
maxTokens: 4096,
|
|
7144
7211
|
},
|
|
7145
|
-
"openai/gpt-
|
|
7146
|
-
id: "openai/gpt-
|
|
7147
|
-
name: "OpenAI: GPT-
|
|
7212
|
+
"openai/gpt-4-0314": {
|
|
7213
|
+
id: "openai/gpt-4-0314",
|
|
7214
|
+
name: "OpenAI: GPT-4 (older v0314)",
|
|
7148
7215
|
api: "openai-completions",
|
|
7149
7216
|
provider: "openrouter",
|
|
7150
7217
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
7151
7218
|
reasoning: false,
|
|
7152
7219
|
input: ["text"],
|
|
7153
7220
|
cost: {
|
|
7154
|
-
input:
|
|
7155
|
-
output:
|
|
7221
|
+
input: 30,
|
|
7222
|
+
output: 60,
|
|
7156
7223
|
cacheRead: 0,
|
|
7157
7224
|
cacheWrite: 0,
|
|
7158
7225
|
},
|
|
7159
|
-
contextWindow:
|
|
7226
|
+
contextWindow: 8191,
|
|
7160
7227
|
maxTokens: 4096,
|
|
7161
7228
|
},
|
|
7162
|
-
"openai/gpt-4
|
|
7163
|
-
id: "openai/gpt-4
|
|
7164
|
-
name: "OpenAI: GPT-4
|
|
7229
|
+
"openai/gpt-4": {
|
|
7230
|
+
id: "openai/gpt-4",
|
|
7231
|
+
name: "OpenAI: GPT-4",
|
|
7165
7232
|
api: "openai-completions",
|
|
7166
7233
|
provider: "openrouter",
|
|
7167
7234
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -7176,21 +7243,21 @@ export const MODELS = {
|
|
|
7176
7243
|
contextWindow: 8191,
|
|
7177
7244
|
maxTokens: 4096,
|
|
7178
7245
|
},
|
|
7179
|
-
"openai/gpt-
|
|
7180
|
-
id: "openai/gpt-
|
|
7181
|
-
name: "OpenAI: GPT-
|
|
7246
|
+
"openai/gpt-3.5-turbo": {
|
|
7247
|
+
id: "openai/gpt-3.5-turbo",
|
|
7248
|
+
name: "OpenAI: GPT-3.5 Turbo",
|
|
7182
7249
|
api: "openai-completions",
|
|
7183
7250
|
provider: "openrouter",
|
|
7184
7251
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
7185
7252
|
reasoning: false,
|
|
7186
7253
|
input: ["text"],
|
|
7187
7254
|
cost: {
|
|
7188
|
-
input:
|
|
7189
|
-
output:
|
|
7255
|
+
input: 0.5,
|
|
7256
|
+
output: 1.5,
|
|
7190
7257
|
cacheRead: 0,
|
|
7191
7258
|
cacheWrite: 0,
|
|
7192
7259
|
},
|
|
7193
|
-
contextWindow:
|
|
7260
|
+
contextWindow: 16385,
|
|
7194
7261
|
maxTokens: 4096,
|
|
7195
7262
|
},
|
|
7196
7263
|
"openrouter/auto": {
|