@mariozechner/pi-ai 0.9.1 → 0.9.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/models.generated.d.ts +68 -34
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +188 -154
- package/dist/models.generated.js.map +1 -1
- package/package.json +1 -1
package/dist/models.generated.js
CHANGED
|
@@ -104,6 +104,23 @@ export const MODELS = {
|
|
|
104
104
|
contextWindow: 200000,
|
|
105
105
|
maxTokens: 8192,
|
|
106
106
|
},
|
|
107
|
+
"claude-opus-4-5": {
|
|
108
|
+
id: "claude-opus-4-5",
|
|
109
|
+
name: "Claude Opus 4.5",
|
|
110
|
+
api: "anthropic-messages",
|
|
111
|
+
provider: "anthropic",
|
|
112
|
+
baseUrl: "https://api.anthropic.com",
|
|
113
|
+
reasoning: true,
|
|
114
|
+
input: ["text", "image"],
|
|
115
|
+
cost: {
|
|
116
|
+
input: 5,
|
|
117
|
+
output: 25,
|
|
118
|
+
cacheRead: 1.5,
|
|
119
|
+
cacheWrite: 18.75,
|
|
120
|
+
},
|
|
121
|
+
contextWindow: 200000,
|
|
122
|
+
maxTokens: 64000,
|
|
123
|
+
},
|
|
107
124
|
"claude-3-opus-20240229": {
|
|
108
125
|
id: "claude-3-opus-20240229",
|
|
109
126
|
name: "Claude Opus 3",
|
|
@@ -1455,23 +1472,6 @@ export const MODELS = {
|
|
|
1455
1472
|
contextWindow: 131072,
|
|
1456
1473
|
maxTokens: 40960,
|
|
1457
1474
|
},
|
|
1458
|
-
"qwen-3-coder-480b": {
|
|
1459
|
-
id: "qwen-3-coder-480b",
|
|
1460
|
-
name: "Qwen 3 Coder 480B",
|
|
1461
|
-
api: "openai-completions",
|
|
1462
|
-
provider: "cerebras",
|
|
1463
|
-
baseUrl: "https://api.cerebras.ai/v1",
|
|
1464
|
-
reasoning: false,
|
|
1465
|
-
input: ["text"],
|
|
1466
|
-
cost: {
|
|
1467
|
-
input: 2,
|
|
1468
|
-
output: 2,
|
|
1469
|
-
cacheRead: 0,
|
|
1470
|
-
cacheWrite: 0,
|
|
1471
|
-
},
|
|
1472
|
-
contextWindow: 131000,
|
|
1473
|
-
maxTokens: 32000,
|
|
1474
|
-
},
|
|
1475
1475
|
"gpt-oss-120b": {
|
|
1476
1476
|
id: "gpt-oss-120b",
|
|
1477
1477
|
name: "GPT OSS 120B",
|
|
@@ -1954,6 +1954,40 @@ export const MODELS = {
|
|
|
1954
1954
|
},
|
|
1955
1955
|
},
|
|
1956
1956
|
openrouter: {
|
|
1957
|
+
"anthropic/claude-opus-4.5": {
|
|
1958
|
+
id: "anthropic/claude-opus-4.5",
|
|
1959
|
+
name: "Anthropic: Claude Opus 4.5",
|
|
1960
|
+
api: "openai-completions",
|
|
1961
|
+
provider: "openrouter",
|
|
1962
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
1963
|
+
reasoning: true,
|
|
1964
|
+
input: ["text", "image"],
|
|
1965
|
+
cost: {
|
|
1966
|
+
input: 5,
|
|
1967
|
+
output: 25,
|
|
1968
|
+
cacheRead: 0.5,
|
|
1969
|
+
cacheWrite: 6.25,
|
|
1970
|
+
},
|
|
1971
|
+
contextWindow: 200000,
|
|
1972
|
+
maxTokens: 32000,
|
|
1973
|
+
},
|
|
1974
|
+
"allenai/olmo-3-7b-instruct": {
|
|
1975
|
+
id: "allenai/olmo-3-7b-instruct",
|
|
1976
|
+
name: "AllenAI: Olmo 3 7B Instruct",
|
|
1977
|
+
api: "openai-completions",
|
|
1978
|
+
provider: "openrouter",
|
|
1979
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
1980
|
+
reasoning: false,
|
|
1981
|
+
input: ["text"],
|
|
1982
|
+
cost: {
|
|
1983
|
+
input: 0.09999999999999999,
|
|
1984
|
+
output: 0.19999999999999998,
|
|
1985
|
+
cacheRead: 0,
|
|
1986
|
+
cacheWrite: 0,
|
|
1987
|
+
},
|
|
1988
|
+
contextWindow: 65536,
|
|
1989
|
+
maxTokens: 65536,
|
|
1990
|
+
},
|
|
1957
1991
|
"x-ai/grok-4.1-fast": {
|
|
1958
1992
|
id: "x-ai/grok-4.1-fast",
|
|
1959
1993
|
name: "xAI: Grok 4.1 Fast",
|
|
@@ -2184,8 +2218,8 @@ export const MODELS = {
|
|
|
2184
2218
|
reasoning: true,
|
|
2185
2219
|
input: ["text"],
|
|
2186
2220
|
cost: {
|
|
2187
|
-
input: 0.
|
|
2188
|
-
output:
|
|
2221
|
+
input: 0.24,
|
|
2222
|
+
output: 0.96,
|
|
2189
2223
|
cacheRead: 0,
|
|
2190
2224
|
cacheWrite: 0,
|
|
2191
2225
|
},
|
|
@@ -2269,8 +2303,8 @@ export const MODELS = {
|
|
|
2269
2303
|
reasoning: false,
|
|
2270
2304
|
input: ["text", "image"],
|
|
2271
2305
|
cost: {
|
|
2272
|
-
input: 0.
|
|
2273
|
-
output: 0.
|
|
2306
|
+
input: 0.064,
|
|
2307
|
+
output: 0.39999999999999997,
|
|
2274
2308
|
cacheRead: 0,
|
|
2275
2309
|
cacheWrite: 0,
|
|
2276
2310
|
},
|
|
@@ -2354,8 +2388,8 @@ export const MODELS = {
|
|
|
2354
2388
|
reasoning: true,
|
|
2355
2389
|
input: ["text", "image"],
|
|
2356
2390
|
cost: {
|
|
2357
|
-
input: 0.
|
|
2358
|
-
output:
|
|
2391
|
+
input: 0.16,
|
|
2392
|
+
output: 0.7999999999999999,
|
|
2359
2393
|
cacheRead: 0,
|
|
2360
2394
|
cacheWrite: 0,
|
|
2361
2395
|
},
|
|
@@ -2456,13 +2490,13 @@ export const MODELS = {
|
|
|
2456
2490
|
reasoning: true,
|
|
2457
2491
|
input: ["text"],
|
|
2458
2492
|
cost: {
|
|
2459
|
-
input: 0.
|
|
2460
|
-
output: 0.
|
|
2493
|
+
input: 0.216,
|
|
2494
|
+
output: 0.328,
|
|
2461
2495
|
cacheRead: 0,
|
|
2462
2496
|
cacheWrite: 0,
|
|
2463
2497
|
},
|
|
2464
2498
|
contextWindow: 163840,
|
|
2465
|
-
maxTokens:
|
|
2499
|
+
maxTokens: 65536,
|
|
2466
2500
|
},
|
|
2467
2501
|
"google/gemini-2.5-flash-preview-09-2025": {
|
|
2468
2502
|
id: "google/gemini-2.5-flash-preview-09-2025",
|
|
@@ -2583,34 +2617,34 @@ export const MODELS = {
|
|
|
2583
2617
|
contextWindow: 400000,
|
|
2584
2618
|
maxTokens: 128000,
|
|
2585
2619
|
},
|
|
2586
|
-
"deepseek/deepseek-v3.1-terminus": {
|
|
2587
|
-
id: "deepseek/deepseek-v3.1-terminus",
|
|
2588
|
-
name: "DeepSeek: DeepSeek V3.1 Terminus",
|
|
2620
|
+
"deepseek/deepseek-v3.1-terminus:exacto": {
|
|
2621
|
+
id: "deepseek/deepseek-v3.1-terminus:exacto",
|
|
2622
|
+
name: "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
|
|
2589
2623
|
api: "openai-completions",
|
|
2590
2624
|
provider: "openrouter",
|
|
2591
2625
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
2592
2626
|
reasoning: true,
|
|
2593
2627
|
input: ["text"],
|
|
2594
2628
|
cost: {
|
|
2595
|
-
input: 0.
|
|
2596
|
-
output: 0.
|
|
2629
|
+
input: 0.216,
|
|
2630
|
+
output: 0.7999999999999999,
|
|
2597
2631
|
cacheRead: 0,
|
|
2598
2632
|
cacheWrite: 0,
|
|
2599
2633
|
},
|
|
2600
|
-
contextWindow:
|
|
2601
|
-
maxTokens:
|
|
2634
|
+
contextWindow: 131072,
|
|
2635
|
+
maxTokens: 65536,
|
|
2602
2636
|
},
|
|
2603
|
-
"deepseek/deepseek-v3.1-terminus
|
|
2604
|
-
id: "deepseek/deepseek-v3.1-terminus
|
|
2605
|
-
name: "DeepSeek: DeepSeek V3.1 Terminus
|
|
2637
|
+
"deepseek/deepseek-v3.1-terminus": {
|
|
2638
|
+
id: "deepseek/deepseek-v3.1-terminus",
|
|
2639
|
+
name: "DeepSeek: DeepSeek V3.1 Terminus",
|
|
2606
2640
|
api: "openai-completions",
|
|
2607
2641
|
provider: "openrouter",
|
|
2608
2642
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
2609
2643
|
reasoning: true,
|
|
2610
2644
|
input: ["text"],
|
|
2611
2645
|
cost: {
|
|
2612
|
-
input: 0.
|
|
2613
|
-
output:
|
|
2646
|
+
input: 0.216,
|
|
2647
|
+
output: 0.7999999999999999,
|
|
2614
2648
|
cacheRead: 0,
|
|
2615
2649
|
cacheWrite: 0,
|
|
2616
2650
|
},
|
|
@@ -2694,13 +2728,13 @@ export const MODELS = {
|
|
|
2694
2728
|
reasoning: true,
|
|
2695
2729
|
input: ["text"],
|
|
2696
2730
|
cost: {
|
|
2697
|
-
input: 0.
|
|
2731
|
+
input: 0.12,
|
|
2698
2732
|
output: 1.2,
|
|
2699
2733
|
cacheRead: 0,
|
|
2700
2734
|
cacheWrite: 0,
|
|
2701
2735
|
},
|
|
2702
|
-
contextWindow:
|
|
2703
|
-
maxTokens:
|
|
2736
|
+
contextWindow: 131072,
|
|
2737
|
+
maxTokens: 32768,
|
|
2704
2738
|
},
|
|
2705
2739
|
"qwen/qwen3-next-80b-a3b-instruct": {
|
|
2706
2740
|
id: "qwen/qwen3-next-80b-a3b-instruct",
|
|
@@ -3017,8 +3051,8 @@ export const MODELS = {
|
|
|
3017
3051
|
reasoning: false,
|
|
3018
3052
|
input: ["text"],
|
|
3019
3053
|
cost: {
|
|
3020
|
-
input: 0.
|
|
3021
|
-
output: 0.
|
|
3054
|
+
input: 0.056,
|
|
3055
|
+
output: 0.224,
|
|
3022
3056
|
cacheRead: 0,
|
|
3023
3057
|
cacheWrite: 0,
|
|
3024
3058
|
},
|
|
@@ -3034,8 +3068,8 @@ export const MODELS = {
|
|
|
3034
3068
|
reasoning: true,
|
|
3035
3069
|
input: ["text", "image"],
|
|
3036
3070
|
cost: {
|
|
3037
|
-
input: 0.
|
|
3038
|
-
output: 0.
|
|
3071
|
+
input: 0.112,
|
|
3072
|
+
output: 0.448,
|
|
3039
3073
|
cacheRead: 0,
|
|
3040
3074
|
cacheWrite: 0,
|
|
3041
3075
|
},
|
|
@@ -3051,8 +3085,8 @@ export const MODELS = {
|
|
|
3051
3085
|
reasoning: true,
|
|
3052
3086
|
input: ["text", "image"],
|
|
3053
3087
|
cost: {
|
|
3054
|
-
input: 0.
|
|
3055
|
-
output: 1.
|
|
3088
|
+
input: 0.48,
|
|
3089
|
+
output: 1.44,
|
|
3056
3090
|
cacheRead: 0.11,
|
|
3057
3091
|
cacheWrite: 0,
|
|
3058
3092
|
},
|
|
@@ -3144,9 +3178,9 @@ export const MODELS = {
|
|
|
3144
3178
|
contextWindow: 400000,
|
|
3145
3179
|
maxTokens: 128000,
|
|
3146
3180
|
},
|
|
3147
|
-
"openai/gpt-oss-120b": {
|
|
3148
|
-
id: "openai/gpt-oss-120b",
|
|
3149
|
-
name: "OpenAI: gpt-oss-120b",
|
|
3181
|
+
"openai/gpt-oss-120b:exacto": {
|
|
3182
|
+
id: "openai/gpt-oss-120b:exacto",
|
|
3183
|
+
name: "OpenAI: gpt-oss-120b (exacto)",
|
|
3150
3184
|
api: "openai-completions",
|
|
3151
3185
|
provider: "openrouter",
|
|
3152
3186
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -3154,29 +3188,29 @@ export const MODELS = {
|
|
|
3154
3188
|
input: ["text"],
|
|
3155
3189
|
cost: {
|
|
3156
3190
|
input: 0.04,
|
|
3157
|
-
output: 0.
|
|
3191
|
+
output: 0.19999999999999998,
|
|
3158
3192
|
cacheRead: 0,
|
|
3159
3193
|
cacheWrite: 0,
|
|
3160
3194
|
},
|
|
3161
3195
|
contextWindow: 131072,
|
|
3162
|
-
maxTokens:
|
|
3196
|
+
maxTokens: 32768,
|
|
3163
3197
|
},
|
|
3164
|
-
"openai/gpt-oss-120b
|
|
3165
|
-
id: "openai/gpt-oss-120b
|
|
3166
|
-
name: "OpenAI: gpt-oss-120b
|
|
3198
|
+
"openai/gpt-oss-120b": {
|
|
3199
|
+
id: "openai/gpt-oss-120b",
|
|
3200
|
+
name: "OpenAI: gpt-oss-120b",
|
|
3167
3201
|
api: "openai-completions",
|
|
3168
3202
|
provider: "openrouter",
|
|
3169
3203
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
3170
3204
|
reasoning: true,
|
|
3171
3205
|
input: ["text"],
|
|
3172
3206
|
cost: {
|
|
3173
|
-
input: 0.
|
|
3174
|
-
output: 0.
|
|
3207
|
+
input: 0.04,
|
|
3208
|
+
output: 0.19999999999999998,
|
|
3175
3209
|
cacheRead: 0,
|
|
3176
3210
|
cacheWrite: 0,
|
|
3177
3211
|
},
|
|
3178
3212
|
contextWindow: 131072,
|
|
3179
|
-
maxTokens:
|
|
3213
|
+
maxTokens: 32768,
|
|
3180
3214
|
},
|
|
3181
3215
|
"openai/gpt-oss-20b:free": {
|
|
3182
3216
|
id: "openai/gpt-oss-20b:free",
|
|
@@ -3323,8 +3357,8 @@ export const MODELS = {
|
|
|
3323
3357
|
reasoning: true,
|
|
3324
3358
|
input: ["text"],
|
|
3325
3359
|
cost: {
|
|
3326
|
-
input: 0.
|
|
3327
|
-
output: 0.
|
|
3360
|
+
input: 0.10400000000000001,
|
|
3361
|
+
output: 0.6799999999999999,
|
|
3328
3362
|
cacheRead: 0,
|
|
3329
3363
|
cacheWrite: 0,
|
|
3330
3364
|
},
|
|
@@ -3408,13 +3442,13 @@ export const MODELS = {
|
|
|
3408
3442
|
reasoning: true,
|
|
3409
3443
|
input: ["text"],
|
|
3410
3444
|
cost: {
|
|
3411
|
-
input:
|
|
3412
|
-
output:
|
|
3445
|
+
input: 1,
|
|
3446
|
+
output: 4,
|
|
3413
3447
|
cacheRead: 0,
|
|
3414
3448
|
cacheWrite: 0,
|
|
3415
3449
|
},
|
|
3416
3450
|
contextWindow: 262144,
|
|
3417
|
-
maxTokens:
|
|
3451
|
+
maxTokens: 32768,
|
|
3418
3452
|
},
|
|
3419
3453
|
"google/gemini-2.5-flash-lite": {
|
|
3420
3454
|
id: "google/gemini-2.5-flash-lite",
|
|
@@ -3442,13 +3476,13 @@ export const MODELS = {
|
|
|
3442
3476
|
reasoning: true,
|
|
3443
3477
|
input: ["text"],
|
|
3444
3478
|
cost: {
|
|
3445
|
-
input: 0.
|
|
3446
|
-
output: 0.
|
|
3479
|
+
input: 0.072,
|
|
3480
|
+
output: 0.464,
|
|
3447
3481
|
cacheRead: 0,
|
|
3448
3482
|
cacheWrite: 0,
|
|
3449
3483
|
},
|
|
3450
|
-
contextWindow:
|
|
3451
|
-
maxTokens:
|
|
3484
|
+
contextWindow: 131072,
|
|
3485
|
+
maxTokens: 16384,
|
|
3452
3486
|
},
|
|
3453
3487
|
"moonshotai/kimi-k2": {
|
|
3454
3488
|
id: "moonshotai/kimi-k2",
|
|
@@ -3459,13 +3493,13 @@ export const MODELS = {
|
|
|
3459
3493
|
reasoning: false,
|
|
3460
3494
|
input: ["text"],
|
|
3461
3495
|
cost: {
|
|
3462
|
-
input: 0.
|
|
3463
|
-
output:
|
|
3496
|
+
input: 0.456,
|
|
3497
|
+
output: 1.8399999999999999,
|
|
3464
3498
|
cacheRead: 0,
|
|
3465
3499
|
cacheWrite: 0,
|
|
3466
3500
|
},
|
|
3467
3501
|
contextWindow: 131072,
|
|
3468
|
-
maxTokens:
|
|
3502
|
+
maxTokens: 131072,
|
|
3469
3503
|
},
|
|
3470
3504
|
"mistralai/devstral-medium": {
|
|
3471
3505
|
id: "mistralai/devstral-medium",
|
|
@@ -3969,8 +4003,8 @@ export const MODELS = {
|
|
|
3969
4003
|
reasoning: true,
|
|
3970
4004
|
input: ["text"],
|
|
3971
4005
|
cost: {
|
|
3972
|
-
input: 0.
|
|
3973
|
-
output: 0.
|
|
4006
|
+
input: 0.028,
|
|
4007
|
+
output: 0.1104,
|
|
3974
4008
|
cacheRead: 0,
|
|
3975
4009
|
cacheWrite: 0,
|
|
3976
4010
|
},
|
|
@@ -4003,8 +4037,8 @@ export const MODELS = {
|
|
|
4003
4037
|
reasoning: true,
|
|
4004
4038
|
input: ["text"],
|
|
4005
4039
|
cost: {
|
|
4006
|
-
input: 0.
|
|
4007
|
-
output: 0.
|
|
4040
|
+
input: 0.08,
|
|
4041
|
+
output: 0.24,
|
|
4008
4042
|
cacheRead: 0,
|
|
4009
4043
|
cacheWrite: 0,
|
|
4010
4044
|
},
|
|
@@ -4190,13 +4224,13 @@ export const MODELS = {
|
|
|
4190
4224
|
reasoning: false,
|
|
4191
4225
|
input: ["text", "image"],
|
|
4192
4226
|
cost: {
|
|
4193
|
-
input: 0.
|
|
4194
|
-
output: 0.
|
|
4227
|
+
input: 0.136,
|
|
4228
|
+
output: 0.6799999999999999,
|
|
4195
4229
|
cacheRead: 0,
|
|
4196
4230
|
cacheWrite: 0,
|
|
4197
4231
|
},
|
|
4198
4232
|
contextWindow: 1048576,
|
|
4199
|
-
maxTokens:
|
|
4233
|
+
maxTokens: 8192,
|
|
4200
4234
|
},
|
|
4201
4235
|
"meta-llama/llama-4-scout": {
|
|
4202
4236
|
id: "meta-llama/llama-4-scout",
|
|
@@ -4241,9 +4275,9 @@ export const MODELS = {
|
|
|
4241
4275
|
reasoning: true,
|
|
4242
4276
|
input: ["text"],
|
|
4243
4277
|
cost: {
|
|
4244
|
-
input: 0.
|
|
4245
|
-
output: 0.
|
|
4246
|
-
cacheRead: 0,
|
|
4278
|
+
input: 0.216,
|
|
4279
|
+
output: 0.896,
|
|
4280
|
+
cacheRead: 0.135,
|
|
4247
4281
|
cacheWrite: 0,
|
|
4248
4282
|
},
|
|
4249
4283
|
contextWindow: 163840,
|
|
@@ -4649,13 +4683,13 @@ export const MODELS = {
|
|
|
4649
4683
|
reasoning: false,
|
|
4650
4684
|
input: ["text"],
|
|
4651
4685
|
cost: {
|
|
4652
|
-
input: 0.
|
|
4653
|
-
output: 0.
|
|
4686
|
+
input: 0.10400000000000001,
|
|
4687
|
+
output: 0.312,
|
|
4654
4688
|
cacheRead: 0,
|
|
4655
4689
|
cacheWrite: 0,
|
|
4656
4690
|
},
|
|
4657
4691
|
contextWindow: 131072,
|
|
4658
|
-
maxTokens:
|
|
4692
|
+
maxTokens: 120000,
|
|
4659
4693
|
},
|
|
4660
4694
|
"amazon/nova-lite-v1": {
|
|
4661
4695
|
id: "amazon/nova-lite-v1",
|
|
@@ -4980,34 +5014,34 @@ export const MODELS = {
|
|
|
4980
5014
|
contextWindow: 32768,
|
|
4981
5015
|
maxTokens: 4096,
|
|
4982
5016
|
},
|
|
4983
|
-
"cohere/command-r-08-2024": {
|
|
4984
|
-
id: "cohere/command-r-08-2024",
|
|
4985
|
-
name: "Cohere: Command R (08-2024)",
|
|
5017
|
+
"cohere/command-r-plus-08-2024": {
|
|
5018
|
+
id: "cohere/command-r-plus-08-2024",
|
|
5019
|
+
name: "Cohere: Command R+ (08-2024)",
|
|
4986
5020
|
api: "openai-completions",
|
|
4987
5021
|
provider: "openrouter",
|
|
4988
5022
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
4989
5023
|
reasoning: false,
|
|
4990
5024
|
input: ["text"],
|
|
4991
5025
|
cost: {
|
|
4992
|
-
input:
|
|
4993
|
-
output:
|
|
5026
|
+
input: 2.5,
|
|
5027
|
+
output: 10,
|
|
4994
5028
|
cacheRead: 0,
|
|
4995
5029
|
cacheWrite: 0,
|
|
4996
5030
|
},
|
|
4997
5031
|
contextWindow: 128000,
|
|
4998
5032
|
maxTokens: 4000,
|
|
4999
5033
|
},
|
|
5000
|
-
"cohere/command-r-
|
|
5001
|
-
id: "cohere/command-r-
|
|
5002
|
-
name: "Cohere: Command R
|
|
5034
|
+
"cohere/command-r-08-2024": {
|
|
5035
|
+
id: "cohere/command-r-08-2024",
|
|
5036
|
+
name: "Cohere: Command R (08-2024)",
|
|
5003
5037
|
api: "openai-completions",
|
|
5004
5038
|
provider: "openrouter",
|
|
5005
5039
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5006
5040
|
reasoning: false,
|
|
5007
5041
|
input: ["text"],
|
|
5008
5042
|
cost: {
|
|
5009
|
-
input:
|
|
5010
|
-
output:
|
|
5043
|
+
input: 0.15,
|
|
5044
|
+
output: 0.6,
|
|
5011
5045
|
cacheRead: 0,
|
|
5012
5046
|
cacheWrite: 0,
|
|
5013
5047
|
},
|
|
@@ -5082,22 +5116,22 @@ export const MODELS = {
|
|
|
5082
5116
|
contextWindow: 128000,
|
|
5083
5117
|
maxTokens: 16384,
|
|
5084
5118
|
},
|
|
5085
|
-
"meta-llama/llama-3.1-
|
|
5086
|
-
id: "meta-llama/llama-3.1-
|
|
5087
|
-
name: "Meta: Llama 3.1
|
|
5119
|
+
"meta-llama/llama-3.1-405b-instruct": {
|
|
5120
|
+
id: "meta-llama/llama-3.1-405b-instruct",
|
|
5121
|
+
name: "Meta: Llama 3.1 405B Instruct",
|
|
5088
5122
|
api: "openai-completions",
|
|
5089
5123
|
provider: "openrouter",
|
|
5090
5124
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5091
5125
|
reasoning: false,
|
|
5092
5126
|
input: ["text"],
|
|
5093
5127
|
cost: {
|
|
5094
|
-
input:
|
|
5095
|
-
output:
|
|
5128
|
+
input: 3.5,
|
|
5129
|
+
output: 3.5,
|
|
5096
5130
|
cacheRead: 0,
|
|
5097
5131
|
cacheWrite: 0,
|
|
5098
5132
|
},
|
|
5099
|
-
contextWindow:
|
|
5100
|
-
maxTokens:
|
|
5133
|
+
contextWindow: 130815,
|
|
5134
|
+
maxTokens: 4096,
|
|
5101
5135
|
},
|
|
5102
5136
|
"meta-llama/llama-3.1-70b-instruct": {
|
|
5103
5137
|
id: "meta-llama/llama-3.1-70b-instruct",
|
|
@@ -5116,22 +5150,22 @@ export const MODELS = {
|
|
|
5116
5150
|
contextWindow: 131072,
|
|
5117
5151
|
maxTokens: 4096,
|
|
5118
5152
|
},
|
|
5119
|
-
"meta-llama/llama-3.1-
|
|
5120
|
-
id: "meta-llama/llama-3.1-
|
|
5121
|
-
name: "Meta: Llama 3.1
|
|
5153
|
+
"meta-llama/llama-3.1-8b-instruct": {
|
|
5154
|
+
id: "meta-llama/llama-3.1-8b-instruct",
|
|
5155
|
+
name: "Meta: Llama 3.1 8B Instruct",
|
|
5122
5156
|
api: "openai-completions",
|
|
5123
5157
|
provider: "openrouter",
|
|
5124
5158
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5125
5159
|
reasoning: false,
|
|
5126
5160
|
input: ["text"],
|
|
5127
5161
|
cost: {
|
|
5128
|
-
input:
|
|
5129
|
-
output:
|
|
5162
|
+
input: 0.02,
|
|
5163
|
+
output: 0.03,
|
|
5130
5164
|
cacheRead: 0,
|
|
5131
5165
|
cacheWrite: 0,
|
|
5132
5166
|
},
|
|
5133
|
-
contextWindow:
|
|
5134
|
-
maxTokens:
|
|
5167
|
+
contextWindow: 131072,
|
|
5168
|
+
maxTokens: 16384,
|
|
5135
5169
|
},
|
|
5136
5170
|
"mistralai/mistral-nemo": {
|
|
5137
5171
|
id: "mistralai/mistral-nemo",
|
|
@@ -5150,9 +5184,9 @@ export const MODELS = {
|
|
|
5150
5184
|
contextWindow: 131072,
|
|
5151
5185
|
maxTokens: 16384,
|
|
5152
5186
|
},
|
|
5153
|
-
"openai/gpt-4o-mini": {
|
|
5154
|
-
id: "openai/gpt-4o-mini",
|
|
5155
|
-
name: "OpenAI: GPT-4o-mini",
|
|
5187
|
+
"openai/gpt-4o-mini-2024-07-18": {
|
|
5188
|
+
id: "openai/gpt-4o-mini-2024-07-18",
|
|
5189
|
+
name: "OpenAI: GPT-4o-mini (2024-07-18)",
|
|
5156
5190
|
api: "openai-completions",
|
|
5157
5191
|
provider: "openrouter",
|
|
5158
5192
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5167,9 +5201,9 @@ export const MODELS = {
|
|
|
5167
5201
|
contextWindow: 128000,
|
|
5168
5202
|
maxTokens: 16384,
|
|
5169
5203
|
},
|
|
5170
|
-
"openai/gpt-4o-mini
|
|
5171
|
-
id: "openai/gpt-4o-mini
|
|
5172
|
-
name: "OpenAI: GPT-4o-mini
|
|
5204
|
+
"openai/gpt-4o-mini": {
|
|
5205
|
+
id: "openai/gpt-4o-mini",
|
|
5206
|
+
name: "OpenAI: GPT-4o-mini",
|
|
5173
5207
|
api: "openai-completions",
|
|
5174
5208
|
provider: "openrouter",
|
|
5175
5209
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5269,6 +5303,23 @@ export const MODELS = {
|
|
|
5269
5303
|
contextWindow: 128000,
|
|
5270
5304
|
maxTokens: 4096,
|
|
5271
5305
|
},
|
|
5306
|
+
"openai/gpt-4o-2024-05-13": {
|
|
5307
|
+
id: "openai/gpt-4o-2024-05-13",
|
|
5308
|
+
name: "OpenAI: GPT-4o (2024-05-13)",
|
|
5309
|
+
api: "openai-completions",
|
|
5310
|
+
provider: "openrouter",
|
|
5311
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
5312
|
+
reasoning: false,
|
|
5313
|
+
input: ["text", "image"],
|
|
5314
|
+
cost: {
|
|
5315
|
+
input: 5,
|
|
5316
|
+
output: 15,
|
|
5317
|
+
cacheRead: 0,
|
|
5318
|
+
cacheWrite: 0,
|
|
5319
|
+
},
|
|
5320
|
+
contextWindow: 128000,
|
|
5321
|
+
maxTokens: 4096,
|
|
5322
|
+
},
|
|
5272
5323
|
"openai/gpt-4o": {
|
|
5273
5324
|
id: "openai/gpt-4o",
|
|
5274
5325
|
name: "OpenAI: GPT-4o",
|
|
@@ -5303,23 +5354,6 @@ export const MODELS = {
|
|
|
5303
5354
|
contextWindow: 128000,
|
|
5304
5355
|
maxTokens: 64000,
|
|
5305
5356
|
},
|
|
5306
|
-
"openai/gpt-4o-2024-05-13": {
|
|
5307
|
-
id: "openai/gpt-4o-2024-05-13",
|
|
5308
|
-
name: "OpenAI: GPT-4o (2024-05-13)",
|
|
5309
|
-
api: "openai-completions",
|
|
5310
|
-
provider: "openrouter",
|
|
5311
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5312
|
-
reasoning: false,
|
|
5313
|
-
input: ["text", "image"],
|
|
5314
|
-
cost: {
|
|
5315
|
-
input: 5,
|
|
5316
|
-
output: 15,
|
|
5317
|
-
cacheRead: 0,
|
|
5318
|
-
cacheWrite: 0,
|
|
5319
|
-
},
|
|
5320
|
-
contextWindow: 128000,
|
|
5321
|
-
maxTokens: 4096,
|
|
5322
|
-
},
|
|
5323
5357
|
"meta-llama/llama-3-70b-instruct": {
|
|
5324
5358
|
id: "meta-llama/llama-3-70b-instruct",
|
|
5325
5359
|
name: "Meta: Llama 3 70B Instruct",
|
|
@@ -5439,38 +5473,38 @@ export const MODELS = {
|
|
|
5439
5473
|
contextWindow: 128000,
|
|
5440
5474
|
maxTokens: 4096,
|
|
5441
5475
|
},
|
|
5442
|
-
"openai/gpt-
|
|
5443
|
-
id: "openai/gpt-
|
|
5444
|
-
name: "OpenAI: GPT-
|
|
5476
|
+
"openai/gpt-3.5-turbo-0613": {
|
|
5477
|
+
id: "openai/gpt-3.5-turbo-0613",
|
|
5478
|
+
name: "OpenAI: GPT-3.5 Turbo (older v0613)",
|
|
5445
5479
|
api: "openai-completions",
|
|
5446
5480
|
provider: "openrouter",
|
|
5447
5481
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5448
5482
|
reasoning: false,
|
|
5449
5483
|
input: ["text"],
|
|
5450
5484
|
cost: {
|
|
5451
|
-
input:
|
|
5452
|
-
output:
|
|
5485
|
+
input: 1,
|
|
5486
|
+
output: 2,
|
|
5453
5487
|
cacheRead: 0,
|
|
5454
5488
|
cacheWrite: 0,
|
|
5455
5489
|
},
|
|
5456
|
-
contextWindow:
|
|
5490
|
+
contextWindow: 4095,
|
|
5457
5491
|
maxTokens: 4096,
|
|
5458
5492
|
},
|
|
5459
|
-
"openai/gpt-
|
|
5460
|
-
id: "openai/gpt-
|
|
5461
|
-
name: "OpenAI: GPT-
|
|
5493
|
+
"openai/gpt-4-turbo-preview": {
|
|
5494
|
+
id: "openai/gpt-4-turbo-preview",
|
|
5495
|
+
name: "OpenAI: GPT-4 Turbo Preview",
|
|
5462
5496
|
api: "openai-completions",
|
|
5463
5497
|
provider: "openrouter",
|
|
5464
5498
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5465
5499
|
reasoning: false,
|
|
5466
5500
|
input: ["text"],
|
|
5467
5501
|
cost: {
|
|
5468
|
-
input:
|
|
5469
|
-
output:
|
|
5502
|
+
input: 10,
|
|
5503
|
+
output: 30,
|
|
5470
5504
|
cacheRead: 0,
|
|
5471
5505
|
cacheWrite: 0,
|
|
5472
5506
|
},
|
|
5473
|
-
contextWindow:
|
|
5507
|
+
contextWindow: 128000,
|
|
5474
5508
|
maxTokens: 4096,
|
|
5475
5509
|
},
|
|
5476
5510
|
"mistralai/mistral-small": {
|
|
@@ -5558,38 +5592,38 @@ export const MODELS = {
|
|
|
5558
5592
|
contextWindow: 16385,
|
|
5559
5593
|
maxTokens: 4096,
|
|
5560
5594
|
},
|
|
5561
|
-
"openai/gpt-
|
|
5562
|
-
id: "openai/gpt-
|
|
5563
|
-
name: "OpenAI: GPT-
|
|
5595
|
+
"openai/gpt-3.5-turbo": {
|
|
5596
|
+
id: "openai/gpt-3.5-turbo",
|
|
5597
|
+
name: "OpenAI: GPT-3.5 Turbo",
|
|
5564
5598
|
api: "openai-completions",
|
|
5565
5599
|
provider: "openrouter",
|
|
5566
5600
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5567
5601
|
reasoning: false,
|
|
5568
5602
|
input: ["text"],
|
|
5569
5603
|
cost: {
|
|
5570
|
-
input:
|
|
5571
|
-
output:
|
|
5604
|
+
input: 0.5,
|
|
5605
|
+
output: 1.5,
|
|
5572
5606
|
cacheRead: 0,
|
|
5573
5607
|
cacheWrite: 0,
|
|
5574
5608
|
},
|
|
5575
|
-
contextWindow:
|
|
5609
|
+
contextWindow: 16385,
|
|
5576
5610
|
maxTokens: 4096,
|
|
5577
5611
|
},
|
|
5578
|
-
"openai/gpt-
|
|
5579
|
-
id: "openai/gpt-
|
|
5580
|
-
name: "OpenAI: GPT-
|
|
5612
|
+
"openai/gpt-4-0314": {
|
|
5613
|
+
id: "openai/gpt-4-0314",
|
|
5614
|
+
name: "OpenAI: GPT-4 (older v0314)",
|
|
5581
5615
|
api: "openai-completions",
|
|
5582
5616
|
provider: "openrouter",
|
|
5583
5617
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5584
5618
|
reasoning: false,
|
|
5585
5619
|
input: ["text"],
|
|
5586
5620
|
cost: {
|
|
5587
|
-
input:
|
|
5588
|
-
output:
|
|
5621
|
+
input: 30,
|
|
5622
|
+
output: 60,
|
|
5589
5623
|
cacheRead: 0,
|
|
5590
5624
|
cacheWrite: 0,
|
|
5591
5625
|
},
|
|
5592
|
-
contextWindow:
|
|
5626
|
+
contextWindow: 8191,
|
|
5593
5627
|
maxTokens: 4096,
|
|
5594
5628
|
},
|
|
5595
5629
|
"openai/gpt-4": {
|