@mariozechner/pi-ai 0.50.0 → 0.50.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -0
- package/dist/env-api-keys.d.ts.map +1 -1
- package/dist/env-api-keys.js +1 -0
- package/dist/env-api-keys.js.map +1 -1
- package/dist/models.generated.d.ts +413 -42
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +406 -66
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/anthropic.d.ts.map +1 -1
- package/dist/providers/anthropic.js +38 -9
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +12 -0
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/providers/openai-responses-shared.d.ts.map +1 -1
- package/dist/providers/openai-responses-shared.js +5 -2
- package/dist/providers/openai-responses-shared.js.map +1 -1
- package/dist/providers/openai-responses.d.ts.map +1 -1
- package/dist/providers/openai-responses.js +14 -0
- package/dist/providers/openai-responses.js.map +1 -1
- package/dist/types.d.ts +1 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utils/overflow.d.ts +2 -2
- package/dist/utils/overflow.d.ts.map +1 -1
- package/dist/utils/overflow.js +7 -7
- package/dist/utils/overflow.js.map +1 -1
- package/package.json +1 -1
package/dist/models.generated.js
CHANGED
|
@@ -3279,6 +3279,260 @@ export const MODELS = {
|
|
|
3279
3279
|
maxTokens: 16384,
|
|
3280
3280
|
},
|
|
3281
3281
|
},
|
|
3282
|
+
"huggingface": {
|
|
3283
|
+
"MiniMaxAI/MiniMax-M2.1": {
|
|
3284
|
+
id: "MiniMaxAI/MiniMax-M2.1",
|
|
3285
|
+
name: "MiniMax-M2.1",
|
|
3286
|
+
api: "openai-completions",
|
|
3287
|
+
provider: "huggingface",
|
|
3288
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3289
|
+
compat: { "supportsDeveloperRole": false },
|
|
3290
|
+
reasoning: true,
|
|
3291
|
+
input: ["text"],
|
|
3292
|
+
cost: {
|
|
3293
|
+
input: 0.3,
|
|
3294
|
+
output: 1.2,
|
|
3295
|
+
cacheRead: 0,
|
|
3296
|
+
cacheWrite: 0,
|
|
3297
|
+
},
|
|
3298
|
+
contextWindow: 204800,
|
|
3299
|
+
maxTokens: 131072,
|
|
3300
|
+
},
|
|
3301
|
+
"Qwen/Qwen3-235B-A22B-Thinking-2507": {
|
|
3302
|
+
id: "Qwen/Qwen3-235B-A22B-Thinking-2507",
|
|
3303
|
+
name: "Qwen3-235B-A22B-Thinking-2507",
|
|
3304
|
+
api: "openai-completions",
|
|
3305
|
+
provider: "huggingface",
|
|
3306
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3307
|
+
compat: { "supportsDeveloperRole": false },
|
|
3308
|
+
reasoning: true,
|
|
3309
|
+
input: ["text"],
|
|
3310
|
+
cost: {
|
|
3311
|
+
input: 0.3,
|
|
3312
|
+
output: 3,
|
|
3313
|
+
cacheRead: 0,
|
|
3314
|
+
cacheWrite: 0,
|
|
3315
|
+
},
|
|
3316
|
+
contextWindow: 262144,
|
|
3317
|
+
maxTokens: 131072,
|
|
3318
|
+
},
|
|
3319
|
+
"Qwen/Qwen3-Coder-480B-A35B-Instruct": {
|
|
3320
|
+
id: "Qwen/Qwen3-Coder-480B-A35B-Instruct",
|
|
3321
|
+
name: "Qwen3-Coder-480B-A35B-Instruct",
|
|
3322
|
+
api: "openai-completions",
|
|
3323
|
+
provider: "huggingface",
|
|
3324
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3325
|
+
compat: { "supportsDeveloperRole": false },
|
|
3326
|
+
reasoning: false,
|
|
3327
|
+
input: ["text"],
|
|
3328
|
+
cost: {
|
|
3329
|
+
input: 2,
|
|
3330
|
+
output: 2,
|
|
3331
|
+
cacheRead: 0,
|
|
3332
|
+
cacheWrite: 0,
|
|
3333
|
+
},
|
|
3334
|
+
contextWindow: 262144,
|
|
3335
|
+
maxTokens: 66536,
|
|
3336
|
+
},
|
|
3337
|
+
"Qwen/Qwen3-Next-80B-A3B-Instruct": {
|
|
3338
|
+
id: "Qwen/Qwen3-Next-80B-A3B-Instruct",
|
|
3339
|
+
name: "Qwen3-Next-80B-A3B-Instruct",
|
|
3340
|
+
api: "openai-completions",
|
|
3341
|
+
provider: "huggingface",
|
|
3342
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3343
|
+
compat: { "supportsDeveloperRole": false },
|
|
3344
|
+
reasoning: false,
|
|
3345
|
+
input: ["text"],
|
|
3346
|
+
cost: {
|
|
3347
|
+
input: 0.25,
|
|
3348
|
+
output: 1,
|
|
3349
|
+
cacheRead: 0,
|
|
3350
|
+
cacheWrite: 0,
|
|
3351
|
+
},
|
|
3352
|
+
contextWindow: 262144,
|
|
3353
|
+
maxTokens: 66536,
|
|
3354
|
+
},
|
|
3355
|
+
"Qwen/Qwen3-Next-80B-A3B-Thinking": {
|
|
3356
|
+
id: "Qwen/Qwen3-Next-80B-A3B-Thinking",
|
|
3357
|
+
name: "Qwen3-Next-80B-A3B-Thinking",
|
|
3358
|
+
api: "openai-completions",
|
|
3359
|
+
provider: "huggingface",
|
|
3360
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3361
|
+
compat: { "supportsDeveloperRole": false },
|
|
3362
|
+
reasoning: false,
|
|
3363
|
+
input: ["text"],
|
|
3364
|
+
cost: {
|
|
3365
|
+
input: 0.3,
|
|
3366
|
+
output: 2,
|
|
3367
|
+
cacheRead: 0,
|
|
3368
|
+
cacheWrite: 0,
|
|
3369
|
+
},
|
|
3370
|
+
contextWindow: 262144,
|
|
3371
|
+
maxTokens: 131072,
|
|
3372
|
+
},
|
|
3373
|
+
"XiaomiMiMo/MiMo-V2-Flash": {
|
|
3374
|
+
id: "XiaomiMiMo/MiMo-V2-Flash",
|
|
3375
|
+
name: "MiMo-V2-Flash",
|
|
3376
|
+
api: "openai-completions",
|
|
3377
|
+
provider: "huggingface",
|
|
3378
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3379
|
+
compat: { "supportsDeveloperRole": false },
|
|
3380
|
+
reasoning: true,
|
|
3381
|
+
input: ["text"],
|
|
3382
|
+
cost: {
|
|
3383
|
+
input: 0.1,
|
|
3384
|
+
output: 0.3,
|
|
3385
|
+
cacheRead: 0,
|
|
3386
|
+
cacheWrite: 0,
|
|
3387
|
+
},
|
|
3388
|
+
contextWindow: 262144,
|
|
3389
|
+
maxTokens: 4096,
|
|
3390
|
+
},
|
|
3391
|
+
"deepseek-ai/DeepSeek-R1-0528": {
|
|
3392
|
+
id: "deepseek-ai/DeepSeek-R1-0528",
|
|
3393
|
+
name: "DeepSeek-R1-0528",
|
|
3394
|
+
api: "openai-completions",
|
|
3395
|
+
provider: "huggingface",
|
|
3396
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3397
|
+
compat: { "supportsDeveloperRole": false },
|
|
3398
|
+
reasoning: true,
|
|
3399
|
+
input: ["text"],
|
|
3400
|
+
cost: {
|
|
3401
|
+
input: 3,
|
|
3402
|
+
output: 5,
|
|
3403
|
+
cacheRead: 0,
|
|
3404
|
+
cacheWrite: 0,
|
|
3405
|
+
},
|
|
3406
|
+
contextWindow: 163840,
|
|
3407
|
+
maxTokens: 163840,
|
|
3408
|
+
},
|
|
3409
|
+
"deepseek-ai/DeepSeek-V3.2": {
|
|
3410
|
+
id: "deepseek-ai/DeepSeek-V3.2",
|
|
3411
|
+
name: "DeepSeek-V3.2",
|
|
3412
|
+
api: "openai-completions",
|
|
3413
|
+
provider: "huggingface",
|
|
3414
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3415
|
+
compat: { "supportsDeveloperRole": false },
|
|
3416
|
+
reasoning: true,
|
|
3417
|
+
input: ["text"],
|
|
3418
|
+
cost: {
|
|
3419
|
+
input: 0.28,
|
|
3420
|
+
output: 0.4,
|
|
3421
|
+
cacheRead: 0,
|
|
3422
|
+
cacheWrite: 0,
|
|
3423
|
+
},
|
|
3424
|
+
contextWindow: 163840,
|
|
3425
|
+
maxTokens: 65536,
|
|
3426
|
+
},
|
|
3427
|
+
"moonshotai/Kimi-K2-Instruct": {
|
|
3428
|
+
id: "moonshotai/Kimi-K2-Instruct",
|
|
3429
|
+
name: "Kimi-K2-Instruct",
|
|
3430
|
+
api: "openai-completions",
|
|
3431
|
+
provider: "huggingface",
|
|
3432
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3433
|
+
compat: { "supportsDeveloperRole": false },
|
|
3434
|
+
reasoning: false,
|
|
3435
|
+
input: ["text"],
|
|
3436
|
+
cost: {
|
|
3437
|
+
input: 1,
|
|
3438
|
+
output: 3,
|
|
3439
|
+
cacheRead: 0,
|
|
3440
|
+
cacheWrite: 0,
|
|
3441
|
+
},
|
|
3442
|
+
contextWindow: 131072,
|
|
3443
|
+
maxTokens: 16384,
|
|
3444
|
+
},
|
|
3445
|
+
"moonshotai/Kimi-K2-Instruct-0905": {
|
|
3446
|
+
id: "moonshotai/Kimi-K2-Instruct-0905",
|
|
3447
|
+
name: "Kimi-K2-Instruct-0905",
|
|
3448
|
+
api: "openai-completions",
|
|
3449
|
+
provider: "huggingface",
|
|
3450
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3451
|
+
compat: { "supportsDeveloperRole": false },
|
|
3452
|
+
reasoning: false,
|
|
3453
|
+
input: ["text"],
|
|
3454
|
+
cost: {
|
|
3455
|
+
input: 1,
|
|
3456
|
+
output: 3,
|
|
3457
|
+
cacheRead: 0,
|
|
3458
|
+
cacheWrite: 0,
|
|
3459
|
+
},
|
|
3460
|
+
contextWindow: 262144,
|
|
3461
|
+
maxTokens: 16384,
|
|
3462
|
+
},
|
|
3463
|
+
"moonshotai/Kimi-K2-Thinking": {
|
|
3464
|
+
id: "moonshotai/Kimi-K2-Thinking",
|
|
3465
|
+
name: "Kimi-K2-Thinking",
|
|
3466
|
+
api: "openai-completions",
|
|
3467
|
+
provider: "huggingface",
|
|
3468
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3469
|
+
compat: { "supportsDeveloperRole": false },
|
|
3470
|
+
reasoning: true,
|
|
3471
|
+
input: ["text"],
|
|
3472
|
+
cost: {
|
|
3473
|
+
input: 0.6,
|
|
3474
|
+
output: 2.5,
|
|
3475
|
+
cacheRead: 0.15,
|
|
3476
|
+
cacheWrite: 0,
|
|
3477
|
+
},
|
|
3478
|
+
contextWindow: 262144,
|
|
3479
|
+
maxTokens: 262144,
|
|
3480
|
+
},
|
|
3481
|
+
"moonshotai/Kimi-K2.5": {
|
|
3482
|
+
id: "moonshotai/Kimi-K2.5",
|
|
3483
|
+
name: "Kimi-K2.5",
|
|
3484
|
+
api: "openai-completions",
|
|
3485
|
+
provider: "huggingface",
|
|
3486
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3487
|
+
compat: { "supportsDeveloperRole": false },
|
|
3488
|
+
reasoning: true,
|
|
3489
|
+
input: ["text", "image"],
|
|
3490
|
+
cost: {
|
|
3491
|
+
input: 0.6,
|
|
3492
|
+
output: 3,
|
|
3493
|
+
cacheRead: 0.1,
|
|
3494
|
+
cacheWrite: 0,
|
|
3495
|
+
},
|
|
3496
|
+
contextWindow: 262144,
|
|
3497
|
+
maxTokens: 262144,
|
|
3498
|
+
},
|
|
3499
|
+
"zai-org/GLM-4.7": {
|
|
3500
|
+
id: "zai-org/GLM-4.7",
|
|
3501
|
+
name: "GLM-4.7",
|
|
3502
|
+
api: "openai-completions",
|
|
3503
|
+
provider: "huggingface",
|
|
3504
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3505
|
+
compat: { "supportsDeveloperRole": false },
|
|
3506
|
+
reasoning: true,
|
|
3507
|
+
input: ["text"],
|
|
3508
|
+
cost: {
|
|
3509
|
+
input: 0.6,
|
|
3510
|
+
output: 2.2,
|
|
3511
|
+
cacheRead: 0.11,
|
|
3512
|
+
cacheWrite: 0,
|
|
3513
|
+
},
|
|
3514
|
+
contextWindow: 204800,
|
|
3515
|
+
maxTokens: 131072,
|
|
3516
|
+
},
|
|
3517
|
+
"zai-org/GLM-4.7-Flash": {
|
|
3518
|
+
id: "zai-org/GLM-4.7-Flash",
|
|
3519
|
+
name: "GLM-4.7-Flash",
|
|
3520
|
+
api: "openai-completions",
|
|
3521
|
+
provider: "huggingface",
|
|
3522
|
+
baseUrl: "https://router.huggingface.co/v1",
|
|
3523
|
+
compat: { "supportsDeveloperRole": false },
|
|
3524
|
+
reasoning: true,
|
|
3525
|
+
input: ["text"],
|
|
3526
|
+
cost: {
|
|
3527
|
+
input: 0,
|
|
3528
|
+
output: 0,
|
|
3529
|
+
cacheRead: 0,
|
|
3530
|
+
cacheWrite: 0,
|
|
3531
|
+
},
|
|
3532
|
+
contextWindow: 200000,
|
|
3533
|
+
maxTokens: 128000,
|
|
3534
|
+
},
|
|
3535
|
+
},
|
|
3282
3536
|
"minimax": {
|
|
3283
3537
|
"MiniMax-M2": {
|
|
3284
3538
|
id: "MiniMax-M2",
|
|
@@ -4633,23 +4887,6 @@ export const MODELS = {
|
|
|
4633
4887
|
contextWindow: 204800,
|
|
4634
4888
|
maxTokens: 131072,
|
|
4635
4889
|
},
|
|
4636
|
-
"glm-4.7-free": {
|
|
4637
|
-
id: "glm-4.7-free",
|
|
4638
|
-
name: "GLM-4.7",
|
|
4639
|
-
api: "openai-completions",
|
|
4640
|
-
provider: "opencode",
|
|
4641
|
-
baseUrl: "https://opencode.ai/zen/v1",
|
|
4642
|
-
reasoning: true,
|
|
4643
|
-
input: ["text"],
|
|
4644
|
-
cost: {
|
|
4645
|
-
input: 0,
|
|
4646
|
-
output: 0,
|
|
4647
|
-
cacheRead: 0,
|
|
4648
|
-
cacheWrite: 0,
|
|
4649
|
-
},
|
|
4650
|
-
contextWindow: 204800,
|
|
4651
|
-
maxTokens: 131072,
|
|
4652
|
-
},
|
|
4653
4890
|
"gpt-5": {
|
|
4654
4891
|
id: "gpt-5",
|
|
4655
4892
|
name: "GPT-5",
|
|
@@ -4803,23 +5040,6 @@ export const MODELS = {
|
|
|
4803
5040
|
contextWindow: 400000,
|
|
4804
5041
|
maxTokens: 128000,
|
|
4805
5042
|
},
|
|
4806
|
-
"grok-code": {
|
|
4807
|
-
id: "grok-code",
|
|
4808
|
-
name: "Grok Code Fast 1",
|
|
4809
|
-
api: "openai-completions",
|
|
4810
|
-
provider: "opencode",
|
|
4811
|
-
baseUrl: "https://opencode.ai/zen/v1",
|
|
4812
|
-
reasoning: true,
|
|
4813
|
-
input: ["text"],
|
|
4814
|
-
cost: {
|
|
4815
|
-
input: 0,
|
|
4816
|
-
output: 0,
|
|
4817
|
-
cacheRead: 0,
|
|
4818
|
-
cacheWrite: 0,
|
|
4819
|
-
},
|
|
4820
|
-
contextWindow: 256000,
|
|
4821
|
-
maxTokens: 256000,
|
|
4822
|
-
},
|
|
4823
5043
|
"kimi-k2": {
|
|
4824
5044
|
id: "kimi-k2",
|
|
4825
5045
|
name: "Kimi K2",
|
|
@@ -4854,18 +5074,35 @@ export const MODELS = {
|
|
|
4854
5074
|
contextWindow: 262144,
|
|
4855
5075
|
maxTokens: 262144,
|
|
4856
5076
|
},
|
|
4857
|
-
"
|
|
4858
|
-
id: "
|
|
5077
|
+
"kimi-k2.5": {
|
|
5078
|
+
id: "kimi-k2.5",
|
|
5079
|
+
name: "Kimi K2.5",
|
|
5080
|
+
api: "openai-completions",
|
|
5081
|
+
provider: "opencode",
|
|
5082
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
5083
|
+
reasoning: true,
|
|
5084
|
+
input: ["text", "image"],
|
|
5085
|
+
cost: {
|
|
5086
|
+
input: 0.6,
|
|
5087
|
+
output: 3,
|
|
5088
|
+
cacheRead: 0.1,
|
|
5089
|
+
cacheWrite: 0,
|
|
5090
|
+
},
|
|
5091
|
+
contextWindow: 262144,
|
|
5092
|
+
maxTokens: 262144,
|
|
5093
|
+
},
|
|
5094
|
+
"minimax-m2.1": {
|
|
5095
|
+
id: "minimax-m2.1",
|
|
4859
5096
|
name: "MiniMax M2.1",
|
|
4860
|
-
api: "
|
|
5097
|
+
api: "openai-completions",
|
|
4861
5098
|
provider: "opencode",
|
|
4862
|
-
baseUrl: "https://opencode.ai/zen",
|
|
5099
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
4863
5100
|
reasoning: true,
|
|
4864
5101
|
input: ["text"],
|
|
4865
5102
|
cost: {
|
|
4866
|
-
input: 0,
|
|
4867
|
-
output:
|
|
4868
|
-
cacheRead: 0,
|
|
5103
|
+
input: 0.3,
|
|
5104
|
+
output: 1.2,
|
|
5105
|
+
cacheRead: 0.1,
|
|
4869
5106
|
cacheWrite: 0,
|
|
4870
5107
|
},
|
|
4871
5108
|
contextWindow: 204800,
|
|
@@ -5071,8 +5308,8 @@ export const MODELS = {
|
|
|
5071
5308
|
cost: {
|
|
5072
5309
|
input: 0.7999999999999999,
|
|
5073
5310
|
output: 4,
|
|
5074
|
-
cacheRead: 0,
|
|
5075
|
-
cacheWrite:
|
|
5311
|
+
cacheRead: 0.08,
|
|
5312
|
+
cacheWrite: 1,
|
|
5076
5313
|
},
|
|
5077
5314
|
contextWindow: 200000,
|
|
5078
5315
|
maxTokens: 8192,
|
|
@@ -5230,6 +5467,23 @@ export const MODELS = {
|
|
|
5230
5467
|
contextWindow: 1000000,
|
|
5231
5468
|
maxTokens: 64000,
|
|
5232
5469
|
},
|
|
5470
|
+
"arcee-ai/trinity-large-preview:free": {
|
|
5471
|
+
id: "arcee-ai/trinity-large-preview:free",
|
|
5472
|
+
name: "Arcee AI: Trinity Large Preview (free)",
|
|
5473
|
+
api: "openai-completions",
|
|
5474
|
+
provider: "openrouter",
|
|
5475
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
5476
|
+
reasoning: false,
|
|
5477
|
+
input: ["text"],
|
|
5478
|
+
cost: {
|
|
5479
|
+
input: 0,
|
|
5480
|
+
output: 0,
|
|
5481
|
+
cacheRead: 0,
|
|
5482
|
+
cacheWrite: 0,
|
|
5483
|
+
},
|
|
5484
|
+
contextWindow: 131000,
|
|
5485
|
+
maxTokens: 4096,
|
|
5486
|
+
},
|
|
5233
5487
|
"arcee-ai/trinity-mini": {
|
|
5234
5488
|
id: "arcee-ai/trinity-mini",
|
|
5235
5489
|
name: "Arcee AI: Trinity Mini",
|
|
@@ -5598,7 +5852,7 @@ export const MODELS = {
|
|
|
5598
5852
|
cost: {
|
|
5599
5853
|
input: 0.21,
|
|
5600
5854
|
output: 0.32,
|
|
5601
|
-
cacheRead: 0,
|
|
5855
|
+
cacheRead: 0.21,
|
|
5602
5856
|
cacheWrite: 0,
|
|
5603
5857
|
},
|
|
5604
5858
|
contextWindow: 163840,
|
|
@@ -5721,7 +5975,7 @@ export const MODELS = {
|
|
|
5721
5975
|
cacheWrite: 0.08333333333333334,
|
|
5722
5976
|
},
|
|
5723
5977
|
contextWindow: 1048576,
|
|
5724
|
-
maxTokens:
|
|
5978
|
+
maxTokens: 65536,
|
|
5725
5979
|
},
|
|
5726
5980
|
"google/gemini-2.5-pro": {
|
|
5727
5981
|
id: "google/gemini-2.5-pro",
|
|
@@ -6114,23 +6368,6 @@ export const MODELS = {
|
|
|
6114
6368
|
contextWindow: 262144,
|
|
6115
6369
|
maxTokens: 65536,
|
|
6116
6370
|
},
|
|
6117
|
-
"mistralai/devstral-2512:free": {
|
|
6118
|
-
id: "mistralai/devstral-2512:free",
|
|
6119
|
-
name: "Mistral: Devstral 2 2512 (free)",
|
|
6120
|
-
api: "openai-completions",
|
|
6121
|
-
provider: "openrouter",
|
|
6122
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
6123
|
-
reasoning: false,
|
|
6124
|
-
input: ["text"],
|
|
6125
|
-
cost: {
|
|
6126
|
-
input: 0,
|
|
6127
|
-
output: 0,
|
|
6128
|
-
cacheRead: 0,
|
|
6129
|
-
cacheWrite: 0,
|
|
6130
|
-
},
|
|
6131
|
-
contextWindow: 262144,
|
|
6132
|
-
maxTokens: 4096,
|
|
6133
|
-
},
|
|
6134
6371
|
"mistralai/devstral-medium": {
|
|
6135
6372
|
id: "mistralai/devstral-medium",
|
|
6136
6373
|
name: "Mistral: Devstral Medium",
|
|
@@ -6641,6 +6878,23 @@ export const MODELS = {
|
|
|
6641
6878
|
contextWindow: 262144,
|
|
6642
6879
|
maxTokens: 65535,
|
|
6643
6880
|
},
|
|
6881
|
+
"moonshotai/kimi-k2.5": {
|
|
6882
|
+
id: "moonshotai/kimi-k2.5",
|
|
6883
|
+
name: "MoonshotAI: Kimi K2.5",
|
|
6884
|
+
api: "openai-completions",
|
|
6885
|
+
provider: "openrouter",
|
|
6886
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
6887
|
+
reasoning: true,
|
|
6888
|
+
input: ["text", "image"],
|
|
6889
|
+
cost: {
|
|
6890
|
+
input: 0.5700000000000001,
|
|
6891
|
+
output: 2.8499999999999996,
|
|
6892
|
+
cacheRead: 0,
|
|
6893
|
+
cacheWrite: 0,
|
|
6894
|
+
},
|
|
6895
|
+
contextWindow: 262144,
|
|
6896
|
+
maxTokens: 262144,
|
|
6897
|
+
},
|
|
6644
6898
|
"nex-agi/deepseek-v3.1-nex-n1": {
|
|
6645
6899
|
id: "nex-agi/deepseek-v3.1-nex-n1",
|
|
6646
6900
|
name: "Nex AGI: DeepSeek V3.1 Nex N1",
|
|
@@ -6735,13 +6989,13 @@ export const MODELS = {
|
|
|
6735
6989
|
reasoning: true,
|
|
6736
6990
|
input: ["text"],
|
|
6737
6991
|
cost: {
|
|
6738
|
-
input: 0.
|
|
6739
|
-
output: 0.
|
|
6992
|
+
input: 0.049999999999999996,
|
|
6993
|
+
output: 0.19999999999999998,
|
|
6740
6994
|
cacheRead: 0,
|
|
6741
6995
|
cacheWrite: 0,
|
|
6742
6996
|
},
|
|
6743
6997
|
contextWindow: 262144,
|
|
6744
|
-
maxTokens:
|
|
6998
|
+
maxTokens: 4096,
|
|
6745
6999
|
},
|
|
6746
7000
|
"nvidia/nemotron-3-nano-30b-a3b:free": {
|
|
6747
7001
|
id: "nvidia/nemotron-3-nano-30b-a3b:free",
|
|
@@ -7995,7 +8249,7 @@ export const MODELS = {
|
|
|
7995
8249
|
cost: {
|
|
7996
8250
|
input: 0.049999999999999996,
|
|
7997
8251
|
output: 0.25,
|
|
7998
|
-
cacheRead: 0,
|
|
8252
|
+
cacheRead: 0.049999999999999996,
|
|
7999
8253
|
cacheWrite: 0,
|
|
8000
8254
|
},
|
|
8001
8255
|
contextWindow: 32000,
|
|
@@ -8443,6 +8697,23 @@ export const MODELS = {
|
|
|
8443
8697
|
contextWindow: 163840,
|
|
8444
8698
|
maxTokens: 65536,
|
|
8445
8699
|
},
|
|
8700
|
+
"upstage/solar-pro-3:free": {
|
|
8701
|
+
id: "upstage/solar-pro-3:free",
|
|
8702
|
+
name: "Upstage: Solar Pro 3 (free)",
|
|
8703
|
+
api: "openai-completions",
|
|
8704
|
+
provider: "openrouter",
|
|
8705
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
8706
|
+
reasoning: true,
|
|
8707
|
+
input: ["text"],
|
|
8708
|
+
cost: {
|
|
8709
|
+
input: 0,
|
|
8710
|
+
output: 0,
|
|
8711
|
+
cacheRead: 0,
|
|
8712
|
+
cacheWrite: 0,
|
|
8713
|
+
},
|
|
8714
|
+
contextWindow: 128000,
|
|
8715
|
+
maxTokens: 4096,
|
|
8716
|
+
},
|
|
8446
8717
|
"x-ai/grok-3": {
|
|
8447
8718
|
id: "x-ai/grok-3",
|
|
8448
8719
|
name: "xAI: Grok 3",
|
|
@@ -8938,6 +9209,23 @@ export const MODELS = {
|
|
|
8938
9209
|
contextWindow: 262144,
|
|
8939
9210
|
maxTokens: 32768,
|
|
8940
9211
|
},
|
|
9212
|
+
"alibaba/qwen3-max-thinking": {
|
|
9213
|
+
id: "alibaba/qwen3-max-thinking",
|
|
9214
|
+
name: "Qwen 3 Max Thinking",
|
|
9215
|
+
api: "anthropic-messages",
|
|
9216
|
+
provider: "vercel-ai-gateway",
|
|
9217
|
+
baseUrl: "https://ai-gateway.vercel.sh",
|
|
9218
|
+
reasoning: true,
|
|
9219
|
+
input: ["text"],
|
|
9220
|
+
cost: {
|
|
9221
|
+
input: 1.2,
|
|
9222
|
+
output: 6,
|
|
9223
|
+
cacheRead: 0.24,
|
|
9224
|
+
cacheWrite: 0,
|
|
9225
|
+
},
|
|
9226
|
+
contextWindow: 256000,
|
|
9227
|
+
maxTokens: 256000,
|
|
9228
|
+
},
|
|
8941
9229
|
"anthropic/claude-3-haiku": {
|
|
8942
9230
|
id: "anthropic/claude-3-haiku",
|
|
8943
9231
|
name: "Claude 3 Haiku",
|
|
@@ -9125,6 +9413,23 @@ export const MODELS = {
|
|
|
9125
9413
|
contextWindow: 1000000,
|
|
9126
9414
|
maxTokens: 64000,
|
|
9127
9415
|
},
|
|
9416
|
+
"arcee-ai/trinity-large-preview": {
|
|
9417
|
+
id: "arcee-ai/trinity-large-preview",
|
|
9418
|
+
name: "Trinity Large Preview",
|
|
9419
|
+
api: "anthropic-messages",
|
|
9420
|
+
provider: "vercel-ai-gateway",
|
|
9421
|
+
baseUrl: "https://ai-gateway.vercel.sh",
|
|
9422
|
+
reasoning: false,
|
|
9423
|
+
input: ["text"],
|
|
9424
|
+
cost: {
|
|
9425
|
+
input: 0.25,
|
|
9426
|
+
output: 1,
|
|
9427
|
+
cacheRead: 0,
|
|
9428
|
+
cacheWrite: 0,
|
|
9429
|
+
},
|
|
9430
|
+
contextWindow: 131000,
|
|
9431
|
+
maxTokens: 131000,
|
|
9432
|
+
},
|
|
9128
9433
|
"bytedance/seed-1.6": {
|
|
9129
9434
|
id: "bytedance/seed-1.6",
|
|
9130
9435
|
name: "Seed 1.6",
|
|
@@ -9822,6 +10127,23 @@ export const MODELS = {
|
|
|
9822
10127
|
contextWindow: 256000,
|
|
9823
10128
|
maxTokens: 16384,
|
|
9824
10129
|
},
|
|
10130
|
+
"moonshotai/kimi-k2.5": {
|
|
10131
|
+
id: "moonshotai/kimi-k2.5",
|
|
10132
|
+
name: "Kimi K2.5",
|
|
10133
|
+
api: "anthropic-messages",
|
|
10134
|
+
provider: "vercel-ai-gateway",
|
|
10135
|
+
baseUrl: "https://ai-gateway.vercel.sh",
|
|
10136
|
+
reasoning: true,
|
|
10137
|
+
input: ["text", "image"],
|
|
10138
|
+
cost: {
|
|
10139
|
+
input: 1.2,
|
|
10140
|
+
output: 1.2,
|
|
10141
|
+
cacheRead: 0.6,
|
|
10142
|
+
cacheWrite: 0,
|
|
10143
|
+
},
|
|
10144
|
+
contextWindow: 256000,
|
|
10145
|
+
maxTokens: 256000,
|
|
10146
|
+
},
|
|
9825
10147
|
"nvidia/nemotron-nano-12b-v2-vl": {
|
|
9826
10148
|
id: "nvidia/nemotron-nano-12b-v2-vl",
|
|
9827
10149
|
name: "Nvidia Nemotron Nano 12B V2 VL",
|
|
@@ -11346,6 +11668,24 @@ export const MODELS = {
|
|
|
11346
11668
|
contextWindow: 204800,
|
|
11347
11669
|
maxTokens: 131072,
|
|
11348
11670
|
},
|
|
11671
|
+
"glm-4.7-flash": {
|
|
11672
|
+
id: "glm-4.7-flash",
|
|
11673
|
+
name: "GLM-4.7-Flash",
|
|
11674
|
+
api: "openai-completions",
|
|
11675
|
+
provider: "zai",
|
|
11676
|
+
baseUrl: "https://api.z.ai/api/coding/paas/v4",
|
|
11677
|
+
compat: { "supportsDeveloperRole": false, "thinkingFormat": "zai" },
|
|
11678
|
+
reasoning: true,
|
|
11679
|
+
input: ["text"],
|
|
11680
|
+
cost: {
|
|
11681
|
+
input: 0,
|
|
11682
|
+
output: 0,
|
|
11683
|
+
cacheRead: 0,
|
|
11684
|
+
cacheWrite: 0,
|
|
11685
|
+
},
|
|
11686
|
+
contextWindow: 200000,
|
|
11687
|
+
maxTokens: 131072,
|
|
11688
|
+
},
|
|
11349
11689
|
},
|
|
11350
11690
|
};
|
|
11351
11691
|
//# sourceMappingURL=models.generated.js.map
|