@mariozechner/pi-ai 0.50.1 → 0.50.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3279,6 +3279,260 @@ export const MODELS = {
3279
3279
  maxTokens: 16384,
3280
3280
  },
3281
3281
  },
3282
+ "huggingface": {
3283
+ "MiniMaxAI/MiniMax-M2.1": {
3284
+ id: "MiniMaxAI/MiniMax-M2.1",
3285
+ name: "MiniMax-M2.1",
3286
+ api: "openai-completions",
3287
+ provider: "huggingface",
3288
+ baseUrl: "https://router.huggingface.co/v1",
3289
+ compat: { "supportsDeveloperRole": false },
3290
+ reasoning: true,
3291
+ input: ["text"],
3292
+ cost: {
3293
+ input: 0.3,
3294
+ output: 1.2,
3295
+ cacheRead: 0,
3296
+ cacheWrite: 0,
3297
+ },
3298
+ contextWindow: 204800,
3299
+ maxTokens: 131072,
3300
+ },
3301
+ "Qwen/Qwen3-235B-A22B-Thinking-2507": {
3302
+ id: "Qwen/Qwen3-235B-A22B-Thinking-2507",
3303
+ name: "Qwen3-235B-A22B-Thinking-2507",
3304
+ api: "openai-completions",
3305
+ provider: "huggingface",
3306
+ baseUrl: "https://router.huggingface.co/v1",
3307
+ compat: { "supportsDeveloperRole": false },
3308
+ reasoning: true,
3309
+ input: ["text"],
3310
+ cost: {
3311
+ input: 0.3,
3312
+ output: 3,
3313
+ cacheRead: 0,
3314
+ cacheWrite: 0,
3315
+ },
3316
+ contextWindow: 262144,
3317
+ maxTokens: 131072,
3318
+ },
3319
+ "Qwen/Qwen3-Coder-480B-A35B-Instruct": {
3320
+ id: "Qwen/Qwen3-Coder-480B-A35B-Instruct",
3321
+ name: "Qwen3-Coder-480B-A35B-Instruct",
3322
+ api: "openai-completions",
3323
+ provider: "huggingface",
3324
+ baseUrl: "https://router.huggingface.co/v1",
3325
+ compat: { "supportsDeveloperRole": false },
3326
+ reasoning: false,
3327
+ input: ["text"],
3328
+ cost: {
3329
+ input: 2,
3330
+ output: 2,
3331
+ cacheRead: 0,
3332
+ cacheWrite: 0,
3333
+ },
3334
+ contextWindow: 262144,
3335
+ maxTokens: 66536,
3336
+ },
3337
+ "Qwen/Qwen3-Next-80B-A3B-Instruct": {
3338
+ id: "Qwen/Qwen3-Next-80B-A3B-Instruct",
3339
+ name: "Qwen3-Next-80B-A3B-Instruct",
3340
+ api: "openai-completions",
3341
+ provider: "huggingface",
3342
+ baseUrl: "https://router.huggingface.co/v1",
3343
+ compat: { "supportsDeveloperRole": false },
3344
+ reasoning: false,
3345
+ input: ["text"],
3346
+ cost: {
3347
+ input: 0.25,
3348
+ output: 1,
3349
+ cacheRead: 0,
3350
+ cacheWrite: 0,
3351
+ },
3352
+ contextWindow: 262144,
3353
+ maxTokens: 66536,
3354
+ },
3355
+ "Qwen/Qwen3-Next-80B-A3B-Thinking": {
3356
+ id: "Qwen/Qwen3-Next-80B-A3B-Thinking",
3357
+ name: "Qwen3-Next-80B-A3B-Thinking",
3358
+ api: "openai-completions",
3359
+ provider: "huggingface",
3360
+ baseUrl: "https://router.huggingface.co/v1",
3361
+ compat: { "supportsDeveloperRole": false },
3362
+ reasoning: false,
3363
+ input: ["text"],
3364
+ cost: {
3365
+ input: 0.3,
3366
+ output: 2,
3367
+ cacheRead: 0,
3368
+ cacheWrite: 0,
3369
+ },
3370
+ contextWindow: 262144,
3371
+ maxTokens: 131072,
3372
+ },
3373
+ "XiaomiMiMo/MiMo-V2-Flash": {
3374
+ id: "XiaomiMiMo/MiMo-V2-Flash",
3375
+ name: "MiMo-V2-Flash",
3376
+ api: "openai-completions",
3377
+ provider: "huggingface",
3378
+ baseUrl: "https://router.huggingface.co/v1",
3379
+ compat: { "supportsDeveloperRole": false },
3380
+ reasoning: true,
3381
+ input: ["text"],
3382
+ cost: {
3383
+ input: 0.1,
3384
+ output: 0.3,
3385
+ cacheRead: 0,
3386
+ cacheWrite: 0,
3387
+ },
3388
+ contextWindow: 262144,
3389
+ maxTokens: 4096,
3390
+ },
3391
+ "deepseek-ai/DeepSeek-R1-0528": {
3392
+ id: "deepseek-ai/DeepSeek-R1-0528",
3393
+ name: "DeepSeek-R1-0528",
3394
+ api: "openai-completions",
3395
+ provider: "huggingface",
3396
+ baseUrl: "https://router.huggingface.co/v1",
3397
+ compat: { "supportsDeveloperRole": false },
3398
+ reasoning: true,
3399
+ input: ["text"],
3400
+ cost: {
3401
+ input: 3,
3402
+ output: 5,
3403
+ cacheRead: 0,
3404
+ cacheWrite: 0,
3405
+ },
3406
+ contextWindow: 163840,
3407
+ maxTokens: 163840,
3408
+ },
3409
+ "deepseek-ai/DeepSeek-V3.2": {
3410
+ id: "deepseek-ai/DeepSeek-V3.2",
3411
+ name: "DeepSeek-V3.2",
3412
+ api: "openai-completions",
3413
+ provider: "huggingface",
3414
+ baseUrl: "https://router.huggingface.co/v1",
3415
+ compat: { "supportsDeveloperRole": false },
3416
+ reasoning: true,
3417
+ input: ["text"],
3418
+ cost: {
3419
+ input: 0.28,
3420
+ output: 0.4,
3421
+ cacheRead: 0,
3422
+ cacheWrite: 0,
3423
+ },
3424
+ contextWindow: 163840,
3425
+ maxTokens: 65536,
3426
+ },
3427
+ "moonshotai/Kimi-K2-Instruct": {
3428
+ id: "moonshotai/Kimi-K2-Instruct",
3429
+ name: "Kimi-K2-Instruct",
3430
+ api: "openai-completions",
3431
+ provider: "huggingface",
3432
+ baseUrl: "https://router.huggingface.co/v1",
3433
+ compat: { "supportsDeveloperRole": false },
3434
+ reasoning: false,
3435
+ input: ["text"],
3436
+ cost: {
3437
+ input: 1,
3438
+ output: 3,
3439
+ cacheRead: 0,
3440
+ cacheWrite: 0,
3441
+ },
3442
+ contextWindow: 131072,
3443
+ maxTokens: 16384,
3444
+ },
3445
+ "moonshotai/Kimi-K2-Instruct-0905": {
3446
+ id: "moonshotai/Kimi-K2-Instruct-0905",
3447
+ name: "Kimi-K2-Instruct-0905",
3448
+ api: "openai-completions",
3449
+ provider: "huggingface",
3450
+ baseUrl: "https://router.huggingface.co/v1",
3451
+ compat: { "supportsDeveloperRole": false },
3452
+ reasoning: false,
3453
+ input: ["text"],
3454
+ cost: {
3455
+ input: 1,
3456
+ output: 3,
3457
+ cacheRead: 0,
3458
+ cacheWrite: 0,
3459
+ },
3460
+ contextWindow: 262144,
3461
+ maxTokens: 16384,
3462
+ },
3463
+ "moonshotai/Kimi-K2-Thinking": {
3464
+ id: "moonshotai/Kimi-K2-Thinking",
3465
+ name: "Kimi-K2-Thinking",
3466
+ api: "openai-completions",
3467
+ provider: "huggingface",
3468
+ baseUrl: "https://router.huggingface.co/v1",
3469
+ compat: { "supportsDeveloperRole": false },
3470
+ reasoning: true,
3471
+ input: ["text"],
3472
+ cost: {
3473
+ input: 0.6,
3474
+ output: 2.5,
3475
+ cacheRead: 0.15,
3476
+ cacheWrite: 0,
3477
+ },
3478
+ contextWindow: 262144,
3479
+ maxTokens: 262144,
3480
+ },
3481
+ "moonshotai/Kimi-K2.5": {
3482
+ id: "moonshotai/Kimi-K2.5",
3483
+ name: "Kimi-K2.5",
3484
+ api: "openai-completions",
3485
+ provider: "huggingface",
3486
+ baseUrl: "https://router.huggingface.co/v1",
3487
+ compat: { "supportsDeveloperRole": false },
3488
+ reasoning: true,
3489
+ input: ["text", "image"],
3490
+ cost: {
3491
+ input: 0.6,
3492
+ output: 3,
3493
+ cacheRead: 0.1,
3494
+ cacheWrite: 0,
3495
+ },
3496
+ contextWindow: 262144,
3497
+ maxTokens: 262144,
3498
+ },
3499
+ "zai-org/GLM-4.7": {
3500
+ id: "zai-org/GLM-4.7",
3501
+ name: "GLM-4.7",
3502
+ api: "openai-completions",
3503
+ provider: "huggingface",
3504
+ baseUrl: "https://router.huggingface.co/v1",
3505
+ compat: { "supportsDeveloperRole": false },
3506
+ reasoning: true,
3507
+ input: ["text"],
3508
+ cost: {
3509
+ input: 0.6,
3510
+ output: 2.2,
3511
+ cacheRead: 0.11,
3512
+ cacheWrite: 0,
3513
+ },
3514
+ contextWindow: 204800,
3515
+ maxTokens: 131072,
3516
+ },
3517
+ "zai-org/GLM-4.7-Flash": {
3518
+ id: "zai-org/GLM-4.7-Flash",
3519
+ name: "GLM-4.7-Flash",
3520
+ api: "openai-completions",
3521
+ provider: "huggingface",
3522
+ baseUrl: "https://router.huggingface.co/v1",
3523
+ compat: { "supportsDeveloperRole": false },
3524
+ reasoning: true,
3525
+ input: ["text"],
3526
+ cost: {
3527
+ input: 0,
3528
+ output: 0,
3529
+ cacheRead: 0,
3530
+ cacheWrite: 0,
3531
+ },
3532
+ contextWindow: 200000,
3533
+ maxTokens: 128000,
3534
+ },
3535
+ },
3282
3536
  "minimax": {
3283
3537
  "MiniMax-M2": {
3284
3538
  id: "MiniMax-M2",
@@ -4820,6 +5074,40 @@ export const MODELS = {
4820
5074
  contextWindow: 262144,
4821
5075
  maxTokens: 262144,
4822
5076
  },
5077
+ "kimi-k2.5": {
5078
+ id: "kimi-k2.5",
5079
+ name: "Kimi K2.5",
5080
+ api: "openai-completions",
5081
+ provider: "opencode",
5082
+ baseUrl: "https://opencode.ai/zen/v1",
5083
+ reasoning: true,
5084
+ input: ["text", "image"],
5085
+ cost: {
5086
+ input: 0.6,
5087
+ output: 3,
5088
+ cacheRead: 0.1,
5089
+ cacheWrite: 0,
5090
+ },
5091
+ contextWindow: 262144,
5092
+ maxTokens: 262144,
5093
+ },
5094
+ "minimax-m2.1": {
5095
+ id: "minimax-m2.1",
5096
+ name: "MiniMax M2.1",
5097
+ api: "openai-completions",
5098
+ provider: "opencode",
5099
+ baseUrl: "https://opencode.ai/zen/v1",
5100
+ reasoning: true,
5101
+ input: ["text"],
5102
+ cost: {
5103
+ input: 0.3,
5104
+ output: 1.2,
5105
+ cacheRead: 0.1,
5106
+ cacheWrite: 0,
5107
+ },
5108
+ contextWindow: 204800,
5109
+ maxTokens: 131072,
5110
+ },
4823
5111
  "qwen3-coder": {
4824
5112
  id: "qwen3-coder",
4825
5113
  name: "Qwen3 Coder",
@@ -5179,6 +5467,23 @@ export const MODELS = {
5179
5467
  contextWindow: 1000000,
5180
5468
  maxTokens: 64000,
5181
5469
  },
5470
+ "arcee-ai/trinity-large-preview:free": {
5471
+ id: "arcee-ai/trinity-large-preview:free",
5472
+ name: "Arcee AI: Trinity Large Preview (free)",
5473
+ api: "openai-completions",
5474
+ provider: "openrouter",
5475
+ baseUrl: "https://openrouter.ai/api/v1",
5476
+ reasoning: false,
5477
+ input: ["text"],
5478
+ cost: {
5479
+ input: 0,
5480
+ output: 0,
5481
+ cacheRead: 0,
5482
+ cacheWrite: 0,
5483
+ },
5484
+ contextWindow: 131000,
5485
+ maxTokens: 4096,
5486
+ },
5182
5487
  "arcee-ai/trinity-mini": {
5183
5488
  id: "arcee-ai/trinity-mini",
5184
5489
  name: "Arcee AI: Trinity Mini",
@@ -5547,7 +5852,7 @@ export const MODELS = {
5547
5852
  cost: {
5548
5853
  input: 0.21,
5549
5854
  output: 0.32,
5550
- cacheRead: 0,
5855
+ cacheRead: 0.21,
5551
5856
  cacheWrite: 0,
5552
5857
  },
5553
5858
  contextWindow: 163840,
@@ -5670,7 +5975,7 @@ export const MODELS = {
5670
5975
  cacheWrite: 0.08333333333333334,
5671
5976
  },
5672
5977
  contextWindow: 1048576,
5673
- maxTokens: 65535,
5978
+ maxTokens: 65536,
5674
5979
  },
5675
5980
  "google/gemini-2.5-pro": {
5676
5981
  id: "google/gemini-2.5-pro",
@@ -6063,23 +6368,6 @@ export const MODELS = {
6063
6368
  contextWindow: 262144,
6064
6369
  maxTokens: 65536,
6065
6370
  },
6066
- "mistralai/devstral-2512:free": {
6067
- id: "mistralai/devstral-2512:free",
6068
- name: "Mistral: Devstral 2 2512 (free)",
6069
- api: "openai-completions",
6070
- provider: "openrouter",
6071
- baseUrl: "https://openrouter.ai/api/v1",
6072
- reasoning: false,
6073
- input: ["text"],
6074
- cost: {
6075
- input: 0,
6076
- output: 0,
6077
- cacheRead: 0,
6078
- cacheWrite: 0,
6079
- },
6080
- contextWindow: 262144,
6081
- maxTokens: 4096,
6082
- },
6083
6371
  "mistralai/devstral-medium": {
6084
6372
  id: "mistralai/devstral-medium",
6085
6373
  name: "Mistral: Devstral Medium",
@@ -6590,6 +6878,23 @@ export const MODELS = {
6590
6878
  contextWindow: 262144,
6591
6879
  maxTokens: 65535,
6592
6880
  },
6881
+ "moonshotai/kimi-k2.5": {
6882
+ id: "moonshotai/kimi-k2.5",
6883
+ name: "MoonshotAI: Kimi K2.5",
6884
+ api: "openai-completions",
6885
+ provider: "openrouter",
6886
+ baseUrl: "https://openrouter.ai/api/v1",
6887
+ reasoning: true,
6888
+ input: ["text", "image"],
6889
+ cost: {
6890
+ input: 0.5700000000000001,
6891
+ output: 2.8499999999999996,
6892
+ cacheRead: 0,
6893
+ cacheWrite: 0,
6894
+ },
6895
+ contextWindow: 262144,
6896
+ maxTokens: 262144,
6897
+ },
6593
6898
  "nex-agi/deepseek-v3.1-nex-n1": {
6594
6899
  id: "nex-agi/deepseek-v3.1-nex-n1",
6595
6900
  name: "Nex AGI: DeepSeek V3.1 Nex N1",
@@ -6684,13 +6989,13 @@ export const MODELS = {
6684
6989
  reasoning: true,
6685
6990
  input: ["text"],
6686
6991
  cost: {
6687
- input: 0.06,
6688
- output: 0.24,
6992
+ input: 0.049999999999999996,
6993
+ output: 0.19999999999999998,
6689
6994
  cacheRead: 0,
6690
6995
  cacheWrite: 0,
6691
6996
  },
6692
6997
  contextWindow: 262144,
6693
- maxTokens: 262144,
6998
+ maxTokens: 4096,
6694
6999
  },
6695
7000
  "nvidia/nemotron-3-nano-30b-a3b:free": {
6696
7001
  id: "nvidia/nemotron-3-nano-30b-a3b:free",
@@ -7944,7 +8249,7 @@ export const MODELS = {
7944
8249
  cost: {
7945
8250
  input: 0.049999999999999996,
7946
8251
  output: 0.25,
7947
- cacheRead: 0,
8252
+ cacheRead: 0.049999999999999996,
7948
8253
  cacheWrite: 0,
7949
8254
  },
7950
8255
  contextWindow: 32000,
@@ -8392,6 +8697,23 @@ export const MODELS = {
8392
8697
  contextWindow: 163840,
8393
8698
  maxTokens: 65536,
8394
8699
  },
8700
+ "upstage/solar-pro-3:free": {
8701
+ id: "upstage/solar-pro-3:free",
8702
+ name: "Upstage: Solar Pro 3 (free)",
8703
+ api: "openai-completions",
8704
+ provider: "openrouter",
8705
+ baseUrl: "https://openrouter.ai/api/v1",
8706
+ reasoning: true,
8707
+ input: ["text"],
8708
+ cost: {
8709
+ input: 0,
8710
+ output: 0,
8711
+ cacheRead: 0,
8712
+ cacheWrite: 0,
8713
+ },
8714
+ contextWindow: 128000,
8715
+ maxTokens: 4096,
8716
+ },
8395
8717
  "x-ai/grok-3": {
8396
8718
  id: "x-ai/grok-3",
8397
8719
  name: "xAI: Grok 3",
@@ -8887,6 +9209,23 @@ export const MODELS = {
8887
9209
  contextWindow: 262144,
8888
9210
  maxTokens: 32768,
8889
9211
  },
9212
+ "alibaba/qwen3-max-thinking": {
9213
+ id: "alibaba/qwen3-max-thinking",
9214
+ name: "Qwen 3 Max Thinking",
9215
+ api: "anthropic-messages",
9216
+ provider: "vercel-ai-gateway",
9217
+ baseUrl: "https://ai-gateway.vercel.sh",
9218
+ reasoning: true,
9219
+ input: ["text"],
9220
+ cost: {
9221
+ input: 1.2,
9222
+ output: 6,
9223
+ cacheRead: 0.24,
9224
+ cacheWrite: 0,
9225
+ },
9226
+ contextWindow: 256000,
9227
+ maxTokens: 256000,
9228
+ },
8890
9229
  "anthropic/claude-3-haiku": {
8891
9230
  id: "anthropic/claude-3-haiku",
8892
9231
  name: "Claude 3 Haiku",
@@ -9074,6 +9413,23 @@ export const MODELS = {
9074
9413
  contextWindow: 1000000,
9075
9414
  maxTokens: 64000,
9076
9415
  },
9416
+ "arcee-ai/trinity-large-preview": {
9417
+ id: "arcee-ai/trinity-large-preview",
9418
+ name: "Trinity Large Preview",
9419
+ api: "anthropic-messages",
9420
+ provider: "vercel-ai-gateway",
9421
+ baseUrl: "https://ai-gateway.vercel.sh",
9422
+ reasoning: false,
9423
+ input: ["text"],
9424
+ cost: {
9425
+ input: 0.25,
9426
+ output: 1,
9427
+ cacheRead: 0,
9428
+ cacheWrite: 0,
9429
+ },
9430
+ contextWindow: 131000,
9431
+ maxTokens: 131000,
9432
+ },
9077
9433
  "bytedance/seed-1.6": {
9078
9434
  id: "bytedance/seed-1.6",
9079
9435
  name: "Seed 1.6",
@@ -9771,6 +10127,23 @@ export const MODELS = {
9771
10127
  contextWindow: 256000,
9772
10128
  maxTokens: 16384,
9773
10129
  },
10130
+ "moonshotai/kimi-k2.5": {
10131
+ id: "moonshotai/kimi-k2.5",
10132
+ name: "Kimi K2.5",
10133
+ api: "anthropic-messages",
10134
+ provider: "vercel-ai-gateway",
10135
+ baseUrl: "https://ai-gateway.vercel.sh",
10136
+ reasoning: true,
10137
+ input: ["text", "image"],
10138
+ cost: {
10139
+ input: 1.2,
10140
+ output: 1.2,
10141
+ cacheRead: 0.6,
10142
+ cacheWrite: 0,
10143
+ },
10144
+ contextWindow: 256000,
10145
+ maxTokens: 256000,
10146
+ },
9774
10147
  "nvidia/nemotron-nano-12b-v2-vl": {
9775
10148
  id: "nvidia/nemotron-nano-12b-v2-vl",
9776
10149
  name: "Nvidia Nemotron Nano 12B V2 VL",
@@ -11295,6 +11668,24 @@ export const MODELS = {
11295
11668
  contextWindow: 204800,
11296
11669
  maxTokens: 131072,
11297
11670
  },
11671
+ "glm-4.7-flash": {
11672
+ id: "glm-4.7-flash",
11673
+ name: "GLM-4.7-Flash",
11674
+ api: "openai-completions",
11675
+ provider: "zai",
11676
+ baseUrl: "https://api.z.ai/api/coding/paas/v4",
11677
+ compat: { "supportsDeveloperRole": false, "thinkingFormat": "zai" },
11678
+ reasoning: true,
11679
+ input: ["text"],
11680
+ cost: {
11681
+ input: 0,
11682
+ output: 0,
11683
+ cacheRead: 0,
11684
+ cacheWrite: 0,
11685
+ },
11686
+ contextWindow: 200000,
11687
+ maxTokens: 131072,
11688
+ },
11298
11689
  },
11299
11690
  };
11300
11691
  //# sourceMappingURL=models.generated.js.map