@mariozechner/pi-ai 0.12.9 → 0.12.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/models.generated.js
CHANGED
|
@@ -1077,6 +1077,23 @@ export const MODELS = {
|
|
|
1077
1077
|
contextWindow: 128000,
|
|
1078
1078
|
maxTokens: 16384,
|
|
1079
1079
|
},
|
|
1080
|
+
"gpt-5.1-codex-max": {
|
|
1081
|
+
id: "gpt-5.1-codex-max",
|
|
1082
|
+
name: "GPT-5.1 Codex Max",
|
|
1083
|
+
api: "openai-responses",
|
|
1084
|
+
provider: "openai",
|
|
1085
|
+
baseUrl: "https://api.openai.com/v1",
|
|
1086
|
+
reasoning: true,
|
|
1087
|
+
input: ["text", "image"],
|
|
1088
|
+
cost: {
|
|
1089
|
+
input: 1.25,
|
|
1090
|
+
output: 10,
|
|
1091
|
+
cacheRead: 0.125,
|
|
1092
|
+
cacheWrite: 0,
|
|
1093
|
+
},
|
|
1094
|
+
contextWindow: 400000,
|
|
1095
|
+
maxTokens: 128000,
|
|
1096
|
+
},
|
|
1080
1097
|
"o3": {
|
|
1081
1098
|
id: "o3",
|
|
1082
1099
|
name: "o3",
|
|
@@ -2005,6 +2022,57 @@ export const MODELS = {
|
|
|
2005
2022
|
contextWindow: 1000000,
|
|
2006
2023
|
maxTokens: 65535,
|
|
2007
2024
|
},
|
|
2025
|
+
"mistralai/ministral-14b-2512": {
|
|
2026
|
+
id: "mistralai/ministral-14b-2512",
|
|
2027
|
+
name: "Mistral: Ministral 3 14B 2512",
|
|
2028
|
+
api: "openai-completions",
|
|
2029
|
+
provider: "openrouter",
|
|
2030
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2031
|
+
reasoning: false,
|
|
2032
|
+
input: ["text", "image"],
|
|
2033
|
+
cost: {
|
|
2034
|
+
input: 0.19999999999999998,
|
|
2035
|
+
output: 0.19999999999999998,
|
|
2036
|
+
cacheRead: 0,
|
|
2037
|
+
cacheWrite: 0,
|
|
2038
|
+
},
|
|
2039
|
+
contextWindow: 262144,
|
|
2040
|
+
maxTokens: 4096,
|
|
2041
|
+
},
|
|
2042
|
+
"mistralai/ministral-8b-2512": {
|
|
2043
|
+
id: "mistralai/ministral-8b-2512",
|
|
2044
|
+
name: "Mistral: Ministral 3 8B 2512",
|
|
2045
|
+
api: "openai-completions",
|
|
2046
|
+
provider: "openrouter",
|
|
2047
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2048
|
+
reasoning: false,
|
|
2049
|
+
input: ["text", "image"],
|
|
2050
|
+
cost: {
|
|
2051
|
+
input: 0.15,
|
|
2052
|
+
output: 0.15,
|
|
2053
|
+
cacheRead: 0,
|
|
2054
|
+
cacheWrite: 0,
|
|
2055
|
+
},
|
|
2056
|
+
contextWindow: 262144,
|
|
2057
|
+
maxTokens: 4096,
|
|
2058
|
+
},
|
|
2059
|
+
"mistralai/ministral-3b-2512": {
|
|
2060
|
+
id: "mistralai/ministral-3b-2512",
|
|
2061
|
+
name: "Mistral: Ministral 3 3B 2512",
|
|
2062
|
+
api: "openai-completions",
|
|
2063
|
+
provider: "openrouter",
|
|
2064
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2065
|
+
reasoning: false,
|
|
2066
|
+
input: ["text", "image"],
|
|
2067
|
+
cost: {
|
|
2068
|
+
input: 0.09999999999999999,
|
|
2069
|
+
output: 0.09999999999999999,
|
|
2070
|
+
cacheRead: 0,
|
|
2071
|
+
cacheWrite: 0,
|
|
2072
|
+
},
|
|
2073
|
+
contextWindow: 131072,
|
|
2074
|
+
maxTokens: 4096,
|
|
2075
|
+
},
|
|
2008
2076
|
"mistralai/mistral-large-2512": {
|
|
2009
2077
|
id: "mistralai/mistral-large-2512",
|
|
2010
2078
|
name: "Mistral: Mistral Large 3 2512",
|
|
@@ -3331,39 +3399,39 @@ export const MODELS = {
|
|
|
3331
3399
|
contextWindow: 400000,
|
|
3332
3400
|
maxTokens: 128000,
|
|
3333
3401
|
},
|
|
3334
|
-
"openai/gpt-oss-120b
|
|
3335
|
-
id: "openai/gpt-oss-120b
|
|
3336
|
-
name: "OpenAI: gpt-oss-120b
|
|
3402
|
+
"openai/gpt-oss-120b": {
|
|
3403
|
+
id: "openai/gpt-oss-120b",
|
|
3404
|
+
name: "OpenAI: gpt-oss-120b",
|
|
3337
3405
|
api: "openai-completions",
|
|
3338
3406
|
provider: "openrouter",
|
|
3339
3407
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
3340
3408
|
reasoning: true,
|
|
3341
3409
|
input: ["text"],
|
|
3342
3410
|
cost: {
|
|
3343
|
-
input: 0.
|
|
3344
|
-
output: 0.
|
|
3411
|
+
input: 0.039,
|
|
3412
|
+
output: 0.19,
|
|
3345
3413
|
cacheRead: 0,
|
|
3346
3414
|
cacheWrite: 0,
|
|
3347
3415
|
},
|
|
3348
3416
|
contextWindow: 131072,
|
|
3349
|
-
maxTokens:
|
|
3417
|
+
maxTokens: 4096,
|
|
3350
3418
|
},
|
|
3351
|
-
"openai/gpt-oss-120b": {
|
|
3352
|
-
id: "openai/gpt-oss-120b",
|
|
3353
|
-
name: "OpenAI: gpt-oss-120b",
|
|
3419
|
+
"openai/gpt-oss-120b:exacto": {
|
|
3420
|
+
id: "openai/gpt-oss-120b:exacto",
|
|
3421
|
+
name: "OpenAI: gpt-oss-120b (exacto)",
|
|
3354
3422
|
api: "openai-completions",
|
|
3355
3423
|
provider: "openrouter",
|
|
3356
3424
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
3357
3425
|
reasoning: true,
|
|
3358
3426
|
input: ["text"],
|
|
3359
3427
|
cost: {
|
|
3360
|
-
input: 0.
|
|
3361
|
-
output: 0.
|
|
3428
|
+
input: 0.039,
|
|
3429
|
+
output: 0.19,
|
|
3362
3430
|
cacheRead: 0,
|
|
3363
3431
|
cacheWrite: 0,
|
|
3364
3432
|
},
|
|
3365
3433
|
contextWindow: 131072,
|
|
3366
|
-
maxTokens:
|
|
3434
|
+
maxTokens: 4096,
|
|
3367
3435
|
},
|
|
3368
3436
|
"openai/gpt-oss-20b:free": {
|
|
3369
3437
|
id: "openai/gpt-oss-20b:free",
|
|
@@ -3858,23 +3926,6 @@ export const MODELS = {
|
|
|
3858
3926
|
contextWindow: 131072,
|
|
3859
3927
|
maxTokens: 4096,
|
|
3860
3928
|
},
|
|
3861
|
-
"mistralai/magistral-small-2506": {
|
|
3862
|
-
id: "mistralai/magistral-small-2506",
|
|
3863
|
-
name: "Mistral: Magistral Small 2506",
|
|
3864
|
-
api: "openai-completions",
|
|
3865
|
-
provider: "openrouter",
|
|
3866
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
3867
|
-
reasoning: true,
|
|
3868
|
-
input: ["text"],
|
|
3869
|
-
cost: {
|
|
3870
|
-
input: 0.5,
|
|
3871
|
-
output: 1.5,
|
|
3872
|
-
cacheRead: 0,
|
|
3873
|
-
cacheWrite: 0,
|
|
3874
|
-
},
|
|
3875
|
-
contextWindow: 40000,
|
|
3876
|
-
maxTokens: 40000,
|
|
3877
|
-
},
|
|
3878
3929
|
"mistralai/magistral-medium-2506:thinking": {
|
|
3879
3930
|
id: "mistralai/magistral-medium-2506:thinking",
|
|
3880
3931
|
name: "Mistral: Magistral Medium 2506 (thinking)",
|
|
@@ -3892,23 +3943,6 @@ export const MODELS = {
|
|
|
3892
3943
|
contextWindow: 40960,
|
|
3893
3944
|
maxTokens: 40000,
|
|
3894
3945
|
},
|
|
3895
|
-
"mistralai/magistral-medium-2506": {
|
|
3896
|
-
id: "mistralai/magistral-medium-2506",
|
|
3897
|
-
name: "Mistral: Magistral Medium 2506",
|
|
3898
|
-
api: "openai-completions",
|
|
3899
|
-
provider: "openrouter",
|
|
3900
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
3901
|
-
reasoning: true,
|
|
3902
|
-
input: ["text"],
|
|
3903
|
-
cost: {
|
|
3904
|
-
input: 2,
|
|
3905
|
-
output: 5,
|
|
3906
|
-
cacheRead: 0,
|
|
3907
|
-
cacheWrite: 0,
|
|
3908
|
-
},
|
|
3909
|
-
contextWindow: 40960,
|
|
3910
|
-
maxTokens: 40000,
|
|
3911
|
-
},
|
|
3912
3946
|
"google/gemini-2.5-pro-preview": {
|
|
3913
3947
|
id: "google/gemini-2.5-pro-preview",
|
|
3914
3948
|
name: "Google: Gemini 2.5 Pro Preview 06-05",
|
|
@@ -3977,23 +4011,6 @@ export const MODELS = {
|
|
|
3977
4011
|
contextWindow: 1000000,
|
|
3978
4012
|
maxTokens: 64000,
|
|
3979
4013
|
},
|
|
3980
|
-
"mistralai/devstral-small-2505": {
|
|
3981
|
-
id: "mistralai/devstral-small-2505",
|
|
3982
|
-
name: "Mistral: Devstral Small 2505",
|
|
3983
|
-
api: "openai-completions",
|
|
3984
|
-
provider: "openrouter",
|
|
3985
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
3986
|
-
reasoning: false,
|
|
3987
|
-
input: ["text"],
|
|
3988
|
-
cost: {
|
|
3989
|
-
input: 0.06,
|
|
3990
|
-
output: 0.12,
|
|
3991
|
-
cacheRead: 0,
|
|
3992
|
-
cacheWrite: 0,
|
|
3993
|
-
},
|
|
3994
|
-
contextWindow: 128000,
|
|
3995
|
-
maxTokens: 4096,
|
|
3996
|
-
},
|
|
3997
4014
|
"openai/codex-mini": {
|
|
3998
4015
|
id: "openai/codex-mini",
|
|
3999
4016
|
name: "OpenAI: Codex Mini",
|
|
@@ -4708,23 +4725,6 @@ export const MODELS = {
|
|
|
4708
4725
|
contextWindow: 163840,
|
|
4709
4726
|
maxTokens: 4096,
|
|
4710
4727
|
},
|
|
4711
|
-
"mistralai/codestral-2501": {
|
|
4712
|
-
id: "mistralai/codestral-2501",
|
|
4713
|
-
name: "Mistral: Codestral 2501",
|
|
4714
|
-
api: "openai-completions",
|
|
4715
|
-
provider: "openrouter",
|
|
4716
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
4717
|
-
reasoning: false,
|
|
4718
|
-
input: ["text"],
|
|
4719
|
-
cost: {
|
|
4720
|
-
input: 0.3,
|
|
4721
|
-
output: 0.8999999999999999,
|
|
4722
|
-
cacheRead: 0,
|
|
4723
|
-
cacheWrite: 0,
|
|
4724
|
-
},
|
|
4725
|
-
contextWindow: 256000,
|
|
4726
|
-
maxTokens: 4096,
|
|
4727
|
-
},
|
|
4728
4728
|
"deepseek/deepseek-chat": {
|
|
4729
4729
|
id: "deepseek/deepseek-chat",
|
|
4730
4730
|
name: "DeepSeek: DeepSeek V3",
|
|
@@ -5592,23 +5592,6 @@ export const MODELS = {
|
|
|
5592
5592
|
contextWindow: 128000,
|
|
5593
5593
|
maxTokens: 4096,
|
|
5594
5594
|
},
|
|
5595
|
-
"mistralai/mistral-small": {
|
|
5596
|
-
id: "mistralai/mistral-small",
|
|
5597
|
-
name: "Mistral Small",
|
|
5598
|
-
api: "openai-completions",
|
|
5599
|
-
provider: "openrouter",
|
|
5600
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5601
|
-
reasoning: false,
|
|
5602
|
-
input: ["text"],
|
|
5603
|
-
cost: {
|
|
5604
|
-
input: 0.19999999999999998,
|
|
5605
|
-
output: 0.6,
|
|
5606
|
-
cacheRead: 0,
|
|
5607
|
-
cacheWrite: 0,
|
|
5608
|
-
},
|
|
5609
|
-
contextWindow: 32768,
|
|
5610
|
-
maxTokens: 4096,
|
|
5611
|
-
},
|
|
5612
5595
|
"mistralai/mistral-tiny": {
|
|
5613
5596
|
id: "mistralai/mistral-tiny",
|
|
5614
5597
|
name: "Mistral Tiny",
|