@mariozechner/pi-ai 0.12.9 → 0.12.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/models.generated.js
CHANGED
|
@@ -1196,6 +1196,23 @@ export const MODELS = {
|
|
|
1196
1196
|
contextWindow: 128000,
|
|
1197
1197
|
maxTokens: 16384,
|
|
1198
1198
|
},
|
|
1199
|
+
"gpt-5.1-codex-max": {
|
|
1200
|
+
id: "gpt-5.1-codex-max",
|
|
1201
|
+
name: "GPT-5.1 Codex Max",
|
|
1202
|
+
api: "openai-responses",
|
|
1203
|
+
provider: "openai",
|
|
1204
|
+
baseUrl: "https://api.openai.com/v1",
|
|
1205
|
+
reasoning: true,
|
|
1206
|
+
input: ["text", "image"],
|
|
1207
|
+
cost: {
|
|
1208
|
+
input: 1.25,
|
|
1209
|
+
output: 10,
|
|
1210
|
+
cacheRead: 0.125,
|
|
1211
|
+
cacheWrite: 0,
|
|
1212
|
+
},
|
|
1213
|
+
contextWindow: 400000,
|
|
1214
|
+
maxTokens: 128000,
|
|
1215
|
+
},
|
|
1199
1216
|
},
|
|
1200
1217
|
groq: {
|
|
1201
1218
|
"llama-3.1-8b-instant": {
|
|
@@ -3858,23 +3875,6 @@ export const MODELS = {
|
|
|
3858
3875
|
contextWindow: 131072,
|
|
3859
3876
|
maxTokens: 4096,
|
|
3860
3877
|
},
|
|
3861
|
-
"mistralai/magistral-small-2506": {
|
|
3862
|
-
id: "mistralai/magistral-small-2506",
|
|
3863
|
-
name: "Mistral: Magistral Small 2506",
|
|
3864
|
-
api: "openai-completions",
|
|
3865
|
-
provider: "openrouter",
|
|
3866
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
3867
|
-
reasoning: true,
|
|
3868
|
-
input: ["text"],
|
|
3869
|
-
cost: {
|
|
3870
|
-
input: 0.5,
|
|
3871
|
-
output: 1.5,
|
|
3872
|
-
cacheRead: 0,
|
|
3873
|
-
cacheWrite: 0,
|
|
3874
|
-
},
|
|
3875
|
-
contextWindow: 40000,
|
|
3876
|
-
maxTokens: 40000,
|
|
3877
|
-
},
|
|
3878
3878
|
"mistralai/magistral-medium-2506:thinking": {
|
|
3879
3879
|
id: "mistralai/magistral-medium-2506:thinking",
|
|
3880
3880
|
name: "Mistral: Magistral Medium 2506 (thinking)",
|
|
@@ -3892,23 +3892,6 @@ export const MODELS = {
|
|
|
3892
3892
|
contextWindow: 40960,
|
|
3893
3893
|
maxTokens: 40000,
|
|
3894
3894
|
},
|
|
3895
|
-
"mistralai/magistral-medium-2506": {
|
|
3896
|
-
id: "mistralai/magistral-medium-2506",
|
|
3897
|
-
name: "Mistral: Magistral Medium 2506",
|
|
3898
|
-
api: "openai-completions",
|
|
3899
|
-
provider: "openrouter",
|
|
3900
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
3901
|
-
reasoning: true,
|
|
3902
|
-
input: ["text"],
|
|
3903
|
-
cost: {
|
|
3904
|
-
input: 2,
|
|
3905
|
-
output: 5,
|
|
3906
|
-
cacheRead: 0,
|
|
3907
|
-
cacheWrite: 0,
|
|
3908
|
-
},
|
|
3909
|
-
contextWindow: 40960,
|
|
3910
|
-
maxTokens: 40000,
|
|
3911
|
-
},
|
|
3912
3895
|
"google/gemini-2.5-pro-preview": {
|
|
3913
3896
|
id: "google/gemini-2.5-pro-preview",
|
|
3914
3897
|
name: "Google: Gemini 2.5 Pro Preview 06-05",
|
|
@@ -3977,23 +3960,6 @@ export const MODELS = {
|
|
|
3977
3960
|
contextWindow: 1000000,
|
|
3978
3961
|
maxTokens: 64000,
|
|
3979
3962
|
},
|
|
3980
|
-
"mistralai/devstral-small-2505": {
|
|
3981
|
-
id: "mistralai/devstral-small-2505",
|
|
3982
|
-
name: "Mistral: Devstral Small 2505",
|
|
3983
|
-
api: "openai-completions",
|
|
3984
|
-
provider: "openrouter",
|
|
3985
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
3986
|
-
reasoning: false,
|
|
3987
|
-
input: ["text"],
|
|
3988
|
-
cost: {
|
|
3989
|
-
input: 0.06,
|
|
3990
|
-
output: 0.12,
|
|
3991
|
-
cacheRead: 0,
|
|
3992
|
-
cacheWrite: 0,
|
|
3993
|
-
},
|
|
3994
|
-
contextWindow: 128000,
|
|
3995
|
-
maxTokens: 4096,
|
|
3996
|
-
},
|
|
3997
3963
|
"openai/codex-mini": {
|
|
3998
3964
|
id: "openai/codex-mini",
|
|
3999
3965
|
name: "OpenAI: Codex Mini",
|
|
@@ -4708,23 +4674,6 @@ export const MODELS = {
|
|
|
4708
4674
|
contextWindow: 163840,
|
|
4709
4675
|
maxTokens: 4096,
|
|
4710
4676
|
},
|
|
4711
|
-
"mistralai/codestral-2501": {
|
|
4712
|
-
id: "mistralai/codestral-2501",
|
|
4713
|
-
name: "Mistral: Codestral 2501",
|
|
4714
|
-
api: "openai-completions",
|
|
4715
|
-
provider: "openrouter",
|
|
4716
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
4717
|
-
reasoning: false,
|
|
4718
|
-
input: ["text"],
|
|
4719
|
-
cost: {
|
|
4720
|
-
input: 0.3,
|
|
4721
|
-
output: 0.8999999999999999,
|
|
4722
|
-
cacheRead: 0,
|
|
4723
|
-
cacheWrite: 0,
|
|
4724
|
-
},
|
|
4725
|
-
contextWindow: 256000,
|
|
4726
|
-
maxTokens: 4096,
|
|
4727
|
-
},
|
|
4728
4677
|
"deepseek/deepseek-chat": {
|
|
4729
4678
|
id: "deepseek/deepseek-chat",
|
|
4730
4679
|
name: "DeepSeek: DeepSeek V3",
|
|
@@ -5592,23 +5541,6 @@ export const MODELS = {
|
|
|
5592
5541
|
contextWindow: 128000,
|
|
5593
5542
|
maxTokens: 4096,
|
|
5594
5543
|
},
|
|
5595
|
-
"mistralai/mistral-small": {
|
|
5596
|
-
id: "mistralai/mistral-small",
|
|
5597
|
-
name: "Mistral Small",
|
|
5598
|
-
api: "openai-completions",
|
|
5599
|
-
provider: "openrouter",
|
|
5600
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5601
|
-
reasoning: false,
|
|
5602
|
-
input: ["text"],
|
|
5603
|
-
cost: {
|
|
5604
|
-
input: 0.19999999999999998,
|
|
5605
|
-
output: 0.6,
|
|
5606
|
-
cacheRead: 0,
|
|
5607
|
-
cacheWrite: 0,
|
|
5608
|
-
},
|
|
5609
|
-
contextWindow: 32768,
|
|
5610
|
-
maxTokens: 4096,
|
|
5611
|
-
},
|
|
5612
5544
|
"mistralai/mistral-tiny": {
|
|
5613
5545
|
id: "mistralai/mistral-tiny",
|
|
5614
5546
|
name: "Mistral Tiny",
|