@mariozechner/pi-ai 0.50.2 → 0.50.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/env-api-keys.d.ts.map +1 -1
- package/dist/env-api-keys.js +1 -0
- package/dist/env-api-keys.js.map +1 -1
- package/dist/models.generated.d.ts +87 -51
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +103 -67
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/anthropic.d.ts.map +1 -1
- package/dist/providers/anthropic.js +2 -2
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/google-gemini-cli.d.ts.map +1 -1
- package/dist/providers/google-gemini-cli.js +1 -1
- package/dist/providers/google-gemini-cli.js.map +1 -1
- package/dist/providers/google-shared.d.ts.map +1 -1
- package/dist/providers/google-shared.js +2 -2
- package/dist/providers/google-shared.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +14 -0
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/types.d.ts +14 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utils/overflow.d.ts +1 -0
- package/dist/utils/overflow.d.ts.map +1 -1
- package/dist/utils/overflow.js +3 -0
- package/dist/utils/overflow.js.map +1 -1
- package/package.json +1 -1
package/dist/models.generated.js
CHANGED
|
@@ -3533,6 +3533,42 @@ export const MODELS = {
|
|
|
3533
3533
|
maxTokens: 128000,
|
|
3534
3534
|
},
|
|
3535
3535
|
},
|
|
3536
|
+
"kimi-coding": {
|
|
3537
|
+
"k2p5": {
|
|
3538
|
+
id: "k2p5",
|
|
3539
|
+
name: "Kimi K2.5",
|
|
3540
|
+
api: "anthropic-messages",
|
|
3541
|
+
provider: "kimi-coding",
|
|
3542
|
+
baseUrl: "https://api.kimi.com/coding",
|
|
3543
|
+
reasoning: true,
|
|
3544
|
+
input: ["text", "image"],
|
|
3545
|
+
cost: {
|
|
3546
|
+
input: 0,
|
|
3547
|
+
output: 0,
|
|
3548
|
+
cacheRead: 0,
|
|
3549
|
+
cacheWrite: 0,
|
|
3550
|
+
},
|
|
3551
|
+
contextWindow: 262144,
|
|
3552
|
+
maxTokens: 32768,
|
|
3553
|
+
},
|
|
3554
|
+
"kimi-k2-thinking": {
|
|
3555
|
+
id: "kimi-k2-thinking",
|
|
3556
|
+
name: "Kimi K2 Thinking",
|
|
3557
|
+
api: "anthropic-messages",
|
|
3558
|
+
provider: "kimi-coding",
|
|
3559
|
+
baseUrl: "https://api.kimi.com/coding",
|
|
3560
|
+
reasoning: true,
|
|
3561
|
+
input: ["text"],
|
|
3562
|
+
cost: {
|
|
3563
|
+
input: 0,
|
|
3564
|
+
output: 0,
|
|
3565
|
+
cacheRead: 0,
|
|
3566
|
+
cacheWrite: 0,
|
|
3567
|
+
},
|
|
3568
|
+
contextWindow: 262144,
|
|
3569
|
+
maxTokens: 32768,
|
|
3570
|
+
},
|
|
3571
|
+
},
|
|
3536
3572
|
"minimax": {
|
|
3537
3573
|
"MiniMax-M2": {
|
|
3538
3574
|
id: "MiniMax-M2",
|
|
@@ -3632,8 +3668,8 @@ export const MODELS = {
|
|
|
3632
3668
|
reasoning: false,
|
|
3633
3669
|
input: ["text"],
|
|
3634
3670
|
cost: {
|
|
3635
|
-
input: 0,
|
|
3636
|
-
output:
|
|
3671
|
+
input: 0.4,
|
|
3672
|
+
output: 2,
|
|
3637
3673
|
cacheRead: 0,
|
|
3638
3674
|
cacheWrite: 0,
|
|
3639
3675
|
},
|
|
@@ -4887,6 +4923,23 @@ export const MODELS = {
|
|
|
4887
4923
|
contextWindow: 204800,
|
|
4888
4924
|
maxTokens: 131072,
|
|
4889
4925
|
},
|
|
4926
|
+
"glm-4.7-free": {
|
|
4927
|
+
id: "glm-4.7-free",
|
|
4928
|
+
name: "GLM-4.7 Free",
|
|
4929
|
+
api: "openai-completions",
|
|
4930
|
+
provider: "opencode",
|
|
4931
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
4932
|
+
reasoning: true,
|
|
4933
|
+
input: ["text"],
|
|
4934
|
+
cost: {
|
|
4935
|
+
input: 0,
|
|
4936
|
+
output: 0,
|
|
4937
|
+
cacheRead: 0,
|
|
4938
|
+
cacheWrite: 0,
|
|
4939
|
+
},
|
|
4940
|
+
contextWindow: 204800,
|
|
4941
|
+
maxTokens: 131072,
|
|
4942
|
+
},
|
|
4890
4943
|
"gpt-5": {
|
|
4891
4944
|
id: "gpt-5",
|
|
4892
4945
|
name: "GPT-5",
|
|
@@ -5085,7 +5138,24 @@ export const MODELS = {
|
|
|
5085
5138
|
cost: {
|
|
5086
5139
|
input: 0.6,
|
|
5087
5140
|
output: 3,
|
|
5088
|
-
cacheRead: 0.
|
|
5141
|
+
cacheRead: 0.08,
|
|
5142
|
+
cacheWrite: 0,
|
|
5143
|
+
},
|
|
5144
|
+
contextWindow: 262144,
|
|
5145
|
+
maxTokens: 262144,
|
|
5146
|
+
},
|
|
5147
|
+
"kimi-k2.5-free": {
|
|
5148
|
+
id: "kimi-k2.5-free",
|
|
5149
|
+
name: "Kimi K2.5 Free",
|
|
5150
|
+
api: "openai-completions",
|
|
5151
|
+
provider: "opencode",
|
|
5152
|
+
baseUrl: "https://opencode.ai/zen/v1",
|
|
5153
|
+
reasoning: true,
|
|
5154
|
+
input: ["text", "image"],
|
|
5155
|
+
cost: {
|
|
5156
|
+
input: 0,
|
|
5157
|
+
output: 0,
|
|
5158
|
+
cacheRead: 0,
|
|
5089
5159
|
cacheWrite: 0,
|
|
5090
5160
|
},
|
|
5091
5161
|
contextWindow: 262144,
|
|
@@ -5108,6 +5178,23 @@ export const MODELS = {
|
|
|
5108
5178
|
contextWindow: 204800,
|
|
5109
5179
|
maxTokens: 131072,
|
|
5110
5180
|
},
|
|
5181
|
+
"minimax-m2.1-free": {
|
|
5182
|
+
id: "minimax-m2.1-free",
|
|
5183
|
+
name: "MiniMax M2.1 Free",
|
|
5184
|
+
api: "anthropic-messages",
|
|
5185
|
+
provider: "opencode",
|
|
5186
|
+
baseUrl: "https://opencode.ai/zen",
|
|
5187
|
+
reasoning: true,
|
|
5188
|
+
input: ["text"],
|
|
5189
|
+
cost: {
|
|
5190
|
+
input: 0,
|
|
5191
|
+
output: 0,
|
|
5192
|
+
cacheRead: 0,
|
|
5193
|
+
cacheWrite: 0,
|
|
5194
|
+
},
|
|
5195
|
+
contextWindow: 204800,
|
|
5196
|
+
maxTokens: 131072,
|
|
5197
|
+
},
|
|
5111
5198
|
"qwen3-coder": {
|
|
5112
5199
|
id: "qwen3-coder",
|
|
5113
5200
|
name: "Qwen3 Coder",
|
|
@@ -5875,23 +5962,6 @@ export const MODELS = {
|
|
|
5875
5962
|
contextWindow: 1048576,
|
|
5876
5963
|
maxTokens: 8192,
|
|
5877
5964
|
},
|
|
5878
|
-
"google/gemini-2.0-flash-exp:free": {
|
|
5879
|
-
id: "google/gemini-2.0-flash-exp:free",
|
|
5880
|
-
name: "Google: Gemini 2.0 Flash Experimental (free)",
|
|
5881
|
-
api: "openai-completions",
|
|
5882
|
-
provider: "openrouter",
|
|
5883
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5884
|
-
reasoning: false,
|
|
5885
|
-
input: ["text", "image"],
|
|
5886
|
-
cost: {
|
|
5887
|
-
input: 0,
|
|
5888
|
-
output: 0,
|
|
5889
|
-
cacheRead: 0,
|
|
5890
|
-
cacheWrite: 0,
|
|
5891
|
-
},
|
|
5892
|
-
contextWindow: 1048576,
|
|
5893
|
-
maxTokens: 8192,
|
|
5894
|
-
},
|
|
5895
5965
|
"google/gemini-2.0-flash-lite-001": {
|
|
5896
5966
|
id: "google/gemini-2.0-flash-lite-001",
|
|
5897
5967
|
name: "Google: Gemini 2.0 Flash Lite",
|
|
@@ -6887,13 +6957,13 @@ export const MODELS = {
|
|
|
6887
6957
|
reasoning: true,
|
|
6888
6958
|
input: ["text", "image"],
|
|
6889
6959
|
cost: {
|
|
6890
|
-
input: 0.
|
|
6891
|
-
output: 2.
|
|
6960
|
+
input: 0.5,
|
|
6961
|
+
output: 2.8,
|
|
6892
6962
|
cacheRead: 0,
|
|
6893
6963
|
cacheWrite: 0,
|
|
6894
6964
|
},
|
|
6895
6965
|
contextWindow: 262144,
|
|
6896
|
-
maxTokens:
|
|
6966
|
+
maxTokens: 4096,
|
|
6897
6967
|
},
|
|
6898
6968
|
"nex-agi/deepseek-v3.1-nex-n1": {
|
|
6899
6969
|
id: "nex-agi/deepseek-v3.1-nex-n1",
|
|
@@ -9224,7 +9294,7 @@ export const MODELS = {
|
|
|
9224
9294
|
cacheWrite: 0,
|
|
9225
9295
|
},
|
|
9226
9296
|
contextWindow: 256000,
|
|
9227
|
-
maxTokens:
|
|
9297
|
+
maxTokens: 65536,
|
|
9228
9298
|
},
|
|
9229
9299
|
"anthropic/claude-3-haiku": {
|
|
9230
9300
|
id: "anthropic/claude-3-haiku",
|
|
@@ -10136,9 +10206,9 @@ export const MODELS = {
|
|
|
10136
10206
|
reasoning: true,
|
|
10137
10207
|
input: ["text", "image"],
|
|
10138
10208
|
cost: {
|
|
10139
|
-
input:
|
|
10140
|
-
output:
|
|
10141
|
-
cacheRead: 0.
|
|
10209
|
+
input: 0.6,
|
|
10210
|
+
output: 3,
|
|
10211
|
+
cacheRead: 0.09999999999999999,
|
|
10142
10212
|
cacheWrite: 0,
|
|
10143
10213
|
},
|
|
10144
10214
|
contextWindow: 256000,
|
|
@@ -10257,7 +10327,7 @@ export const MODELS = {
|
|
|
10257
10327
|
cost: {
|
|
10258
10328
|
input: 0.09999999999999999,
|
|
10259
10329
|
output: 0.39999999999999997,
|
|
10260
|
-
cacheRead: 0.
|
|
10330
|
+
cacheRead: 0.03,
|
|
10261
10331
|
cacheWrite: 0,
|
|
10262
10332
|
},
|
|
10263
10333
|
contextWindow: 1047576,
|
|
@@ -10461,7 +10531,7 @@ export const MODELS = {
|
|
|
10461
10531
|
cost: {
|
|
10462
10532
|
input: 1.25,
|
|
10463
10533
|
output: 10,
|
|
10464
|
-
cacheRead: 0.
|
|
10534
|
+
cacheRead: 0.13,
|
|
10465
10535
|
cacheWrite: 0,
|
|
10466
10536
|
},
|
|
10467
10537
|
contextWindow: 128000,
|
|
@@ -10478,7 +10548,7 @@ export const MODELS = {
|
|
|
10478
10548
|
cost: {
|
|
10479
10549
|
input: 1.25,
|
|
10480
10550
|
output: 10,
|
|
10481
|
-
cacheRead: 0.
|
|
10551
|
+
cacheRead: 0.13,
|
|
10482
10552
|
cacheWrite: 0,
|
|
10483
10553
|
},
|
|
10484
10554
|
contextWindow: 400000,
|
|
@@ -10486,7 +10556,7 @@ export const MODELS = {
|
|
|
10486
10556
|
},
|
|
10487
10557
|
"openai/gpt-5.2": {
|
|
10488
10558
|
id: "openai/gpt-5.2",
|
|
10489
|
-
name: "GPT
|
|
10559
|
+
name: "GPT 5.2",
|
|
10490
10560
|
api: "anthropic-messages",
|
|
10491
10561
|
provider: "vercel-ai-gateway",
|
|
10492
10562
|
baseUrl: "https://ai-gateway.vercel.sh",
|
|
@@ -10495,7 +10565,7 @@ export const MODELS = {
|
|
|
10495
10565
|
cost: {
|
|
10496
10566
|
input: 1.75,
|
|
10497
10567
|
output: 14,
|
|
10498
|
-
cacheRead: 0.
|
|
10568
|
+
cacheRead: 0.18,
|
|
10499
10569
|
cacheWrite: 0,
|
|
10500
10570
|
},
|
|
10501
10571
|
contextWindow: 400000,
|
|
@@ -10756,40 +10826,6 @@ export const MODELS = {
|
|
|
10756
10826
|
contextWindow: 131072,
|
|
10757
10827
|
maxTokens: 131072,
|
|
10758
10828
|
},
|
|
10759
|
-
"stealth/sonoma-dusk-alpha": {
|
|
10760
|
-
id: "stealth/sonoma-dusk-alpha",
|
|
10761
|
-
name: "Sonoma Dusk Alpha",
|
|
10762
|
-
api: "anthropic-messages",
|
|
10763
|
-
provider: "vercel-ai-gateway",
|
|
10764
|
-
baseUrl: "https://ai-gateway.vercel.sh",
|
|
10765
|
-
reasoning: false,
|
|
10766
|
-
input: ["text", "image"],
|
|
10767
|
-
cost: {
|
|
10768
|
-
input: 0.19999999999999998,
|
|
10769
|
-
output: 0.5,
|
|
10770
|
-
cacheRead: 0.049999999999999996,
|
|
10771
|
-
cacheWrite: 0,
|
|
10772
|
-
},
|
|
10773
|
-
contextWindow: 2000000,
|
|
10774
|
-
maxTokens: 131072,
|
|
10775
|
-
},
|
|
10776
|
-
"stealth/sonoma-sky-alpha": {
|
|
10777
|
-
id: "stealth/sonoma-sky-alpha",
|
|
10778
|
-
name: "Sonoma Sky Alpha",
|
|
10779
|
-
api: "anthropic-messages",
|
|
10780
|
-
provider: "vercel-ai-gateway",
|
|
10781
|
-
baseUrl: "https://ai-gateway.vercel.sh",
|
|
10782
|
-
reasoning: false,
|
|
10783
|
-
input: ["text", "image"],
|
|
10784
|
-
cost: {
|
|
10785
|
-
input: 0.19999999999999998,
|
|
10786
|
-
output: 0.5,
|
|
10787
|
-
cacheRead: 0.049999999999999996,
|
|
10788
|
-
cacheWrite: 0,
|
|
10789
|
-
},
|
|
10790
|
-
contextWindow: 2000000,
|
|
10791
|
-
maxTokens: 131072,
|
|
10792
|
-
},
|
|
10793
10829
|
"vercel/v0-1.0-md": {
|
|
10794
10830
|
id: "vercel/v0-1.0-md",
|
|
10795
10831
|
name: "v0-1.0-md",
|
|
@@ -11077,7 +11113,7 @@ export const MODELS = {
|
|
|
11077
11113
|
cacheWrite: 0,
|
|
11078
11114
|
},
|
|
11079
11115
|
contextWindow: 65536,
|
|
11080
|
-
maxTokens:
|
|
11116
|
+
maxTokens: 16384,
|
|
11081
11117
|
},
|
|
11082
11118
|
"zai/glm-4.6": {
|
|
11083
11119
|
id: "zai/glm-4.6",
|