@mariozechner/pi-ai 0.18.2 → 0.18.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/models.generated.d.ts +121 -2
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +134 -15
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +8 -0
- package/dist/providers/openai-completions.js.map +1 -1
- package/package.json +1 -1
package/dist/models.generated.js
CHANGED
|
@@ -839,6 +839,23 @@ export const MODELS = {
|
|
|
839
839
|
contextWindow: 200000,
|
|
840
840
|
maxTokens: 100000,
|
|
841
841
|
},
|
|
842
|
+
"gpt-5.2-pro": {
|
|
843
|
+
id: "gpt-5.2-pro",
|
|
844
|
+
name: "GPT-5.2 Pro",
|
|
845
|
+
api: "openai-responses",
|
|
846
|
+
provider: "openai",
|
|
847
|
+
baseUrl: "https://api.openai.com/v1",
|
|
848
|
+
reasoning: true,
|
|
849
|
+
input: ["text", "image"],
|
|
850
|
+
cost: {
|
|
851
|
+
input: 21,
|
|
852
|
+
output: 168,
|
|
853
|
+
cacheRead: 0,
|
|
854
|
+
cacheWrite: 0,
|
|
855
|
+
},
|
|
856
|
+
contextWindow: 400000,
|
|
857
|
+
maxTokens: 128000,
|
|
858
|
+
},
|
|
842
859
|
"gpt-4-turbo": {
|
|
843
860
|
id: "gpt-4-turbo",
|
|
844
861
|
name: "GPT-4 Turbo",
|
|
@@ -890,6 +907,23 @@ export const MODELS = {
|
|
|
890
907
|
contextWindow: 200000,
|
|
891
908
|
maxTokens: 100000,
|
|
892
909
|
},
|
|
910
|
+
"gpt-5.2-chat-latest": {
|
|
911
|
+
id: "gpt-5.2-chat-latest",
|
|
912
|
+
name: "GPT-5.2 Chat",
|
|
913
|
+
api: "openai-responses",
|
|
914
|
+
provider: "openai",
|
|
915
|
+
baseUrl: "https://api.openai.com/v1",
|
|
916
|
+
reasoning: true,
|
|
917
|
+
input: ["text", "image"],
|
|
918
|
+
cost: {
|
|
919
|
+
input: 1.75,
|
|
920
|
+
output: 14,
|
|
921
|
+
cacheRead: 0.175,
|
|
922
|
+
cacheWrite: 0,
|
|
923
|
+
},
|
|
924
|
+
contextWindow: 128000,
|
|
925
|
+
maxTokens: 16384,
|
|
926
|
+
},
|
|
893
927
|
"gpt-5.1": {
|
|
894
928
|
id: "gpt-5.1",
|
|
895
929
|
name: "GPT-5.1",
|
|
@@ -1179,6 +1213,23 @@ export const MODELS = {
|
|
|
1179
1213
|
contextWindow: 400000,
|
|
1180
1214
|
maxTokens: 272000,
|
|
1181
1215
|
},
|
|
1216
|
+
"gpt-5.2": {
|
|
1217
|
+
id: "gpt-5.2",
|
|
1218
|
+
name: "GPT-5.2",
|
|
1219
|
+
api: "openai-responses",
|
|
1220
|
+
provider: "openai",
|
|
1221
|
+
baseUrl: "https://api.openai.com/v1",
|
|
1222
|
+
reasoning: true,
|
|
1223
|
+
input: ["text", "image"],
|
|
1224
|
+
cost: {
|
|
1225
|
+
input: 1.75,
|
|
1226
|
+
output: 14,
|
|
1227
|
+
cacheRead: 0.175,
|
|
1228
|
+
cacheWrite: 0,
|
|
1229
|
+
},
|
|
1230
|
+
contextWindow: 400000,
|
|
1231
|
+
maxTokens: 128000,
|
|
1232
|
+
},
|
|
1182
1233
|
"gpt-5.1-chat-latest": {
|
|
1183
1234
|
id: "gpt-5.1-chat-latest",
|
|
1184
1235
|
name: "GPT-5.1 Chat",
|
|
@@ -2090,6 +2141,23 @@ export const MODELS = {
|
|
|
2090
2141
|
contextWindow: 128000,
|
|
2091
2142
|
maxTokens: 128000,
|
|
2092
2143
|
},
|
|
2144
|
+
"mistral-small-2506": {
|
|
2145
|
+
id: "mistral-small-2506",
|
|
2146
|
+
name: "Mistral Small 3.2",
|
|
2147
|
+
api: "openai-completions",
|
|
2148
|
+
provider: "mistral",
|
|
2149
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2150
|
+
reasoning: false,
|
|
2151
|
+
input: ["text", "image"],
|
|
2152
|
+
cost: {
|
|
2153
|
+
input: 0.1,
|
|
2154
|
+
output: 0.3,
|
|
2155
|
+
cacheRead: 0,
|
|
2156
|
+
cacheWrite: 0,
|
|
2157
|
+
},
|
|
2158
|
+
contextWindow: 128000,
|
|
2159
|
+
maxTokens: 16384,
|
|
2160
|
+
},
|
|
2093
2161
|
"ministral-3b-latest": {
|
|
2094
2162
|
id: "ministral-3b-latest",
|
|
2095
2163
|
name: "Ministral 3B",
|
|
@@ -2398,6 +2466,57 @@ export const MODELS = {
|
|
|
2398
2466
|
},
|
|
2399
2467
|
},
|
|
2400
2468
|
openrouter: {
|
|
2469
|
+
"openai/gpt-5.2-chat": {
|
|
2470
|
+
id: "openai/gpt-5.2-chat",
|
|
2471
|
+
name: "OpenAI: GPT-5.2 Chat",
|
|
2472
|
+
api: "openai-completions",
|
|
2473
|
+
provider: "openrouter",
|
|
2474
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2475
|
+
reasoning: false,
|
|
2476
|
+
input: ["text", "image"],
|
|
2477
|
+
cost: {
|
|
2478
|
+
input: 1.75,
|
|
2479
|
+
output: 14,
|
|
2480
|
+
cacheRead: 0.175,
|
|
2481
|
+
cacheWrite: 0,
|
|
2482
|
+
},
|
|
2483
|
+
contextWindow: 128000,
|
|
2484
|
+
maxTokens: 16384,
|
|
2485
|
+
},
|
|
2486
|
+
"openai/gpt-5.2-pro": {
|
|
2487
|
+
id: "openai/gpt-5.2-pro",
|
|
2488
|
+
name: "OpenAI: GPT-5.2 Pro",
|
|
2489
|
+
api: "openai-completions",
|
|
2490
|
+
provider: "openrouter",
|
|
2491
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2492
|
+
reasoning: true,
|
|
2493
|
+
input: ["text", "image"],
|
|
2494
|
+
cost: {
|
|
2495
|
+
input: 21,
|
|
2496
|
+
output: 168,
|
|
2497
|
+
cacheRead: 0,
|
|
2498
|
+
cacheWrite: 0,
|
|
2499
|
+
},
|
|
2500
|
+
contextWindow: 400000,
|
|
2501
|
+
maxTokens: 128000,
|
|
2502
|
+
},
|
|
2503
|
+
"openai/gpt-5.2": {
|
|
2504
|
+
id: "openai/gpt-5.2",
|
|
2505
|
+
name: "OpenAI: GPT-5.2",
|
|
2506
|
+
api: "openai-completions",
|
|
2507
|
+
provider: "openrouter",
|
|
2508
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2509
|
+
reasoning: true,
|
|
2510
|
+
input: ["text", "image"],
|
|
2511
|
+
cost: {
|
|
2512
|
+
input: 1.75,
|
|
2513
|
+
output: 14,
|
|
2514
|
+
cacheRead: 0.175,
|
|
2515
|
+
cacheWrite: 0,
|
|
2516
|
+
},
|
|
2517
|
+
contextWindow: 400000,
|
|
2518
|
+
maxTokens: 128000,
|
|
2519
|
+
},
|
|
2401
2520
|
"mistralai/devstral-2512:free": {
|
|
2402
2521
|
id: "mistralai/devstral-2512:free",
|
|
2403
2522
|
name: "Mistral: Devstral 2 2512 (free)",
|
|
@@ -2472,7 +2591,7 @@ export const MODELS = {
|
|
|
2472
2591
|
api: "openai-completions",
|
|
2473
2592
|
provider: "openrouter",
|
|
2474
2593
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
2475
|
-
reasoning:
|
|
2594
|
+
reasoning: false,
|
|
2476
2595
|
input: ["text"],
|
|
2477
2596
|
cost: {
|
|
2478
2597
|
input: 0,
|
|
@@ -2645,9 +2764,9 @@ export const MODELS = {
|
|
|
2645
2764
|
reasoning: true,
|
|
2646
2765
|
input: ["text"],
|
|
2647
2766
|
cost: {
|
|
2648
|
-
input: 0.
|
|
2649
|
-
output: 0.
|
|
2650
|
-
cacheRead: 0.
|
|
2767
|
+
input: 0.25,
|
|
2768
|
+
output: 0.38,
|
|
2769
|
+
cacheRead: 0.19,
|
|
2651
2770
|
cacheWrite: 0,
|
|
2652
2771
|
},
|
|
2653
2772
|
contextWindow: 163840,
|
|
@@ -3189,13 +3308,13 @@ export const MODELS = {
|
|
|
3189
3308
|
reasoning: true,
|
|
3190
3309
|
input: ["text"],
|
|
3191
3310
|
cost: {
|
|
3192
|
-
input: 0.
|
|
3193
|
-
output: 1.
|
|
3194
|
-
cacheRead: 0
|
|
3311
|
+
input: 0.44,
|
|
3312
|
+
output: 1.76,
|
|
3313
|
+
cacheRead: 0,
|
|
3195
3314
|
cacheWrite: 0,
|
|
3196
3315
|
},
|
|
3197
|
-
contextWindow:
|
|
3198
|
-
maxTokens:
|
|
3316
|
+
contextWindow: 204800,
|
|
3317
|
+
maxTokens: 131072,
|
|
3199
3318
|
},
|
|
3200
3319
|
"anthropic/claude-sonnet-4.5": {
|
|
3201
3320
|
id: "anthropic/claude-sonnet-4.5",
|
|
@@ -3478,13 +3597,13 @@ export const MODELS = {
|
|
|
3478
3597
|
reasoning: false,
|
|
3479
3598
|
input: ["text"],
|
|
3480
3599
|
cost: {
|
|
3481
|
-
input: 0.
|
|
3482
|
-
output:
|
|
3600
|
+
input: 0.09,
|
|
3601
|
+
output: 1.1,
|
|
3483
3602
|
cacheRead: 0,
|
|
3484
3603
|
cacheWrite: 0,
|
|
3485
3604
|
},
|
|
3486
3605
|
contextWindow: 262144,
|
|
3487
|
-
maxTokens:
|
|
3606
|
+
maxTokens: 4096,
|
|
3488
3607
|
},
|
|
3489
3608
|
"meituan/longcat-flash-chat:free": {
|
|
3490
3609
|
id: "meituan/longcat-flash-chat:free",
|
|
@@ -4223,7 +4342,7 @@ export const MODELS = {
|
|
|
4223
4342
|
api: "openai-completions",
|
|
4224
4343
|
provider: "openrouter",
|
|
4225
4344
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
4226
|
-
reasoning:
|
|
4345
|
+
reasoning: false,
|
|
4227
4346
|
input: ["text"],
|
|
4228
4347
|
cost: {
|
|
4229
4348
|
input: 0.071,
|
|
@@ -5314,13 +5433,13 @@ export const MODELS = {
|
|
|
5314
5433
|
reasoning: false,
|
|
5315
5434
|
input: ["text"],
|
|
5316
5435
|
cost: {
|
|
5317
|
-
input: 0.
|
|
5436
|
+
input: 0.09999999999999999,
|
|
5318
5437
|
output: 0.32,
|
|
5319
5438
|
cacheRead: 0,
|
|
5320
5439
|
cacheWrite: 0,
|
|
5321
5440
|
},
|
|
5322
5441
|
contextWindow: 131072,
|
|
5323
|
-
maxTokens:
|
|
5442
|
+
maxTokens: 16384,
|
|
5324
5443
|
},
|
|
5325
5444
|
"amazon/nova-lite-v1": {
|
|
5326
5445
|
id: "amazon/nova-lite-v1",
|