@mariozechner/pi-ai 0.27.1 → 0.27.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/models.generated.js
CHANGED
|
@@ -2871,6 +2871,57 @@ export const MODELS = {
|
|
|
2871
2871
|
},
|
|
2872
2872
|
},
|
|
2873
2873
|
"openrouter": {
|
|
2874
|
+
"bytedance-seed/seed-1.6-flash": {
|
|
2875
|
+
id: "bytedance-seed/seed-1.6-flash",
|
|
2876
|
+
name: "ByteDance Seed: Seed 1.6 Flash",
|
|
2877
|
+
api: "openai-completions",
|
|
2878
|
+
provider: "openrouter",
|
|
2879
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2880
|
+
reasoning: true,
|
|
2881
|
+
input: ["text", "image"],
|
|
2882
|
+
cost: {
|
|
2883
|
+
input: 0.075,
|
|
2884
|
+
output: 0.3,
|
|
2885
|
+
cacheRead: 0,
|
|
2886
|
+
cacheWrite: 0,
|
|
2887
|
+
},
|
|
2888
|
+
contextWindow: 262144,
|
|
2889
|
+
maxTokens: 16384,
|
|
2890
|
+
},
|
|
2891
|
+
"bytedance-seed/seed-1.6": {
|
|
2892
|
+
id: "bytedance-seed/seed-1.6",
|
|
2893
|
+
name: "ByteDance Seed: Seed 1.6",
|
|
2894
|
+
api: "openai-completions",
|
|
2895
|
+
provider: "openrouter",
|
|
2896
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2897
|
+
reasoning: true,
|
|
2898
|
+
input: ["text", "image"],
|
|
2899
|
+
cost: {
|
|
2900
|
+
input: 0.25,
|
|
2901
|
+
output: 2,
|
|
2902
|
+
cacheRead: 0,
|
|
2903
|
+
cacheWrite: 0,
|
|
2904
|
+
},
|
|
2905
|
+
contextWindow: 262144,
|
|
2906
|
+
maxTokens: 32768,
|
|
2907
|
+
},
|
|
2908
|
+
"minimax/minimax-m2.1": {
|
|
2909
|
+
id: "minimax/minimax-m2.1",
|
|
2910
|
+
name: "MiniMax: MiniMax M2.1",
|
|
2911
|
+
api: "openai-completions",
|
|
2912
|
+
provider: "openrouter",
|
|
2913
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2914
|
+
reasoning: true,
|
|
2915
|
+
input: ["text"],
|
|
2916
|
+
cost: {
|
|
2917
|
+
input: 0.3,
|
|
2918
|
+
output: 1.2,
|
|
2919
|
+
cacheRead: 0,
|
|
2920
|
+
cacheWrite: 0,
|
|
2921
|
+
},
|
|
2922
|
+
contextWindow: 204800,
|
|
2923
|
+
maxTokens: 131072,
|
|
2924
|
+
},
|
|
2874
2925
|
"z-ai/glm-4.7": {
|
|
2875
2926
|
id: "z-ai/glm-4.7",
|
|
2876
2927
|
name: "Z.AI: GLM 4.7",
|
|
@@ -2880,13 +2931,13 @@ export const MODELS = {
|
|
|
2880
2931
|
reasoning: true,
|
|
2881
2932
|
input: ["text"],
|
|
2882
2933
|
cost: {
|
|
2883
|
-
input: 0.
|
|
2884
|
-
output:
|
|
2885
|
-
cacheRead: 0
|
|
2934
|
+
input: 0.39999999999999997,
|
|
2935
|
+
output: 1.5,
|
|
2936
|
+
cacheRead: 0,
|
|
2886
2937
|
cacheWrite: 0,
|
|
2887
2938
|
},
|
|
2888
|
-
contextWindow:
|
|
2889
|
-
maxTokens:
|
|
2939
|
+
contextWindow: 202752,
|
|
2940
|
+
maxTokens: 65535,
|
|
2890
2941
|
},
|
|
2891
2942
|
"google/gemini-3-flash-preview": {
|
|
2892
2943
|
id: "google/gemini-3-flash-preview",
|
|
@@ -2971,7 +3022,7 @@ export const MODELS = {
|
|
|
2971
3022
|
cacheWrite: 0,
|
|
2972
3023
|
},
|
|
2973
3024
|
contextWindow: 262144,
|
|
2974
|
-
maxTokens:
|
|
3025
|
+
maxTokens: 262144,
|
|
2975
3026
|
},
|
|
2976
3027
|
"openai/gpt-5.2-chat": {
|
|
2977
3028
|
id: "openai/gpt-5.2-chat",
|
|
@@ -3254,13 +3305,13 @@ export const MODELS = {
|
|
|
3254
3305
|
reasoning: true,
|
|
3255
3306
|
input: ["text"],
|
|
3256
3307
|
cost: {
|
|
3257
|
-
input: 0.
|
|
3258
|
-
output: 0.
|
|
3259
|
-
cacheRead: 0
|
|
3308
|
+
input: 0.224,
|
|
3309
|
+
output: 0.32,
|
|
3310
|
+
cacheRead: 0,
|
|
3260
3311
|
cacheWrite: 0,
|
|
3261
3312
|
},
|
|
3262
3313
|
contextWindow: 163840,
|
|
3263
|
-
maxTokens:
|
|
3314
|
+
maxTokens: 4096,
|
|
3264
3315
|
},
|
|
3265
3316
|
"prime-intellect/intellect-3": {
|
|
3266
3317
|
id: "prime-intellect/intellect-3",
|
|
@@ -4330,7 +4381,7 @@ export const MODELS = {
|
|
|
4330
4381
|
cacheRead: 0,
|
|
4331
4382
|
cacheWrite: 0,
|
|
4332
4383
|
},
|
|
4333
|
-
contextWindow:
|
|
4384
|
+
contextWindow: 32768,
|
|
4334
4385
|
maxTokens: 7168,
|
|
4335
4386
|
},
|
|
4336
4387
|
"openai/gpt-4o-audio-preview": {
|
|
@@ -6050,9 +6101,9 @@ export const MODELS = {
|
|
|
6050
6101
|
contextWindow: 32768,
|
|
6051
6102
|
maxTokens: 4096,
|
|
6052
6103
|
},
|
|
6053
|
-
"anthropic/claude-3.5-haiku": {
|
|
6054
|
-
id: "anthropic/claude-3.5-haiku",
|
|
6055
|
-
name: "Anthropic: Claude 3.5 Haiku",
|
|
6104
|
+
"anthropic/claude-3.5-haiku-20241022": {
|
|
6105
|
+
id: "anthropic/claude-3.5-haiku-20241022",
|
|
6106
|
+
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
|
|
6056
6107
|
api: "openai-completions",
|
|
6057
6108
|
provider: "openrouter",
|
|
6058
6109
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -6067,9 +6118,9 @@ export const MODELS = {
|
|
|
6067
6118
|
contextWindow: 200000,
|
|
6068
6119
|
maxTokens: 8192,
|
|
6069
6120
|
},
|
|
6070
|
-
"anthropic/claude-3.5-haiku
|
|
6071
|
-
id: "anthropic/claude-3.5-haiku
|
|
6072
|
-
name: "Anthropic: Claude 3.5 Haiku
|
|
6121
|
+
"anthropic/claude-3.5-haiku": {
|
|
6122
|
+
id: "anthropic/claude-3.5-haiku",
|
|
6123
|
+
name: "Anthropic: Claude 3.5 Haiku",
|
|
6073
6124
|
api: "openai-completions",
|
|
6074
6125
|
provider: "openrouter",
|
|
6075
6126
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -6101,34 +6152,34 @@ export const MODELS = {
|
|
|
6101
6152
|
contextWindow: 200000,
|
|
6102
6153
|
maxTokens: 8192,
|
|
6103
6154
|
},
|
|
6104
|
-
"mistralai/ministral-
|
|
6105
|
-
id: "mistralai/ministral-
|
|
6106
|
-
name: "Mistral: Ministral
|
|
6155
|
+
"mistralai/ministral-8b": {
|
|
6156
|
+
id: "mistralai/ministral-8b",
|
|
6157
|
+
name: "Mistral: Ministral 8B",
|
|
6107
6158
|
api: "openai-completions",
|
|
6108
6159
|
provider: "openrouter",
|
|
6109
6160
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6110
6161
|
reasoning: false,
|
|
6111
6162
|
input: ["text"],
|
|
6112
6163
|
cost: {
|
|
6113
|
-
input: 0.
|
|
6114
|
-
output: 0.
|
|
6164
|
+
input: 0.09999999999999999,
|
|
6165
|
+
output: 0.09999999999999999,
|
|
6115
6166
|
cacheRead: 0,
|
|
6116
6167
|
cacheWrite: 0,
|
|
6117
6168
|
},
|
|
6118
6169
|
contextWindow: 131072,
|
|
6119
6170
|
maxTokens: 4096,
|
|
6120
6171
|
},
|
|
6121
|
-
"mistralai/ministral-
|
|
6122
|
-
id: "mistralai/ministral-
|
|
6123
|
-
name: "Mistral: Ministral
|
|
6172
|
+
"mistralai/ministral-3b": {
|
|
6173
|
+
id: "mistralai/ministral-3b",
|
|
6174
|
+
name: "Mistral: Ministral 3B",
|
|
6124
6175
|
api: "openai-completions",
|
|
6125
6176
|
provider: "openrouter",
|
|
6126
6177
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6127
6178
|
reasoning: false,
|
|
6128
6179
|
input: ["text"],
|
|
6129
6180
|
cost: {
|
|
6130
|
-
input: 0.
|
|
6131
|
-
output: 0.
|
|
6181
|
+
input: 0.04,
|
|
6182
|
+
output: 0.04,
|
|
6132
6183
|
cacheRead: 0,
|
|
6133
6184
|
cacheWrite: 0,
|
|
6134
6185
|
},
|
|
@@ -6195,13 +6246,13 @@ export const MODELS = {
|
|
|
6195
6246
|
reasoning: false,
|
|
6196
6247
|
input: ["text"],
|
|
6197
6248
|
cost: {
|
|
6198
|
-
input: 0.
|
|
6199
|
-
output: 0.
|
|
6249
|
+
input: 0.12,
|
|
6250
|
+
output: 0.39,
|
|
6200
6251
|
cacheRead: 0,
|
|
6201
6252
|
cacheWrite: 0,
|
|
6202
6253
|
},
|
|
6203
6254
|
contextWindow: 32768,
|
|
6204
|
-
maxTokens:
|
|
6255
|
+
maxTokens: 16384,
|
|
6205
6256
|
},
|
|
6206
6257
|
"mistralai/pixtral-12b": {
|
|
6207
6258
|
id: "mistralai/pixtral-12b",
|
|
@@ -6322,39 +6373,39 @@ export const MODELS = {
|
|
|
6322
6373
|
contextWindow: 131072,
|
|
6323
6374
|
maxTokens: 4096,
|
|
6324
6375
|
},
|
|
6325
|
-
"meta-llama/llama-3.1-
|
|
6326
|
-
id: "meta-llama/llama-3.1-
|
|
6327
|
-
name: "Meta: Llama 3.1
|
|
6376
|
+
"meta-llama/llama-3.1-8b-instruct": {
|
|
6377
|
+
id: "meta-llama/llama-3.1-8b-instruct",
|
|
6378
|
+
name: "Meta: Llama 3.1 8B Instruct",
|
|
6328
6379
|
api: "openai-completions",
|
|
6329
6380
|
provider: "openrouter",
|
|
6330
6381
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6331
6382
|
reasoning: false,
|
|
6332
6383
|
input: ["text"],
|
|
6333
6384
|
cost: {
|
|
6334
|
-
input:
|
|
6335
|
-
output:
|
|
6385
|
+
input: 0.02,
|
|
6386
|
+
output: 0.03,
|
|
6336
6387
|
cacheRead: 0,
|
|
6337
6388
|
cacheWrite: 0,
|
|
6338
6389
|
},
|
|
6339
|
-
contextWindow:
|
|
6340
|
-
maxTokens:
|
|
6390
|
+
contextWindow: 131072,
|
|
6391
|
+
maxTokens: 16384,
|
|
6341
6392
|
},
|
|
6342
|
-
"meta-llama/llama-3.1-
|
|
6343
|
-
id: "meta-llama/llama-3.1-
|
|
6344
|
-
name: "Meta: Llama 3.1
|
|
6393
|
+
"meta-llama/llama-3.1-405b-instruct": {
|
|
6394
|
+
id: "meta-llama/llama-3.1-405b-instruct",
|
|
6395
|
+
name: "Meta: Llama 3.1 405B Instruct",
|
|
6345
6396
|
api: "openai-completions",
|
|
6346
6397
|
provider: "openrouter",
|
|
6347
6398
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6348
6399
|
reasoning: false,
|
|
6349
6400
|
input: ["text"],
|
|
6350
6401
|
cost: {
|
|
6351
|
-
input:
|
|
6352
|
-
output:
|
|
6402
|
+
input: 3.5,
|
|
6403
|
+
output: 3.5,
|
|
6353
6404
|
cacheRead: 0,
|
|
6354
6405
|
cacheWrite: 0,
|
|
6355
6406
|
},
|
|
6356
|
-
contextWindow:
|
|
6357
|
-
maxTokens:
|
|
6407
|
+
contextWindow: 10000,
|
|
6408
|
+
maxTokens: 4096,
|
|
6358
6409
|
},
|
|
6359
6410
|
"mistralai/mistral-nemo": {
|
|
6360
6411
|
id: "mistralai/mistral-nemo",
|
|
@@ -6373,9 +6424,9 @@ export const MODELS = {
|
|
|
6373
6424
|
contextWindow: 131072,
|
|
6374
6425
|
maxTokens: 16384,
|
|
6375
6426
|
},
|
|
6376
|
-
"openai/gpt-4o-mini": {
|
|
6377
|
-
id: "openai/gpt-4o-mini",
|
|
6378
|
-
name: "OpenAI: GPT-4o-mini",
|
|
6427
|
+
"openai/gpt-4o-mini-2024-07-18": {
|
|
6428
|
+
id: "openai/gpt-4o-mini-2024-07-18",
|
|
6429
|
+
name: "OpenAI: GPT-4o-mini (2024-07-18)",
|
|
6379
6430
|
api: "openai-completions",
|
|
6380
6431
|
provider: "openrouter",
|
|
6381
6432
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -6390,9 +6441,9 @@ export const MODELS = {
|
|
|
6390
6441
|
contextWindow: 128000,
|
|
6391
6442
|
maxTokens: 16384,
|
|
6392
6443
|
},
|
|
6393
|
-
"openai/gpt-4o-mini
|
|
6394
|
-
id: "openai/gpt-4o-mini
|
|
6395
|
-
name: "OpenAI: GPT-4o-mini
|
|
6444
|
+
"openai/gpt-4o-mini": {
|
|
6445
|
+
id: "openai/gpt-4o-mini",
|
|
6446
|
+
name: "OpenAI: GPT-4o-mini",
|
|
6396
6447
|
api: "openai-completions",
|
|
6397
6448
|
provider: "openrouter",
|
|
6398
6449
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -6543,34 +6594,34 @@ export const MODELS = {
|
|
|
6543
6594
|
contextWindow: 128000,
|
|
6544
6595
|
maxTokens: 4096,
|
|
6545
6596
|
},
|
|
6546
|
-
"meta-llama/llama-3-
|
|
6547
|
-
id: "meta-llama/llama-3-
|
|
6548
|
-
name: "Meta: Llama 3
|
|
6597
|
+
"meta-llama/llama-3-70b-instruct": {
|
|
6598
|
+
id: "meta-llama/llama-3-70b-instruct",
|
|
6599
|
+
name: "Meta: Llama 3 70B Instruct",
|
|
6549
6600
|
api: "openai-completions",
|
|
6550
6601
|
provider: "openrouter",
|
|
6551
6602
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6552
6603
|
reasoning: false,
|
|
6553
6604
|
input: ["text"],
|
|
6554
6605
|
cost: {
|
|
6555
|
-
input: 0.
|
|
6556
|
-
output: 0.
|
|
6606
|
+
input: 0.3,
|
|
6607
|
+
output: 0.39999999999999997,
|
|
6557
6608
|
cacheRead: 0,
|
|
6558
6609
|
cacheWrite: 0,
|
|
6559
6610
|
},
|
|
6560
6611
|
contextWindow: 8192,
|
|
6561
6612
|
maxTokens: 16384,
|
|
6562
6613
|
},
|
|
6563
|
-
"meta-llama/llama-3-
|
|
6564
|
-
id: "meta-llama/llama-3-
|
|
6565
|
-
name: "Meta: Llama 3
|
|
6614
|
+
"meta-llama/llama-3-8b-instruct": {
|
|
6615
|
+
id: "meta-llama/llama-3-8b-instruct",
|
|
6616
|
+
name: "Meta: Llama 3 8B Instruct",
|
|
6566
6617
|
api: "openai-completions",
|
|
6567
6618
|
provider: "openrouter",
|
|
6568
6619
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6569
6620
|
reasoning: false,
|
|
6570
6621
|
input: ["text"],
|
|
6571
6622
|
cost: {
|
|
6572
|
-
input: 0.
|
|
6573
|
-
output: 0.
|
|
6623
|
+
input: 0.03,
|
|
6624
|
+
output: 0.06,
|
|
6574
6625
|
cacheRead: 0,
|
|
6575
6626
|
cacheWrite: 0,
|
|
6576
6627
|
},
|
|
@@ -6781,38 +6832,38 @@ export const MODELS = {
|
|
|
6781
6832
|
contextWindow: 8191,
|
|
6782
6833
|
maxTokens: 4096,
|
|
6783
6834
|
},
|
|
6784
|
-
"openai/gpt-
|
|
6785
|
-
id: "openai/gpt-
|
|
6786
|
-
name: "OpenAI: GPT-
|
|
6835
|
+
"openai/gpt-4": {
|
|
6836
|
+
id: "openai/gpt-4",
|
|
6837
|
+
name: "OpenAI: GPT-4",
|
|
6787
6838
|
api: "openai-completions",
|
|
6788
6839
|
provider: "openrouter",
|
|
6789
6840
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6790
6841
|
reasoning: false,
|
|
6791
6842
|
input: ["text"],
|
|
6792
6843
|
cost: {
|
|
6793
|
-
input:
|
|
6794
|
-
output:
|
|
6844
|
+
input: 30,
|
|
6845
|
+
output: 60,
|
|
6795
6846
|
cacheRead: 0,
|
|
6796
6847
|
cacheWrite: 0,
|
|
6797
6848
|
},
|
|
6798
|
-
contextWindow:
|
|
6849
|
+
contextWindow: 8191,
|
|
6799
6850
|
maxTokens: 4096,
|
|
6800
6851
|
},
|
|
6801
|
-
"openai/gpt-
|
|
6802
|
-
id: "openai/gpt-
|
|
6803
|
-
name: "OpenAI: GPT-
|
|
6852
|
+
"openai/gpt-3.5-turbo": {
|
|
6853
|
+
id: "openai/gpt-3.5-turbo",
|
|
6854
|
+
name: "OpenAI: GPT-3.5 Turbo",
|
|
6804
6855
|
api: "openai-completions",
|
|
6805
6856
|
provider: "openrouter",
|
|
6806
6857
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6807
6858
|
reasoning: false,
|
|
6808
6859
|
input: ["text"],
|
|
6809
6860
|
cost: {
|
|
6810
|
-
input:
|
|
6811
|
-
output:
|
|
6861
|
+
input: 0.5,
|
|
6862
|
+
output: 1.5,
|
|
6812
6863
|
cacheRead: 0,
|
|
6813
6864
|
cacheWrite: 0,
|
|
6814
6865
|
},
|
|
6815
|
-
contextWindow:
|
|
6866
|
+
contextWindow: 16385,
|
|
6816
6867
|
maxTokens: 4096,
|
|
6817
6868
|
},
|
|
6818
6869
|
"openrouter/auto": {
|