@mariozechner/pi-ai 0.15.0 → 0.16.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/models.generated.js
CHANGED
|
@@ -4462,13 +4462,13 @@ export const MODELS = {
|
|
|
4462
4462
|
reasoning: true,
|
|
4463
4463
|
input: ["text"],
|
|
4464
4464
|
cost: {
|
|
4465
|
-
input: 0.
|
|
4466
|
-
output: 0.
|
|
4467
|
-
cacheRead: 0,
|
|
4465
|
+
input: 0.19999999999999998,
|
|
4466
|
+
output: 0.88,
|
|
4467
|
+
cacheRead: 0.106,
|
|
4468
4468
|
cacheWrite: 0,
|
|
4469
4469
|
},
|
|
4470
|
-
contextWindow:
|
|
4471
|
-
maxTokens:
|
|
4470
|
+
contextWindow: 163840,
|
|
4471
|
+
maxTokens: 4096,
|
|
4472
4472
|
},
|
|
4473
4473
|
"mistralai/mistral-small-3.1-24b-instruct:free": {
|
|
4474
4474
|
id: "mistralai/mistral-small-3.1-24b-instruct:free",
|
|
@@ -5048,34 +5048,34 @@ export const MODELS = {
|
|
|
5048
5048
|
contextWindow: 200000,
|
|
5049
5049
|
maxTokens: 8192,
|
|
5050
5050
|
},
|
|
5051
|
-
"mistralai/ministral-
|
|
5052
|
-
id: "mistralai/ministral-
|
|
5053
|
-
name: "Mistral: Ministral
|
|
5051
|
+
"mistralai/ministral-3b": {
|
|
5052
|
+
id: "mistralai/ministral-3b",
|
|
5053
|
+
name: "Mistral: Ministral 3B",
|
|
5054
5054
|
api: "openai-completions",
|
|
5055
5055
|
provider: "openrouter",
|
|
5056
5056
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5057
5057
|
reasoning: false,
|
|
5058
5058
|
input: ["text"],
|
|
5059
5059
|
cost: {
|
|
5060
|
-
input: 0.
|
|
5061
|
-
output: 0.
|
|
5060
|
+
input: 0.04,
|
|
5061
|
+
output: 0.04,
|
|
5062
5062
|
cacheRead: 0,
|
|
5063
5063
|
cacheWrite: 0,
|
|
5064
5064
|
},
|
|
5065
5065
|
contextWindow: 131072,
|
|
5066
5066
|
maxTokens: 4096,
|
|
5067
5067
|
},
|
|
5068
|
-
"mistralai/ministral-
|
|
5069
|
-
id: "mistralai/ministral-
|
|
5070
|
-
name: "Mistral: Ministral
|
|
5068
|
+
"mistralai/ministral-8b": {
|
|
5069
|
+
id: "mistralai/ministral-8b",
|
|
5070
|
+
name: "Mistral: Ministral 8B",
|
|
5071
5071
|
api: "openai-completions",
|
|
5072
5072
|
provider: "openrouter",
|
|
5073
5073
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5074
5074
|
reasoning: false,
|
|
5075
5075
|
input: ["text"],
|
|
5076
5076
|
cost: {
|
|
5077
|
-
input: 0.
|
|
5078
|
-
output: 0.
|
|
5077
|
+
input: 0.09999999999999999,
|
|
5078
|
+
output: 0.09999999999999999,
|
|
5079
5079
|
cacheRead: 0,
|
|
5080
5080
|
cacheWrite: 0,
|
|
5081
5081
|
},
|
|
@@ -5269,38 +5269,38 @@ export const MODELS = {
|
|
|
5269
5269
|
contextWindow: 131072,
|
|
5270
5270
|
maxTokens: 16384,
|
|
5271
5271
|
},
|
|
5272
|
-
"meta-llama/llama-3.1-
|
|
5273
|
-
id: "meta-llama/llama-3.1-
|
|
5274
|
-
name: "Meta: Llama 3.1
|
|
5272
|
+
"meta-llama/llama-3.1-70b-instruct": {
|
|
5273
|
+
id: "meta-llama/llama-3.1-70b-instruct",
|
|
5274
|
+
name: "Meta: Llama 3.1 70B Instruct",
|
|
5275
5275
|
api: "openai-completions",
|
|
5276
5276
|
provider: "openrouter",
|
|
5277
5277
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5278
5278
|
reasoning: false,
|
|
5279
5279
|
input: ["text"],
|
|
5280
5280
|
cost: {
|
|
5281
|
-
input:
|
|
5282
|
-
output:
|
|
5281
|
+
input: 0.39999999999999997,
|
|
5282
|
+
output: 0.39999999999999997,
|
|
5283
5283
|
cacheRead: 0,
|
|
5284
5284
|
cacheWrite: 0,
|
|
5285
5285
|
},
|
|
5286
|
-
contextWindow:
|
|
5286
|
+
contextWindow: 131072,
|
|
5287
5287
|
maxTokens: 4096,
|
|
5288
5288
|
},
|
|
5289
|
-
"meta-llama/llama-3.1-
|
|
5290
|
-
id: "meta-llama/llama-3.1-
|
|
5291
|
-
name: "Meta: Llama 3.1
|
|
5289
|
+
"meta-llama/llama-3.1-405b-instruct": {
|
|
5290
|
+
id: "meta-llama/llama-3.1-405b-instruct",
|
|
5291
|
+
name: "Meta: Llama 3.1 405B Instruct",
|
|
5292
5292
|
api: "openai-completions",
|
|
5293
5293
|
provider: "openrouter",
|
|
5294
5294
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5295
5295
|
reasoning: false,
|
|
5296
5296
|
input: ["text"],
|
|
5297
5297
|
cost: {
|
|
5298
|
-
input:
|
|
5299
|
-
output:
|
|
5298
|
+
input: 3.5,
|
|
5299
|
+
output: 3.5,
|
|
5300
5300
|
cacheRead: 0,
|
|
5301
5301
|
cacheWrite: 0,
|
|
5302
5302
|
},
|
|
5303
|
-
contextWindow:
|
|
5303
|
+
contextWindow: 130815,
|
|
5304
5304
|
maxTokens: 4096,
|
|
5305
5305
|
},
|
|
5306
5306
|
"mistralai/mistral-nemo": {
|
|
@@ -5320,9 +5320,9 @@ export const MODELS = {
|
|
|
5320
5320
|
contextWindow: 131072,
|
|
5321
5321
|
maxTokens: 16384,
|
|
5322
5322
|
},
|
|
5323
|
-
"openai/gpt-4o-mini
|
|
5324
|
-
id: "openai/gpt-4o-mini
|
|
5325
|
-
name: "OpenAI: GPT-4o-mini
|
|
5323
|
+
"openai/gpt-4o-mini": {
|
|
5324
|
+
id: "openai/gpt-4o-mini",
|
|
5325
|
+
name: "OpenAI: GPT-4o-mini",
|
|
5326
5326
|
api: "openai-completions",
|
|
5327
5327
|
provider: "openrouter",
|
|
5328
5328
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5337,9 +5337,9 @@ export const MODELS = {
|
|
|
5337
5337
|
contextWindow: 128000,
|
|
5338
5338
|
maxTokens: 16384,
|
|
5339
5339
|
},
|
|
5340
|
-
"openai/gpt-4o-mini": {
|
|
5341
|
-
id: "openai/gpt-4o-mini",
|
|
5342
|
-
name: "OpenAI: GPT-4o-mini",
|
|
5340
|
+
"openai/gpt-4o-mini-2024-07-18": {
|
|
5341
|
+
id: "openai/gpt-4o-mini-2024-07-18",
|
|
5342
|
+
name: "OpenAI: GPT-4o-mini (2024-07-18)",
|
|
5343
5343
|
api: "openai-completions",
|
|
5344
5344
|
provider: "openrouter",
|
|
5345
5345
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5439,23 +5439,6 @@ export const MODELS = {
|
|
|
5439
5439
|
contextWindow: 128000,
|
|
5440
5440
|
maxTokens: 4096,
|
|
5441
5441
|
},
|
|
5442
|
-
"openai/gpt-4o-2024-05-13": {
|
|
5443
|
-
id: "openai/gpt-4o-2024-05-13",
|
|
5444
|
-
name: "OpenAI: GPT-4o (2024-05-13)",
|
|
5445
|
-
api: "openai-completions",
|
|
5446
|
-
provider: "openrouter",
|
|
5447
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5448
|
-
reasoning: false,
|
|
5449
|
-
input: ["text", "image"],
|
|
5450
|
-
cost: {
|
|
5451
|
-
input: 5,
|
|
5452
|
-
output: 15,
|
|
5453
|
-
cacheRead: 0,
|
|
5454
|
-
cacheWrite: 0,
|
|
5455
|
-
},
|
|
5456
|
-
contextWindow: 128000,
|
|
5457
|
-
maxTokens: 4096,
|
|
5458
|
-
},
|
|
5459
5442
|
"openai/gpt-4o": {
|
|
5460
5443
|
id: "openai/gpt-4o",
|
|
5461
5444
|
name: "OpenAI: GPT-4o",
|
|
@@ -5490,6 +5473,23 @@ export const MODELS = {
|
|
|
5490
5473
|
contextWindow: 128000,
|
|
5491
5474
|
maxTokens: 64000,
|
|
5492
5475
|
},
|
|
5476
|
+
"openai/gpt-4o-2024-05-13": {
|
|
5477
|
+
id: "openai/gpt-4o-2024-05-13",
|
|
5478
|
+
name: "OpenAI: GPT-4o (2024-05-13)",
|
|
5479
|
+
api: "openai-completions",
|
|
5480
|
+
provider: "openrouter",
|
|
5481
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
5482
|
+
reasoning: false,
|
|
5483
|
+
input: ["text", "image"],
|
|
5484
|
+
cost: {
|
|
5485
|
+
input: 5,
|
|
5486
|
+
output: 15,
|
|
5487
|
+
cacheRead: 0,
|
|
5488
|
+
cacheWrite: 0,
|
|
5489
|
+
},
|
|
5490
|
+
contextWindow: 128000,
|
|
5491
|
+
maxTokens: 4096,
|
|
5492
|
+
},
|
|
5493
5493
|
"meta-llama/llama-3-70b-instruct": {
|
|
5494
5494
|
id: "meta-llama/llama-3-70b-instruct",
|
|
5495
5495
|
name: "Meta: Llama 3 70B Instruct",
|
|
@@ -5609,38 +5609,38 @@ export const MODELS = {
|
|
|
5609
5609
|
contextWindow: 128000,
|
|
5610
5610
|
maxTokens: 4096,
|
|
5611
5611
|
},
|
|
5612
|
-
"openai/gpt-
|
|
5613
|
-
id: "openai/gpt-
|
|
5614
|
-
name: "OpenAI: GPT-
|
|
5612
|
+
"openai/gpt-4-turbo-preview": {
|
|
5613
|
+
id: "openai/gpt-4-turbo-preview",
|
|
5614
|
+
name: "OpenAI: GPT-4 Turbo Preview",
|
|
5615
5615
|
api: "openai-completions",
|
|
5616
5616
|
provider: "openrouter",
|
|
5617
5617
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5618
5618
|
reasoning: false,
|
|
5619
5619
|
input: ["text"],
|
|
5620
5620
|
cost: {
|
|
5621
|
-
input:
|
|
5622
|
-
output:
|
|
5621
|
+
input: 10,
|
|
5622
|
+
output: 30,
|
|
5623
5623
|
cacheRead: 0,
|
|
5624
5624
|
cacheWrite: 0,
|
|
5625
5625
|
},
|
|
5626
|
-
contextWindow:
|
|
5626
|
+
contextWindow: 128000,
|
|
5627
5627
|
maxTokens: 4096,
|
|
5628
5628
|
},
|
|
5629
|
-
"openai/gpt-
|
|
5630
|
-
id: "openai/gpt-
|
|
5631
|
-
name: "OpenAI: GPT-
|
|
5629
|
+
"openai/gpt-3.5-turbo-0613": {
|
|
5630
|
+
id: "openai/gpt-3.5-turbo-0613",
|
|
5631
|
+
name: "OpenAI: GPT-3.5 Turbo (older v0613)",
|
|
5632
5632
|
api: "openai-completions",
|
|
5633
5633
|
provider: "openrouter",
|
|
5634
5634
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5635
5635
|
reasoning: false,
|
|
5636
5636
|
input: ["text"],
|
|
5637
5637
|
cost: {
|
|
5638
|
-
input:
|
|
5639
|
-
output:
|
|
5638
|
+
input: 1,
|
|
5639
|
+
output: 2,
|
|
5640
5640
|
cacheRead: 0,
|
|
5641
5641
|
cacheWrite: 0,
|
|
5642
5642
|
},
|
|
5643
|
-
contextWindow:
|
|
5643
|
+
contextWindow: 4095,
|
|
5644
5644
|
maxTokens: 4096,
|
|
5645
5645
|
},
|
|
5646
5646
|
"mistralai/mistral-tiny": {
|