@mariozechner/pi-ai 0.17.0 → 0.18.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/index.d.ts +1 -1
- package/dist/agent/index.d.ts.map +1 -1
- package/dist/agent/index.js.map +1 -1
- package/dist/models.generated.d.ts +15 -15
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +79 -79
- package/dist/models.generated.js.map +1 -1
- package/package.json +1 -1
package/dist/models.generated.js
CHANGED
|
@@ -5065,34 +5065,34 @@ export const MODELS = {
|
|
|
5065
5065
|
contextWindow: 200000,
|
|
5066
5066
|
maxTokens: 8192,
|
|
5067
5067
|
},
|
|
5068
|
-
"mistralai/ministral-
|
|
5069
|
-
id: "mistralai/ministral-
|
|
5070
|
-
name: "Mistral: Ministral
|
|
5068
|
+
"mistralai/ministral-3b": {
|
|
5069
|
+
id: "mistralai/ministral-3b",
|
|
5070
|
+
name: "Mistral: Ministral 3B",
|
|
5071
5071
|
api: "openai-completions",
|
|
5072
5072
|
provider: "openrouter",
|
|
5073
5073
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5074
5074
|
reasoning: false,
|
|
5075
5075
|
input: ["text"],
|
|
5076
5076
|
cost: {
|
|
5077
|
-
input: 0.
|
|
5078
|
-
output: 0.
|
|
5077
|
+
input: 0.04,
|
|
5078
|
+
output: 0.04,
|
|
5079
5079
|
cacheRead: 0,
|
|
5080
5080
|
cacheWrite: 0,
|
|
5081
5081
|
},
|
|
5082
5082
|
contextWindow: 131072,
|
|
5083
5083
|
maxTokens: 4096,
|
|
5084
5084
|
},
|
|
5085
|
-
"mistralai/ministral-
|
|
5086
|
-
id: "mistralai/ministral-
|
|
5087
|
-
name: "Mistral: Ministral
|
|
5085
|
+
"mistralai/ministral-8b": {
|
|
5086
|
+
id: "mistralai/ministral-8b",
|
|
5087
|
+
name: "Mistral: Ministral 8B",
|
|
5088
5088
|
api: "openai-completions",
|
|
5089
5089
|
provider: "openrouter",
|
|
5090
5090
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5091
5091
|
reasoning: false,
|
|
5092
5092
|
input: ["text"],
|
|
5093
5093
|
cost: {
|
|
5094
|
-
input: 0.
|
|
5095
|
-
output: 0.
|
|
5094
|
+
input: 0.09999999999999999,
|
|
5095
|
+
output: 0.09999999999999999,
|
|
5096
5096
|
cacheRead: 0,
|
|
5097
5097
|
cacheWrite: 0,
|
|
5098
5098
|
},
|
|
@@ -5269,23 +5269,6 @@ export const MODELS = {
|
|
|
5269
5269
|
contextWindow: 128000,
|
|
5270
5270
|
maxTokens: 16384,
|
|
5271
5271
|
},
|
|
5272
|
-
"meta-llama/llama-3.1-8b-instruct": {
|
|
5273
|
-
id: "meta-llama/llama-3.1-8b-instruct",
|
|
5274
|
-
name: "Meta: Llama 3.1 8B Instruct",
|
|
5275
|
-
api: "openai-completions",
|
|
5276
|
-
provider: "openrouter",
|
|
5277
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5278
|
-
reasoning: false,
|
|
5279
|
-
input: ["text"],
|
|
5280
|
-
cost: {
|
|
5281
|
-
input: 0.02,
|
|
5282
|
-
output: 0.03,
|
|
5283
|
-
cacheRead: 0,
|
|
5284
|
-
cacheWrite: 0,
|
|
5285
|
-
},
|
|
5286
|
-
contextWindow: 131072,
|
|
5287
|
-
maxTokens: 16384,
|
|
5288
|
-
},
|
|
5289
5272
|
"meta-llama/llama-3.1-405b-instruct": {
|
|
5290
5273
|
id: "meta-llama/llama-3.1-405b-instruct",
|
|
5291
5274
|
name: "Meta: Llama 3.1 405B Instruct",
|
|
@@ -5320,6 +5303,23 @@ export const MODELS = {
|
|
|
5320
5303
|
contextWindow: 131072,
|
|
5321
5304
|
maxTokens: 4096,
|
|
5322
5305
|
},
|
|
5306
|
+
"meta-llama/llama-3.1-8b-instruct": {
|
|
5307
|
+
id: "meta-llama/llama-3.1-8b-instruct",
|
|
5308
|
+
name: "Meta: Llama 3.1 8B Instruct",
|
|
5309
|
+
api: "openai-completions",
|
|
5310
|
+
provider: "openrouter",
|
|
5311
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
5312
|
+
reasoning: false,
|
|
5313
|
+
input: ["text"],
|
|
5314
|
+
cost: {
|
|
5315
|
+
input: 0.02,
|
|
5316
|
+
output: 0.03,
|
|
5317
|
+
cacheRead: 0,
|
|
5318
|
+
cacheWrite: 0,
|
|
5319
|
+
},
|
|
5320
|
+
contextWindow: 131072,
|
|
5321
|
+
maxTokens: 16384,
|
|
5322
|
+
},
|
|
5323
5323
|
"mistralai/mistral-nemo": {
|
|
5324
5324
|
id: "mistralai/mistral-nemo",
|
|
5325
5325
|
name: "Mistral: Mistral Nemo",
|
|
@@ -5456,23 +5456,6 @@ export const MODELS = {
|
|
|
5456
5456
|
contextWindow: 128000,
|
|
5457
5457
|
maxTokens: 4096,
|
|
5458
5458
|
},
|
|
5459
|
-
"openai/gpt-4o-2024-05-13": {
|
|
5460
|
-
id: "openai/gpt-4o-2024-05-13",
|
|
5461
|
-
name: "OpenAI: GPT-4o (2024-05-13)",
|
|
5462
|
-
api: "openai-completions",
|
|
5463
|
-
provider: "openrouter",
|
|
5464
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5465
|
-
reasoning: false,
|
|
5466
|
-
input: ["text", "image"],
|
|
5467
|
-
cost: {
|
|
5468
|
-
input: 5,
|
|
5469
|
-
output: 15,
|
|
5470
|
-
cacheRead: 0,
|
|
5471
|
-
cacheWrite: 0,
|
|
5472
|
-
},
|
|
5473
|
-
contextWindow: 128000,
|
|
5474
|
-
maxTokens: 4096,
|
|
5475
|
-
},
|
|
5476
5459
|
"openai/gpt-4o": {
|
|
5477
5460
|
id: "openai/gpt-4o",
|
|
5478
5461
|
name: "OpenAI: GPT-4o",
|
|
@@ -5507,22 +5490,22 @@ export const MODELS = {
|
|
|
5507
5490
|
contextWindow: 128000,
|
|
5508
5491
|
maxTokens: 64000,
|
|
5509
5492
|
},
|
|
5510
|
-
"
|
|
5511
|
-
id: "
|
|
5512
|
-
name: "
|
|
5493
|
+
"openai/gpt-4o-2024-05-13": {
|
|
5494
|
+
id: "openai/gpt-4o-2024-05-13",
|
|
5495
|
+
name: "OpenAI: GPT-4o (2024-05-13)",
|
|
5513
5496
|
api: "openai-completions",
|
|
5514
5497
|
provider: "openrouter",
|
|
5515
5498
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5516
5499
|
reasoning: false,
|
|
5517
|
-
input: ["text"],
|
|
5500
|
+
input: ["text", "image"],
|
|
5518
5501
|
cost: {
|
|
5519
|
-
input:
|
|
5520
|
-
output:
|
|
5502
|
+
input: 5,
|
|
5503
|
+
output: 15,
|
|
5521
5504
|
cacheRead: 0,
|
|
5522
5505
|
cacheWrite: 0,
|
|
5523
5506
|
},
|
|
5524
|
-
contextWindow:
|
|
5525
|
-
maxTokens:
|
|
5507
|
+
contextWindow: 128000,
|
|
5508
|
+
maxTokens: 4096,
|
|
5526
5509
|
},
|
|
5527
5510
|
"meta-llama/llama-3-8b-instruct": {
|
|
5528
5511
|
id: "meta-llama/llama-3-8b-instruct",
|
|
@@ -5541,6 +5524,23 @@ export const MODELS = {
|
|
|
5541
5524
|
contextWindow: 8192,
|
|
5542
5525
|
maxTokens: 16384,
|
|
5543
5526
|
},
|
|
5527
|
+
"meta-llama/llama-3-70b-instruct": {
|
|
5528
|
+
id: "meta-llama/llama-3-70b-instruct",
|
|
5529
|
+
name: "Meta: Llama 3 70B Instruct",
|
|
5530
|
+
api: "openai-completions",
|
|
5531
|
+
provider: "openrouter",
|
|
5532
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
5533
|
+
reasoning: false,
|
|
5534
|
+
input: ["text"],
|
|
5535
|
+
cost: {
|
|
5536
|
+
input: 0.3,
|
|
5537
|
+
output: 0.39999999999999997,
|
|
5538
|
+
cacheRead: 0,
|
|
5539
|
+
cacheWrite: 0,
|
|
5540
|
+
},
|
|
5541
|
+
contextWindow: 8192,
|
|
5542
|
+
maxTokens: 16384,
|
|
5543
|
+
},
|
|
5544
5544
|
"mistralai/mixtral-8x22b-instruct": {
|
|
5545
5545
|
id: "mistralai/mixtral-8x22b-instruct",
|
|
5546
5546
|
name: "Mistral: Mixtral 8x22B Instruct",
|
|
@@ -5626,38 +5626,38 @@ export const MODELS = {
|
|
|
5626
5626
|
contextWindow: 128000,
|
|
5627
5627
|
maxTokens: 4096,
|
|
5628
5628
|
},
|
|
5629
|
-
"openai/gpt-
|
|
5630
|
-
id: "openai/gpt-
|
|
5631
|
-
name: "OpenAI: GPT-
|
|
5629
|
+
"openai/gpt-4-turbo-preview": {
|
|
5630
|
+
id: "openai/gpt-4-turbo-preview",
|
|
5631
|
+
name: "OpenAI: GPT-4 Turbo Preview",
|
|
5632
5632
|
api: "openai-completions",
|
|
5633
5633
|
provider: "openrouter",
|
|
5634
5634
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5635
5635
|
reasoning: false,
|
|
5636
5636
|
input: ["text"],
|
|
5637
5637
|
cost: {
|
|
5638
|
-
input:
|
|
5639
|
-
output:
|
|
5638
|
+
input: 10,
|
|
5639
|
+
output: 30,
|
|
5640
5640
|
cacheRead: 0,
|
|
5641
5641
|
cacheWrite: 0,
|
|
5642
5642
|
},
|
|
5643
|
-
contextWindow:
|
|
5643
|
+
contextWindow: 128000,
|
|
5644
5644
|
maxTokens: 4096,
|
|
5645
5645
|
},
|
|
5646
|
-
"openai/gpt-
|
|
5647
|
-
id: "openai/gpt-
|
|
5648
|
-
name: "OpenAI: GPT-
|
|
5646
|
+
"openai/gpt-3.5-turbo-0613": {
|
|
5647
|
+
id: "openai/gpt-3.5-turbo-0613",
|
|
5648
|
+
name: "OpenAI: GPT-3.5 Turbo (older v0613)",
|
|
5649
5649
|
api: "openai-completions",
|
|
5650
5650
|
provider: "openrouter",
|
|
5651
5651
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5652
5652
|
reasoning: false,
|
|
5653
5653
|
input: ["text"],
|
|
5654
5654
|
cost: {
|
|
5655
|
-
input:
|
|
5656
|
-
output:
|
|
5655
|
+
input: 1,
|
|
5656
|
+
output: 2,
|
|
5657
5657
|
cacheRead: 0,
|
|
5658
5658
|
cacheWrite: 0,
|
|
5659
5659
|
},
|
|
5660
|
-
contextWindow:
|
|
5660
|
+
contextWindow: 4095,
|
|
5661
5661
|
maxTokens: 4096,
|
|
5662
5662
|
},
|
|
5663
5663
|
"mistralai/mistral-tiny": {
|
|
@@ -5728,9 +5728,9 @@ export const MODELS = {
|
|
|
5728
5728
|
contextWindow: 16385,
|
|
5729
5729
|
maxTokens: 4096,
|
|
5730
5730
|
},
|
|
5731
|
-
"openai/gpt-4
|
|
5732
|
-
id: "openai/gpt-4
|
|
5733
|
-
name: "OpenAI: GPT-4
|
|
5731
|
+
"openai/gpt-4": {
|
|
5732
|
+
id: "openai/gpt-4",
|
|
5733
|
+
name: "OpenAI: GPT-4",
|
|
5734
5734
|
api: "openai-completions",
|
|
5735
5735
|
provider: "openrouter",
|
|
5736
5736
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5745,38 +5745,38 @@ export const MODELS = {
|
|
|
5745
5745
|
contextWindow: 8191,
|
|
5746
5746
|
maxTokens: 4096,
|
|
5747
5747
|
},
|
|
5748
|
-
"openai/gpt-
|
|
5749
|
-
id: "openai/gpt-
|
|
5750
|
-
name: "OpenAI: GPT-
|
|
5748
|
+
"openai/gpt-3.5-turbo": {
|
|
5749
|
+
id: "openai/gpt-3.5-turbo",
|
|
5750
|
+
name: "OpenAI: GPT-3.5 Turbo",
|
|
5751
5751
|
api: "openai-completions",
|
|
5752
5752
|
provider: "openrouter",
|
|
5753
5753
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5754
5754
|
reasoning: false,
|
|
5755
5755
|
input: ["text"],
|
|
5756
5756
|
cost: {
|
|
5757
|
-
input:
|
|
5758
|
-
output:
|
|
5757
|
+
input: 0.5,
|
|
5758
|
+
output: 1.5,
|
|
5759
5759
|
cacheRead: 0,
|
|
5760
5760
|
cacheWrite: 0,
|
|
5761
5761
|
},
|
|
5762
|
-
contextWindow:
|
|
5762
|
+
contextWindow: 16385,
|
|
5763
5763
|
maxTokens: 4096,
|
|
5764
5764
|
},
|
|
5765
|
-
"openai/gpt-
|
|
5766
|
-
id: "openai/gpt-
|
|
5767
|
-
name: "OpenAI: GPT-
|
|
5765
|
+
"openai/gpt-4-0314": {
|
|
5766
|
+
id: "openai/gpt-4-0314",
|
|
5767
|
+
name: "OpenAI: GPT-4 (older v0314)",
|
|
5768
5768
|
api: "openai-completions",
|
|
5769
5769
|
provider: "openrouter",
|
|
5770
5770
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5771
5771
|
reasoning: false,
|
|
5772
5772
|
input: ["text"],
|
|
5773
5773
|
cost: {
|
|
5774
|
-
input:
|
|
5775
|
-
output:
|
|
5774
|
+
input: 30,
|
|
5775
|
+
output: 60,
|
|
5776
5776
|
cacheRead: 0,
|
|
5777
5777
|
cacheWrite: 0,
|
|
5778
5778
|
},
|
|
5779
|
-
contextWindow:
|
|
5779
|
+
contextWindow: 8191,
|
|
5780
5780
|
maxTokens: 4096,
|
|
5781
5781
|
},
|
|
5782
5782
|
"openrouter/auto": {
|