@mariozechner/pi-ai 0.10.1 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/models.generated.js
CHANGED
|
@@ -138,6 +138,23 @@ export const MODELS = {
|
|
|
138
138
|
contextWindow: 200000,
|
|
139
139
|
maxTokens: 4096,
|
|
140
140
|
},
|
|
141
|
+
"claude-opus-4-5-20251101": {
|
|
142
|
+
id: "claude-opus-4-5-20251101",
|
|
143
|
+
name: "Claude Opus 4.5",
|
|
144
|
+
api: "anthropic-messages",
|
|
145
|
+
provider: "anthropic",
|
|
146
|
+
baseUrl: "https://api.anthropic.com",
|
|
147
|
+
reasoning: true,
|
|
148
|
+
input: ["text", "image"],
|
|
149
|
+
cost: {
|
|
150
|
+
input: 5,
|
|
151
|
+
output: 25,
|
|
152
|
+
cacheRead: 0.5,
|
|
153
|
+
cacheWrite: 6.25,
|
|
154
|
+
},
|
|
155
|
+
contextWindow: 200000,
|
|
156
|
+
maxTokens: 64000,
|
|
157
|
+
},
|
|
141
158
|
"claude-sonnet-4-5": {
|
|
142
159
|
id: "claude-sonnet-4-5",
|
|
143
160
|
name: "Claude Sonnet 4.5 (latest)",
|
|
@@ -2020,7 +2037,24 @@ export const MODELS = {
|
|
|
2020
2037
|
cacheWrite: 6.25,
|
|
2021
2038
|
},
|
|
2022
2039
|
contextWindow: 200000,
|
|
2023
|
-
maxTokens:
|
|
2040
|
+
maxTokens: 64000,
|
|
2041
|
+
},
|
|
2042
|
+
"openrouter/bert-nebulon-alpha": {
|
|
2043
|
+
id: "openrouter/bert-nebulon-alpha",
|
|
2044
|
+
name: "Bert-Nebulon Alpha",
|
|
2045
|
+
api: "openai-completions",
|
|
2046
|
+
provider: "openrouter",
|
|
2047
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2048
|
+
reasoning: false,
|
|
2049
|
+
input: ["text", "image"],
|
|
2050
|
+
cost: {
|
|
2051
|
+
input: 0,
|
|
2052
|
+
output: 0,
|
|
2053
|
+
cacheRead: 0,
|
|
2054
|
+
cacheWrite: 0,
|
|
2055
|
+
},
|
|
2056
|
+
contextWindow: 256000,
|
|
2057
|
+
maxTokens: 4096,
|
|
2024
2058
|
},
|
|
2025
2059
|
"allenai/olmo-3-7b-instruct": {
|
|
2026
2060
|
id: "allenai/olmo-3-7b-instruct",
|
|
@@ -2490,13 +2524,13 @@ export const MODELS = {
|
|
|
2490
2524
|
reasoning: true,
|
|
2491
2525
|
input: ["text"],
|
|
2492
2526
|
cost: {
|
|
2493
|
-
input: 0.
|
|
2494
|
-
output: 1.
|
|
2527
|
+
input: 0.44,
|
|
2528
|
+
output: 1.76,
|
|
2495
2529
|
cacheRead: 0,
|
|
2496
2530
|
cacheWrite: 0,
|
|
2497
2531
|
},
|
|
2498
|
-
contextWindow:
|
|
2499
|
-
maxTokens:
|
|
2532
|
+
contextWindow: 204800,
|
|
2533
|
+
maxTokens: 131072,
|
|
2500
2534
|
},
|
|
2501
2535
|
"anthropic/claude-sonnet-4.5": {
|
|
2502
2536
|
id: "anthropic/claude-sonnet-4.5",
|
|
@@ -3358,7 +3392,7 @@ export const MODELS = {
|
|
|
3358
3392
|
input: ["text"],
|
|
3359
3393
|
cost: {
|
|
3360
3394
|
input: 0.35,
|
|
3361
|
-
output: 1.
|
|
3395
|
+
output: 1.55,
|
|
3362
3396
|
cacheRead: 0,
|
|
3363
3397
|
cacheWrite: 0,
|
|
3364
3398
|
},
|
|
@@ -4997,34 +5031,34 @@ export const MODELS = {
|
|
|
4997
5031
|
contextWindow: 32768,
|
|
4998
5032
|
maxTokens: 4096,
|
|
4999
5033
|
},
|
|
5000
|
-
"cohere/command-r-
|
|
5001
|
-
id: "cohere/command-r-
|
|
5002
|
-
name: "Cohere: Command R
|
|
5034
|
+
"cohere/command-r-08-2024": {
|
|
5035
|
+
id: "cohere/command-r-08-2024",
|
|
5036
|
+
name: "Cohere: Command R (08-2024)",
|
|
5003
5037
|
api: "openai-completions",
|
|
5004
5038
|
provider: "openrouter",
|
|
5005
5039
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5006
5040
|
reasoning: false,
|
|
5007
5041
|
input: ["text"],
|
|
5008
5042
|
cost: {
|
|
5009
|
-
input:
|
|
5010
|
-
output:
|
|
5043
|
+
input: 0.15,
|
|
5044
|
+
output: 0.6,
|
|
5011
5045
|
cacheRead: 0,
|
|
5012
5046
|
cacheWrite: 0,
|
|
5013
5047
|
},
|
|
5014
5048
|
contextWindow: 128000,
|
|
5015
5049
|
maxTokens: 4000,
|
|
5016
5050
|
},
|
|
5017
|
-
"cohere/command-r-08-2024": {
|
|
5018
|
-
id: "cohere/command-r-08-2024",
|
|
5019
|
-
name: "Cohere: Command R (08-2024)",
|
|
5051
|
+
"cohere/command-r-plus-08-2024": {
|
|
5052
|
+
id: "cohere/command-r-plus-08-2024",
|
|
5053
|
+
name: "Cohere: Command R+ (08-2024)",
|
|
5020
5054
|
api: "openai-completions",
|
|
5021
5055
|
provider: "openrouter",
|
|
5022
5056
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5023
5057
|
reasoning: false,
|
|
5024
5058
|
input: ["text"],
|
|
5025
5059
|
cost: {
|
|
5026
|
-
input:
|
|
5027
|
-
output:
|
|
5060
|
+
input: 2.5,
|
|
5061
|
+
output: 10,
|
|
5028
5062
|
cacheRead: 0,
|
|
5029
5063
|
cacheWrite: 0,
|
|
5030
5064
|
},
|
|
@@ -5065,23 +5099,6 @@ export const MODELS = {
|
|
|
5065
5099
|
contextWindow: 128000,
|
|
5066
5100
|
maxTokens: 4096,
|
|
5067
5101
|
},
|
|
5068
|
-
"nousresearch/hermes-3-llama-3.1-70b": {
|
|
5069
|
-
id: "nousresearch/hermes-3-llama-3.1-70b",
|
|
5070
|
-
name: "Nous: Hermes 3 70B Instruct",
|
|
5071
|
-
api: "openai-completions",
|
|
5072
|
-
provider: "openrouter",
|
|
5073
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5074
|
-
reasoning: false,
|
|
5075
|
-
input: ["text"],
|
|
5076
|
-
cost: {
|
|
5077
|
-
input: 0.3,
|
|
5078
|
-
output: 0.3,
|
|
5079
|
-
cacheRead: 0,
|
|
5080
|
-
cacheWrite: 0,
|
|
5081
|
-
},
|
|
5082
|
-
contextWindow: 65536,
|
|
5083
|
-
maxTokens: 4096,
|
|
5084
|
-
},
|
|
5085
5102
|
"openai/gpt-4o-2024-08-06": {
|
|
5086
5103
|
id: "openai/gpt-4o-2024-08-06",
|
|
5087
5104
|
name: "OpenAI: GPT-4o (2024-08-06)",
|
|
@@ -5167,9 +5184,9 @@ export const MODELS = {
|
|
|
5167
5184
|
contextWindow: 131072,
|
|
5168
5185
|
maxTokens: 16384,
|
|
5169
5186
|
},
|
|
5170
|
-
"openai/gpt-4o-mini": {
|
|
5171
|
-
id: "openai/gpt-4o-mini",
|
|
5172
|
-
name: "OpenAI: GPT-4o-mini",
|
|
5187
|
+
"openai/gpt-4o-mini-2024-07-18": {
|
|
5188
|
+
id: "openai/gpt-4o-mini-2024-07-18",
|
|
5189
|
+
name: "OpenAI: GPT-4o-mini (2024-07-18)",
|
|
5173
5190
|
api: "openai-completions",
|
|
5174
5191
|
provider: "openrouter",
|
|
5175
5192
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5184,9 +5201,9 @@ export const MODELS = {
|
|
|
5184
5201
|
contextWindow: 128000,
|
|
5185
5202
|
maxTokens: 16384,
|
|
5186
5203
|
},
|
|
5187
|
-
"openai/gpt-4o-mini
|
|
5188
|
-
id: "openai/gpt-4o-mini
|
|
5189
|
-
name: "OpenAI: GPT-4o-mini
|
|
5204
|
+
"openai/gpt-4o-mini": {
|
|
5205
|
+
id: "openai/gpt-4o-mini",
|
|
5206
|
+
name: "OpenAI: GPT-4o-mini",
|
|
5190
5207
|
api: "openai-completions",
|
|
5191
5208
|
provider: "openrouter",
|
|
5192
5209
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5286,6 +5303,23 @@ export const MODELS = {
|
|
|
5286
5303
|
contextWindow: 128000,
|
|
5287
5304
|
maxTokens: 4096,
|
|
5288
5305
|
},
|
|
5306
|
+
"openai/gpt-4o-2024-05-13": {
|
|
5307
|
+
id: "openai/gpt-4o-2024-05-13",
|
|
5308
|
+
name: "OpenAI: GPT-4o (2024-05-13)",
|
|
5309
|
+
api: "openai-completions",
|
|
5310
|
+
provider: "openrouter",
|
|
5311
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
5312
|
+
reasoning: false,
|
|
5313
|
+
input: ["text", "image"],
|
|
5314
|
+
cost: {
|
|
5315
|
+
input: 5,
|
|
5316
|
+
output: 15,
|
|
5317
|
+
cacheRead: 0,
|
|
5318
|
+
cacheWrite: 0,
|
|
5319
|
+
},
|
|
5320
|
+
contextWindow: 128000,
|
|
5321
|
+
maxTokens: 4096,
|
|
5322
|
+
},
|
|
5289
5323
|
"openai/gpt-4o": {
|
|
5290
5324
|
id: "openai/gpt-4o",
|
|
5291
5325
|
name: "OpenAI: GPT-4o",
|
|
@@ -5320,22 +5354,22 @@ export const MODELS = {
|
|
|
5320
5354
|
contextWindow: 128000,
|
|
5321
5355
|
maxTokens: 64000,
|
|
5322
5356
|
},
|
|
5323
|
-
"
|
|
5324
|
-
id: "
|
|
5325
|
-
name: "
|
|
5357
|
+
"meta-llama/llama-3-70b-instruct": {
|
|
5358
|
+
id: "meta-llama/llama-3-70b-instruct",
|
|
5359
|
+
name: "Meta: Llama 3 70B Instruct",
|
|
5326
5360
|
api: "openai-completions",
|
|
5327
5361
|
provider: "openrouter",
|
|
5328
5362
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5329
5363
|
reasoning: false,
|
|
5330
|
-
input: ["text"
|
|
5364
|
+
input: ["text"],
|
|
5331
5365
|
cost: {
|
|
5332
|
-
input:
|
|
5333
|
-
output:
|
|
5366
|
+
input: 0.3,
|
|
5367
|
+
output: 0.39999999999999997,
|
|
5334
5368
|
cacheRead: 0,
|
|
5335
5369
|
cacheWrite: 0,
|
|
5336
5370
|
},
|
|
5337
|
-
contextWindow:
|
|
5338
|
-
maxTokens:
|
|
5371
|
+
contextWindow: 8192,
|
|
5372
|
+
maxTokens: 16384,
|
|
5339
5373
|
},
|
|
5340
5374
|
"meta-llama/llama-3-8b-instruct": {
|
|
5341
5375
|
id: "meta-llama/llama-3-8b-instruct",
|
|
@@ -5354,23 +5388,6 @@ export const MODELS = {
|
|
|
5354
5388
|
contextWindow: 8192,
|
|
5355
5389
|
maxTokens: 16384,
|
|
5356
5390
|
},
|
|
5357
|
-
"meta-llama/llama-3-70b-instruct": {
|
|
5358
|
-
id: "meta-llama/llama-3-70b-instruct",
|
|
5359
|
-
name: "Meta: Llama 3 70B Instruct",
|
|
5360
|
-
api: "openai-completions",
|
|
5361
|
-
provider: "openrouter",
|
|
5362
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5363
|
-
reasoning: false,
|
|
5364
|
-
input: ["text"],
|
|
5365
|
-
cost: {
|
|
5366
|
-
input: 0.3,
|
|
5367
|
-
output: 0.39999999999999997,
|
|
5368
|
-
cacheRead: 0,
|
|
5369
|
-
cacheWrite: 0,
|
|
5370
|
-
},
|
|
5371
|
-
contextWindow: 8192,
|
|
5372
|
-
maxTokens: 16384,
|
|
5373
|
-
},
|
|
5374
5391
|
"mistralai/mixtral-8x22b-instruct": {
|
|
5375
5392
|
id: "mistralai/mixtral-8x22b-instruct",
|
|
5376
5393
|
name: "Mistral: Mixtral 8x22B Instruct",
|
|
@@ -5456,38 +5473,38 @@ export const MODELS = {
|
|
|
5456
5473
|
contextWindow: 128000,
|
|
5457
5474
|
maxTokens: 4096,
|
|
5458
5475
|
},
|
|
5459
|
-
"openai/gpt-
|
|
5460
|
-
id: "openai/gpt-
|
|
5461
|
-
name: "OpenAI: GPT-
|
|
5476
|
+
"openai/gpt-3.5-turbo-0613": {
|
|
5477
|
+
id: "openai/gpt-3.5-turbo-0613",
|
|
5478
|
+
name: "OpenAI: GPT-3.5 Turbo (older v0613)",
|
|
5462
5479
|
api: "openai-completions",
|
|
5463
5480
|
provider: "openrouter",
|
|
5464
5481
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5465
5482
|
reasoning: false,
|
|
5466
5483
|
input: ["text"],
|
|
5467
5484
|
cost: {
|
|
5468
|
-
input:
|
|
5469
|
-
output:
|
|
5485
|
+
input: 1,
|
|
5486
|
+
output: 2,
|
|
5470
5487
|
cacheRead: 0,
|
|
5471
5488
|
cacheWrite: 0,
|
|
5472
5489
|
},
|
|
5473
|
-
contextWindow:
|
|
5490
|
+
contextWindow: 4095,
|
|
5474
5491
|
maxTokens: 4096,
|
|
5475
5492
|
},
|
|
5476
|
-
"openai/gpt-
|
|
5477
|
-
id: "openai/gpt-
|
|
5478
|
-
name: "OpenAI: GPT-
|
|
5493
|
+
"openai/gpt-4-turbo-preview": {
|
|
5494
|
+
id: "openai/gpt-4-turbo-preview",
|
|
5495
|
+
name: "OpenAI: GPT-4 Turbo Preview",
|
|
5479
5496
|
api: "openai-completions",
|
|
5480
5497
|
provider: "openrouter",
|
|
5481
5498
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5482
5499
|
reasoning: false,
|
|
5483
5500
|
input: ["text"],
|
|
5484
5501
|
cost: {
|
|
5485
|
-
input:
|
|
5486
|
-
output:
|
|
5502
|
+
input: 10,
|
|
5503
|
+
output: 30,
|
|
5487
5504
|
cacheRead: 0,
|
|
5488
5505
|
cacheWrite: 0,
|
|
5489
5506
|
},
|
|
5490
|
-
contextWindow:
|
|
5507
|
+
contextWindow: 128000,
|
|
5491
5508
|
maxTokens: 4096,
|
|
5492
5509
|
},
|
|
5493
5510
|
"mistralai/mistral-small": {
|
|
@@ -5592,38 +5609,38 @@ export const MODELS = {
|
|
|
5592
5609
|
contextWindow: 8191,
|
|
5593
5610
|
maxTokens: 4096,
|
|
5594
5611
|
},
|
|
5595
|
-
"openai/gpt-
|
|
5596
|
-
id: "openai/gpt-
|
|
5597
|
-
name: "OpenAI: GPT-
|
|
5612
|
+
"openai/gpt-4": {
|
|
5613
|
+
id: "openai/gpt-4",
|
|
5614
|
+
name: "OpenAI: GPT-4",
|
|
5598
5615
|
api: "openai-completions",
|
|
5599
5616
|
provider: "openrouter",
|
|
5600
5617
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5601
5618
|
reasoning: false,
|
|
5602
5619
|
input: ["text"],
|
|
5603
5620
|
cost: {
|
|
5604
|
-
input:
|
|
5605
|
-
output:
|
|
5621
|
+
input: 30,
|
|
5622
|
+
output: 60,
|
|
5606
5623
|
cacheRead: 0,
|
|
5607
5624
|
cacheWrite: 0,
|
|
5608
5625
|
},
|
|
5609
|
-
contextWindow:
|
|
5626
|
+
contextWindow: 8191,
|
|
5610
5627
|
maxTokens: 4096,
|
|
5611
5628
|
},
|
|
5612
|
-
"openai/gpt-
|
|
5613
|
-
id: "openai/gpt-
|
|
5614
|
-
name: "OpenAI: GPT-
|
|
5629
|
+
"openai/gpt-3.5-turbo": {
|
|
5630
|
+
id: "openai/gpt-3.5-turbo",
|
|
5631
|
+
name: "OpenAI: GPT-3.5 Turbo",
|
|
5615
5632
|
api: "openai-completions",
|
|
5616
5633
|
provider: "openrouter",
|
|
5617
5634
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5618
5635
|
reasoning: false,
|
|
5619
5636
|
input: ["text"],
|
|
5620
5637
|
cost: {
|
|
5621
|
-
input:
|
|
5622
|
-
output:
|
|
5638
|
+
input: 0.5,
|
|
5639
|
+
output: 1.5,
|
|
5623
5640
|
cacheRead: 0,
|
|
5624
5641
|
cacheWrite: 0,
|
|
5625
5642
|
},
|
|
5626
|
-
contextWindow:
|
|
5643
|
+
contextWindow: 16385,
|
|
5627
5644
|
maxTokens: 4096,
|
|
5628
5645
|
},
|
|
5629
5646
|
"openrouter/auto": {
|