@mariozechner/pi-ai 0.22.4 → 0.22.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/models.generated.js
CHANGED
|
@@ -5964,9 +5964,9 @@ export const MODELS = {
|
|
|
5964
5964
|
contextWindow: 32768,
|
|
5965
5965
|
maxTokens: 4096,
|
|
5966
5966
|
},
|
|
5967
|
-
"anthropic/claude-3.5-haiku": {
|
|
5968
|
-
id: "anthropic/claude-3.5-haiku",
|
|
5969
|
-
name: "Anthropic: Claude 3.5 Haiku",
|
|
5967
|
+
"anthropic/claude-3.5-haiku-20241022": {
|
|
5968
|
+
id: "anthropic/claude-3.5-haiku-20241022",
|
|
5969
|
+
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
|
|
5970
5970
|
api: "openai-completions",
|
|
5971
5971
|
provider: "openrouter",
|
|
5972
5972
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5981,9 +5981,9 @@ export const MODELS = {
|
|
|
5981
5981
|
contextWindow: 200000,
|
|
5982
5982
|
maxTokens: 8192,
|
|
5983
5983
|
},
|
|
5984
|
-
"anthropic/claude-3.5-haiku
|
|
5985
|
-
id: "anthropic/claude-3.5-haiku
|
|
5986
|
-
name: "Anthropic: Claude 3.5 Haiku
|
|
5984
|
+
"anthropic/claude-3.5-haiku": {
|
|
5985
|
+
id: "anthropic/claude-3.5-haiku",
|
|
5986
|
+
name: "Anthropic: Claude 3.5 Haiku",
|
|
5987
5987
|
api: "openai-completions",
|
|
5988
5988
|
provider: "openrouter",
|
|
5989
5989
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -6015,34 +6015,34 @@ export const MODELS = {
|
|
|
6015
6015
|
contextWindow: 200000,
|
|
6016
6016
|
maxTokens: 8192,
|
|
6017
6017
|
},
|
|
6018
|
-
"mistralai/ministral-
|
|
6019
|
-
id: "mistralai/ministral-
|
|
6020
|
-
name: "Mistral: Ministral
|
|
6018
|
+
"mistralai/ministral-8b": {
|
|
6019
|
+
id: "mistralai/ministral-8b",
|
|
6020
|
+
name: "Mistral: Ministral 8B",
|
|
6021
6021
|
api: "openai-completions",
|
|
6022
6022
|
provider: "openrouter",
|
|
6023
6023
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6024
6024
|
reasoning: false,
|
|
6025
6025
|
input: ["text"],
|
|
6026
6026
|
cost: {
|
|
6027
|
-
input: 0.
|
|
6028
|
-
output: 0.
|
|
6027
|
+
input: 0.09999999999999999,
|
|
6028
|
+
output: 0.09999999999999999,
|
|
6029
6029
|
cacheRead: 0,
|
|
6030
6030
|
cacheWrite: 0,
|
|
6031
6031
|
},
|
|
6032
6032
|
contextWindow: 131072,
|
|
6033
6033
|
maxTokens: 4096,
|
|
6034
6034
|
},
|
|
6035
|
-
"mistralai/ministral-
|
|
6036
|
-
id: "mistralai/ministral-
|
|
6037
|
-
name: "Mistral: Ministral
|
|
6035
|
+
"mistralai/ministral-3b": {
|
|
6036
|
+
id: "mistralai/ministral-3b",
|
|
6037
|
+
name: "Mistral: Ministral 3B",
|
|
6038
6038
|
api: "openai-completions",
|
|
6039
6039
|
provider: "openrouter",
|
|
6040
6040
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6041
6041
|
reasoning: false,
|
|
6042
6042
|
input: ["text"],
|
|
6043
6043
|
cost: {
|
|
6044
|
-
input: 0.
|
|
6045
|
-
output: 0.
|
|
6044
|
+
input: 0.04,
|
|
6045
|
+
output: 0.04,
|
|
6046
6046
|
cacheRead: 0,
|
|
6047
6047
|
cacheWrite: 0,
|
|
6048
6048
|
},
|
|
@@ -6134,34 +6134,34 @@ export const MODELS = {
|
|
|
6134
6134
|
contextWindow: 32768,
|
|
6135
6135
|
maxTokens: 4096,
|
|
6136
6136
|
},
|
|
6137
|
-
"cohere/command-r-
|
|
6138
|
-
id: "cohere/command-r-
|
|
6139
|
-
name: "Cohere: Command R
|
|
6137
|
+
"cohere/command-r-08-2024": {
|
|
6138
|
+
id: "cohere/command-r-08-2024",
|
|
6139
|
+
name: "Cohere: Command R (08-2024)",
|
|
6140
6140
|
api: "openai-completions",
|
|
6141
6141
|
provider: "openrouter",
|
|
6142
6142
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6143
6143
|
reasoning: false,
|
|
6144
6144
|
input: ["text"],
|
|
6145
6145
|
cost: {
|
|
6146
|
-
input:
|
|
6147
|
-
output:
|
|
6146
|
+
input: 0.15,
|
|
6147
|
+
output: 0.6,
|
|
6148
6148
|
cacheRead: 0,
|
|
6149
6149
|
cacheWrite: 0,
|
|
6150
6150
|
},
|
|
6151
6151
|
contextWindow: 128000,
|
|
6152
6152
|
maxTokens: 4000,
|
|
6153
6153
|
},
|
|
6154
|
-
"cohere/command-r-08-2024": {
|
|
6155
|
-
id: "cohere/command-r-08-2024",
|
|
6156
|
-
name: "Cohere: Command R (08-2024)",
|
|
6154
|
+
"cohere/command-r-plus-08-2024": {
|
|
6155
|
+
id: "cohere/command-r-plus-08-2024",
|
|
6156
|
+
name: "Cohere: Command R+ (08-2024)",
|
|
6157
6157
|
api: "openai-completions",
|
|
6158
6158
|
provider: "openrouter",
|
|
6159
6159
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6160
6160
|
reasoning: false,
|
|
6161
6161
|
input: ["text"],
|
|
6162
6162
|
cost: {
|
|
6163
|
-
input:
|
|
6164
|
-
output:
|
|
6163
|
+
input: 2.5,
|
|
6164
|
+
output: 10,
|
|
6165
6165
|
cacheRead: 0,
|
|
6166
6166
|
cacheWrite: 0,
|
|
6167
6167
|
},
|
|
@@ -6219,6 +6219,23 @@ export const MODELS = {
|
|
|
6219
6219
|
contextWindow: 128000,
|
|
6220
6220
|
maxTokens: 16384,
|
|
6221
6221
|
},
|
|
6222
|
+
"meta-llama/llama-3.1-8b-instruct": {
|
|
6223
|
+
id: "meta-llama/llama-3.1-8b-instruct",
|
|
6224
|
+
name: "Meta: Llama 3.1 8B Instruct",
|
|
6225
|
+
api: "openai-completions",
|
|
6226
|
+
provider: "openrouter",
|
|
6227
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
6228
|
+
reasoning: false,
|
|
6229
|
+
input: ["text"],
|
|
6230
|
+
cost: {
|
|
6231
|
+
input: 0.02,
|
|
6232
|
+
output: 0.03,
|
|
6233
|
+
cacheRead: 0,
|
|
6234
|
+
cacheWrite: 0,
|
|
6235
|
+
},
|
|
6236
|
+
contextWindow: 131072,
|
|
6237
|
+
maxTokens: 16384,
|
|
6238
|
+
},
|
|
6222
6239
|
"meta-llama/llama-3.1-405b-instruct": {
|
|
6223
6240
|
id: "meta-llama/llama-3.1-405b-instruct",
|
|
6224
6241
|
name: "Meta: Llama 3.1 405B Instruct",
|
|
@@ -6253,23 +6270,6 @@ export const MODELS = {
|
|
|
6253
6270
|
contextWindow: 131072,
|
|
6254
6271
|
maxTokens: 4096,
|
|
6255
6272
|
},
|
|
6256
|
-
"meta-llama/llama-3.1-8b-instruct": {
|
|
6257
|
-
id: "meta-llama/llama-3.1-8b-instruct",
|
|
6258
|
-
name: "Meta: Llama 3.1 8B Instruct",
|
|
6259
|
-
api: "openai-completions",
|
|
6260
|
-
provider: "openrouter",
|
|
6261
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
6262
|
-
reasoning: false,
|
|
6263
|
-
input: ["text"],
|
|
6264
|
-
cost: {
|
|
6265
|
-
input: 0.02,
|
|
6266
|
-
output: 0.03,
|
|
6267
|
-
cacheRead: 0,
|
|
6268
|
-
cacheWrite: 0,
|
|
6269
|
-
},
|
|
6270
|
-
contextWindow: 131072,
|
|
6271
|
-
maxTokens: 16384,
|
|
6272
|
-
},
|
|
6273
6273
|
"mistralai/mistral-nemo": {
|
|
6274
6274
|
id: "mistralai/mistral-nemo",
|
|
6275
6275
|
name: "Mistral: Mistral Nemo",
|
|
@@ -6457,34 +6457,34 @@ export const MODELS = {
|
|
|
6457
6457
|
contextWindow: 128000,
|
|
6458
6458
|
maxTokens: 64000,
|
|
6459
6459
|
},
|
|
6460
|
-
"meta-llama/llama-3-
|
|
6461
|
-
id: "meta-llama/llama-3-
|
|
6462
|
-
name: "Meta: Llama 3
|
|
6460
|
+
"meta-llama/llama-3-70b-instruct": {
|
|
6461
|
+
id: "meta-llama/llama-3-70b-instruct",
|
|
6462
|
+
name: "Meta: Llama 3 70B Instruct",
|
|
6463
6463
|
api: "openai-completions",
|
|
6464
6464
|
provider: "openrouter",
|
|
6465
6465
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6466
6466
|
reasoning: false,
|
|
6467
6467
|
input: ["text"],
|
|
6468
6468
|
cost: {
|
|
6469
|
-
input: 0.
|
|
6470
|
-
output: 0.
|
|
6469
|
+
input: 0.3,
|
|
6470
|
+
output: 0.39999999999999997,
|
|
6471
6471
|
cacheRead: 0,
|
|
6472
6472
|
cacheWrite: 0,
|
|
6473
6473
|
},
|
|
6474
6474
|
contextWindow: 8192,
|
|
6475
6475
|
maxTokens: 16384,
|
|
6476
6476
|
},
|
|
6477
|
-
"meta-llama/llama-3-
|
|
6478
|
-
id: "meta-llama/llama-3-
|
|
6479
|
-
name: "Meta: Llama 3
|
|
6477
|
+
"meta-llama/llama-3-8b-instruct": {
|
|
6478
|
+
id: "meta-llama/llama-3-8b-instruct",
|
|
6479
|
+
name: "Meta: Llama 3 8B Instruct",
|
|
6480
6480
|
api: "openai-completions",
|
|
6481
6481
|
provider: "openrouter",
|
|
6482
6482
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6483
6483
|
reasoning: false,
|
|
6484
6484
|
input: ["text"],
|
|
6485
6485
|
cost: {
|
|
6486
|
-
input: 0.
|
|
6487
|
-
output: 0.
|
|
6486
|
+
input: 0.03,
|
|
6487
|
+
output: 0.06,
|
|
6488
6488
|
cacheRead: 0,
|
|
6489
6489
|
cacheWrite: 0,
|
|
6490
6490
|
},
|