@mariozechner/pi-ai 0.23.0 → 0.23.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -361,6 +361,23 @@ export const MODELS = {
361
361
  },
362
362
  },
363
363
  "google": {
364
+ "gemini-3-flash-preview": {
365
+ id: "gemini-3-flash-preview",
366
+ name: "Gemini 3 Flash Preview",
367
+ api: "google-generative-ai",
368
+ provider: "google",
369
+ baseUrl: "https://generativelanguage.googleapis.com/v1beta",
370
+ reasoning: true,
371
+ input: ["text", "image"],
372
+ cost: {
373
+ input: 0.15,
374
+ output: 0.6,
375
+ cacheRead: 0.0375,
376
+ cacheWrite: 0,
377
+ },
378
+ contextWindow: 1048576,
379
+ maxTokens: 65536,
380
+ },
364
381
  "gemini-2.5-flash-preview-05-20": {
365
382
  id: "gemini-2.5-flash-preview-05-20",
366
383
  name: "Gemini 2.5 Flash Preview 05-20",
@@ -2802,6 +2819,23 @@ export const MODELS = {
2802
2819
  },
2803
2820
  },
2804
2821
  "openrouter": {
2822
+ "google/gemini-3-flash-preview": {
2823
+ id: "google/gemini-3-flash-preview",
2824
+ name: "Google: Gemini 3 Flash Preview",
2825
+ api: "openai-completions",
2826
+ provider: "openrouter",
2827
+ baseUrl: "https://openrouter.ai/api/v1",
2828
+ reasoning: true,
2829
+ input: ["text", "image"],
2830
+ cost: {
2831
+ input: 0.5,
2832
+ output: 3,
2833
+ cacheRead: 0.049999999999999996,
2834
+ cacheWrite: 0,
2835
+ },
2836
+ contextWindow: 1048576,
2837
+ maxTokens: 65535,
2838
+ },
2805
2839
  "mistralai/mistral-small-creative": {
2806
2840
  id: "mistralai/mistral-small-creative",
2807
2841
  name: "Mistral: Mistral Small Creative",
@@ -5964,9 +5998,9 @@ export const MODELS = {
5964
5998
  contextWindow: 32768,
5965
5999
  maxTokens: 4096,
5966
6000
  },
5967
- "anthropic/claude-3.5-haiku": {
5968
- id: "anthropic/claude-3.5-haiku",
5969
- name: "Anthropic: Claude 3.5 Haiku",
6001
+ "anthropic/claude-3.5-haiku-20241022": {
6002
+ id: "anthropic/claude-3.5-haiku-20241022",
6003
+ name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
5970
6004
  api: "openai-completions",
5971
6005
  provider: "openrouter",
5972
6006
  baseUrl: "https://openrouter.ai/api/v1",
@@ -5981,9 +6015,9 @@ export const MODELS = {
5981
6015
  contextWindow: 200000,
5982
6016
  maxTokens: 8192,
5983
6017
  },
5984
- "anthropic/claude-3.5-haiku-20241022": {
5985
- id: "anthropic/claude-3.5-haiku-20241022",
5986
- name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
6018
+ "anthropic/claude-3.5-haiku": {
6019
+ id: "anthropic/claude-3.5-haiku",
6020
+ name: "Anthropic: Claude 3.5 Haiku",
5987
6021
  api: "openai-completions",
5988
6022
  provider: "openrouter",
5989
6023
  baseUrl: "https://openrouter.ai/api/v1",
@@ -6015,34 +6049,34 @@ export const MODELS = {
6015
6049
  contextWindow: 200000,
6016
6050
  maxTokens: 8192,
6017
6051
  },
6018
- "mistralai/ministral-3b": {
6019
- id: "mistralai/ministral-3b",
6020
- name: "Mistral: Ministral 3B",
6052
+ "mistralai/ministral-8b": {
6053
+ id: "mistralai/ministral-8b",
6054
+ name: "Mistral: Ministral 8B",
6021
6055
  api: "openai-completions",
6022
6056
  provider: "openrouter",
6023
6057
  baseUrl: "https://openrouter.ai/api/v1",
6024
6058
  reasoning: false,
6025
6059
  input: ["text"],
6026
6060
  cost: {
6027
- input: 0.04,
6028
- output: 0.04,
6061
+ input: 0.09999999999999999,
6062
+ output: 0.09999999999999999,
6029
6063
  cacheRead: 0,
6030
6064
  cacheWrite: 0,
6031
6065
  },
6032
6066
  contextWindow: 131072,
6033
6067
  maxTokens: 4096,
6034
6068
  },
6035
- "mistralai/ministral-8b": {
6036
- id: "mistralai/ministral-8b",
6037
- name: "Mistral: Ministral 8B",
6069
+ "mistralai/ministral-3b": {
6070
+ id: "mistralai/ministral-3b",
6071
+ name: "Mistral: Ministral 3B",
6038
6072
  api: "openai-completions",
6039
6073
  provider: "openrouter",
6040
6074
  baseUrl: "https://openrouter.ai/api/v1",
6041
6075
  reasoning: false,
6042
6076
  input: ["text"],
6043
6077
  cost: {
6044
- input: 0.09999999999999999,
6045
- output: 0.09999999999999999,
6078
+ input: 0.04,
6079
+ output: 0.04,
6046
6080
  cacheRead: 0,
6047
6081
  cacheWrite: 0,
6048
6082
  },
@@ -6134,34 +6168,34 @@ export const MODELS = {
6134
6168
  contextWindow: 32768,
6135
6169
  maxTokens: 4096,
6136
6170
  },
6137
- "cohere/command-r-plus-08-2024": {
6138
- id: "cohere/command-r-plus-08-2024",
6139
- name: "Cohere: Command R+ (08-2024)",
6171
+ "cohere/command-r-08-2024": {
6172
+ id: "cohere/command-r-08-2024",
6173
+ name: "Cohere: Command R (08-2024)",
6140
6174
  api: "openai-completions",
6141
6175
  provider: "openrouter",
6142
6176
  baseUrl: "https://openrouter.ai/api/v1",
6143
6177
  reasoning: false,
6144
6178
  input: ["text"],
6145
6179
  cost: {
6146
- input: 2.5,
6147
- output: 10,
6180
+ input: 0.15,
6181
+ output: 0.6,
6148
6182
  cacheRead: 0,
6149
6183
  cacheWrite: 0,
6150
6184
  },
6151
6185
  contextWindow: 128000,
6152
6186
  maxTokens: 4000,
6153
6187
  },
6154
- "cohere/command-r-08-2024": {
6155
- id: "cohere/command-r-08-2024",
6156
- name: "Cohere: Command R (08-2024)",
6188
+ "cohere/command-r-plus-08-2024": {
6189
+ id: "cohere/command-r-plus-08-2024",
6190
+ name: "Cohere: Command R+ (08-2024)",
6157
6191
  api: "openai-completions",
6158
6192
  provider: "openrouter",
6159
6193
  baseUrl: "https://openrouter.ai/api/v1",
6160
6194
  reasoning: false,
6161
6195
  input: ["text"],
6162
6196
  cost: {
6163
- input: 0.15,
6164
- output: 0.6,
6197
+ input: 2.5,
6198
+ output: 10,
6165
6199
  cacheRead: 0,
6166
6200
  cacheWrite: 0,
6167
6201
  },
@@ -6219,6 +6253,23 @@ export const MODELS = {
6219
6253
  contextWindow: 128000,
6220
6254
  maxTokens: 16384,
6221
6255
  },
6256
+ "meta-llama/llama-3.1-8b-instruct": {
6257
+ id: "meta-llama/llama-3.1-8b-instruct",
6258
+ name: "Meta: Llama 3.1 8B Instruct",
6259
+ api: "openai-completions",
6260
+ provider: "openrouter",
6261
+ baseUrl: "https://openrouter.ai/api/v1",
6262
+ reasoning: false,
6263
+ input: ["text"],
6264
+ cost: {
6265
+ input: 0.02,
6266
+ output: 0.03,
6267
+ cacheRead: 0,
6268
+ cacheWrite: 0,
6269
+ },
6270
+ contextWindow: 131072,
6271
+ maxTokens: 16384,
6272
+ },
6222
6273
  "meta-llama/llama-3.1-405b-instruct": {
6223
6274
  id: "meta-llama/llama-3.1-405b-instruct",
6224
6275
  name: "Meta: Llama 3.1 405B Instruct",
@@ -6253,23 +6304,6 @@ export const MODELS = {
6253
6304
  contextWindow: 131072,
6254
6305
  maxTokens: 4096,
6255
6306
  },
6256
- "meta-llama/llama-3.1-8b-instruct": {
6257
- id: "meta-llama/llama-3.1-8b-instruct",
6258
- name: "Meta: Llama 3.1 8B Instruct",
6259
- api: "openai-completions",
6260
- provider: "openrouter",
6261
- baseUrl: "https://openrouter.ai/api/v1",
6262
- reasoning: false,
6263
- input: ["text"],
6264
- cost: {
6265
- input: 0.02,
6266
- output: 0.03,
6267
- cacheRead: 0,
6268
- cacheWrite: 0,
6269
- },
6270
- contextWindow: 131072,
6271
- maxTokens: 16384,
6272
- },
6273
6307
  "mistralai/mistral-nemo": {
6274
6308
  id: "mistralai/mistral-nemo",
6275
6309
  name: "Mistral: Mistral Nemo",
@@ -6457,34 +6491,34 @@ export const MODELS = {
6457
6491
  contextWindow: 128000,
6458
6492
  maxTokens: 64000,
6459
6493
  },
6460
- "meta-llama/llama-3-8b-instruct": {
6461
- id: "meta-llama/llama-3-8b-instruct",
6462
- name: "Meta: Llama 3 8B Instruct",
6494
+ "meta-llama/llama-3-70b-instruct": {
6495
+ id: "meta-llama/llama-3-70b-instruct",
6496
+ name: "Meta: Llama 3 70B Instruct",
6463
6497
  api: "openai-completions",
6464
6498
  provider: "openrouter",
6465
6499
  baseUrl: "https://openrouter.ai/api/v1",
6466
6500
  reasoning: false,
6467
6501
  input: ["text"],
6468
6502
  cost: {
6469
- input: 0.03,
6470
- output: 0.06,
6503
+ input: 0.3,
6504
+ output: 0.39999999999999997,
6471
6505
  cacheRead: 0,
6472
6506
  cacheWrite: 0,
6473
6507
  },
6474
6508
  contextWindow: 8192,
6475
6509
  maxTokens: 16384,
6476
6510
  },
6477
- "meta-llama/llama-3-70b-instruct": {
6478
- id: "meta-llama/llama-3-70b-instruct",
6479
- name: "Meta: Llama 3 70B Instruct",
6511
+ "meta-llama/llama-3-8b-instruct": {
6512
+ id: "meta-llama/llama-3-8b-instruct",
6513
+ name: "Meta: Llama 3 8B Instruct",
6480
6514
  api: "openai-completions",
6481
6515
  provider: "openrouter",
6482
6516
  baseUrl: "https://openrouter.ai/api/v1",
6483
6517
  reasoning: false,
6484
6518
  input: ["text"],
6485
6519
  cost: {
6486
- input: 0.3,
6487
- output: 0.39999999999999997,
6520
+ input: 0.03,
6521
+ output: 0.06,
6488
6522
  cacheRead: 0,
6489
6523
  cacheWrite: 0,
6490
6524
  },