@mariozechner/pi-ai 0.27.1 → 0.27.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3004,6 +3004,23 @@ export declare const MODELS: {
3004
3004
  };
3005
3005
  };
3006
3006
  readonly openrouter: {
3007
+ readonly "minimax/minimax-m2.1": {
3008
+ id: string;
3009
+ name: string;
3010
+ api: "openai-completions";
3011
+ provider: string;
3012
+ baseUrl: string;
3013
+ reasoning: true;
3014
+ input: "text"[];
3015
+ cost: {
3016
+ input: number;
3017
+ output: number;
3018
+ cacheRead: number;
3019
+ cacheWrite: number;
3020
+ };
3021
+ contextWindow: number;
3022
+ maxTokens: number;
3023
+ };
3007
3024
  readonly "z-ai/glm-4.7": {
3008
3025
  id: string;
3009
3026
  name: string;
@@ -6234,7 +6251,7 @@ export declare const MODELS: {
6234
6251
  contextWindow: number;
6235
6252
  maxTokens: number;
6236
6253
  };
6237
- readonly "mistralai/ministral-3b": {
6254
+ readonly "mistralai/ministral-8b": {
6238
6255
  id: string;
6239
6256
  name: string;
6240
6257
  api: "openai-completions";
@@ -6251,7 +6268,7 @@ export declare const MODELS: {
6251
6268
  contextWindow: number;
6252
6269
  maxTokens: number;
6253
6270
  };
6254
- readonly "mistralai/ministral-8b": {
6271
+ readonly "mistralai/ministral-3b": {
6255
6272
  id: string;
6256
6273
  name: string;
6257
6274
  api: "openai-completions";
@@ -6438,7 +6455,7 @@ export declare const MODELS: {
6438
6455
  contextWindow: number;
6439
6456
  maxTokens: number;
6440
6457
  };
6441
- readonly "meta-llama/llama-3.1-70b-instruct": {
6458
+ readonly "meta-llama/llama-3.1-405b-instruct": {
6442
6459
  id: string;
6443
6460
  name: string;
6444
6461
  api: "openai-completions";
@@ -6455,7 +6472,7 @@ export declare const MODELS: {
6455
6472
  contextWindow: number;
6456
6473
  maxTokens: number;
6457
6474
  };
6458
- readonly "meta-llama/llama-3.1-405b-instruct": {
6475
+ readonly "meta-llama/llama-3.1-8b-instruct": {
6459
6476
  id: string;
6460
6477
  name: string;
6461
6478
  api: "openai-completions";
@@ -6472,7 +6489,7 @@ export declare const MODELS: {
6472
6489
  contextWindow: number;
6473
6490
  maxTokens: number;
6474
6491
  };
6475
- readonly "meta-llama/llama-3.1-8b-instruct": {
6492
+ readonly "meta-llama/llama-3.1-70b-instruct": {
6476
6493
  id: string;
6477
6494
  name: string;
6478
6495
  api: "openai-completions";
@@ -6506,7 +6523,7 @@ export declare const MODELS: {
6506
6523
  contextWindow: number;
6507
6524
  maxTokens: number;
6508
6525
  };
6509
- readonly "openai/gpt-4o-mini": {
6526
+ readonly "openai/gpt-4o-mini-2024-07-18": {
6510
6527
  id: string;
6511
6528
  name: string;
6512
6529
  api: "openai-completions";
@@ -6523,7 +6540,7 @@ export declare const MODELS: {
6523
6540
  contextWindow: number;
6524
6541
  maxTokens: number;
6525
6542
  };
6526
- readonly "openai/gpt-4o-mini-2024-07-18": {
6543
+ readonly "openai/gpt-4o-mini": {
6527
6544
  id: string;
6528
6545
  name: string;
6529
6546
  api: "openai-completions";
@@ -6625,7 +6642,7 @@ export declare const MODELS: {
6625
6642
  contextWindow: number;
6626
6643
  maxTokens: number;
6627
6644
  };
6628
- readonly "openai/gpt-4o": {
6645
+ readonly "openai/gpt-4o-2024-05-13": {
6629
6646
  id: string;
6630
6647
  name: string;
6631
6648
  api: "openai-completions";
@@ -6642,7 +6659,7 @@ export declare const MODELS: {
6642
6659
  contextWindow: number;
6643
6660
  maxTokens: number;
6644
6661
  };
6645
- readonly "openai/gpt-4o:extended": {
6662
+ readonly "openai/gpt-4o": {
6646
6663
  id: string;
6647
6664
  name: string;
6648
6665
  api: "openai-completions";
@@ -6659,7 +6676,7 @@ export declare const MODELS: {
6659
6676
  contextWindow: number;
6660
6677
  maxTokens: number;
6661
6678
  };
6662
- readonly "openai/gpt-4o-2024-05-13": {
6679
+ readonly "openai/gpt-4o:extended": {
6663
6680
  id: string;
6664
6681
  name: string;
6665
6682
  api: "openai-completions";
@@ -6676,7 +6693,7 @@ export declare const MODELS: {
6676
6693
  contextWindow: number;
6677
6694
  maxTokens: number;
6678
6695
  };
6679
- readonly "meta-llama/llama-3-8b-instruct": {
6696
+ readonly "meta-llama/llama-3-70b-instruct": {
6680
6697
  id: string;
6681
6698
  name: string;
6682
6699
  api: "openai-completions";
@@ -6693,7 +6710,7 @@ export declare const MODELS: {
6693
6710
  contextWindow: number;
6694
6711
  maxTokens: number;
6695
6712
  };
6696
- readonly "meta-llama/llama-3-70b-instruct": {
6713
+ readonly "meta-llama/llama-3-8b-instruct": {
6697
6714
  id: string;
6698
6715
  name: string;
6699
6716
  api: "openai-completions";
@@ -6795,7 +6812,7 @@ export declare const MODELS: {
6795
6812
  contextWindow: number;
6796
6813
  maxTokens: number;
6797
6814
  };
6798
- readonly "openai/gpt-4-turbo-preview": {
6815
+ readonly "openai/gpt-3.5-turbo-0613": {
6799
6816
  id: string;
6800
6817
  name: string;
6801
6818
  api: "openai-completions";
@@ -6812,7 +6829,7 @@ export declare const MODELS: {
6812
6829
  contextWindow: number;
6813
6830
  maxTokens: number;
6814
6831
  };
6815
- readonly "openai/gpt-3.5-turbo-0613": {
6832
+ readonly "openai/gpt-4-turbo-preview": {
6816
6833
  id: string;
6817
6834
  name: string;
6818
6835
  api: "openai-completions";
@@ -6897,7 +6914,7 @@ export declare const MODELS: {
6897
6914
  contextWindow: number;
6898
6915
  maxTokens: number;
6899
6916
  };
6900
- readonly "openai/gpt-4-0314": {
6917
+ readonly "openai/gpt-4": {
6901
6918
  id: string;
6902
6919
  name: string;
6903
6920
  api: "openai-completions";
@@ -6914,7 +6931,7 @@ export declare const MODELS: {
6914
6931
  contextWindow: number;
6915
6932
  maxTokens: number;
6916
6933
  };
6917
- readonly "openai/gpt-3.5-turbo": {
6934
+ readonly "openai/gpt-4-0314": {
6918
6935
  id: string;
6919
6936
  name: string;
6920
6937
  api: "openai-completions";
@@ -6931,7 +6948,7 @@ export declare const MODELS: {
6931
6948
  contextWindow: number;
6932
6949
  maxTokens: number;
6933
6950
  };
6934
- readonly "openai/gpt-4": {
6951
+ readonly "openai/gpt-3.5-turbo": {
6935
6952
  id: string;
6936
6953
  name: string;
6937
6954
  api: "openai-completions";