@mariozechner/pi-ai 0.26.1 → 0.27.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1967,6 +1967,23 @@ export declare const MODELS: {
1967
1967
  };
1968
1968
  };
1969
1969
  readonly zai: {
1970
+ readonly "glm-4.7": {
1971
+ id: string;
1972
+ name: string;
1973
+ api: "anthropic-messages";
1974
+ provider: string;
1975
+ baseUrl: string;
1976
+ reasoning: true;
1977
+ input: "text"[];
1978
+ cost: {
1979
+ input: number;
1980
+ output: number;
1981
+ cacheRead: number;
1982
+ cacheWrite: number;
1983
+ };
1984
+ contextWindow: number;
1985
+ maxTokens: number;
1986
+ };
1970
1987
  readonly "glm-4.5-flash": {
1971
1988
  id: string;
1972
1989
  name: string;
@@ -2987,6 +3004,23 @@ export declare const MODELS: {
2987
3004
  };
2988
3005
  };
2989
3006
  readonly openrouter: {
3007
+ readonly "z-ai/glm-4.7": {
3008
+ id: string;
3009
+ name: string;
3010
+ api: "openai-completions";
3011
+ provider: string;
3012
+ baseUrl: string;
3013
+ reasoning: true;
3014
+ input: "text"[];
3015
+ cost: {
3016
+ input: number;
3017
+ output: number;
3018
+ cacheRead: number;
3019
+ cacheWrite: number;
3020
+ };
3021
+ contextWindow: number;
3022
+ maxTokens: number;
3023
+ };
2990
3024
  readonly "google/gemini-3-flash-preview": {
2991
3025
  id: string;
2992
3026
  name: string;
@@ -6149,7 +6183,7 @@ export declare const MODELS: {
6149
6183
  contextWindow: number;
6150
6184
  maxTokens: number;
6151
6185
  };
6152
- readonly "anthropic/claude-3.5-haiku-20241022": {
6186
+ readonly "anthropic/claude-3.5-haiku": {
6153
6187
  id: string;
6154
6188
  name: string;
6155
6189
  api: "openai-completions";
@@ -6166,7 +6200,7 @@ export declare const MODELS: {
6166
6200
  contextWindow: number;
6167
6201
  maxTokens: number;
6168
6202
  };
6169
- readonly "anthropic/claude-3.5-haiku": {
6203
+ readonly "anthropic/claude-3.5-haiku-20241022": {
6170
6204
  id: string;
6171
6205
  name: string;
6172
6206
  api: "openai-completions";
@@ -6200,7 +6234,7 @@ export declare const MODELS: {
6200
6234
  contextWindow: number;
6201
6235
  maxTokens: number;
6202
6236
  };
6203
- readonly "mistralai/ministral-8b": {
6237
+ readonly "mistralai/ministral-3b": {
6204
6238
  id: string;
6205
6239
  name: string;
6206
6240
  api: "openai-completions";
@@ -6217,7 +6251,7 @@ export declare const MODELS: {
6217
6251
  contextWindow: number;
6218
6252
  maxTokens: number;
6219
6253
  };
6220
- readonly "mistralai/ministral-3b": {
6254
+ readonly "mistralai/ministral-8b": {
6221
6255
  id: string;
6222
6256
  name: string;
6223
6257
  api: "openai-completions";
@@ -6404,7 +6438,7 @@ export declare const MODELS: {
6404
6438
  contextWindow: number;
6405
6439
  maxTokens: number;
6406
6440
  };
6407
- readonly "meta-llama/llama-3.1-8b-instruct": {
6441
+ readonly "meta-llama/llama-3.1-70b-instruct": {
6408
6442
  id: string;
6409
6443
  name: string;
6410
6444
  api: "openai-completions";
@@ -6438,7 +6472,7 @@ export declare const MODELS: {
6438
6472
  contextWindow: number;
6439
6473
  maxTokens: number;
6440
6474
  };
6441
- readonly "meta-llama/llama-3.1-70b-instruct": {
6475
+ readonly "meta-llama/llama-3.1-8b-instruct": {
6442
6476
  id: string;
6443
6477
  name: string;
6444
6478
  api: "openai-completions";
@@ -6472,7 +6506,7 @@ export declare const MODELS: {
6472
6506
  contextWindow: number;
6473
6507
  maxTokens: number;
6474
6508
  };
6475
- readonly "openai/gpt-4o-mini-2024-07-18": {
6509
+ readonly "openai/gpt-4o-mini": {
6476
6510
  id: string;
6477
6511
  name: string;
6478
6512
  api: "openai-completions";
@@ -6489,7 +6523,7 @@ export declare const MODELS: {
6489
6523
  contextWindow: number;
6490
6524
  maxTokens: number;
6491
6525
  };
6492
- readonly "openai/gpt-4o-mini": {
6526
+ readonly "openai/gpt-4o-mini-2024-07-18": {
6493
6527
  id: string;
6494
6528
  name: string;
6495
6529
  api: "openai-completions";
@@ -6591,7 +6625,7 @@ export declare const MODELS: {
6591
6625
  contextWindow: number;
6592
6626
  maxTokens: number;
6593
6627
  };
6594
- readonly "openai/gpt-4o-2024-05-13": {
6628
+ readonly "openai/gpt-4o": {
6595
6629
  id: string;
6596
6630
  name: string;
6597
6631
  api: "openai-completions";
@@ -6608,7 +6642,7 @@ export declare const MODELS: {
6608
6642
  contextWindow: number;
6609
6643
  maxTokens: number;
6610
6644
  };
6611
- readonly "openai/gpt-4o": {
6645
+ readonly "openai/gpt-4o:extended": {
6612
6646
  id: string;
6613
6647
  name: string;
6614
6648
  api: "openai-completions";
@@ -6625,7 +6659,7 @@ export declare const MODELS: {
6625
6659
  contextWindow: number;
6626
6660
  maxTokens: number;
6627
6661
  };
6628
- readonly "openai/gpt-4o:extended": {
6662
+ readonly "openai/gpt-4o-2024-05-13": {
6629
6663
  id: string;
6630
6664
  name: string;
6631
6665
  api: "openai-completions";
@@ -6642,7 +6676,7 @@ export declare const MODELS: {
6642
6676
  contextWindow: number;
6643
6677
  maxTokens: number;
6644
6678
  };
6645
- readonly "meta-llama/llama-3-70b-instruct": {
6679
+ readonly "meta-llama/llama-3-8b-instruct": {
6646
6680
  id: string;
6647
6681
  name: string;
6648
6682
  api: "openai-completions";
@@ -6659,7 +6693,7 @@ export declare const MODELS: {
6659
6693
  contextWindow: number;
6660
6694
  maxTokens: number;
6661
6695
  };
6662
- readonly "meta-llama/llama-3-8b-instruct": {
6696
+ readonly "meta-llama/llama-3-70b-instruct": {
6663
6697
  id: string;
6664
6698
  name: string;
6665
6699
  api: "openai-completions";
@@ -6761,7 +6795,7 @@ export declare const MODELS: {
6761
6795
  contextWindow: number;
6762
6796
  maxTokens: number;
6763
6797
  };
6764
- readonly "openai/gpt-3.5-turbo-0613": {
6798
+ readonly "openai/gpt-4-turbo-preview": {
6765
6799
  id: string;
6766
6800
  name: string;
6767
6801
  api: "openai-completions";
@@ -6778,7 +6812,7 @@ export declare const MODELS: {
6778
6812
  contextWindow: number;
6779
6813
  maxTokens: number;
6780
6814
  };
6781
- readonly "openai/gpt-4-turbo-preview": {
6815
+ readonly "openai/gpt-3.5-turbo-0613": {
6782
6816
  id: string;
6783
6817
  name: string;
6784
6818
  api: "openai-completions";
@@ -6880,7 +6914,7 @@ export declare const MODELS: {
6880
6914
  contextWindow: number;
6881
6915
  maxTokens: number;
6882
6916
  };
6883
- readonly "openai/gpt-4": {
6917
+ readonly "openai/gpt-3.5-turbo": {
6884
6918
  id: string;
6885
6919
  name: string;
6886
6920
  api: "openai-completions";
@@ -6897,7 +6931,7 @@ export declare const MODELS: {
6897
6931
  contextWindow: number;
6898
6932
  maxTokens: number;
6899
6933
  };
6900
- readonly "openai/gpt-3.5-turbo": {
6934
+ readonly "openai/gpt-4": {
6901
6935
  id: string;
6902
6936
  name: string;
6903
6937
  api: "openai-completions";