@mariozechner/pi-ai 0.14.2 → 0.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2003,6 +2003,23 @@ export declare const MODELS: {
2003
2003
  contextWindow: number;
2004
2004
  maxTokens: number;
2005
2005
  };
2006
+ readonly "z-ai/glm-4.6v": {
2007
+ id: string;
2008
+ name: string;
2009
+ api: "openai-completions";
2010
+ provider: string;
2011
+ baseUrl: string;
2012
+ reasoning: true;
2013
+ input: ("image" | "text")[];
2014
+ cost: {
2015
+ input: number;
2016
+ output: number;
2017
+ cacheRead: number;
2018
+ cacheWrite: number;
2019
+ };
2020
+ contextWindow: number;
2021
+ maxTokens: number;
2022
+ };
2006
2023
  readonly "openai/gpt-5.1-codex-max": {
2007
2024
  id: string;
2008
2025
  name: string;
@@ -5029,7 +5046,7 @@ export declare const MODELS: {
5029
5046
  contextWindow: number;
5030
5047
  maxTokens: number;
5031
5048
  };
5032
- readonly "mistralai/ministral-8b": {
5049
+ readonly "mistralai/ministral-3b": {
5033
5050
  id: string;
5034
5051
  name: string;
5035
5052
  api: "openai-completions";
@@ -5046,7 +5063,7 @@ export declare const MODELS: {
5046
5063
  contextWindow: number;
5047
5064
  maxTokens: number;
5048
5065
  };
5049
- readonly "mistralai/ministral-3b": {
5066
+ readonly "mistralai/ministral-8b": {
5050
5067
  id: string;
5051
5068
  name: string;
5052
5069
  api: "openai-completions";
@@ -5250,7 +5267,7 @@ export declare const MODELS: {
5250
5267
  contextWindow: number;
5251
5268
  maxTokens: number;
5252
5269
  };
5253
- readonly "meta-llama/llama-3.1-405b-instruct": {
5270
+ readonly "meta-llama/llama-3.1-70b-instruct": {
5254
5271
  id: string;
5255
5272
  name: string;
5256
5273
  api: "openai-completions";
@@ -5267,7 +5284,7 @@ export declare const MODELS: {
5267
5284
  contextWindow: number;
5268
5285
  maxTokens: number;
5269
5286
  };
5270
- readonly "meta-llama/llama-3.1-70b-instruct": {
5287
+ readonly "meta-llama/llama-3.1-405b-instruct": {
5271
5288
  id: string;
5272
5289
  name: string;
5273
5290
  api: "openai-completions";
@@ -5301,7 +5318,7 @@ export declare const MODELS: {
5301
5318
  contextWindow: number;
5302
5319
  maxTokens: number;
5303
5320
  };
5304
- readonly "openai/gpt-4o-mini-2024-07-18": {
5321
+ readonly "openai/gpt-4o-mini": {
5305
5322
  id: string;
5306
5323
  name: string;
5307
5324
  api: "openai-completions";
@@ -5318,7 +5335,7 @@ export declare const MODELS: {
5318
5335
  contextWindow: number;
5319
5336
  maxTokens: number;
5320
5337
  };
5321
- readonly "openai/gpt-4o-mini": {
5338
+ readonly "openai/gpt-4o-mini-2024-07-18": {
5322
5339
  id: string;
5323
5340
  name: string;
5324
5341
  api: "openai-completions";
@@ -5420,7 +5437,7 @@ export declare const MODELS: {
5420
5437
  contextWindow: number;
5421
5438
  maxTokens: number;
5422
5439
  };
5423
- readonly "openai/gpt-4o-2024-05-13": {
5440
+ readonly "openai/gpt-4o": {
5424
5441
  id: string;
5425
5442
  name: string;
5426
5443
  api: "openai-completions";
@@ -5437,7 +5454,7 @@ export declare const MODELS: {
5437
5454
  contextWindow: number;
5438
5455
  maxTokens: number;
5439
5456
  };
5440
- readonly "openai/gpt-4o": {
5457
+ readonly "openai/gpt-4o:extended": {
5441
5458
  id: string;
5442
5459
  name: string;
5443
5460
  api: "openai-completions";
@@ -5454,7 +5471,7 @@ export declare const MODELS: {
5454
5471
  contextWindow: number;
5455
5472
  maxTokens: number;
5456
5473
  };
5457
- readonly "openai/gpt-4o:extended": {
5474
+ readonly "openai/gpt-4o-2024-05-13": {
5458
5475
  id: string;
5459
5476
  name: string;
5460
5477
  api: "openai-completions";
@@ -5590,7 +5607,7 @@ export declare const MODELS: {
5590
5607
  contextWindow: number;
5591
5608
  maxTokens: number;
5592
5609
  };
5593
- readonly "openai/gpt-3.5-turbo-0613": {
5610
+ readonly "openai/gpt-4-turbo-preview": {
5594
5611
  id: string;
5595
5612
  name: string;
5596
5613
  api: "openai-completions";
@@ -5607,7 +5624,7 @@ export declare const MODELS: {
5607
5624
  contextWindow: number;
5608
5625
  maxTokens: number;
5609
5626
  };
5610
- readonly "openai/gpt-4-turbo-preview": {
5627
+ readonly "openai/gpt-3.5-turbo-0613": {
5611
5628
  id: string;
5612
5629
  name: string;
5613
5630
  api: "openai-completions";