@mariozechner/pi-ai 0.10.0 → 0.10.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -136,6 +136,23 @@ export declare const MODELS: {
136
136
  contextWindow: number;
137
137
  maxTokens: number;
138
138
  };
139
+ readonly "claude-opus-4-5-20251101": {
140
+ id: string;
141
+ name: string;
142
+ api: "anthropic-messages";
143
+ provider: string;
144
+ baseUrl: string;
145
+ reasoning: true;
146
+ input: ("image" | "text")[];
147
+ cost: {
148
+ input: number;
149
+ output: number;
150
+ cacheRead: number;
151
+ cacheWrite: number;
152
+ };
153
+ contextWindow: number;
154
+ maxTokens: number;
155
+ };
139
156
  readonly "claude-sonnet-4-5": {
140
157
  id: string;
141
158
  name: string;
@@ -1952,6 +1969,23 @@ export declare const MODELS: {
1952
1969
  };
1953
1970
  };
1954
1971
  readonly openrouter: {
1972
+ readonly "prime-intellect/intellect-3": {
1973
+ id: string;
1974
+ name: string;
1975
+ api: "openai-completions";
1976
+ provider: string;
1977
+ baseUrl: string;
1978
+ reasoning: true;
1979
+ input: "text"[];
1980
+ cost: {
1981
+ input: number;
1982
+ output: number;
1983
+ cacheRead: number;
1984
+ cacheWrite: number;
1985
+ };
1986
+ contextWindow: number;
1987
+ maxTokens: number;
1988
+ };
1955
1989
  readonly "tngtech/tng-r1t-chimera:free": {
1956
1990
  id: string;
1957
1991
  name: string;
@@ -4978,7 +5012,7 @@ export declare const MODELS: {
4978
5012
  contextWindow: number;
4979
5013
  maxTokens: number;
4980
5014
  };
4981
- readonly "cohere/command-r-08-2024": {
5015
+ readonly "cohere/command-r-plus-08-2024": {
4982
5016
  id: string;
4983
5017
  name: string;
4984
5018
  api: "openai-completions";
@@ -4995,7 +5029,7 @@ export declare const MODELS: {
4995
5029
  contextWindow: number;
4996
5030
  maxTokens: number;
4997
5031
  };
4998
- readonly "cohere/command-r-plus-08-2024": {
5032
+ readonly "cohere/command-r-08-2024": {
4999
5033
  id: string;
5000
5034
  name: string;
5001
5035
  api: "openai-completions";
@@ -5148,7 +5182,7 @@ export declare const MODELS: {
5148
5182
  contextWindow: number;
5149
5183
  maxTokens: number;
5150
5184
  };
5151
- readonly "openai/gpt-4o-mini-2024-07-18": {
5185
+ readonly "openai/gpt-4o-mini": {
5152
5186
  id: string;
5153
5187
  name: string;
5154
5188
  api: "openai-completions";
@@ -5165,7 +5199,7 @@ export declare const MODELS: {
5165
5199
  contextWindow: number;
5166
5200
  maxTokens: number;
5167
5201
  };
5168
- readonly "openai/gpt-4o-mini": {
5202
+ readonly "openai/gpt-4o-mini-2024-07-18": {
5169
5203
  id: string;
5170
5204
  name: string;
5171
5205
  api: "openai-completions";
@@ -5267,7 +5301,7 @@ export declare const MODELS: {
5267
5301
  contextWindow: number;
5268
5302
  maxTokens: number;
5269
5303
  };
5270
- readonly "openai/gpt-4o-2024-05-13": {
5304
+ readonly "openai/gpt-4o": {
5271
5305
  id: string;
5272
5306
  name: string;
5273
5307
  api: "openai-completions";
@@ -5284,7 +5318,7 @@ export declare const MODELS: {
5284
5318
  contextWindow: number;
5285
5319
  maxTokens: number;
5286
5320
  };
5287
- readonly "openai/gpt-4o": {
5321
+ readonly "openai/gpt-4o:extended": {
5288
5322
  id: string;
5289
5323
  name: string;
5290
5324
  api: "openai-completions";
@@ -5301,7 +5335,7 @@ export declare const MODELS: {
5301
5335
  contextWindow: number;
5302
5336
  maxTokens: number;
5303
5337
  };
5304
- readonly "openai/gpt-4o:extended": {
5338
+ readonly "openai/gpt-4o-2024-05-13": {
5305
5339
  id: string;
5306
5340
  name: string;
5307
5341
  api: "openai-completions";
@@ -5318,7 +5352,7 @@ export declare const MODELS: {
5318
5352
  contextWindow: number;
5319
5353
  maxTokens: number;
5320
5354
  };
5321
- readonly "meta-llama/llama-3-70b-instruct": {
5355
+ readonly "meta-llama/llama-3-8b-instruct": {
5322
5356
  id: string;
5323
5357
  name: string;
5324
5358
  api: "openai-completions";
@@ -5335,7 +5369,7 @@ export declare const MODELS: {
5335
5369
  contextWindow: number;
5336
5370
  maxTokens: number;
5337
5371
  };
5338
- readonly "meta-llama/llama-3-8b-instruct": {
5372
+ readonly "meta-llama/llama-3-70b-instruct": {
5339
5373
  id: string;
5340
5374
  name: string;
5341
5375
  api: "openai-completions";
@@ -5437,7 +5471,7 @@ export declare const MODELS: {
5437
5471
  contextWindow: number;
5438
5472
  maxTokens: number;
5439
5473
  };
5440
- readonly "openai/gpt-3.5-turbo-0613": {
5474
+ readonly "openai/gpt-4-turbo-preview": {
5441
5475
  id: string;
5442
5476
  name: string;
5443
5477
  api: "openai-completions";
@@ -5454,7 +5488,7 @@ export declare const MODELS: {
5454
5488
  contextWindow: number;
5455
5489
  maxTokens: number;
5456
5490
  };
5457
- readonly "openai/gpt-4-turbo-preview": {
5491
+ readonly "openai/gpt-3.5-turbo-0613": {
5458
5492
  id: string;
5459
5493
  name: string;
5460
5494
  api: "openai-completions";
@@ -5573,7 +5607,7 @@ export declare const MODELS: {
5573
5607
  contextWindow: number;
5574
5608
  maxTokens: number;
5575
5609
  };
5576
- readonly "openai/gpt-4": {
5610
+ readonly "openai/gpt-3.5-turbo": {
5577
5611
  id: string;
5578
5612
  name: string;
5579
5613
  api: "openai-completions";
@@ -5590,7 +5624,7 @@ export declare const MODELS: {
5590
5624
  contextWindow: number;
5591
5625
  maxTokens: number;
5592
5626
  };
5593
- readonly "openai/gpt-3.5-turbo": {
5627
+ readonly "openai/gpt-4": {
5594
5628
  id: string;
5595
5629
  name: string;
5596
5630
  api: "openai-completions";