@mariozechner/pi-ai 0.11.4 → 0.11.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1969,6 +1969,23 @@ export declare const MODELS: {
1969
1969
  };
1970
1970
  };
1971
1971
  readonly openrouter: {
1972
+ readonly "deepseek/deepseek-v3.2": {
1973
+ id: string;
1974
+ name: string;
1975
+ api: "openai-completions";
1976
+ provider: string;
1977
+ baseUrl: string;
1978
+ reasoning: true;
1979
+ input: "text"[];
1980
+ cost: {
1981
+ input: number;
1982
+ output: number;
1983
+ cacheRead: number;
1984
+ cacheWrite: number;
1985
+ };
1986
+ contextWindow: number;
1987
+ maxTokens: number;
1988
+ };
1972
1989
  readonly "prime-intellect/intellect-3": {
1973
1990
  id: string;
1974
1991
  name: string;
@@ -5114,7 +5131,7 @@ export declare const MODELS: {
5114
5131
  contextWindow: number;
5115
5132
  maxTokens: number;
5116
5133
  };
5117
- readonly "meta-llama/llama-3.1-8b-instruct": {
5134
+ readonly "meta-llama/llama-3.1-70b-instruct": {
5118
5135
  id: string;
5119
5136
  name: string;
5120
5137
  api: "openai-completions";
@@ -5131,7 +5148,7 @@ export declare const MODELS: {
5131
5148
  contextWindow: number;
5132
5149
  maxTokens: number;
5133
5150
  };
5134
- readonly "meta-llama/llama-3.1-405b-instruct": {
5151
+ readonly "meta-llama/llama-3.1-8b-instruct": {
5135
5152
  id: string;
5136
5153
  name: string;
5137
5154
  api: "openai-completions";
@@ -5148,7 +5165,7 @@ export declare const MODELS: {
5148
5165
  contextWindow: number;
5149
5166
  maxTokens: number;
5150
5167
  };
5151
- readonly "meta-llama/llama-3.1-70b-instruct": {
5168
+ readonly "meta-llama/llama-3.1-405b-instruct": {
5152
5169
  id: string;
5153
5170
  name: string;
5154
5171
  api: "openai-completions";
@@ -5301,7 +5318,7 @@ export declare const MODELS: {
5301
5318
  contextWindow: number;
5302
5319
  maxTokens: number;
5303
5320
  };
5304
- readonly "openai/gpt-4o": {
5321
+ readonly "openai/gpt-4o-2024-05-13": {
5305
5322
  id: string;
5306
5323
  name: string;
5307
5324
  api: "openai-completions";
@@ -5318,7 +5335,7 @@ export declare const MODELS: {
5318
5335
  contextWindow: number;
5319
5336
  maxTokens: number;
5320
5337
  };
5321
- readonly "openai/gpt-4o:extended": {
5338
+ readonly "openai/gpt-4o": {
5322
5339
  id: string;
5323
5340
  name: string;
5324
5341
  api: "openai-completions";
@@ -5335,7 +5352,7 @@ export declare const MODELS: {
5335
5352
  contextWindow: number;
5336
5353
  maxTokens: number;
5337
5354
  };
5338
- readonly "openai/gpt-4o-2024-05-13": {
5355
+ readonly "openai/gpt-4o:extended": {
5339
5356
  id: string;
5340
5357
  name: string;
5341
5358
  api: "openai-completions";
@@ -5352,7 +5369,7 @@ export declare const MODELS: {
5352
5369
  contextWindow: number;
5353
5370
  maxTokens: number;
5354
5371
  };
5355
- readonly "meta-llama/llama-3-8b-instruct": {
5372
+ readonly "meta-llama/llama-3-70b-instruct": {
5356
5373
  id: string;
5357
5374
  name: string;
5358
5375
  api: "openai-completions";
@@ -5369,7 +5386,7 @@ export declare const MODELS: {
5369
5386
  contextWindow: number;
5370
5387
  maxTokens: number;
5371
5388
  };
5372
- readonly "meta-llama/llama-3-70b-instruct": {
5389
+ readonly "meta-llama/llama-3-8b-instruct": {
5373
5390
  id: string;
5374
5391
  name: string;
5375
5392
  api: "openai-completions";
@@ -5471,7 +5488,7 @@ export declare const MODELS: {
5471
5488
  contextWindow: number;
5472
5489
  maxTokens: number;
5473
5490
  };
5474
- readonly "openai/gpt-4-turbo-preview": {
5491
+ readonly "openai/gpt-3.5-turbo-0613": {
5475
5492
  id: string;
5476
5493
  name: string;
5477
5494
  api: "openai-completions";
@@ -5488,7 +5505,7 @@ export declare const MODELS: {
5488
5505
  contextWindow: number;
5489
5506
  maxTokens: number;
5490
5507
  };
5491
- readonly "openai/gpt-3.5-turbo-0613": {
5508
+ readonly "openai/gpt-4-turbo-preview": {
5492
5509
  id: string;
5493
5510
  name: string;
5494
5511
  api: "openai-completions";
@@ -5590,7 +5607,7 @@ export declare const MODELS: {
5590
5607
  contextWindow: number;
5591
5608
  maxTokens: number;
5592
5609
  };
5593
- readonly "openai/gpt-4-0314": {
5610
+ readonly "openai/gpt-3.5-turbo": {
5594
5611
  id: string;
5595
5612
  name: string;
5596
5613
  api: "openai-completions";
@@ -5607,7 +5624,7 @@ export declare const MODELS: {
5607
5624
  contextWindow: number;
5608
5625
  maxTokens: number;
5609
5626
  };
5610
- readonly "openai/gpt-3.5-turbo": {
5627
+ readonly "openai/gpt-4-0314": {
5611
5628
  id: string;
5612
5629
  name: string;
5613
5630
  api: "openai-completions";