@mariozechner/pi-ai 0.6.2 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4723,7 +4723,7 @@ export declare const MODELS: {
4723
4723
  contextWindow: number;
4724
4724
  maxTokens: number;
4725
4725
  };
4726
- readonly "mistralai/ministral-3b": {
4726
+ readonly "mistralai/ministral-8b": {
4727
4727
  id: string;
4728
4728
  name: string;
4729
4729
  api: "openai-completions";
@@ -4740,7 +4740,7 @@ export declare const MODELS: {
4740
4740
  contextWindow: number;
4741
4741
  maxTokens: number;
4742
4742
  };
4743
- readonly "mistralai/ministral-8b": {
4743
+ readonly "mistralai/ministral-3b": {
4744
4744
  id: string;
4745
4745
  name: string;
4746
4746
  api: "openai-completions";
@@ -4961,7 +4961,7 @@ export declare const MODELS: {
4961
4961
  contextWindow: number;
4962
4962
  maxTokens: number;
4963
4963
  };
4964
- readonly "meta-llama/llama-3.1-405b-instruct": {
4964
+ readonly "meta-llama/llama-3.1-8b-instruct": {
4965
4965
  id: string;
4966
4966
  name: string;
4967
4967
  api: "openai-completions";
@@ -4978,7 +4978,7 @@ export declare const MODELS: {
4978
4978
  contextWindow: number;
4979
4979
  maxTokens: number;
4980
4980
  };
4981
- readonly "meta-llama/llama-3.1-70b-instruct": {
4981
+ readonly "meta-llama/llama-3.1-405b-instruct": {
4982
4982
  id: string;
4983
4983
  name: string;
4984
4984
  api: "openai-completions";
@@ -4995,7 +4995,7 @@ export declare const MODELS: {
4995
4995
  contextWindow: number;
4996
4996
  maxTokens: number;
4997
4997
  };
4998
- readonly "meta-llama/llama-3.1-8b-instruct": {
4998
+ readonly "meta-llama/llama-3.1-70b-instruct": {
4999
4999
  id: string;
5000
5000
  name: string;
5001
5001
  api: "openai-completions";
@@ -5029,7 +5029,7 @@ export declare const MODELS: {
5029
5029
  contextWindow: number;
5030
5030
  maxTokens: number;
5031
5031
  };
5032
- readonly "openai/gpt-4o-mini": {
5032
+ readonly "openai/gpt-4o-mini-2024-07-18": {
5033
5033
  id: string;
5034
5034
  name: string;
5035
5035
  api: "openai-completions";
@@ -5046,7 +5046,7 @@ export declare const MODELS: {
5046
5046
  contextWindow: number;
5047
5047
  maxTokens: number;
5048
5048
  };
5049
- readonly "openai/gpt-4o-mini-2024-07-18": {
5049
+ readonly "openai/gpt-4o-mini": {
5050
5050
  id: string;
5051
5051
  name: string;
5052
5052
  api: "openai-completions";
@@ -5471,7 +5471,7 @@ export declare const MODELS: {
5471
5471
  contextWindow: number;
5472
5472
  maxTokens: number;
5473
5473
  };
5474
- readonly "openai/gpt-4": {
5474
+ readonly "openai/gpt-4-0314": {
5475
5475
  id: string;
5476
5476
  name: string;
5477
5477
  api: "openai-completions";
@@ -5488,7 +5488,7 @@ export declare const MODELS: {
5488
5488
  contextWindow: number;
5489
5489
  maxTokens: number;
5490
5490
  };
5491
- readonly "openai/gpt-3.5-turbo": {
5491
+ readonly "openai/gpt-4": {
5492
5492
  id: string;
5493
5493
  name: string;
5494
5494
  api: "openai-completions";
@@ -5505,7 +5505,7 @@ export declare const MODELS: {
5505
5505
  contextWindow: number;
5506
5506
  maxTokens: number;
5507
5507
  };
5508
- readonly "openai/gpt-4-0314": {
5508
+ readonly "openai/gpt-3.5-turbo": {
5509
5509
  id: string;
5510
5510
  name: string;
5511
5511
  api: "openai-completions";