@mariozechner/pi-ai 0.18.8 → 0.19.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5575,7 +5575,7 @@ export declare const MODELS: {
5575
5575
  contextWindow: number;
5576
5576
  maxTokens: number;
5577
5577
  };
5578
- readonly "anthropic/claude-3.5-haiku": {
5578
+ readonly "anthropic/claude-3.5-haiku-20241022": {
5579
5579
  id: string;
5580
5580
  name: string;
5581
5581
  api: "openai-completions";
@@ -5592,7 +5592,7 @@ export declare const MODELS: {
5592
5592
  contextWindow: number;
5593
5593
  maxTokens: number;
5594
5594
  };
5595
- readonly "anthropic/claude-3.5-haiku-20241022": {
5595
+ readonly "anthropic/claude-3.5-haiku": {
5596
5596
  id: string;
5597
5597
  name: string;
5598
5598
  api: "openai-completions";
@@ -5626,7 +5626,7 @@ export declare const MODELS: {
5626
5626
  contextWindow: number;
5627
5627
  maxTokens: number;
5628
5628
  };
5629
- readonly "mistralai/ministral-3b": {
5629
+ readonly "mistralai/ministral-8b": {
5630
5630
  id: string;
5631
5631
  name: string;
5632
5632
  api: "openai-completions";
@@ -5643,7 +5643,7 @@ export declare const MODELS: {
5643
5643
  contextWindow: number;
5644
5644
  maxTokens: number;
5645
5645
  };
5646
- readonly "mistralai/ministral-8b": {
5646
+ readonly "mistralai/ministral-3b": {
5647
5647
  id: string;
5648
5648
  name: string;
5649
5649
  api: "openai-completions";
@@ -5830,7 +5830,7 @@ export declare const MODELS: {
5830
5830
  contextWindow: number;
5831
5831
  maxTokens: number;
5832
5832
  };
5833
- readonly "meta-llama/llama-3.1-405b-instruct": {
5833
+ readonly "meta-llama/llama-3.1-8b-instruct": {
5834
5834
  id: string;
5835
5835
  name: string;
5836
5836
  api: "openai-completions";
@@ -5847,7 +5847,7 @@ export declare const MODELS: {
5847
5847
  contextWindow: number;
5848
5848
  maxTokens: number;
5849
5849
  };
5850
- readonly "meta-llama/llama-3.1-8b-instruct": {
5850
+ readonly "meta-llama/llama-3.1-405b-instruct": {
5851
5851
  id: string;
5852
5852
  name: string;
5853
5853
  api: "openai-completions";
@@ -5898,7 +5898,7 @@ export declare const MODELS: {
5898
5898
  contextWindow: number;
5899
5899
  maxTokens: number;
5900
5900
  };
5901
- readonly "openai/gpt-4o-mini": {
5901
+ readonly "openai/gpt-4o-mini-2024-07-18": {
5902
5902
  id: string;
5903
5903
  name: string;
5904
5904
  api: "openai-completions";
@@ -5915,7 +5915,7 @@ export declare const MODELS: {
5915
5915
  contextWindow: number;
5916
5916
  maxTokens: number;
5917
5917
  };
5918
- readonly "openai/gpt-4o-mini-2024-07-18": {
5918
+ readonly "openai/gpt-4o-mini": {
5919
5919
  id: string;
5920
5920
  name: string;
5921
5921
  api: "openai-completions";
@@ -6068,7 +6068,7 @@ export declare const MODELS: {
6068
6068
  contextWindow: number;
6069
6069
  maxTokens: number;
6070
6070
  };
6071
- readonly "meta-llama/llama-3-8b-instruct": {
6071
+ readonly "meta-llama/llama-3-70b-instruct": {
6072
6072
  id: string;
6073
6073
  name: string;
6074
6074
  api: "openai-completions";
@@ -6085,7 +6085,7 @@ export declare const MODELS: {
6085
6085
  contextWindow: number;
6086
6086
  maxTokens: number;
6087
6087
  };
6088
- readonly "meta-llama/llama-3-70b-instruct": {
6088
+ readonly "meta-llama/llama-3-8b-instruct": {
6089
6089
  id: string;
6090
6090
  name: string;
6091
6091
  api: "openai-completions";
@@ -6289,7 +6289,7 @@ export declare const MODELS: {
6289
6289
  contextWindow: number;
6290
6290
  maxTokens: number;
6291
6291
  };
6292
- readonly "openai/gpt-3.5-turbo": {
6292
+ readonly "openai/gpt-4-0314": {
6293
6293
  id: string;
6294
6294
  name: string;
6295
6295
  api: "openai-completions";
@@ -6306,7 +6306,7 @@ export declare const MODELS: {
6306
6306
  contextWindow: number;
6307
6307
  maxTokens: number;
6308
6308
  };
6309
- readonly "openai/gpt-4-0314": {
6309
+ readonly "openai/gpt-4": {
6310
6310
  id: string;
6311
6311
  name: string;
6312
6312
  api: "openai-completions";
@@ -6323,7 +6323,7 @@ export declare const MODELS: {
6323
6323
  contextWindow: number;
6324
6324
  maxTokens: number;
6325
6325
  };
6326
- readonly "openai/gpt-4": {
6326
+ readonly "openai/gpt-3.5-turbo": {
6327
6327
  id: string;
6328
6328
  name: string;
6329
6329
  api: "openai-completions";