@mariozechner/pi-ai 0.12.2 → 0.12.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -5080,7 +5080,7 @@ export declare const MODELS: {
|
|
|
5080
5080
|
contextWindow: number;
|
|
5081
5081
|
maxTokens: number;
|
|
5082
5082
|
};
|
|
5083
|
-
readonly "cohere/command-r-
|
|
5083
|
+
readonly "cohere/command-r-08-2024": {
|
|
5084
5084
|
id: string;
|
|
5085
5085
|
name: string;
|
|
5086
5086
|
api: "openai-completions";
|
|
@@ -5097,7 +5097,7 @@ export declare const MODELS: {
|
|
|
5097
5097
|
contextWindow: number;
|
|
5098
5098
|
maxTokens: number;
|
|
5099
5099
|
};
|
|
5100
|
-
readonly "cohere/command-r-08-2024": {
|
|
5100
|
+
readonly "cohere/command-r-plus-08-2024": {
|
|
5101
5101
|
id: string;
|
|
5102
5102
|
name: string;
|
|
5103
5103
|
api: "openai-completions";
|
|
@@ -5165,7 +5165,7 @@ export declare const MODELS: {
|
|
|
5165
5165
|
contextWindow: number;
|
|
5166
5166
|
maxTokens: number;
|
|
5167
5167
|
};
|
|
5168
|
-
readonly "meta-llama/llama-3.1-
|
|
5168
|
+
readonly "meta-llama/llama-3.1-8b-instruct": {
|
|
5169
5169
|
id: string;
|
|
5170
5170
|
name: string;
|
|
5171
5171
|
api: "openai-completions";
|
|
@@ -5182,7 +5182,7 @@ export declare const MODELS: {
|
|
|
5182
5182
|
contextWindow: number;
|
|
5183
5183
|
maxTokens: number;
|
|
5184
5184
|
};
|
|
5185
|
-
readonly "meta-llama/llama-3.1-
|
|
5185
|
+
readonly "meta-llama/llama-3.1-405b-instruct": {
|
|
5186
5186
|
id: string;
|
|
5187
5187
|
name: string;
|
|
5188
5188
|
api: "openai-completions";
|
|
@@ -5199,7 +5199,7 @@ export declare const MODELS: {
|
|
|
5199
5199
|
contextWindow: number;
|
|
5200
5200
|
maxTokens: number;
|
|
5201
5201
|
};
|
|
5202
|
-
readonly "meta-llama/llama-3.1-
|
|
5202
|
+
readonly "meta-llama/llama-3.1-70b-instruct": {
|
|
5203
5203
|
id: string;
|
|
5204
5204
|
name: string;
|
|
5205
5205
|
api: "openai-completions";
|
|
@@ -5233,7 +5233,7 @@ export declare const MODELS: {
|
|
|
5233
5233
|
contextWindow: number;
|
|
5234
5234
|
maxTokens: number;
|
|
5235
5235
|
};
|
|
5236
|
-
readonly "openai/gpt-4o-mini": {
|
|
5236
|
+
readonly "openai/gpt-4o-mini-2024-07-18": {
|
|
5237
5237
|
id: string;
|
|
5238
5238
|
name: string;
|
|
5239
5239
|
api: "openai-completions";
|
|
@@ -5250,7 +5250,7 @@ export declare const MODELS: {
|
|
|
5250
5250
|
contextWindow: number;
|
|
5251
5251
|
maxTokens: number;
|
|
5252
5252
|
};
|
|
5253
|
-
readonly "openai/gpt-4o-mini
|
|
5253
|
+
readonly "openai/gpt-4o-mini": {
|
|
5254
5254
|
id: string;
|
|
5255
5255
|
name: string;
|
|
5256
5256
|
api: "openai-completions";
|
|
@@ -5641,7 +5641,7 @@ export declare const MODELS: {
|
|
|
5641
5641
|
contextWindow: number;
|
|
5642
5642
|
maxTokens: number;
|
|
5643
5643
|
};
|
|
5644
|
-
readonly "openai/gpt-
|
|
5644
|
+
readonly "openai/gpt-4-0314": {
|
|
5645
5645
|
id: string;
|
|
5646
5646
|
name: string;
|
|
5647
5647
|
api: "openai-completions";
|
|
@@ -5658,7 +5658,7 @@ export declare const MODELS: {
|
|
|
5658
5658
|
contextWindow: number;
|
|
5659
5659
|
maxTokens: number;
|
|
5660
5660
|
};
|
|
5661
|
-
readonly "openai/gpt-4
|
|
5661
|
+
readonly "openai/gpt-4": {
|
|
5662
5662
|
id: string;
|
|
5663
5663
|
name: string;
|
|
5664
5664
|
api: "openai-completions";
|
|
@@ -5675,7 +5675,7 @@ export declare const MODELS: {
|
|
|
5675
5675
|
contextWindow: number;
|
|
5676
5676
|
maxTokens: number;
|
|
5677
5677
|
};
|
|
5678
|
-
readonly "openai/gpt-
|
|
5678
|
+
readonly "openai/gpt-3.5-turbo": {
|
|
5679
5679
|
id: string;
|
|
5680
5680
|
name: string;
|
|
5681
5681
|
api: "openai-completions";
|