@mariozechner/pi-ai 0.19.2 → 0.20.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -5728,7 +5728,7 @@ export declare const MODELS: {
|
|
|
5728
5728
|
contextWindow: number;
|
|
5729
5729
|
maxTokens: number;
|
|
5730
5730
|
};
|
|
5731
|
-
readonly "cohere/command-r-
|
|
5731
|
+
readonly "cohere/command-r-08-2024": {
|
|
5732
5732
|
id: string;
|
|
5733
5733
|
name: string;
|
|
5734
5734
|
api: "openai-completions";
|
|
@@ -5745,7 +5745,7 @@ export declare const MODELS: {
|
|
|
5745
5745
|
contextWindow: number;
|
|
5746
5746
|
maxTokens: number;
|
|
5747
5747
|
};
|
|
5748
|
-
readonly "cohere/command-r-08-2024": {
|
|
5748
|
+
readonly "cohere/command-r-plus-08-2024": {
|
|
5749
5749
|
id: string;
|
|
5750
5750
|
name: string;
|
|
5751
5751
|
api: "openai-completions";
|
|
@@ -5830,7 +5830,7 @@ export declare const MODELS: {
|
|
|
5830
5830
|
contextWindow: number;
|
|
5831
5831
|
maxTokens: number;
|
|
5832
5832
|
};
|
|
5833
|
-
readonly "meta-llama/llama-3.1-
|
|
5833
|
+
readonly "meta-llama/llama-3.1-405b-instruct": {
|
|
5834
5834
|
id: string;
|
|
5835
5835
|
name: string;
|
|
5836
5836
|
api: "openai-completions";
|
|
@@ -5847,7 +5847,7 @@ export declare const MODELS: {
|
|
|
5847
5847
|
contextWindow: number;
|
|
5848
5848
|
maxTokens: number;
|
|
5849
5849
|
};
|
|
5850
|
-
readonly "meta-llama/llama-3.1-
|
|
5850
|
+
readonly "meta-llama/llama-3.1-70b-instruct": {
|
|
5851
5851
|
id: string;
|
|
5852
5852
|
name: string;
|
|
5853
5853
|
api: "openai-completions";
|
|
@@ -5881,7 +5881,7 @@ export declare const MODELS: {
|
|
|
5881
5881
|
contextWindow: number;
|
|
5882
5882
|
maxTokens: number;
|
|
5883
5883
|
};
|
|
5884
|
-
readonly "openai/gpt-4o-mini": {
|
|
5884
|
+
readonly "openai/gpt-4o-mini-2024-07-18": {
|
|
5885
5885
|
id: string;
|
|
5886
5886
|
name: string;
|
|
5887
5887
|
api: "openai-completions";
|
|
@@ -5898,7 +5898,7 @@ export declare const MODELS: {
|
|
|
5898
5898
|
contextWindow: number;
|
|
5899
5899
|
maxTokens: number;
|
|
5900
5900
|
};
|
|
5901
|
-
readonly "openai/gpt-4o-mini
|
|
5901
|
+
readonly "openai/gpt-4o-mini": {
|
|
5902
5902
|
id: string;
|
|
5903
5903
|
name: string;
|
|
5904
5904
|
api: "openai-completions";
|
|
@@ -6051,7 +6051,7 @@ export declare const MODELS: {
|
|
|
6051
6051
|
contextWindow: number;
|
|
6052
6052
|
maxTokens: number;
|
|
6053
6053
|
};
|
|
6054
|
-
readonly "meta-llama/llama-3-
|
|
6054
|
+
readonly "meta-llama/llama-3-70b-instruct": {
|
|
6055
6055
|
id: string;
|
|
6056
6056
|
name: string;
|
|
6057
6057
|
api: "openai-completions";
|
|
@@ -6068,7 +6068,7 @@ export declare const MODELS: {
|
|
|
6068
6068
|
contextWindow: number;
|
|
6069
6069
|
maxTokens: number;
|
|
6070
6070
|
};
|
|
6071
|
-
readonly "meta-llama/llama-3-
|
|
6071
|
+
readonly "meta-llama/llama-3-8b-instruct": {
|
|
6072
6072
|
id: string;
|
|
6073
6073
|
name: string;
|
|
6074
6074
|
api: "openai-completions";
|
|
@@ -6170,7 +6170,7 @@ export declare const MODELS: {
|
|
|
6170
6170
|
contextWindow: number;
|
|
6171
6171
|
maxTokens: number;
|
|
6172
6172
|
};
|
|
6173
|
-
readonly "openai/gpt-
|
|
6173
|
+
readonly "openai/gpt-3.5-turbo-0613": {
|
|
6174
6174
|
id: string;
|
|
6175
6175
|
name: string;
|
|
6176
6176
|
api: "openai-completions";
|
|
@@ -6187,7 +6187,7 @@ export declare const MODELS: {
|
|
|
6187
6187
|
contextWindow: number;
|
|
6188
6188
|
maxTokens: number;
|
|
6189
6189
|
};
|
|
6190
|
-
readonly "openai/gpt-
|
|
6190
|
+
readonly "openai/gpt-4-turbo-preview": {
|
|
6191
6191
|
id: string;
|
|
6192
6192
|
name: string;
|
|
6193
6193
|
api: "openai-completions";
|
|
@@ -6272,7 +6272,7 @@ export declare const MODELS: {
|
|
|
6272
6272
|
contextWindow: number;
|
|
6273
6273
|
maxTokens: number;
|
|
6274
6274
|
};
|
|
6275
|
-
readonly "openai/gpt-4": {
|
|
6275
|
+
readonly "openai/gpt-4-0314": {
|
|
6276
6276
|
id: string;
|
|
6277
6277
|
name: string;
|
|
6278
6278
|
api: "openai-completions";
|
|
@@ -6289,7 +6289,7 @@ export declare const MODELS: {
|
|
|
6289
6289
|
contextWindow: number;
|
|
6290
6290
|
maxTokens: number;
|
|
6291
6291
|
};
|
|
6292
|
-
readonly "openai/gpt-4
|
|
6292
|
+
readonly "openai/gpt-4": {
|
|
6293
6293
|
id: string;
|
|
6294
6294
|
name: string;
|
|
6295
6295
|
api: "openai-completions";
|