@mariozechner/pi-ai 0.22.4 → 0.22.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -6092,7 +6092,7 @@ export declare const MODELS: {
|
|
|
6092
6092
|
contextWindow: number;
|
|
6093
6093
|
maxTokens: number;
|
|
6094
6094
|
};
|
|
6095
|
-
readonly "anthropic/claude-3.5-haiku": {
|
|
6095
|
+
readonly "anthropic/claude-3.5-haiku-20241022": {
|
|
6096
6096
|
id: string;
|
|
6097
6097
|
name: string;
|
|
6098
6098
|
api: "openai-completions";
|
|
@@ -6109,7 +6109,7 @@ export declare const MODELS: {
|
|
|
6109
6109
|
contextWindow: number;
|
|
6110
6110
|
maxTokens: number;
|
|
6111
6111
|
};
|
|
6112
|
-
readonly "anthropic/claude-3.5-haiku
|
|
6112
|
+
readonly "anthropic/claude-3.5-haiku": {
|
|
6113
6113
|
id: string;
|
|
6114
6114
|
name: string;
|
|
6115
6115
|
api: "openai-completions";
|
|
@@ -6143,7 +6143,7 @@ export declare const MODELS: {
|
|
|
6143
6143
|
contextWindow: number;
|
|
6144
6144
|
maxTokens: number;
|
|
6145
6145
|
};
|
|
6146
|
-
readonly "mistralai/ministral-
|
|
6146
|
+
readonly "mistralai/ministral-8b": {
|
|
6147
6147
|
id: string;
|
|
6148
6148
|
name: string;
|
|
6149
6149
|
api: "openai-completions";
|
|
@@ -6160,7 +6160,7 @@ export declare const MODELS: {
|
|
|
6160
6160
|
contextWindow: number;
|
|
6161
6161
|
maxTokens: number;
|
|
6162
6162
|
};
|
|
6163
|
-
readonly "mistralai/ministral-
|
|
6163
|
+
readonly "mistralai/ministral-3b": {
|
|
6164
6164
|
id: string;
|
|
6165
6165
|
name: string;
|
|
6166
6166
|
api: "openai-completions";
|
|
@@ -6262,7 +6262,7 @@ export declare const MODELS: {
|
|
|
6262
6262
|
contextWindow: number;
|
|
6263
6263
|
maxTokens: number;
|
|
6264
6264
|
};
|
|
6265
|
-
readonly "cohere/command-r-
|
|
6265
|
+
readonly "cohere/command-r-08-2024": {
|
|
6266
6266
|
id: string;
|
|
6267
6267
|
name: string;
|
|
6268
6268
|
api: "openai-completions";
|
|
@@ -6279,7 +6279,7 @@ export declare const MODELS: {
|
|
|
6279
6279
|
contextWindow: number;
|
|
6280
6280
|
maxTokens: number;
|
|
6281
6281
|
};
|
|
6282
|
-
readonly "cohere/command-r-08-2024": {
|
|
6282
|
+
readonly "cohere/command-r-plus-08-2024": {
|
|
6283
6283
|
id: string;
|
|
6284
6284
|
name: string;
|
|
6285
6285
|
api: "openai-completions";
|
|
@@ -6347,7 +6347,7 @@ export declare const MODELS: {
|
|
|
6347
6347
|
contextWindow: number;
|
|
6348
6348
|
maxTokens: number;
|
|
6349
6349
|
};
|
|
6350
|
-
readonly "meta-llama/llama-3.1-
|
|
6350
|
+
readonly "meta-llama/llama-3.1-8b-instruct": {
|
|
6351
6351
|
id: string;
|
|
6352
6352
|
name: string;
|
|
6353
6353
|
api: "openai-completions";
|
|
@@ -6364,7 +6364,7 @@ export declare const MODELS: {
|
|
|
6364
6364
|
contextWindow: number;
|
|
6365
6365
|
maxTokens: number;
|
|
6366
6366
|
};
|
|
6367
|
-
readonly "meta-llama/llama-3.1-
|
|
6367
|
+
readonly "meta-llama/llama-3.1-405b-instruct": {
|
|
6368
6368
|
id: string;
|
|
6369
6369
|
name: string;
|
|
6370
6370
|
api: "openai-completions";
|
|
@@ -6381,7 +6381,7 @@ export declare const MODELS: {
|
|
|
6381
6381
|
contextWindow: number;
|
|
6382
6382
|
maxTokens: number;
|
|
6383
6383
|
};
|
|
6384
|
-
readonly "meta-llama/llama-3.1-
|
|
6384
|
+
readonly "meta-llama/llama-3.1-70b-instruct": {
|
|
6385
6385
|
id: string;
|
|
6386
6386
|
name: string;
|
|
6387
6387
|
api: "openai-completions";
|
|
@@ -6585,7 +6585,7 @@ export declare const MODELS: {
|
|
|
6585
6585
|
contextWindow: number;
|
|
6586
6586
|
maxTokens: number;
|
|
6587
6587
|
};
|
|
6588
|
-
readonly "meta-llama/llama-3-
|
|
6588
|
+
readonly "meta-llama/llama-3-70b-instruct": {
|
|
6589
6589
|
id: string;
|
|
6590
6590
|
name: string;
|
|
6591
6591
|
api: "openai-completions";
|
|
@@ -6602,7 +6602,7 @@ export declare const MODELS: {
|
|
|
6602
6602
|
contextWindow: number;
|
|
6603
6603
|
maxTokens: number;
|
|
6604
6604
|
};
|
|
6605
|
-
readonly "meta-llama/llama-3-
|
|
6605
|
+
readonly "meta-llama/llama-3-8b-instruct": {
|
|
6606
6606
|
id: string;
|
|
6607
6607
|
name: string;
|
|
6608
6608
|
api: "openai-completions";
|