@mariozechner/pi-ai 0.10.1 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -136,6 +136,23 @@ export declare const MODELS: {
|
|
|
136
136
|
contextWindow: number;
|
|
137
137
|
maxTokens: number;
|
|
138
138
|
};
|
|
139
|
+
readonly "claude-opus-4-5-20251101": {
|
|
140
|
+
id: string;
|
|
141
|
+
name: string;
|
|
142
|
+
api: "anthropic-messages";
|
|
143
|
+
provider: string;
|
|
144
|
+
baseUrl: string;
|
|
145
|
+
reasoning: true;
|
|
146
|
+
input: ("image" | "text")[];
|
|
147
|
+
cost: {
|
|
148
|
+
input: number;
|
|
149
|
+
output: number;
|
|
150
|
+
cacheRead: number;
|
|
151
|
+
cacheWrite: number;
|
|
152
|
+
};
|
|
153
|
+
contextWindow: number;
|
|
154
|
+
maxTokens: number;
|
|
155
|
+
};
|
|
139
156
|
readonly "claude-sonnet-4-5": {
|
|
140
157
|
id: string;
|
|
141
158
|
name: string;
|
|
@@ -2020,6 +2037,23 @@ export declare const MODELS: {
|
|
|
2020
2037
|
contextWindow: number;
|
|
2021
2038
|
maxTokens: number;
|
|
2022
2039
|
};
|
|
2040
|
+
readonly "openrouter/bert-nebulon-alpha": {
|
|
2041
|
+
id: string;
|
|
2042
|
+
name: string;
|
|
2043
|
+
api: "openai-completions";
|
|
2044
|
+
provider: string;
|
|
2045
|
+
baseUrl: string;
|
|
2046
|
+
reasoning: false;
|
|
2047
|
+
input: ("image" | "text")[];
|
|
2048
|
+
cost: {
|
|
2049
|
+
input: number;
|
|
2050
|
+
output: number;
|
|
2051
|
+
cacheRead: number;
|
|
2052
|
+
cacheWrite: number;
|
|
2053
|
+
};
|
|
2054
|
+
contextWindow: number;
|
|
2055
|
+
maxTokens: number;
|
|
2056
|
+
};
|
|
2023
2057
|
readonly "allenai/olmo-3-7b-instruct": {
|
|
2024
2058
|
id: string;
|
|
2025
2059
|
name: string;
|
|
@@ -4995,7 +5029,7 @@ export declare const MODELS: {
|
|
|
4995
5029
|
contextWindow: number;
|
|
4996
5030
|
maxTokens: number;
|
|
4997
5031
|
};
|
|
4998
|
-
readonly "cohere/command-r-
|
|
5032
|
+
readonly "cohere/command-r-08-2024": {
|
|
4999
5033
|
id: string;
|
|
5000
5034
|
name: string;
|
|
5001
5035
|
api: "openai-completions";
|
|
@@ -5012,7 +5046,7 @@ export declare const MODELS: {
|
|
|
5012
5046
|
contextWindow: number;
|
|
5013
5047
|
maxTokens: number;
|
|
5014
5048
|
};
|
|
5015
|
-
readonly "cohere/command-r-08-2024": {
|
|
5049
|
+
readonly "cohere/command-r-plus-08-2024": {
|
|
5016
5050
|
id: string;
|
|
5017
5051
|
name: string;
|
|
5018
5052
|
api: "openai-completions";
|
|
@@ -5063,23 +5097,6 @@ export declare const MODELS: {
|
|
|
5063
5097
|
contextWindow: number;
|
|
5064
5098
|
maxTokens: number;
|
|
5065
5099
|
};
|
|
5066
|
-
readonly "nousresearch/hermes-3-llama-3.1-70b": {
|
|
5067
|
-
id: string;
|
|
5068
|
-
name: string;
|
|
5069
|
-
api: "openai-completions";
|
|
5070
|
-
provider: string;
|
|
5071
|
-
baseUrl: string;
|
|
5072
|
-
reasoning: false;
|
|
5073
|
-
input: "text"[];
|
|
5074
|
-
cost: {
|
|
5075
|
-
input: number;
|
|
5076
|
-
output: number;
|
|
5077
|
-
cacheRead: number;
|
|
5078
|
-
cacheWrite: number;
|
|
5079
|
-
};
|
|
5080
|
-
contextWindow: number;
|
|
5081
|
-
maxTokens: number;
|
|
5082
|
-
};
|
|
5083
5100
|
readonly "openai/gpt-4o-2024-08-06": {
|
|
5084
5101
|
id: string;
|
|
5085
5102
|
name: string;
|
|
@@ -5165,7 +5182,7 @@ export declare const MODELS: {
|
|
|
5165
5182
|
contextWindow: number;
|
|
5166
5183
|
maxTokens: number;
|
|
5167
5184
|
};
|
|
5168
|
-
readonly "openai/gpt-4o-mini": {
|
|
5185
|
+
readonly "openai/gpt-4o-mini-2024-07-18": {
|
|
5169
5186
|
id: string;
|
|
5170
5187
|
name: string;
|
|
5171
5188
|
api: "openai-completions";
|
|
@@ -5182,7 +5199,7 @@ export declare const MODELS: {
|
|
|
5182
5199
|
contextWindow: number;
|
|
5183
5200
|
maxTokens: number;
|
|
5184
5201
|
};
|
|
5185
|
-
readonly "openai/gpt-4o-mini
|
|
5202
|
+
readonly "openai/gpt-4o-mini": {
|
|
5186
5203
|
id: string;
|
|
5187
5204
|
name: string;
|
|
5188
5205
|
api: "openai-completions";
|
|
@@ -5284,7 +5301,7 @@ export declare const MODELS: {
|
|
|
5284
5301
|
contextWindow: number;
|
|
5285
5302
|
maxTokens: number;
|
|
5286
5303
|
};
|
|
5287
|
-
readonly "openai/gpt-4o": {
|
|
5304
|
+
readonly "openai/gpt-4o-2024-05-13": {
|
|
5288
5305
|
id: string;
|
|
5289
5306
|
name: string;
|
|
5290
5307
|
api: "openai-completions";
|
|
@@ -5301,7 +5318,7 @@ export declare const MODELS: {
|
|
|
5301
5318
|
contextWindow: number;
|
|
5302
5319
|
maxTokens: number;
|
|
5303
5320
|
};
|
|
5304
|
-
readonly "openai/gpt-4o
|
|
5321
|
+
readonly "openai/gpt-4o": {
|
|
5305
5322
|
id: string;
|
|
5306
5323
|
name: string;
|
|
5307
5324
|
api: "openai-completions";
|
|
@@ -5318,7 +5335,7 @@ export declare const MODELS: {
|
|
|
5318
5335
|
contextWindow: number;
|
|
5319
5336
|
maxTokens: number;
|
|
5320
5337
|
};
|
|
5321
|
-
readonly "openai/gpt-4o
|
|
5338
|
+
readonly "openai/gpt-4o:extended": {
|
|
5322
5339
|
id: string;
|
|
5323
5340
|
name: string;
|
|
5324
5341
|
api: "openai-completions";
|
|
@@ -5335,7 +5352,7 @@ export declare const MODELS: {
|
|
|
5335
5352
|
contextWindow: number;
|
|
5336
5353
|
maxTokens: number;
|
|
5337
5354
|
};
|
|
5338
|
-
readonly "meta-llama/llama-3-
|
|
5355
|
+
readonly "meta-llama/llama-3-70b-instruct": {
|
|
5339
5356
|
id: string;
|
|
5340
5357
|
name: string;
|
|
5341
5358
|
api: "openai-completions";
|
|
@@ -5352,7 +5369,7 @@ export declare const MODELS: {
|
|
|
5352
5369
|
contextWindow: number;
|
|
5353
5370
|
maxTokens: number;
|
|
5354
5371
|
};
|
|
5355
|
-
readonly "meta-llama/llama-3-
|
|
5372
|
+
readonly "meta-llama/llama-3-8b-instruct": {
|
|
5356
5373
|
id: string;
|
|
5357
5374
|
name: string;
|
|
5358
5375
|
api: "openai-completions";
|
|
@@ -5454,7 +5471,7 @@ export declare const MODELS: {
|
|
|
5454
5471
|
contextWindow: number;
|
|
5455
5472
|
maxTokens: number;
|
|
5456
5473
|
};
|
|
5457
|
-
readonly "openai/gpt-
|
|
5474
|
+
readonly "openai/gpt-3.5-turbo-0613": {
|
|
5458
5475
|
id: string;
|
|
5459
5476
|
name: string;
|
|
5460
5477
|
api: "openai-completions";
|
|
@@ -5471,7 +5488,7 @@ export declare const MODELS: {
|
|
|
5471
5488
|
contextWindow: number;
|
|
5472
5489
|
maxTokens: number;
|
|
5473
5490
|
};
|
|
5474
|
-
readonly "openai/gpt-
|
|
5491
|
+
readonly "openai/gpt-4-turbo-preview": {
|
|
5475
5492
|
id: string;
|
|
5476
5493
|
name: string;
|
|
5477
5494
|
api: "openai-completions";
|
|
@@ -5590,7 +5607,7 @@ export declare const MODELS: {
|
|
|
5590
5607
|
contextWindow: number;
|
|
5591
5608
|
maxTokens: number;
|
|
5592
5609
|
};
|
|
5593
|
-
readonly "openai/gpt-
|
|
5610
|
+
readonly "openai/gpt-4": {
|
|
5594
5611
|
id: string;
|
|
5595
5612
|
name: string;
|
|
5596
5613
|
api: "openai-completions";
|
|
@@ -5607,7 +5624,7 @@ export declare const MODELS: {
|
|
|
5607
5624
|
contextWindow: number;
|
|
5608
5625
|
maxTokens: number;
|
|
5609
5626
|
};
|
|
5610
|
-
readonly "openai/gpt-
|
|
5627
|
+
readonly "openai/gpt-3.5-turbo": {
|
|
5611
5628
|
id: string;
|
|
5612
5629
|
name: string;
|
|
5613
5630
|
api: "openai-completions";
|