@mariozechner/pi-ai 0.9.4 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1952,6 +1952,40 @@ export declare const MODELS: {
|
|
|
1952
1952
|
};
|
|
1953
1953
|
};
|
|
1954
1954
|
readonly openrouter: {
|
|
1955
|
+
readonly "tngtech/tng-r1t-chimera:free": {
|
|
1956
|
+
id: string;
|
|
1957
|
+
name: string;
|
|
1958
|
+
api: "openai-completions";
|
|
1959
|
+
provider: string;
|
|
1960
|
+
baseUrl: string;
|
|
1961
|
+
reasoning: true;
|
|
1962
|
+
input: "text"[];
|
|
1963
|
+
cost: {
|
|
1964
|
+
input: number;
|
|
1965
|
+
output: number;
|
|
1966
|
+
cacheRead: number;
|
|
1967
|
+
cacheWrite: number;
|
|
1968
|
+
};
|
|
1969
|
+
contextWindow: number;
|
|
1970
|
+
maxTokens: number;
|
|
1971
|
+
};
|
|
1972
|
+
readonly "tngtech/tng-r1t-chimera": {
|
|
1973
|
+
id: string;
|
|
1974
|
+
name: string;
|
|
1975
|
+
api: "openai-completions";
|
|
1976
|
+
provider: string;
|
|
1977
|
+
baseUrl: string;
|
|
1978
|
+
reasoning: true;
|
|
1979
|
+
input: "text"[];
|
|
1980
|
+
cost: {
|
|
1981
|
+
input: number;
|
|
1982
|
+
output: number;
|
|
1983
|
+
cacheRead: number;
|
|
1984
|
+
cacheWrite: number;
|
|
1985
|
+
};
|
|
1986
|
+
contextWindow: number;
|
|
1987
|
+
maxTokens: number;
|
|
1988
|
+
};
|
|
1955
1989
|
readonly "anthropic/claude-opus-4.5": {
|
|
1956
1990
|
id: string;
|
|
1957
1991
|
name: string;
|
|
@@ -3567,23 +3601,6 @@ export declare const MODELS: {
|
|
|
3567
3601
|
contextWindow: number;
|
|
3568
3602
|
maxTokens: number;
|
|
3569
3603
|
};
|
|
3570
|
-
readonly "mistralai/mistral-small-3.2-24b-instruct:free": {
|
|
3571
|
-
id: string;
|
|
3572
|
-
name: string;
|
|
3573
|
-
api: "openai-completions";
|
|
3574
|
-
provider: string;
|
|
3575
|
-
baseUrl: string;
|
|
3576
|
-
reasoning: false;
|
|
3577
|
-
input: ("image" | "text")[];
|
|
3578
|
-
cost: {
|
|
3579
|
-
input: number;
|
|
3580
|
-
output: number;
|
|
3581
|
-
cacheRead: number;
|
|
3582
|
-
cacheWrite: number;
|
|
3583
|
-
};
|
|
3584
|
-
contextWindow: number;
|
|
3585
|
-
maxTokens: number;
|
|
3586
|
-
};
|
|
3587
3604
|
readonly "mistralai/mistral-small-3.2-24b-instruct": {
|
|
3588
3605
|
id: string;
|
|
3589
3606
|
name: string;
|
|
@@ -4230,23 +4247,6 @@ export declare const MODELS: {
|
|
|
4230
4247
|
contextWindow: number;
|
|
4231
4248
|
maxTokens: number;
|
|
4232
4249
|
};
|
|
4233
|
-
readonly "deepseek/deepseek-chat-v3-0324:free": {
|
|
4234
|
-
id: string;
|
|
4235
|
-
name: string;
|
|
4236
|
-
api: "openai-completions";
|
|
4237
|
-
provider: string;
|
|
4238
|
-
baseUrl: string;
|
|
4239
|
-
reasoning: false;
|
|
4240
|
-
input: "text"[];
|
|
4241
|
-
cost: {
|
|
4242
|
-
input: number;
|
|
4243
|
-
output: number;
|
|
4244
|
-
cacheRead: number;
|
|
4245
|
-
cacheWrite: number;
|
|
4246
|
-
};
|
|
4247
|
-
contextWindow: number;
|
|
4248
|
-
maxTokens: number;
|
|
4249
|
-
};
|
|
4250
4250
|
readonly "deepseek/deepseek-chat-v3-0324": {
|
|
4251
4251
|
id: string;
|
|
4252
4252
|
name: string;
|
|
@@ -4808,7 +4808,7 @@ export declare const MODELS: {
|
|
|
4808
4808
|
contextWindow: number;
|
|
4809
4809
|
maxTokens: number;
|
|
4810
4810
|
};
|
|
4811
|
-
readonly "anthropic/claude-3.5-haiku": {
|
|
4811
|
+
readonly "anthropic/claude-3.5-haiku-20241022": {
|
|
4812
4812
|
id: string;
|
|
4813
4813
|
name: string;
|
|
4814
4814
|
api: "openai-completions";
|
|
@@ -4825,7 +4825,7 @@ export declare const MODELS: {
|
|
|
4825
4825
|
contextWindow: number;
|
|
4826
4826
|
maxTokens: number;
|
|
4827
4827
|
};
|
|
4828
|
-
readonly "anthropic/claude-3.5-haiku
|
|
4828
|
+
readonly "anthropic/claude-3.5-haiku": {
|
|
4829
4829
|
id: string;
|
|
4830
4830
|
name: string;
|
|
4831
4831
|
api: "openai-completions";
|
|
@@ -5148,7 +5148,7 @@ export declare const MODELS: {
|
|
|
5148
5148
|
contextWindow: number;
|
|
5149
5149
|
maxTokens: number;
|
|
5150
5150
|
};
|
|
5151
|
-
readonly "openai/gpt-4o-mini": {
|
|
5151
|
+
readonly "openai/gpt-4o-mini-2024-07-18": {
|
|
5152
5152
|
id: string;
|
|
5153
5153
|
name: string;
|
|
5154
5154
|
api: "openai-completions";
|
|
@@ -5165,7 +5165,7 @@ export declare const MODELS: {
|
|
|
5165
5165
|
contextWindow: number;
|
|
5166
5166
|
maxTokens: number;
|
|
5167
5167
|
};
|
|
5168
|
-
readonly "openai/gpt-4o-mini
|
|
5168
|
+
readonly "openai/gpt-4o-mini": {
|
|
5169
5169
|
id: string;
|
|
5170
5170
|
name: string;
|
|
5171
5171
|
api: "openai-completions";
|
|
@@ -5267,7 +5267,7 @@ export declare const MODELS: {
|
|
|
5267
5267
|
contextWindow: number;
|
|
5268
5268
|
maxTokens: number;
|
|
5269
5269
|
};
|
|
5270
|
-
readonly "openai/gpt-4o": {
|
|
5270
|
+
readonly "openai/gpt-4o-2024-05-13": {
|
|
5271
5271
|
id: string;
|
|
5272
5272
|
name: string;
|
|
5273
5273
|
api: "openai-completions";
|
|
@@ -5284,7 +5284,7 @@ export declare const MODELS: {
|
|
|
5284
5284
|
contextWindow: number;
|
|
5285
5285
|
maxTokens: number;
|
|
5286
5286
|
};
|
|
5287
|
-
readonly "openai/gpt-4o
|
|
5287
|
+
readonly "openai/gpt-4o": {
|
|
5288
5288
|
id: string;
|
|
5289
5289
|
name: string;
|
|
5290
5290
|
api: "openai-completions";
|
|
@@ -5301,7 +5301,7 @@ export declare const MODELS: {
|
|
|
5301
5301
|
contextWindow: number;
|
|
5302
5302
|
maxTokens: number;
|
|
5303
5303
|
};
|
|
5304
|
-
readonly "openai/gpt-4o
|
|
5304
|
+
readonly "openai/gpt-4o:extended": {
|
|
5305
5305
|
id: string;
|
|
5306
5306
|
name: string;
|
|
5307
5307
|
api: "openai-completions";
|
|
@@ -5318,7 +5318,7 @@ export declare const MODELS: {
|
|
|
5318
5318
|
contextWindow: number;
|
|
5319
5319
|
maxTokens: number;
|
|
5320
5320
|
};
|
|
5321
|
-
readonly "meta-llama/llama-3-
|
|
5321
|
+
readonly "meta-llama/llama-3-70b-instruct": {
|
|
5322
5322
|
id: string;
|
|
5323
5323
|
name: string;
|
|
5324
5324
|
api: "openai-completions";
|
|
@@ -5335,7 +5335,7 @@ export declare const MODELS: {
|
|
|
5335
5335
|
contextWindow: number;
|
|
5336
5336
|
maxTokens: number;
|
|
5337
5337
|
};
|
|
5338
|
-
readonly "meta-llama/llama-3-
|
|
5338
|
+
readonly "meta-llama/llama-3-8b-instruct": {
|
|
5339
5339
|
id: string;
|
|
5340
5340
|
name: string;
|
|
5341
5341
|
api: "openai-completions";
|
|
@@ -5437,7 +5437,7 @@ export declare const MODELS: {
|
|
|
5437
5437
|
contextWindow: number;
|
|
5438
5438
|
maxTokens: number;
|
|
5439
5439
|
};
|
|
5440
|
-
readonly "openai/gpt-
|
|
5440
|
+
readonly "openai/gpt-3.5-turbo-0613": {
|
|
5441
5441
|
id: string;
|
|
5442
5442
|
name: string;
|
|
5443
5443
|
api: "openai-completions";
|
|
@@ -5454,7 +5454,7 @@ export declare const MODELS: {
|
|
|
5454
5454
|
contextWindow: number;
|
|
5455
5455
|
maxTokens: number;
|
|
5456
5456
|
};
|
|
5457
|
-
readonly "openai/gpt-
|
|
5457
|
+
readonly "openai/gpt-4-turbo-preview": {
|
|
5458
5458
|
id: string;
|
|
5459
5459
|
name: string;
|
|
5460
5460
|
api: "openai-completions";
|