@mariozechner/pi-ai 0.9.3 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1952,14 +1952,14 @@ export declare const MODELS: {
|
|
|
1952
1952
|
};
|
|
1953
1953
|
};
|
|
1954
1954
|
readonly openrouter: {
|
|
1955
|
-
readonly "
|
|
1955
|
+
readonly "tngtech/tng-r1t-chimera:free": {
|
|
1956
1956
|
id: string;
|
|
1957
1957
|
name: string;
|
|
1958
1958
|
api: "openai-completions";
|
|
1959
1959
|
provider: string;
|
|
1960
1960
|
baseUrl: string;
|
|
1961
1961
|
reasoning: true;
|
|
1962
|
-
input:
|
|
1962
|
+
input: "text"[];
|
|
1963
1963
|
cost: {
|
|
1964
1964
|
input: number;
|
|
1965
1965
|
output: number;
|
|
@@ -1969,13 +1969,13 @@ export declare const MODELS: {
|
|
|
1969
1969
|
contextWindow: number;
|
|
1970
1970
|
maxTokens: number;
|
|
1971
1971
|
};
|
|
1972
|
-
readonly "
|
|
1972
|
+
readonly "tngtech/tng-r1t-chimera": {
|
|
1973
1973
|
id: string;
|
|
1974
1974
|
name: string;
|
|
1975
1975
|
api: "openai-completions";
|
|
1976
1976
|
provider: string;
|
|
1977
1977
|
baseUrl: string;
|
|
1978
|
-
reasoning:
|
|
1978
|
+
reasoning: true;
|
|
1979
1979
|
input: "text"[];
|
|
1980
1980
|
cost: {
|
|
1981
1981
|
input: number;
|
|
@@ -1986,7 +1986,7 @@ export declare const MODELS: {
|
|
|
1986
1986
|
contextWindow: number;
|
|
1987
1987
|
maxTokens: number;
|
|
1988
1988
|
};
|
|
1989
|
-
readonly "
|
|
1989
|
+
readonly "anthropic/claude-opus-4.5": {
|
|
1990
1990
|
id: string;
|
|
1991
1991
|
name: string;
|
|
1992
1992
|
api: "openai-completions";
|
|
@@ -2003,6 +2003,23 @@ export declare const MODELS: {
|
|
|
2003
2003
|
contextWindow: number;
|
|
2004
2004
|
maxTokens: number;
|
|
2005
2005
|
};
|
|
2006
|
+
readonly "allenai/olmo-3-7b-instruct": {
|
|
2007
|
+
id: string;
|
|
2008
|
+
name: string;
|
|
2009
|
+
api: "openai-completions";
|
|
2010
|
+
provider: string;
|
|
2011
|
+
baseUrl: string;
|
|
2012
|
+
reasoning: false;
|
|
2013
|
+
input: "text"[];
|
|
2014
|
+
cost: {
|
|
2015
|
+
input: number;
|
|
2016
|
+
output: number;
|
|
2017
|
+
cacheRead: number;
|
|
2018
|
+
cacheWrite: number;
|
|
2019
|
+
};
|
|
2020
|
+
contextWindow: number;
|
|
2021
|
+
maxTokens: number;
|
|
2022
|
+
};
|
|
2006
2023
|
readonly "x-ai/grok-4.1-fast:free": {
|
|
2007
2024
|
id: string;
|
|
2008
2025
|
name: string;
|
|
@@ -3584,23 +3601,6 @@ export declare const MODELS: {
|
|
|
3584
3601
|
contextWindow: number;
|
|
3585
3602
|
maxTokens: number;
|
|
3586
3603
|
};
|
|
3587
|
-
readonly "mistralai/mistral-small-3.2-24b-instruct:free": {
|
|
3588
|
-
id: string;
|
|
3589
|
-
name: string;
|
|
3590
|
-
api: "openai-completions";
|
|
3591
|
-
provider: string;
|
|
3592
|
-
baseUrl: string;
|
|
3593
|
-
reasoning: false;
|
|
3594
|
-
input: ("image" | "text")[];
|
|
3595
|
-
cost: {
|
|
3596
|
-
input: number;
|
|
3597
|
-
output: number;
|
|
3598
|
-
cacheRead: number;
|
|
3599
|
-
cacheWrite: number;
|
|
3600
|
-
};
|
|
3601
|
-
contextWindow: number;
|
|
3602
|
-
maxTokens: number;
|
|
3603
|
-
};
|
|
3604
3604
|
readonly "mistralai/mistral-small-3.2-24b-instruct": {
|
|
3605
3605
|
id: string;
|
|
3606
3606
|
name: string;
|
|
@@ -4247,23 +4247,6 @@ export declare const MODELS: {
|
|
|
4247
4247
|
contextWindow: number;
|
|
4248
4248
|
maxTokens: number;
|
|
4249
4249
|
};
|
|
4250
|
-
readonly "deepseek/deepseek-chat-v3-0324:free": {
|
|
4251
|
-
id: string;
|
|
4252
|
-
name: string;
|
|
4253
|
-
api: "openai-completions";
|
|
4254
|
-
provider: string;
|
|
4255
|
-
baseUrl: string;
|
|
4256
|
-
reasoning: false;
|
|
4257
|
-
input: "text"[];
|
|
4258
|
-
cost: {
|
|
4259
|
-
input: number;
|
|
4260
|
-
output: number;
|
|
4261
|
-
cacheRead: number;
|
|
4262
|
-
cacheWrite: number;
|
|
4263
|
-
};
|
|
4264
|
-
contextWindow: number;
|
|
4265
|
-
maxTokens: number;
|
|
4266
|
-
};
|
|
4267
4250
|
readonly "deepseek/deepseek-chat-v3-0324": {
|
|
4268
4251
|
id: string;
|
|
4269
4252
|
name: string;
|
|
@@ -4910,23 +4893,6 @@ export declare const MODELS: {
|
|
|
4910
4893
|
contextWindow: number;
|
|
4911
4894
|
maxTokens: number;
|
|
4912
4895
|
};
|
|
4913
|
-
readonly "qwen/qwen-2.5-7b-instruct": {
|
|
4914
|
-
id: string;
|
|
4915
|
-
name: string;
|
|
4916
|
-
api: "openai-completions";
|
|
4917
|
-
provider: string;
|
|
4918
|
-
baseUrl: string;
|
|
4919
|
-
reasoning: false;
|
|
4920
|
-
input: "text"[];
|
|
4921
|
-
cost: {
|
|
4922
|
-
input: number;
|
|
4923
|
-
output: number;
|
|
4924
|
-
cacheRead: number;
|
|
4925
|
-
cacheWrite: number;
|
|
4926
|
-
};
|
|
4927
|
-
contextWindow: number;
|
|
4928
|
-
maxTokens: number;
|
|
4929
|
-
};
|
|
4930
4896
|
readonly "nvidia/llama-3.1-nemotron-70b-instruct": {
|
|
4931
4897
|
id: string;
|
|
4932
4898
|
name: string;
|
|
@@ -5012,7 +4978,7 @@ export declare const MODELS: {
|
|
|
5012
4978
|
contextWindow: number;
|
|
5013
4979
|
maxTokens: number;
|
|
5014
4980
|
};
|
|
5015
|
-
readonly "cohere/command-r-
|
|
4981
|
+
readonly "cohere/command-r-08-2024": {
|
|
5016
4982
|
id: string;
|
|
5017
4983
|
name: string;
|
|
5018
4984
|
api: "openai-completions";
|
|
@@ -5029,7 +4995,7 @@ export declare const MODELS: {
|
|
|
5029
4995
|
contextWindow: number;
|
|
5030
4996
|
maxTokens: number;
|
|
5031
4997
|
};
|
|
5032
|
-
readonly "cohere/command-r-08-2024": {
|
|
4998
|
+
readonly "cohere/command-r-plus-08-2024": {
|
|
5033
4999
|
id: string;
|
|
5034
5000
|
name: string;
|
|
5035
5001
|
api: "openai-completions";
|
|
@@ -5114,7 +5080,7 @@ export declare const MODELS: {
|
|
|
5114
5080
|
contextWindow: number;
|
|
5115
5081
|
maxTokens: number;
|
|
5116
5082
|
};
|
|
5117
|
-
readonly "meta-llama/llama-3.1-
|
|
5083
|
+
readonly "meta-llama/llama-3.1-8b-instruct": {
|
|
5118
5084
|
id: string;
|
|
5119
5085
|
name: string;
|
|
5120
5086
|
api: "openai-completions";
|
|
@@ -5131,7 +5097,7 @@ export declare const MODELS: {
|
|
|
5131
5097
|
contextWindow: number;
|
|
5132
5098
|
maxTokens: number;
|
|
5133
5099
|
};
|
|
5134
|
-
readonly "meta-llama/llama-3.1-
|
|
5100
|
+
readonly "meta-llama/llama-3.1-405b-instruct": {
|
|
5135
5101
|
id: string;
|
|
5136
5102
|
name: string;
|
|
5137
5103
|
api: "openai-completions";
|
|
@@ -5148,7 +5114,7 @@ export declare const MODELS: {
|
|
|
5148
5114
|
contextWindow: number;
|
|
5149
5115
|
maxTokens: number;
|
|
5150
5116
|
};
|
|
5151
|
-
readonly "meta-llama/llama-3.1-
|
|
5117
|
+
readonly "meta-llama/llama-3.1-70b-instruct": {
|
|
5152
5118
|
id: string;
|
|
5153
5119
|
name: string;
|
|
5154
5120
|
api: "openai-completions";
|
|
@@ -5590,7 +5556,7 @@ export declare const MODELS: {
|
|
|
5590
5556
|
contextWindow: number;
|
|
5591
5557
|
maxTokens: number;
|
|
5592
5558
|
};
|
|
5593
|
-
readonly "openai/gpt-
|
|
5559
|
+
readonly "openai/gpt-4-0314": {
|
|
5594
5560
|
id: string;
|
|
5595
5561
|
name: string;
|
|
5596
5562
|
api: "openai-completions";
|
|
@@ -5607,7 +5573,7 @@ export declare const MODELS: {
|
|
|
5607
5573
|
contextWindow: number;
|
|
5608
5574
|
maxTokens: number;
|
|
5609
5575
|
};
|
|
5610
|
-
readonly "openai/gpt-4
|
|
5576
|
+
readonly "openai/gpt-4": {
|
|
5611
5577
|
id: string;
|
|
5612
5578
|
name: string;
|
|
5613
5579
|
api: "openai-completions";
|
|
@@ -5624,7 +5590,7 @@ export declare const MODELS: {
|
|
|
5624
5590
|
contextWindow: number;
|
|
5625
5591
|
maxTokens: number;
|
|
5626
5592
|
};
|
|
5627
|
-
readonly "openai/gpt-
|
|
5593
|
+
readonly "openai/gpt-3.5-turbo": {
|
|
5628
5594
|
id: string;
|
|
5629
5595
|
name: string;
|
|
5630
5596
|
api: "openai-completions";
|