@mariozechner/pi-ai 0.12.3 → 0.12.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1969,14 +1969,14 @@ export declare const MODELS: {
1969
1969
  };
1970
1970
  };
1971
1971
  readonly openrouter: {
1972
- readonly "arcee-ai/trinity-mini:free": {
1972
+ readonly "amazon/nova-2-lite-v1:free": {
1973
1973
  id: string;
1974
1974
  name: string;
1975
1975
  api: "openai-completions";
1976
1976
  provider: string;
1977
1977
  baseUrl: string;
1978
1978
  reasoning: true;
1979
- input: "text"[];
1979
+ input: ("image" | "text")[];
1980
1980
  cost: {
1981
1981
  input: number;
1982
1982
  output: number;
@@ -1986,7 +1986,41 @@ export declare const MODELS: {
1986
1986
  contextWindow: number;
1987
1987
  maxTokens: number;
1988
1988
  };
1989
- readonly "arcee-ai/trinity-mini": {
1989
+ readonly "amazon/nova-2-lite-v1": {
1990
+ id: string;
1991
+ name: string;
1992
+ api: "openai-completions";
1993
+ provider: string;
1994
+ baseUrl: string;
1995
+ reasoning: true;
1996
+ input: ("image" | "text")[];
1997
+ cost: {
1998
+ input: number;
1999
+ output: number;
2000
+ cacheRead: number;
2001
+ cacheWrite: number;
2002
+ };
2003
+ contextWindow: number;
2004
+ maxTokens: number;
2005
+ };
2006
+ readonly "mistralai/mistral-large-2512": {
2007
+ id: string;
2008
+ name: string;
2009
+ api: "openai-completions";
2010
+ provider: string;
2011
+ baseUrl: string;
2012
+ reasoning: false;
2013
+ input: ("image" | "text")[];
2014
+ cost: {
2015
+ input: number;
2016
+ output: number;
2017
+ cacheRead: number;
2018
+ cacheWrite: number;
2019
+ };
2020
+ contextWindow: number;
2021
+ maxTokens: number;
2022
+ };
2023
+ readonly "arcee-ai/trinity-mini:free": {
1990
2024
  id: string;
1991
2025
  name: string;
1992
2026
  api: "openai-completions";
@@ -2003,7 +2037,7 @@ export declare const MODELS: {
2003
2037
  contextWindow: number;
2004
2038
  maxTokens: number;
2005
2039
  };
2006
- readonly "deepseek/deepseek-v3.2": {
2040
+ readonly "arcee-ai/trinity-mini": {
2007
2041
  id: string;
2008
2042
  name: string;
2009
2043
  api: "openai-completions";
@@ -2020,7 +2054,7 @@ export declare const MODELS: {
2020
2054
  contextWindow: number;
2021
2055
  maxTokens: number;
2022
2056
  };
2023
- readonly "prime-intellect/intellect-3": {
2057
+ readonly "deepseek/deepseek-v3.2": {
2024
2058
  id: string;
2025
2059
  name: string;
2026
2060
  api: "openai-completions";
@@ -2037,7 +2071,7 @@ export declare const MODELS: {
2037
2071
  contextWindow: number;
2038
2072
  maxTokens: number;
2039
2073
  };
2040
- readonly "tngtech/tng-r1t-chimera:free": {
2074
+ readonly "prime-intellect/intellect-3": {
2041
2075
  id: string;
2042
2076
  name: string;
2043
2077
  api: "openai-completions";
@@ -2054,7 +2088,7 @@ export declare const MODELS: {
2054
2088
  contextWindow: number;
2055
2089
  maxTokens: number;
2056
2090
  };
2057
- readonly "tngtech/tng-r1t-chimera": {
2091
+ readonly "tngtech/tng-r1t-chimera:free": {
2058
2092
  id: string;
2059
2093
  name: string;
2060
2094
  api: "openai-completions";
@@ -2071,14 +2105,14 @@ export declare const MODELS: {
2071
2105
  contextWindow: number;
2072
2106
  maxTokens: number;
2073
2107
  };
2074
- readonly "anthropic/claude-opus-4.5": {
2108
+ readonly "tngtech/tng-r1t-chimera": {
2075
2109
  id: string;
2076
2110
  name: string;
2077
2111
  api: "openai-completions";
2078
2112
  provider: string;
2079
2113
  baseUrl: string;
2080
2114
  reasoning: true;
2081
- input: ("image" | "text")[];
2115
+ input: "text"[];
2082
2116
  cost: {
2083
2117
  input: number;
2084
2118
  output: number;
@@ -2088,13 +2122,13 @@ export declare const MODELS: {
2088
2122
  contextWindow: number;
2089
2123
  maxTokens: number;
2090
2124
  };
2091
- readonly "openrouter/bert-nebulon-alpha": {
2125
+ readonly "anthropic/claude-opus-4.5": {
2092
2126
  id: string;
2093
2127
  name: string;
2094
2128
  api: "openai-completions";
2095
2129
  provider: string;
2096
2130
  baseUrl: string;
2097
- reasoning: false;
2131
+ reasoning: true;
2098
2132
  input: ("image" | "text")[];
2099
2133
  cost: {
2100
2134
  input: number;
@@ -2734,7 +2768,7 @@ export declare const MODELS: {
2734
2768
  contextWindow: number;
2735
2769
  maxTokens: number;
2736
2770
  };
2737
- readonly "deepseek/deepseek-v3.1-terminus": {
2771
+ readonly "deepseek/deepseek-v3.1-terminus:exacto": {
2738
2772
  id: string;
2739
2773
  name: string;
2740
2774
  api: "openai-completions";
@@ -2751,7 +2785,7 @@ export declare const MODELS: {
2751
2785
  contextWindow: number;
2752
2786
  maxTokens: number;
2753
2787
  };
2754
- readonly "deepseek/deepseek-v3.1-terminus:exacto": {
2788
+ readonly "deepseek/deepseek-v3.1-terminus": {
2755
2789
  id: string;
2756
2790
  name: string;
2757
2791
  api: "openai-completions";
@@ -5080,7 +5114,7 @@ export declare const MODELS: {
5080
5114
  contextWindow: number;
5081
5115
  maxTokens: number;
5082
5116
  };
5083
- readonly "cohere/command-r-plus-08-2024": {
5117
+ readonly "cohere/command-r-08-2024": {
5084
5118
  id: string;
5085
5119
  name: string;
5086
5120
  api: "openai-completions";
@@ -5097,7 +5131,7 @@ export declare const MODELS: {
5097
5131
  contextWindow: number;
5098
5132
  maxTokens: number;
5099
5133
  };
5100
- readonly "cohere/command-r-08-2024": {
5134
+ readonly "cohere/command-r-plus-08-2024": {
5101
5135
  id: string;
5102
5136
  name: string;
5103
5137
  api: "openai-completions";
@@ -5165,7 +5199,7 @@ export declare const MODELS: {
5165
5199
  contextWindow: number;
5166
5200
  maxTokens: number;
5167
5201
  };
5168
- readonly "meta-llama/llama-3.1-70b-instruct": {
5202
+ readonly "meta-llama/llama-3.1-8b-instruct": {
5169
5203
  id: string;
5170
5204
  name: string;
5171
5205
  api: "openai-completions";
@@ -5182,7 +5216,7 @@ export declare const MODELS: {
5182
5216
  contextWindow: number;
5183
5217
  maxTokens: number;
5184
5218
  };
5185
- readonly "meta-llama/llama-3.1-8b-instruct": {
5219
+ readonly "meta-llama/llama-3.1-405b-instruct": {
5186
5220
  id: string;
5187
5221
  name: string;
5188
5222
  api: "openai-completions";
@@ -5199,7 +5233,7 @@ export declare const MODELS: {
5199
5233
  contextWindow: number;
5200
5234
  maxTokens: number;
5201
5235
  };
5202
- readonly "meta-llama/llama-3.1-405b-instruct": {
5236
+ readonly "meta-llama/llama-3.1-70b-instruct": {
5203
5237
  id: string;
5204
5238
  name: string;
5205
5239
  api: "openai-completions";
@@ -5233,7 +5267,7 @@ export declare const MODELS: {
5233
5267
  contextWindow: number;
5234
5268
  maxTokens: number;
5235
5269
  };
5236
- readonly "openai/gpt-4o-mini": {
5270
+ readonly "openai/gpt-4o-mini-2024-07-18": {
5237
5271
  id: string;
5238
5272
  name: string;
5239
5273
  api: "openai-completions";
@@ -5250,7 +5284,7 @@ export declare const MODELS: {
5250
5284
  contextWindow: number;
5251
5285
  maxTokens: number;
5252
5286
  };
5253
- readonly "openai/gpt-4o-mini-2024-07-18": {
5287
+ readonly "openai/gpt-4o-mini": {
5254
5288
  id: string;
5255
5289
  name: string;
5256
5290
  api: "openai-completions";
@@ -5641,7 +5675,7 @@ export declare const MODELS: {
5641
5675
  contextWindow: number;
5642
5676
  maxTokens: number;
5643
5677
  };
5644
- readonly "openai/gpt-3.5-turbo": {
5678
+ readonly "openai/gpt-4-0314": {
5645
5679
  id: string;
5646
5680
  name: string;
5647
5681
  api: "openai-completions";
@@ -5658,7 +5692,7 @@ export declare const MODELS: {
5658
5692
  contextWindow: number;
5659
5693
  maxTokens: number;
5660
5694
  };
5661
- readonly "openai/gpt-4-0314": {
5695
+ readonly "openai/gpt-4": {
5662
5696
  id: string;
5663
5697
  name: string;
5664
5698
  api: "openai-completions";
@@ -5675,7 +5709,7 @@ export declare const MODELS: {
5675
5709
  contextWindow: number;
5676
5710
  maxTokens: number;
5677
5711
  };
5678
- readonly "openai/gpt-4": {
5712
+ readonly "openai/gpt-3.5-turbo": {
5679
5713
  id: string;
5680
5714
  name: string;
5681
5715
  api: "openai-completions";