@mariozechner/pi-ai 0.12.4 → 0.12.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/models.generated.d.ts +47 -13
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +80 -46
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +4 -2
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/providers/openai-responses.d.ts.map +1 -1
- package/dist/providers/openai-responses.js +4 -2
- package/dist/providers/openai-responses.js.map +1 -1
- package/package.json +1 -1
package/dist/models.generated.js
CHANGED
|
@@ -1971,6 +1971,57 @@ export const MODELS = {
|
|
|
1971
1971
|
},
|
|
1972
1972
|
},
|
|
1973
1973
|
openrouter: {
|
|
1974
|
+
"amazon/nova-2-lite-v1:free": {
|
|
1975
|
+
id: "amazon/nova-2-lite-v1:free",
|
|
1976
|
+
name: "Amazon: Nova 2 Lite (free)",
|
|
1977
|
+
api: "openai-completions",
|
|
1978
|
+
provider: "openrouter",
|
|
1979
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
1980
|
+
reasoning: true,
|
|
1981
|
+
input: ["text", "image"],
|
|
1982
|
+
cost: {
|
|
1983
|
+
input: 0,
|
|
1984
|
+
output: 0,
|
|
1985
|
+
cacheRead: 0,
|
|
1986
|
+
cacheWrite: 0,
|
|
1987
|
+
},
|
|
1988
|
+
contextWindow: 1000000,
|
|
1989
|
+
maxTokens: 4096,
|
|
1990
|
+
},
|
|
1991
|
+
"amazon/nova-2-lite-v1": {
|
|
1992
|
+
id: "amazon/nova-2-lite-v1",
|
|
1993
|
+
name: "Amazon: Nova 2 Lite",
|
|
1994
|
+
api: "openai-completions",
|
|
1995
|
+
provider: "openrouter",
|
|
1996
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
1997
|
+
reasoning: true,
|
|
1998
|
+
input: ["text", "image"],
|
|
1999
|
+
cost: {
|
|
2000
|
+
input: 0.3,
|
|
2001
|
+
output: 2.5,
|
|
2002
|
+
cacheRead: 0,
|
|
2003
|
+
cacheWrite: 0,
|
|
2004
|
+
},
|
|
2005
|
+
contextWindow: 1000000,
|
|
2006
|
+
maxTokens: 4096,
|
|
2007
|
+
},
|
|
2008
|
+
"mistralai/mistral-large-2512": {
|
|
2009
|
+
id: "mistralai/mistral-large-2512",
|
|
2010
|
+
name: "Mistral: Mistral Large 3 2512",
|
|
2011
|
+
api: "openai-completions",
|
|
2012
|
+
provider: "openrouter",
|
|
2013
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2014
|
+
reasoning: false,
|
|
2015
|
+
input: ["text", "image"],
|
|
2016
|
+
cost: {
|
|
2017
|
+
input: 0.5,
|
|
2018
|
+
output: 1.5,
|
|
2019
|
+
cacheRead: 0,
|
|
2020
|
+
cacheWrite: 0,
|
|
2021
|
+
},
|
|
2022
|
+
contextWindow: 262144,
|
|
2023
|
+
maxTokens: 4096,
|
|
2024
|
+
},
|
|
1974
2025
|
"arcee-ai/trinity-mini:free": {
|
|
1975
2026
|
id: "arcee-ai/trinity-mini:free",
|
|
1976
2027
|
name: "Arcee AI: Trinity Mini (free)",
|
|
@@ -2014,13 +2065,13 @@ export const MODELS = {
|
|
|
2014
2065
|
reasoning: true,
|
|
2015
2066
|
input: ["text"],
|
|
2016
2067
|
cost: {
|
|
2017
|
-
input: 0.
|
|
2068
|
+
input: 0.27,
|
|
2018
2069
|
output: 0.39999999999999997,
|
|
2019
|
-
cacheRead: 0,
|
|
2070
|
+
cacheRead: 0.216,
|
|
2020
2071
|
cacheWrite: 0,
|
|
2021
2072
|
},
|
|
2022
2073
|
contextWindow: 163840,
|
|
2023
|
-
maxTokens:
|
|
2074
|
+
maxTokens: 4096,
|
|
2024
2075
|
},
|
|
2025
2076
|
"prime-intellect/intellect-3": {
|
|
2026
2077
|
id: "prime-intellect/intellect-3",
|
|
@@ -2090,23 +2141,6 @@ export const MODELS = {
|
|
|
2090
2141
|
contextWindow: 200000,
|
|
2091
2142
|
maxTokens: 64000,
|
|
2092
2143
|
},
|
|
2093
|
-
"openrouter/bert-nebulon-alpha": {
|
|
2094
|
-
id: "openrouter/bert-nebulon-alpha",
|
|
2095
|
-
name: "Bert-Nebulon Alpha",
|
|
2096
|
-
api: "openai-completions",
|
|
2097
|
-
provider: "openrouter",
|
|
2098
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
2099
|
-
reasoning: false,
|
|
2100
|
-
input: ["text", "image"],
|
|
2101
|
-
cost: {
|
|
2102
|
-
input: 0,
|
|
2103
|
-
output: 0,
|
|
2104
|
-
cacheRead: 0,
|
|
2105
|
-
cacheWrite: 0,
|
|
2106
|
-
},
|
|
2107
|
-
contextWindow: 256000,
|
|
2108
|
-
maxTokens: 4096,
|
|
2109
|
-
},
|
|
2110
2144
|
"allenai/olmo-3-7b-instruct": {
|
|
2111
2145
|
id: "allenai/olmo-3-7b-instruct",
|
|
2112
2146
|
name: "AllenAI: Olmo 3 7B Instruct",
|
|
@@ -2575,13 +2609,13 @@ export const MODELS = {
|
|
|
2575
2609
|
reasoning: true,
|
|
2576
2610
|
input: ["text"],
|
|
2577
2611
|
cost: {
|
|
2578
|
-
input: 0.
|
|
2579
|
-
output: 1.
|
|
2580
|
-
cacheRead: 0,
|
|
2612
|
+
input: 0.43,
|
|
2613
|
+
output: 1.75,
|
|
2614
|
+
cacheRead: 0.0799999993,
|
|
2581
2615
|
cacheWrite: 0,
|
|
2582
2616
|
},
|
|
2583
|
-
contextWindow:
|
|
2584
|
-
maxTokens:
|
|
2617
|
+
contextWindow: 202752,
|
|
2618
|
+
maxTokens: 4096,
|
|
2585
2619
|
},
|
|
2586
2620
|
"anthropic/claude-sonnet-4.5": {
|
|
2587
2621
|
id: "anthropic/claude-sonnet-4.5",
|
|
@@ -2677,13 +2711,13 @@ export const MODELS = {
|
|
|
2677
2711
|
reasoning: false,
|
|
2678
2712
|
input: ["text", "image"],
|
|
2679
2713
|
cost: {
|
|
2680
|
-
input: 0.
|
|
2681
|
-
output: 1.
|
|
2714
|
+
input: 0.19999999999999998,
|
|
2715
|
+
output: 1.2,
|
|
2682
2716
|
cacheRead: 0,
|
|
2683
2717
|
cacheWrite: 0,
|
|
2684
2718
|
},
|
|
2685
|
-
contextWindow:
|
|
2686
|
-
maxTokens:
|
|
2719
|
+
contextWindow: 262144,
|
|
2720
|
+
maxTokens: 4096,
|
|
2687
2721
|
},
|
|
2688
2722
|
"qwen/qwen3-max": {
|
|
2689
2723
|
id: "qwen/qwen3-max",
|
|
@@ -2736,9 +2770,9 @@ export const MODELS = {
|
|
|
2736
2770
|
contextWindow: 400000,
|
|
2737
2771
|
maxTokens: 128000,
|
|
2738
2772
|
},
|
|
2739
|
-
"deepseek/deepseek-v3.1-terminus": {
|
|
2740
|
-
id: "deepseek/deepseek-v3.1-terminus",
|
|
2741
|
-
name: "DeepSeek: DeepSeek V3.1 Terminus",
|
|
2773
|
+
"deepseek/deepseek-v3.1-terminus:exacto": {
|
|
2774
|
+
id: "deepseek/deepseek-v3.1-terminus:exacto",
|
|
2775
|
+
name: "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
|
|
2742
2776
|
api: "openai-completions",
|
|
2743
2777
|
provider: "openrouter",
|
|
2744
2778
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -2753,22 +2787,22 @@ export const MODELS = {
|
|
|
2753
2787
|
contextWindow: 163840,
|
|
2754
2788
|
maxTokens: 4096,
|
|
2755
2789
|
},
|
|
2756
|
-
"deepseek/deepseek-v3.1-terminus
|
|
2757
|
-
id: "deepseek/deepseek-v3.1-terminus
|
|
2758
|
-
name: "DeepSeek: DeepSeek V3.1 Terminus
|
|
2790
|
+
"deepseek/deepseek-v3.1-terminus": {
|
|
2791
|
+
id: "deepseek/deepseek-v3.1-terminus",
|
|
2792
|
+
name: "DeepSeek: DeepSeek V3.1 Terminus",
|
|
2759
2793
|
api: "openai-completions",
|
|
2760
2794
|
provider: "openrouter",
|
|
2761
2795
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
2762
2796
|
reasoning: true,
|
|
2763
2797
|
input: ["text"],
|
|
2764
2798
|
cost: {
|
|
2765
|
-
input: 0.
|
|
2766
|
-
output: 0.
|
|
2767
|
-
cacheRead: 0,
|
|
2799
|
+
input: 0.21,
|
|
2800
|
+
output: 0.7899999999999999,
|
|
2801
|
+
cacheRead: 0.16799999999999998,
|
|
2768
2802
|
cacheWrite: 0,
|
|
2769
2803
|
},
|
|
2770
|
-
contextWindow:
|
|
2771
|
-
maxTokens:
|
|
2804
|
+
contextWindow: 163840,
|
|
2805
|
+
maxTokens: 4096,
|
|
2772
2806
|
},
|
|
2773
2807
|
"x-ai/grok-4-fast": {
|
|
2774
2808
|
id: "x-ai/grok-4-fast",
|
|
@@ -3595,13 +3629,13 @@ export const MODELS = {
|
|
|
3595
3629
|
reasoning: true,
|
|
3596
3630
|
input: ["text"],
|
|
3597
3631
|
cost: {
|
|
3598
|
-
input: 0.
|
|
3599
|
-
output: 0.
|
|
3632
|
+
input: 0.071,
|
|
3633
|
+
output: 0.463,
|
|
3600
3634
|
cacheRead: 0,
|
|
3601
3635
|
cacheWrite: 0,
|
|
3602
3636
|
},
|
|
3603
|
-
contextWindow:
|
|
3604
|
-
maxTokens:
|
|
3637
|
+
contextWindow: 262144,
|
|
3638
|
+
maxTokens: 4096,
|
|
3605
3639
|
},
|
|
3606
3640
|
"moonshotai/kimi-k2": {
|
|
3607
3641
|
id: "moonshotai/kimi-k2",
|
|
@@ -4768,8 +4802,8 @@ export const MODELS = {
|
|
|
4768
4802
|
reasoning: false,
|
|
4769
4803
|
input: ["text"],
|
|
4770
4804
|
cost: {
|
|
4771
|
-
input: 0.
|
|
4772
|
-
output: 0.
|
|
4805
|
+
input: 0.108,
|
|
4806
|
+
output: 0.32,
|
|
4773
4807
|
cacheRead: 0,
|
|
4774
4808
|
cacheWrite: 0,
|
|
4775
4809
|
},
|