@mariozechner/pi-ai 0.11.5 → 0.11.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1971,6 +1971,40 @@ export const MODELS = {
1971
1971
  },
1972
1972
  },
1973
1973
  openrouter: {
1974
+ "arcee-ai/trinity-mini:free": {
1975
+ id: "arcee-ai/trinity-mini:free",
1976
+ name: "Arcee AI: Trinity Mini (free)",
1977
+ api: "openai-completions",
1978
+ provider: "openrouter",
1979
+ baseUrl: "https://openrouter.ai/api/v1",
1980
+ reasoning: true,
1981
+ input: ["text"],
1982
+ cost: {
1983
+ input: 0,
1984
+ output: 0,
1985
+ cacheRead: 0,
1986
+ cacheWrite: 0,
1987
+ },
1988
+ contextWindow: 131072,
1989
+ maxTokens: 4096,
1990
+ },
1991
+ "arcee-ai/trinity-mini": {
1992
+ id: "arcee-ai/trinity-mini",
1993
+ name: "Arcee AI: Trinity Mini",
1994
+ api: "openai-completions",
1995
+ provider: "openrouter",
1996
+ baseUrl: "https://openrouter.ai/api/v1",
1997
+ reasoning: true,
1998
+ input: ["text"],
1999
+ cost: {
2000
+ input: 0.045,
2001
+ output: 0.15,
2002
+ cacheRead: 0,
2003
+ cacheWrite: 0,
2004
+ },
2005
+ contextWindow: 131072,
2006
+ maxTokens: 4096,
2007
+ },
1974
2008
  "deepseek/deepseek-v3.2": {
1975
2009
  id: "deepseek/deepseek-v3.2",
1976
2010
  name: "DeepSeek: DeepSeek V3.2",
@@ -1981,12 +2015,12 @@ export const MODELS = {
1981
2015
  input: ["text"],
1982
2016
  cost: {
1983
2017
  input: 0.28,
1984
- output: 0.42,
1985
- cacheRead: 0.028,
2018
+ output: 0.39999999999999997,
2019
+ cacheRead: 0,
1986
2020
  cacheWrite: 0,
1987
2021
  },
1988
- contextWindow: 131072,
1989
- maxTokens: 64000,
2022
+ contextWindow: 163840,
2023
+ maxTokens: 65536,
1990
2024
  },
1991
2025
  "prime-intellect/intellect-3": {
1992
2026
  id: "prime-intellect/intellect-3",
@@ -2575,13 +2609,13 @@ export const MODELS = {
2575
2609
  reasoning: true,
2576
2610
  input: ["text"],
2577
2611
  cost: {
2578
- input: 0.216,
2579
- output: 0.328,
2580
- cacheRead: 0,
2612
+ input: 0.21,
2613
+ output: 0.32,
2614
+ cacheRead: 0.16799999999999998,
2581
2615
  cacheWrite: 0,
2582
2616
  },
2583
2617
  contextWindow: 163840,
2584
- maxTokens: 65536,
2618
+ maxTokens: 4096,
2585
2619
  },
2586
2620
  "google/gemini-2.5-flash-preview-09-2025": {
2587
2621
  id: "google/gemini-2.5-flash-preview-09-2025",
@@ -2702,26 +2736,26 @@ export const MODELS = {
2702
2736
  contextWindow: 400000,
2703
2737
  maxTokens: 128000,
2704
2738
  },
2705
- "deepseek/deepseek-v3.1-terminus:exacto": {
2706
- id: "deepseek/deepseek-v3.1-terminus:exacto",
2707
- name: "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
2739
+ "deepseek/deepseek-v3.1-terminus": {
2740
+ id: "deepseek/deepseek-v3.1-terminus",
2741
+ name: "DeepSeek: DeepSeek V3.1 Terminus",
2708
2742
  api: "openai-completions",
2709
2743
  provider: "openrouter",
2710
2744
  baseUrl: "https://openrouter.ai/api/v1",
2711
2745
  reasoning: true,
2712
2746
  input: ["text"],
2713
2747
  cost: {
2714
- input: 0.216,
2715
- output: 0.7999999999999999,
2716
- cacheRead: 0,
2748
+ input: 0.21,
2749
+ output: 0.7899999999999999,
2750
+ cacheRead: 0.16799999999999998,
2717
2751
  cacheWrite: 0,
2718
2752
  },
2719
- contextWindow: 131072,
2720
- maxTokens: 65536,
2753
+ contextWindow: 163840,
2754
+ maxTokens: 4096,
2721
2755
  },
2722
- "deepseek/deepseek-v3.1-terminus": {
2723
- id: "deepseek/deepseek-v3.1-terminus",
2724
- name: "DeepSeek: DeepSeek V3.1 Terminus",
2756
+ "deepseek/deepseek-v3.1-terminus:exacto": {
2757
+ id: "deepseek/deepseek-v3.1-terminus:exacto",
2758
+ name: "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
2725
2759
  api: "openai-completions",
2726
2760
  provider: "openrouter",
2727
2761
  baseUrl: "https://openrouter.ai/api/v1",
@@ -4328,7 +4362,7 @@ export const MODELS = {
4328
4362
  cost: {
4329
4363
  input: 0.19999999999999998,
4330
4364
  output: 0.88,
4331
- cacheRead: 0.135,
4365
+ cacheRead: 0.106,
4332
4366
  cacheWrite: 0,
4333
4367
  },
4334
4368
  contextWindow: 163840,
@@ -4921,10 +4955,10 @@ export const MODELS = {
4921
4955
  reasoning: false,
4922
4956
  input: ["text", "image"],
4923
4957
  cost: {
4924
- input: 3,
4925
- output: 15,
4926
- cacheRead: 0.3,
4927
- cacheWrite: 3.75,
4958
+ input: 6,
4959
+ output: 30,
4960
+ cacheRead: 0,
4961
+ cacheWrite: 0,
4928
4962
  },
4929
4963
  contextWindow: 200000,
4930
4964
  maxTokens: 8192,
@@ -5048,34 +5082,34 @@ export const MODELS = {
5048
5082
  contextWindow: 32768,
5049
5083
  maxTokens: 4096,
5050
5084
  },
5051
- "cohere/command-r-plus-08-2024": {
5052
- id: "cohere/command-r-plus-08-2024",
5053
- name: "Cohere: Command R+ (08-2024)",
5085
+ "cohere/command-r-08-2024": {
5086
+ id: "cohere/command-r-08-2024",
5087
+ name: "Cohere: Command R (08-2024)",
5054
5088
  api: "openai-completions",
5055
5089
  provider: "openrouter",
5056
5090
  baseUrl: "https://openrouter.ai/api/v1",
5057
5091
  reasoning: false,
5058
5092
  input: ["text"],
5059
5093
  cost: {
5060
- input: 2.5,
5061
- output: 10,
5094
+ input: 0.15,
5095
+ output: 0.6,
5062
5096
  cacheRead: 0,
5063
5097
  cacheWrite: 0,
5064
5098
  },
5065
5099
  contextWindow: 128000,
5066
5100
  maxTokens: 4000,
5067
5101
  },
5068
- "cohere/command-r-08-2024": {
5069
- id: "cohere/command-r-08-2024",
5070
- name: "Cohere: Command R (08-2024)",
5102
+ "cohere/command-r-plus-08-2024": {
5103
+ id: "cohere/command-r-plus-08-2024",
5104
+ name: "Cohere: Command R+ (08-2024)",
5071
5105
  api: "openai-completions",
5072
5106
  provider: "openrouter",
5073
5107
  baseUrl: "https://openrouter.ai/api/v1",
5074
5108
  reasoning: false,
5075
5109
  input: ["text"],
5076
5110
  cost: {
5077
- input: 0.15,
5078
- output: 0.6,
5111
+ input: 2.5,
5112
+ output: 10,
5079
5113
  cacheRead: 0,
5080
5114
  cacheWrite: 0,
5081
5115
  },
@@ -5133,23 +5167,6 @@ export const MODELS = {
5133
5167
  contextWindow: 128000,
5134
5168
  maxTokens: 16384,
5135
5169
  },
5136
- "meta-llama/llama-3.1-70b-instruct": {
5137
- id: "meta-llama/llama-3.1-70b-instruct",
5138
- name: "Meta: Llama 3.1 70B Instruct",
5139
- api: "openai-completions",
5140
- provider: "openrouter",
5141
- baseUrl: "https://openrouter.ai/api/v1",
5142
- reasoning: false,
5143
- input: ["text"],
5144
- cost: {
5145
- input: 0.39999999999999997,
5146
- output: 0.39999999999999997,
5147
- cacheRead: 0,
5148
- cacheWrite: 0,
5149
- },
5150
- contextWindow: 131072,
5151
- maxTokens: 4096,
5152
- },
5153
5170
  "meta-llama/llama-3.1-8b-instruct": {
5154
5171
  id: "meta-llama/llama-3.1-8b-instruct",
5155
5172
  name: "Meta: Llama 3.1 8B Instruct",
@@ -5184,6 +5201,23 @@ export const MODELS = {
5184
5201
  contextWindow: 130815,
5185
5202
  maxTokens: 4096,
5186
5203
  },
5204
+ "meta-llama/llama-3.1-70b-instruct": {
5205
+ id: "meta-llama/llama-3.1-70b-instruct",
5206
+ name: "Meta: Llama 3.1 70B Instruct",
5207
+ api: "openai-completions",
5208
+ provider: "openrouter",
5209
+ baseUrl: "https://openrouter.ai/api/v1",
5210
+ reasoning: false,
5211
+ input: ["text"],
5212
+ cost: {
5213
+ input: 0.39999999999999997,
5214
+ output: 0.39999999999999997,
5215
+ cacheRead: 0,
5216
+ cacheWrite: 0,
5217
+ },
5218
+ contextWindow: 131072,
5219
+ maxTokens: 4096,
5220
+ },
5187
5221
  "mistralai/mistral-nemo": {
5188
5222
  id: "mistralai/mistral-nemo",
5189
5223
  name: "Mistral: Mistral Nemo",
@@ -5201,9 +5235,9 @@ export const MODELS = {
5201
5235
  contextWindow: 131072,
5202
5236
  maxTokens: 16384,
5203
5237
  },
5204
- "openai/gpt-4o-mini": {
5205
- id: "openai/gpt-4o-mini",
5206
- name: "OpenAI: GPT-4o-mini",
5238
+ "openai/gpt-4o-mini-2024-07-18": {
5239
+ id: "openai/gpt-4o-mini-2024-07-18",
5240
+ name: "OpenAI: GPT-4o-mini (2024-07-18)",
5207
5241
  api: "openai-completions",
5208
5242
  provider: "openrouter",
5209
5243
  baseUrl: "https://openrouter.ai/api/v1",
@@ -5218,9 +5252,9 @@ export const MODELS = {
5218
5252
  contextWindow: 128000,
5219
5253
  maxTokens: 16384,
5220
5254
  },
5221
- "openai/gpt-4o-mini-2024-07-18": {
5222
- id: "openai/gpt-4o-mini-2024-07-18",
5223
- name: "OpenAI: GPT-4o-mini (2024-07-18)",
5255
+ "openai/gpt-4o-mini": {
5256
+ id: "openai/gpt-4o-mini",
5257
+ name: "OpenAI: GPT-4o-mini",
5224
5258
  api: "openai-completions",
5225
5259
  provider: "openrouter",
5226
5260
  baseUrl: "https://openrouter.ai/api/v1",
@@ -5609,26 +5643,26 @@ export const MODELS = {
5609
5643
  contextWindow: 16385,
5610
5644
  maxTokens: 4096,
5611
5645
  },
5612
- "openai/gpt-3.5-turbo": {
5613
- id: "openai/gpt-3.5-turbo",
5614
- name: "OpenAI: GPT-3.5 Turbo",
5646
+ "openai/gpt-4-0314": {
5647
+ id: "openai/gpt-4-0314",
5648
+ name: "OpenAI: GPT-4 (older v0314)",
5615
5649
  api: "openai-completions",
5616
5650
  provider: "openrouter",
5617
5651
  baseUrl: "https://openrouter.ai/api/v1",
5618
5652
  reasoning: false,
5619
5653
  input: ["text"],
5620
5654
  cost: {
5621
- input: 0.5,
5622
- output: 1.5,
5655
+ input: 30,
5656
+ output: 60,
5623
5657
  cacheRead: 0,
5624
5658
  cacheWrite: 0,
5625
5659
  },
5626
- contextWindow: 16385,
5660
+ contextWindow: 8191,
5627
5661
  maxTokens: 4096,
5628
5662
  },
5629
- "openai/gpt-4-0314": {
5630
- id: "openai/gpt-4-0314",
5631
- name: "OpenAI: GPT-4 (older v0314)",
5663
+ "openai/gpt-4": {
5664
+ id: "openai/gpt-4",
5665
+ name: "OpenAI: GPT-4",
5632
5666
  api: "openai-completions",
5633
5667
  provider: "openrouter",
5634
5668
  baseUrl: "https://openrouter.ai/api/v1",
@@ -5643,21 +5677,21 @@ export const MODELS = {
5643
5677
  contextWindow: 8191,
5644
5678
  maxTokens: 4096,
5645
5679
  },
5646
- "openai/gpt-4": {
5647
- id: "openai/gpt-4",
5648
- name: "OpenAI: GPT-4",
5680
+ "openai/gpt-3.5-turbo": {
5681
+ id: "openai/gpt-3.5-turbo",
5682
+ name: "OpenAI: GPT-3.5 Turbo",
5649
5683
  api: "openai-completions",
5650
5684
  provider: "openrouter",
5651
5685
  baseUrl: "https://openrouter.ai/api/v1",
5652
5686
  reasoning: false,
5653
5687
  input: ["text"],
5654
5688
  cost: {
5655
- input: 30,
5656
- output: 60,
5689
+ input: 0.5,
5690
+ output: 1.5,
5657
5691
  cacheRead: 0,
5658
5692
  cacheWrite: 0,
5659
5693
  },
5660
- contextWindow: 8191,
5694
+ contextWindow: 16385,
5661
5695
  maxTokens: 4096,
5662
5696
  },
5663
5697
  "openrouter/auto": {