@mariozechner/pi-ai 0.9.4 → 0.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -115,8 +115,8 @@ export const MODELS = {
115
115
  cost: {
116
116
  input: 5,
117
117
  output: 25,
118
- cacheRead: 1.5,
119
- cacheWrite: 18.75,
118
+ cacheRead: 0.5,
119
+ cacheWrite: 6.25,
120
120
  },
121
121
  contextWindow: 200000,
122
122
  maxTokens: 64000,
@@ -1954,6 +1954,57 @@ export const MODELS = {
1954
1954
  },
1955
1955
  },
1956
1956
  openrouter: {
1957
+ "prime-intellect/intellect-3": {
1958
+ id: "prime-intellect/intellect-3",
1959
+ name: "Prime Intellect: INTELLECT-3",
1960
+ api: "openai-completions",
1961
+ provider: "openrouter",
1962
+ baseUrl: "https://openrouter.ai/api/v1",
1963
+ reasoning: true,
1964
+ input: ["text"],
1965
+ cost: {
1966
+ input: 0.19999999999999998,
1967
+ output: 1.1,
1968
+ cacheRead: 0,
1969
+ cacheWrite: 0,
1970
+ },
1971
+ contextWindow: 131072,
1972
+ maxTokens: 131072,
1973
+ },
1974
+ "tngtech/tng-r1t-chimera:free": {
1975
+ id: "tngtech/tng-r1t-chimera:free",
1976
+ name: "TNG: R1T Chimera (free)",
1977
+ api: "openai-completions",
1978
+ provider: "openrouter",
1979
+ baseUrl: "https://openrouter.ai/api/v1",
1980
+ reasoning: true,
1981
+ input: ["text"],
1982
+ cost: {
1983
+ input: 0,
1984
+ output: 0,
1985
+ cacheRead: 0,
1986
+ cacheWrite: 0,
1987
+ },
1988
+ contextWindow: 163840,
1989
+ maxTokens: 163840,
1990
+ },
1991
+ "tngtech/tng-r1t-chimera": {
1992
+ id: "tngtech/tng-r1t-chimera",
1993
+ name: "TNG: R1T Chimera",
1994
+ api: "openai-completions",
1995
+ provider: "openrouter",
1996
+ baseUrl: "https://openrouter.ai/api/v1",
1997
+ reasoning: true,
1998
+ input: ["text"],
1999
+ cost: {
2000
+ input: 0.3,
2001
+ output: 1.2,
2002
+ cacheRead: 0,
2003
+ cacheWrite: 0,
2004
+ },
2005
+ contextWindow: 163840,
2006
+ maxTokens: 163840,
2007
+ },
1957
2008
  "anthropic/claude-opus-4.5": {
1958
2009
  id: "anthropic/claude-opus-4.5",
1959
2010
  name: "Anthropic: Claude Opus 4.5",
@@ -2201,8 +2252,8 @@ export const MODELS = {
2201
2252
  reasoning: true,
2202
2253
  input: ["text"],
2203
2254
  cost: {
2204
- input: 0.24,
2205
- output: 0.96,
2255
+ input: 0.255,
2256
+ output: 1.02,
2206
2257
  cacheRead: 0,
2207
2258
  cacheWrite: 0,
2208
2259
  },
@@ -3569,23 +3620,6 @@ export const MODELS = {
3569
3620
  contextWindow: 128000,
3570
3621
  maxTokens: 16384,
3571
3622
  },
3572
- "mistralai/mistral-small-3.2-24b-instruct:free": {
3573
- id: "mistralai/mistral-small-3.2-24b-instruct:free",
3574
- name: "Mistral: Mistral Small 3.2 24B (free)",
3575
- api: "openai-completions",
3576
- provider: "openrouter",
3577
- baseUrl: "https://openrouter.ai/api/v1",
3578
- reasoning: false,
3579
- input: ["text", "image"],
3580
- cost: {
3581
- input: 0,
3582
- output: 0,
3583
- cacheRead: 0,
3584
- cacheWrite: 0,
3585
- },
3586
- contextWindow: 131072,
3587
- maxTokens: 4096,
3588
- },
3589
3623
  "mistralai/mistral-small-3.2-24b-instruct": {
3590
3624
  id: "mistralai/mistral-small-3.2-24b-instruct",
3591
3625
  name: "Mistral: Mistral Small 3.2 24B",
@@ -4042,7 +4076,7 @@ export const MODELS = {
4042
4076
  cacheRead: 0,
4043
4077
  cacheWrite: 0,
4044
4078
  },
4045
- contextWindow: 40960,
4079
+ contextWindow: 131072,
4046
4080
  maxTokens: 4096,
4047
4081
  },
4048
4082
  "qwen/qwen3-235b-a22b": {
@@ -4232,23 +4266,6 @@ export const MODELS = {
4232
4266
  contextWindow: 327680,
4233
4267
  maxTokens: 16384,
4234
4268
  },
4235
- "deepseek/deepseek-chat-v3-0324:free": {
4236
- id: "deepseek/deepseek-chat-v3-0324:free",
4237
- name: "DeepSeek: DeepSeek V3 0324 (free)",
4238
- api: "openai-completions",
4239
- provider: "openrouter",
4240
- baseUrl: "https://openrouter.ai/api/v1",
4241
- reasoning: false,
4242
- input: ["text"],
4243
- cost: {
4244
- input: 0,
4245
- output: 0,
4246
- cacheRead: 0,
4247
- cacheWrite: 0,
4248
- },
4249
- contextWindow: 163840,
4250
- maxTokens: 4096,
4251
- },
4252
4269
  "deepseek/deepseek-chat-v3-0324": {
4253
4270
  id: "deepseek/deepseek-chat-v3-0324",
4254
4271
  name: "DeepSeek: DeepSeek V3 0324",
@@ -4258,13 +4275,13 @@ export const MODELS = {
4258
4275
  reasoning: true,
4259
4276
  input: ["text"],
4260
4277
  cost: {
4261
- input: 0.216,
4262
- output: 0.896,
4263
- cacheRead: 0.135,
4278
+ input: 0.19999999999999998,
4279
+ output: 0.88,
4280
+ cacheRead: 0,
4264
4281
  cacheWrite: 0,
4265
4282
  },
4266
4283
  contextWindow: 163840,
4267
- maxTokens: 163840,
4284
+ maxTokens: 4096,
4268
4285
  },
4269
4286
  "mistralai/mistral-small-3.1-24b-instruct:free": {
4270
4287
  id: "mistralai/mistral-small-3.1-24b-instruct:free",
@@ -4280,8 +4297,8 @@ export const MODELS = {
4280
4297
  cacheRead: 0,
4281
4298
  cacheWrite: 0,
4282
4299
  },
4283
- contextWindow: 96000,
4284
- maxTokens: 96000,
4300
+ contextWindow: 128000,
4301
+ maxTokens: 4096,
4285
4302
  },
4286
4303
  "mistralai/mistral-small-3.1-24b-instruct": {
4287
4304
  id: "mistralai/mistral-small-3.1-24b-instruct",
@@ -4810,9 +4827,9 @@ export const MODELS = {
4810
4827
  contextWindow: 32768,
4811
4828
  maxTokens: 4096,
4812
4829
  },
4813
- "anthropic/claude-3.5-haiku": {
4814
- id: "anthropic/claude-3.5-haiku",
4815
- name: "Anthropic: Claude 3.5 Haiku",
4830
+ "anthropic/claude-3.5-haiku-20241022": {
4831
+ id: "anthropic/claude-3.5-haiku-20241022",
4832
+ name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
4816
4833
  api: "openai-completions",
4817
4834
  provider: "openrouter",
4818
4835
  baseUrl: "https://openrouter.ai/api/v1",
@@ -4827,9 +4844,9 @@ export const MODELS = {
4827
4844
  contextWindow: 200000,
4828
4845
  maxTokens: 8192,
4829
4846
  },
4830
- "anthropic/claude-3.5-haiku-20241022": {
4831
- id: "anthropic/claude-3.5-haiku-20241022",
4832
- name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
4847
+ "anthropic/claude-3.5-haiku": {
4848
+ id: "anthropic/claude-3.5-haiku",
4849
+ name: "Anthropic: Claude 3.5 Haiku",
4833
4850
  api: "openai-completions",
4834
4851
  provider: "openrouter",
4835
4852
  baseUrl: "https://openrouter.ai/api/v1",
@@ -4980,34 +4997,34 @@ export const MODELS = {
4980
4997
  contextWindow: 32768,
4981
4998
  maxTokens: 4096,
4982
4999
  },
4983
- "cohere/command-r-08-2024": {
4984
- id: "cohere/command-r-08-2024",
4985
- name: "Cohere: Command R (08-2024)",
5000
+ "cohere/command-r-plus-08-2024": {
5001
+ id: "cohere/command-r-plus-08-2024",
5002
+ name: "Cohere: Command R+ (08-2024)",
4986
5003
  api: "openai-completions",
4987
5004
  provider: "openrouter",
4988
5005
  baseUrl: "https://openrouter.ai/api/v1",
4989
5006
  reasoning: false,
4990
5007
  input: ["text"],
4991
5008
  cost: {
4992
- input: 0.15,
4993
- output: 0.6,
5009
+ input: 2.5,
5010
+ output: 10,
4994
5011
  cacheRead: 0,
4995
5012
  cacheWrite: 0,
4996
5013
  },
4997
5014
  contextWindow: 128000,
4998
5015
  maxTokens: 4000,
4999
5016
  },
5000
- "cohere/command-r-plus-08-2024": {
5001
- id: "cohere/command-r-plus-08-2024",
5002
- name: "Cohere: Command R+ (08-2024)",
5017
+ "cohere/command-r-08-2024": {
5018
+ id: "cohere/command-r-08-2024",
5019
+ name: "Cohere: Command R (08-2024)",
5003
5020
  api: "openai-completions",
5004
5021
  provider: "openrouter",
5005
5022
  baseUrl: "https://openrouter.ai/api/v1",
5006
5023
  reasoning: false,
5007
5024
  input: ["text"],
5008
5025
  cost: {
5009
- input: 2.5,
5010
- output: 10,
5026
+ input: 0.15,
5027
+ output: 0.6,
5011
5028
  cacheRead: 0,
5012
5029
  cacheWrite: 0,
5013
5030
  },
@@ -5575,38 +5592,38 @@ export const MODELS = {
5575
5592
  contextWindow: 8191,
5576
5593
  maxTokens: 4096,
5577
5594
  },
5578
- "openai/gpt-4": {
5579
- id: "openai/gpt-4",
5580
- name: "OpenAI: GPT-4",
5595
+ "openai/gpt-3.5-turbo": {
5596
+ id: "openai/gpt-3.5-turbo",
5597
+ name: "OpenAI: GPT-3.5 Turbo",
5581
5598
  api: "openai-completions",
5582
5599
  provider: "openrouter",
5583
5600
  baseUrl: "https://openrouter.ai/api/v1",
5584
5601
  reasoning: false,
5585
5602
  input: ["text"],
5586
5603
  cost: {
5587
- input: 30,
5588
- output: 60,
5604
+ input: 0.5,
5605
+ output: 1.5,
5589
5606
  cacheRead: 0,
5590
5607
  cacheWrite: 0,
5591
5608
  },
5592
- contextWindow: 8191,
5609
+ contextWindow: 16385,
5593
5610
  maxTokens: 4096,
5594
5611
  },
5595
- "openai/gpt-3.5-turbo": {
5596
- id: "openai/gpt-3.5-turbo",
5597
- name: "OpenAI: GPT-3.5 Turbo",
5612
+ "openai/gpt-4": {
5613
+ id: "openai/gpt-4",
5614
+ name: "OpenAI: GPT-4",
5598
5615
  api: "openai-completions",
5599
5616
  provider: "openrouter",
5600
5617
  baseUrl: "https://openrouter.ai/api/v1",
5601
5618
  reasoning: false,
5602
5619
  input: ["text"],
5603
5620
  cost: {
5604
- input: 0.5,
5605
- output: 1.5,
5621
+ input: 30,
5622
+ output: 60,
5606
5623
  cacheRead: 0,
5607
5624
  cacheWrite: 0,
5608
5625
  },
5609
- contextWindow: 16385,
5626
+ contextWindow: 8191,
5610
5627
  maxTokens: 4096,
5611
5628
  },
5612
5629
  "openrouter/auto": {