@mariozechner/pi-ai 0.26.1 → 0.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1969,6 +1969,23 @@ export const MODELS = {
1969
1969
  },
1970
1970
  },
1971
1971
  "zai": {
1972
+ "glm-4.7": {
1973
+ id: "glm-4.7",
1974
+ name: "GLM-4.7",
1975
+ api: "anthropic-messages",
1976
+ provider: "zai",
1977
+ baseUrl: "https://api.z.ai/api/anthropic",
1978
+ reasoning: true,
1979
+ input: ["text"],
1980
+ cost: {
1981
+ input: 0.6,
1982
+ output: 2.2,
1983
+ cacheRead: 0.11,
1984
+ cacheWrite: 0,
1985
+ },
1986
+ contextWindow: 204800,
1987
+ maxTokens: 131072,
1988
+ },
1972
1989
  "glm-4.5-flash": {
1973
1990
  id: "glm-4.5-flash",
1974
1991
  name: "GLM-4.5-Flash",
@@ -6016,9 +6033,9 @@ export const MODELS = {
6016
6033
  contextWindow: 32768,
6017
6034
  maxTokens: 4096,
6018
6035
  },
6019
- "anthropic/claude-3.5-haiku-20241022": {
6020
- id: "anthropic/claude-3.5-haiku-20241022",
6021
- name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
6036
+ "anthropic/claude-3.5-haiku": {
6037
+ id: "anthropic/claude-3.5-haiku",
6038
+ name: "Anthropic: Claude 3.5 Haiku",
6022
6039
  api: "openai-completions",
6023
6040
  provider: "openrouter",
6024
6041
  baseUrl: "https://openrouter.ai/api/v1",
@@ -6033,9 +6050,9 @@ export const MODELS = {
6033
6050
  contextWindow: 200000,
6034
6051
  maxTokens: 8192,
6035
6052
  },
6036
- "anthropic/claude-3.5-haiku": {
6037
- id: "anthropic/claude-3.5-haiku",
6038
- name: "Anthropic: Claude 3.5 Haiku",
6053
+ "anthropic/claude-3.5-haiku-20241022": {
6054
+ id: "anthropic/claude-3.5-haiku-20241022",
6055
+ name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
6039
6056
  api: "openai-completions",
6040
6057
  provider: "openrouter",
6041
6058
  baseUrl: "https://openrouter.ai/api/v1",
@@ -6271,39 +6288,39 @@ export const MODELS = {
6271
6288
  contextWindow: 128000,
6272
6289
  maxTokens: 16384,
6273
6290
  },
6274
- "meta-llama/llama-3.1-8b-instruct": {
6275
- id: "meta-llama/llama-3.1-8b-instruct",
6276
- name: "Meta: Llama 3.1 8B Instruct",
6291
+ "meta-llama/llama-3.1-405b-instruct": {
6292
+ id: "meta-llama/llama-3.1-405b-instruct",
6293
+ name: "Meta: Llama 3.1 405B Instruct",
6277
6294
  api: "openai-completions",
6278
6295
  provider: "openrouter",
6279
6296
  baseUrl: "https://openrouter.ai/api/v1",
6280
6297
  reasoning: false,
6281
6298
  input: ["text"],
6282
6299
  cost: {
6283
- input: 0.02,
6284
- output: 0.03,
6300
+ input: 3.5,
6301
+ output: 3.5,
6285
6302
  cacheRead: 0,
6286
6303
  cacheWrite: 0,
6287
6304
  },
6288
- contextWindow: 131072,
6289
- maxTokens: 16384,
6305
+ contextWindow: 130815,
6306
+ maxTokens: 4096,
6290
6307
  },
6291
- "meta-llama/llama-3.1-405b-instruct": {
6292
- id: "meta-llama/llama-3.1-405b-instruct",
6293
- name: "Meta: Llama 3.1 405B Instruct",
6308
+ "meta-llama/llama-3.1-8b-instruct": {
6309
+ id: "meta-llama/llama-3.1-8b-instruct",
6310
+ name: "Meta: Llama 3.1 8B Instruct",
6294
6311
  api: "openai-completions",
6295
6312
  provider: "openrouter",
6296
6313
  baseUrl: "https://openrouter.ai/api/v1",
6297
6314
  reasoning: false,
6298
6315
  input: ["text"],
6299
6316
  cost: {
6300
- input: 3.5,
6301
- output: 3.5,
6317
+ input: 0.02,
6318
+ output: 0.03,
6302
6319
  cacheRead: 0,
6303
6320
  cacheWrite: 0,
6304
6321
  },
6305
- contextWindow: 130815,
6306
- maxTokens: 4096,
6322
+ contextWindow: 131072,
6323
+ maxTokens: 16384,
6307
6324
  },
6308
6325
  "meta-llama/llama-3.1-70b-instruct": {
6309
6326
  id: "meta-llama/llama-3.1-70b-instruct",