@mariozechner/pi-ai 0.26.1 → 0.27.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1969,6 +1969,23 @@ export const MODELS = {
1969
1969
  },
1970
1970
  },
1971
1971
  "zai": {
1972
+ "glm-4.7": {
1973
+ id: "glm-4.7",
1974
+ name: "GLM-4.7",
1975
+ api: "anthropic-messages",
1976
+ provider: "zai",
1977
+ baseUrl: "https://api.z.ai/api/anthropic",
1978
+ reasoning: true,
1979
+ input: ["text"],
1980
+ cost: {
1981
+ input: 0.6,
1982
+ output: 2.2,
1983
+ cacheRead: 0.11,
1984
+ cacheWrite: 0,
1985
+ },
1986
+ contextWindow: 204800,
1987
+ maxTokens: 131072,
1988
+ },
1972
1989
  "glm-4.5-flash": {
1973
1990
  id: "glm-4.5-flash",
1974
1991
  name: "GLM-4.5-Flash",
@@ -2854,6 +2871,23 @@ export const MODELS = {
2854
2871
  },
2855
2872
  },
2856
2873
  "openrouter": {
2874
+ "z-ai/glm-4.7": {
2875
+ id: "z-ai/glm-4.7",
2876
+ name: "Z.AI: GLM 4.7",
2877
+ api: "openai-completions",
2878
+ provider: "openrouter",
2879
+ baseUrl: "https://openrouter.ai/api/v1",
2880
+ reasoning: true,
2881
+ input: ["text"],
2882
+ cost: {
2883
+ input: 0.11,
2884
+ output: 2.2,
2885
+ cacheRead: 0.11,
2886
+ cacheWrite: 0,
2887
+ },
2888
+ contextWindow: 200000,
2889
+ maxTokens: 131072,
2890
+ },
2857
2891
  "google/gemini-3-flash-preview": {
2858
2892
  id: "google/gemini-3-flash-preview",
2859
2893
  name: "Google: Gemini 3 Flash Preview",
@@ -6016,9 +6050,9 @@ export const MODELS = {
6016
6050
  contextWindow: 32768,
6017
6051
  maxTokens: 4096,
6018
6052
  },
6019
- "anthropic/claude-3.5-haiku-20241022": {
6020
- id: "anthropic/claude-3.5-haiku-20241022",
6021
- name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
6053
+ "anthropic/claude-3.5-haiku": {
6054
+ id: "anthropic/claude-3.5-haiku",
6055
+ name: "Anthropic: Claude 3.5 Haiku",
6022
6056
  api: "openai-completions",
6023
6057
  provider: "openrouter",
6024
6058
  baseUrl: "https://openrouter.ai/api/v1",
@@ -6033,9 +6067,9 @@ export const MODELS = {
6033
6067
  contextWindow: 200000,
6034
6068
  maxTokens: 8192,
6035
6069
  },
6036
- "anthropic/claude-3.5-haiku": {
6037
- id: "anthropic/claude-3.5-haiku",
6038
- name: "Anthropic: Claude 3.5 Haiku",
6070
+ "anthropic/claude-3.5-haiku-20241022": {
6071
+ id: "anthropic/claude-3.5-haiku-20241022",
6072
+ name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
6039
6073
  api: "openai-completions",
6040
6074
  provider: "openrouter",
6041
6075
  baseUrl: "https://openrouter.ai/api/v1",
@@ -6067,34 +6101,34 @@ export const MODELS = {
6067
6101
  contextWindow: 200000,
6068
6102
  maxTokens: 8192,
6069
6103
  },
6070
- "mistralai/ministral-8b": {
6071
- id: "mistralai/ministral-8b",
6072
- name: "Mistral: Ministral 8B",
6104
+ "mistralai/ministral-3b": {
6105
+ id: "mistralai/ministral-3b",
6106
+ name: "Mistral: Ministral 3B",
6073
6107
  api: "openai-completions",
6074
6108
  provider: "openrouter",
6075
6109
  baseUrl: "https://openrouter.ai/api/v1",
6076
6110
  reasoning: false,
6077
6111
  input: ["text"],
6078
6112
  cost: {
6079
- input: 0.09999999999999999,
6080
- output: 0.09999999999999999,
6113
+ input: 0.04,
6114
+ output: 0.04,
6081
6115
  cacheRead: 0,
6082
6116
  cacheWrite: 0,
6083
6117
  },
6084
6118
  contextWindow: 131072,
6085
6119
  maxTokens: 4096,
6086
6120
  },
6087
- "mistralai/ministral-3b": {
6088
- id: "mistralai/ministral-3b",
6089
- name: "Mistral: Ministral 3B",
6121
+ "mistralai/ministral-8b": {
6122
+ id: "mistralai/ministral-8b",
6123
+ name: "Mistral: Ministral 8B",
6090
6124
  api: "openai-completions",
6091
6125
  provider: "openrouter",
6092
6126
  baseUrl: "https://openrouter.ai/api/v1",
6093
6127
  reasoning: false,
6094
6128
  input: ["text"],
6095
6129
  cost: {
6096
- input: 0.04,
6097
- output: 0.04,
6130
+ input: 0.09999999999999999,
6131
+ output: 0.09999999999999999,
6098
6132
  cacheRead: 0,
6099
6133
  cacheWrite: 0,
6100
6134
  },
@@ -6271,22 +6305,22 @@ export const MODELS = {
6271
6305
  contextWindow: 128000,
6272
6306
  maxTokens: 16384,
6273
6307
  },
6274
- "meta-llama/llama-3.1-8b-instruct": {
6275
- id: "meta-llama/llama-3.1-8b-instruct",
6276
- name: "Meta: Llama 3.1 8B Instruct",
6308
+ "meta-llama/llama-3.1-70b-instruct": {
6309
+ id: "meta-llama/llama-3.1-70b-instruct",
6310
+ name: "Meta: Llama 3.1 70B Instruct",
6277
6311
  api: "openai-completions",
6278
6312
  provider: "openrouter",
6279
6313
  baseUrl: "https://openrouter.ai/api/v1",
6280
6314
  reasoning: false,
6281
6315
  input: ["text"],
6282
6316
  cost: {
6283
- input: 0.02,
6284
- output: 0.03,
6317
+ input: 0.39999999999999997,
6318
+ output: 0.39999999999999997,
6285
6319
  cacheRead: 0,
6286
6320
  cacheWrite: 0,
6287
6321
  },
6288
6322
  contextWindow: 131072,
6289
- maxTokens: 16384,
6323
+ maxTokens: 4096,
6290
6324
  },
6291
6325
  "meta-llama/llama-3.1-405b-instruct": {
6292
6326
  id: "meta-llama/llama-3.1-405b-instruct",
@@ -6305,22 +6339,22 @@ export const MODELS = {
6305
6339
  contextWindow: 130815,
6306
6340
  maxTokens: 4096,
6307
6341
  },
6308
- "meta-llama/llama-3.1-70b-instruct": {
6309
- id: "meta-llama/llama-3.1-70b-instruct",
6310
- name: "Meta: Llama 3.1 70B Instruct",
6342
+ "meta-llama/llama-3.1-8b-instruct": {
6343
+ id: "meta-llama/llama-3.1-8b-instruct",
6344
+ name: "Meta: Llama 3.1 8B Instruct",
6311
6345
  api: "openai-completions",
6312
6346
  provider: "openrouter",
6313
6347
  baseUrl: "https://openrouter.ai/api/v1",
6314
6348
  reasoning: false,
6315
6349
  input: ["text"],
6316
6350
  cost: {
6317
- input: 0.39999999999999997,
6318
- output: 0.39999999999999997,
6351
+ input: 0.02,
6352
+ output: 0.03,
6319
6353
  cacheRead: 0,
6320
6354
  cacheWrite: 0,
6321
6355
  },
6322
6356
  contextWindow: 131072,
6323
- maxTokens: 4096,
6357
+ maxTokens: 16384,
6324
6358
  },
6325
6359
  "mistralai/mistral-nemo": {
6326
6360
  id: "mistralai/mistral-nemo",
@@ -6339,9 +6373,9 @@ export const MODELS = {
6339
6373
  contextWindow: 131072,
6340
6374
  maxTokens: 16384,
6341
6375
  },
6342
- "openai/gpt-4o-mini-2024-07-18": {
6343
- id: "openai/gpt-4o-mini-2024-07-18",
6344
- name: "OpenAI: GPT-4o-mini (2024-07-18)",
6376
+ "openai/gpt-4o-mini": {
6377
+ id: "openai/gpt-4o-mini",
6378
+ name: "OpenAI: GPT-4o-mini",
6345
6379
  api: "openai-completions",
6346
6380
  provider: "openrouter",
6347
6381
  baseUrl: "https://openrouter.ai/api/v1",
@@ -6356,9 +6390,9 @@ export const MODELS = {
6356
6390
  contextWindow: 128000,
6357
6391
  maxTokens: 16384,
6358
6392
  },
6359
- "openai/gpt-4o-mini": {
6360
- id: "openai/gpt-4o-mini",
6361
- name: "OpenAI: GPT-4o-mini",
6393
+ "openai/gpt-4o-mini-2024-07-18": {
6394
+ id: "openai/gpt-4o-mini-2024-07-18",
6395
+ name: "OpenAI: GPT-4o-mini (2024-07-18)",
6362
6396
  api: "openai-completions",
6363
6397
  provider: "openrouter",
6364
6398
  baseUrl: "https://openrouter.ai/api/v1",
@@ -6458,23 +6492,6 @@ export const MODELS = {
6458
6492
  contextWindow: 128000,
6459
6493
  maxTokens: 4096,
6460
6494
  },
6461
- "openai/gpt-4o-2024-05-13": {
6462
- id: "openai/gpt-4o-2024-05-13",
6463
- name: "OpenAI: GPT-4o (2024-05-13)",
6464
- api: "openai-completions",
6465
- provider: "openrouter",
6466
- baseUrl: "https://openrouter.ai/api/v1",
6467
- reasoning: false,
6468
- input: ["text", "image"],
6469
- cost: {
6470
- input: 5,
6471
- output: 15,
6472
- cacheRead: 0,
6473
- cacheWrite: 0,
6474
- },
6475
- contextWindow: 128000,
6476
- maxTokens: 4096,
6477
- },
6478
6495
  "openai/gpt-4o": {
6479
6496
  id: "openai/gpt-4o",
6480
6497
  name: "OpenAI: GPT-4o",
@@ -6509,22 +6526,22 @@ export const MODELS = {
6509
6526
  contextWindow: 128000,
6510
6527
  maxTokens: 64000,
6511
6528
  },
6512
- "meta-llama/llama-3-70b-instruct": {
6513
- id: "meta-llama/llama-3-70b-instruct",
6514
- name: "Meta: Llama 3 70B Instruct",
6529
+ "openai/gpt-4o-2024-05-13": {
6530
+ id: "openai/gpt-4o-2024-05-13",
6531
+ name: "OpenAI: GPT-4o (2024-05-13)",
6515
6532
  api: "openai-completions",
6516
6533
  provider: "openrouter",
6517
6534
  baseUrl: "https://openrouter.ai/api/v1",
6518
6535
  reasoning: false,
6519
- input: ["text"],
6536
+ input: ["text", "image"],
6520
6537
  cost: {
6521
- input: 0.3,
6522
- output: 0.39999999999999997,
6538
+ input: 5,
6539
+ output: 15,
6523
6540
  cacheRead: 0,
6524
6541
  cacheWrite: 0,
6525
6542
  },
6526
- contextWindow: 8192,
6527
- maxTokens: 16384,
6543
+ contextWindow: 128000,
6544
+ maxTokens: 4096,
6528
6545
  },
6529
6546
  "meta-llama/llama-3-8b-instruct": {
6530
6547
  id: "meta-llama/llama-3-8b-instruct",
@@ -6543,6 +6560,23 @@ export const MODELS = {
6543
6560
  contextWindow: 8192,
6544
6561
  maxTokens: 16384,
6545
6562
  },
6563
+ "meta-llama/llama-3-70b-instruct": {
6564
+ id: "meta-llama/llama-3-70b-instruct",
6565
+ name: "Meta: Llama 3 70B Instruct",
6566
+ api: "openai-completions",
6567
+ provider: "openrouter",
6568
+ baseUrl: "https://openrouter.ai/api/v1",
6569
+ reasoning: false,
6570
+ input: ["text"],
6571
+ cost: {
6572
+ input: 0.3,
6573
+ output: 0.39999999999999997,
6574
+ cacheRead: 0,
6575
+ cacheWrite: 0,
6576
+ },
6577
+ contextWindow: 8192,
6578
+ maxTokens: 16384,
6579
+ },
6546
6580
  "mistralai/mixtral-8x22b-instruct": {
6547
6581
  id: "mistralai/mixtral-8x22b-instruct",
6548
6582
  name: "Mistral: Mixtral 8x22B Instruct",
@@ -6628,38 +6662,38 @@ export const MODELS = {
6628
6662
  contextWindow: 128000,
6629
6663
  maxTokens: 4096,
6630
6664
  },
6631
- "openai/gpt-3.5-turbo-0613": {
6632
- id: "openai/gpt-3.5-turbo-0613",
6633
- name: "OpenAI: GPT-3.5 Turbo (older v0613)",
6665
+ "openai/gpt-4-turbo-preview": {
6666
+ id: "openai/gpt-4-turbo-preview",
6667
+ name: "OpenAI: GPT-4 Turbo Preview",
6634
6668
  api: "openai-completions",
6635
6669
  provider: "openrouter",
6636
6670
  baseUrl: "https://openrouter.ai/api/v1",
6637
6671
  reasoning: false,
6638
6672
  input: ["text"],
6639
6673
  cost: {
6640
- input: 1,
6641
- output: 2,
6674
+ input: 10,
6675
+ output: 30,
6642
6676
  cacheRead: 0,
6643
6677
  cacheWrite: 0,
6644
6678
  },
6645
- contextWindow: 4095,
6679
+ contextWindow: 128000,
6646
6680
  maxTokens: 4096,
6647
6681
  },
6648
- "openai/gpt-4-turbo-preview": {
6649
- id: "openai/gpt-4-turbo-preview",
6650
- name: "OpenAI: GPT-4 Turbo Preview",
6682
+ "openai/gpt-3.5-turbo-0613": {
6683
+ id: "openai/gpt-3.5-turbo-0613",
6684
+ name: "OpenAI: GPT-3.5 Turbo (older v0613)",
6651
6685
  api: "openai-completions",
6652
6686
  provider: "openrouter",
6653
6687
  baseUrl: "https://openrouter.ai/api/v1",
6654
6688
  reasoning: false,
6655
6689
  input: ["text"],
6656
6690
  cost: {
6657
- input: 10,
6658
- output: 30,
6691
+ input: 1,
6692
+ output: 2,
6659
6693
  cacheRead: 0,
6660
6694
  cacheWrite: 0,
6661
6695
  },
6662
- contextWindow: 128000,
6696
+ contextWindow: 4095,
6663
6697
  maxTokens: 4096,
6664
6698
  },
6665
6699
  "mistralai/mistral-tiny": {
@@ -6747,38 +6781,38 @@ export const MODELS = {
6747
6781
  contextWindow: 8191,
6748
6782
  maxTokens: 4096,
6749
6783
  },
6750
- "openai/gpt-4": {
6751
- id: "openai/gpt-4",
6752
- name: "OpenAI: GPT-4",
6784
+ "openai/gpt-3.5-turbo": {
6785
+ id: "openai/gpt-3.5-turbo",
6786
+ name: "OpenAI: GPT-3.5 Turbo",
6753
6787
  api: "openai-completions",
6754
6788
  provider: "openrouter",
6755
6789
  baseUrl: "https://openrouter.ai/api/v1",
6756
6790
  reasoning: false,
6757
6791
  input: ["text"],
6758
6792
  cost: {
6759
- input: 30,
6760
- output: 60,
6793
+ input: 0.5,
6794
+ output: 1.5,
6761
6795
  cacheRead: 0,
6762
6796
  cacheWrite: 0,
6763
6797
  },
6764
- contextWindow: 8191,
6798
+ contextWindow: 16385,
6765
6799
  maxTokens: 4096,
6766
6800
  },
6767
- "openai/gpt-3.5-turbo": {
6768
- id: "openai/gpt-3.5-turbo",
6769
- name: "OpenAI: GPT-3.5 Turbo",
6801
+ "openai/gpt-4": {
6802
+ id: "openai/gpt-4",
6803
+ name: "OpenAI: GPT-4",
6770
6804
  api: "openai-completions",
6771
6805
  provider: "openrouter",
6772
6806
  baseUrl: "https://openrouter.ai/api/v1",
6773
6807
  reasoning: false,
6774
6808
  input: ["text"],
6775
6809
  cost: {
6776
- input: 0.5,
6777
- output: 1.5,
6810
+ input: 30,
6811
+ output: 60,
6778
6812
  cacheRead: 0,
6779
6813
  cacheWrite: 0,
6780
6814
  },
6781
- contextWindow: 16385,
6815
+ contextWindow: 8191,
6782
6816
  maxTokens: 4096,
6783
6817
  },
6784
6818
  "openrouter/auto": {