@mariozechner/pi-ai 0.27.0 → 0.27.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2871,6 +2871,23 @@ export const MODELS = {
2871
2871
  },
2872
2872
  },
2873
2873
  "openrouter": {
2874
+ "z-ai/glm-4.7": {
2875
+ id: "z-ai/glm-4.7",
2876
+ name: "Z.AI: GLM 4.7",
2877
+ api: "openai-completions",
2878
+ provider: "openrouter",
2879
+ baseUrl: "https://openrouter.ai/api/v1",
2880
+ reasoning: true,
2881
+ input: ["text"],
2882
+ cost: {
2883
+ input: 0.11,
2884
+ output: 2.2,
2885
+ cacheRead: 0.11,
2886
+ cacheWrite: 0,
2887
+ },
2888
+ contextWindow: 200000,
2889
+ maxTokens: 131072,
2890
+ },
2874
2891
  "google/gemini-3-flash-preview": {
2875
2892
  id: "google/gemini-3-flash-preview",
2876
2893
  name: "Google: Gemini 3 Flash Preview",
@@ -6084,34 +6101,34 @@ export const MODELS = {
6084
6101
  contextWindow: 200000,
6085
6102
  maxTokens: 8192,
6086
6103
  },
6087
- "mistralai/ministral-8b": {
6088
- id: "mistralai/ministral-8b",
6089
- name: "Mistral: Ministral 8B",
6104
+ "mistralai/ministral-3b": {
6105
+ id: "mistralai/ministral-3b",
6106
+ name: "Mistral: Ministral 3B",
6090
6107
  api: "openai-completions",
6091
6108
  provider: "openrouter",
6092
6109
  baseUrl: "https://openrouter.ai/api/v1",
6093
6110
  reasoning: false,
6094
6111
  input: ["text"],
6095
6112
  cost: {
6096
- input: 0.09999999999999999,
6097
- output: 0.09999999999999999,
6113
+ input: 0.04,
6114
+ output: 0.04,
6098
6115
  cacheRead: 0,
6099
6116
  cacheWrite: 0,
6100
6117
  },
6101
6118
  contextWindow: 131072,
6102
6119
  maxTokens: 4096,
6103
6120
  },
6104
- "mistralai/ministral-3b": {
6105
- id: "mistralai/ministral-3b",
6106
- name: "Mistral: Ministral 3B",
6121
+ "mistralai/ministral-8b": {
6122
+ id: "mistralai/ministral-8b",
6123
+ name: "Mistral: Ministral 8B",
6107
6124
  api: "openai-completions",
6108
6125
  provider: "openrouter",
6109
6126
  baseUrl: "https://openrouter.ai/api/v1",
6110
6127
  reasoning: false,
6111
6128
  input: ["text"],
6112
6129
  cost: {
6113
- input: 0.04,
6114
- output: 0.04,
6130
+ input: 0.09999999999999999,
6131
+ output: 0.09999999999999999,
6115
6132
  cacheRead: 0,
6116
6133
  cacheWrite: 0,
6117
6134
  },
@@ -6288,6 +6305,23 @@ export const MODELS = {
6288
6305
  contextWindow: 128000,
6289
6306
  maxTokens: 16384,
6290
6307
  },
6308
+ "meta-llama/llama-3.1-70b-instruct": {
6309
+ id: "meta-llama/llama-3.1-70b-instruct",
6310
+ name: "Meta: Llama 3.1 70B Instruct",
6311
+ api: "openai-completions",
6312
+ provider: "openrouter",
6313
+ baseUrl: "https://openrouter.ai/api/v1",
6314
+ reasoning: false,
6315
+ input: ["text"],
6316
+ cost: {
6317
+ input: 0.39999999999999997,
6318
+ output: 0.39999999999999997,
6319
+ cacheRead: 0,
6320
+ cacheWrite: 0,
6321
+ },
6322
+ contextWindow: 131072,
6323
+ maxTokens: 4096,
6324
+ },
6291
6325
  "meta-llama/llama-3.1-405b-instruct": {
6292
6326
  id: "meta-llama/llama-3.1-405b-instruct",
6293
6327
  name: "Meta: Llama 3.1 405B Instruct",
@@ -6322,23 +6356,6 @@ export const MODELS = {
6322
6356
  contextWindow: 131072,
6323
6357
  maxTokens: 16384,
6324
6358
  },
6325
- "meta-llama/llama-3.1-70b-instruct": {
6326
- id: "meta-llama/llama-3.1-70b-instruct",
6327
- name: "Meta: Llama 3.1 70B Instruct",
6328
- api: "openai-completions",
6329
- provider: "openrouter",
6330
- baseUrl: "https://openrouter.ai/api/v1",
6331
- reasoning: false,
6332
- input: ["text"],
6333
- cost: {
6334
- input: 0.39999999999999997,
6335
- output: 0.39999999999999997,
6336
- cacheRead: 0,
6337
- cacheWrite: 0,
6338
- },
6339
- contextWindow: 131072,
6340
- maxTokens: 4096,
6341
- },
6342
6359
  "mistralai/mistral-nemo": {
6343
6360
  id: "mistralai/mistral-nemo",
6344
6361
  name: "Mistral: Mistral Nemo",
@@ -6356,9 +6373,9 @@ export const MODELS = {
6356
6373
  contextWindow: 131072,
6357
6374
  maxTokens: 16384,
6358
6375
  },
6359
- "openai/gpt-4o-mini-2024-07-18": {
6360
- id: "openai/gpt-4o-mini-2024-07-18",
6361
- name: "OpenAI: GPT-4o-mini (2024-07-18)",
6376
+ "openai/gpt-4o-mini": {
6377
+ id: "openai/gpt-4o-mini",
6378
+ name: "OpenAI: GPT-4o-mini",
6362
6379
  api: "openai-completions",
6363
6380
  provider: "openrouter",
6364
6381
  baseUrl: "https://openrouter.ai/api/v1",
@@ -6373,9 +6390,9 @@ export const MODELS = {
6373
6390
  contextWindow: 128000,
6374
6391
  maxTokens: 16384,
6375
6392
  },
6376
- "openai/gpt-4o-mini": {
6377
- id: "openai/gpt-4o-mini",
6378
- name: "OpenAI: GPT-4o-mini",
6393
+ "openai/gpt-4o-mini-2024-07-18": {
6394
+ id: "openai/gpt-4o-mini-2024-07-18",
6395
+ name: "OpenAI: GPT-4o-mini (2024-07-18)",
6379
6396
  api: "openai-completions",
6380
6397
  provider: "openrouter",
6381
6398
  baseUrl: "https://openrouter.ai/api/v1",
@@ -6475,23 +6492,6 @@ export const MODELS = {
6475
6492
  contextWindow: 128000,
6476
6493
  maxTokens: 4096,
6477
6494
  },
6478
- "openai/gpt-4o-2024-05-13": {
6479
- id: "openai/gpt-4o-2024-05-13",
6480
- name: "OpenAI: GPT-4o (2024-05-13)",
6481
- api: "openai-completions",
6482
- provider: "openrouter",
6483
- baseUrl: "https://openrouter.ai/api/v1",
6484
- reasoning: false,
6485
- input: ["text", "image"],
6486
- cost: {
6487
- input: 5,
6488
- output: 15,
6489
- cacheRead: 0,
6490
- cacheWrite: 0,
6491
- },
6492
- contextWindow: 128000,
6493
- maxTokens: 4096,
6494
- },
6495
6495
  "openai/gpt-4o": {
6496
6496
  id: "openai/gpt-4o",
6497
6497
  name: "OpenAI: GPT-4o",
@@ -6526,22 +6526,22 @@ export const MODELS = {
6526
6526
  contextWindow: 128000,
6527
6527
  maxTokens: 64000,
6528
6528
  },
6529
- "meta-llama/llama-3-70b-instruct": {
6530
- id: "meta-llama/llama-3-70b-instruct",
6531
- name: "Meta: Llama 3 70B Instruct",
6529
+ "openai/gpt-4o-2024-05-13": {
6530
+ id: "openai/gpt-4o-2024-05-13",
6531
+ name: "OpenAI: GPT-4o (2024-05-13)",
6532
6532
  api: "openai-completions",
6533
6533
  provider: "openrouter",
6534
6534
  baseUrl: "https://openrouter.ai/api/v1",
6535
6535
  reasoning: false,
6536
- input: ["text"],
6536
+ input: ["text", "image"],
6537
6537
  cost: {
6538
- input: 0.3,
6539
- output: 0.39999999999999997,
6538
+ input: 5,
6539
+ output: 15,
6540
6540
  cacheRead: 0,
6541
6541
  cacheWrite: 0,
6542
6542
  },
6543
- contextWindow: 8192,
6544
- maxTokens: 16384,
6543
+ contextWindow: 128000,
6544
+ maxTokens: 4096,
6545
6545
  },
6546
6546
  "meta-llama/llama-3-8b-instruct": {
6547
6547
  id: "meta-llama/llama-3-8b-instruct",
@@ -6560,6 +6560,23 @@ export const MODELS = {
6560
6560
  contextWindow: 8192,
6561
6561
  maxTokens: 16384,
6562
6562
  },
6563
+ "meta-llama/llama-3-70b-instruct": {
6564
+ id: "meta-llama/llama-3-70b-instruct",
6565
+ name: "Meta: Llama 3 70B Instruct",
6566
+ api: "openai-completions",
6567
+ provider: "openrouter",
6568
+ baseUrl: "https://openrouter.ai/api/v1",
6569
+ reasoning: false,
6570
+ input: ["text"],
6571
+ cost: {
6572
+ input: 0.3,
6573
+ output: 0.39999999999999997,
6574
+ cacheRead: 0,
6575
+ cacheWrite: 0,
6576
+ },
6577
+ contextWindow: 8192,
6578
+ maxTokens: 16384,
6579
+ },
6563
6580
  "mistralai/mixtral-8x22b-instruct": {
6564
6581
  id: "mistralai/mixtral-8x22b-instruct",
6565
6582
  name: "Mistral: Mixtral 8x22B Instruct",
@@ -6645,38 +6662,38 @@ export const MODELS = {
6645
6662
  contextWindow: 128000,
6646
6663
  maxTokens: 4096,
6647
6664
  },
6648
- "openai/gpt-3.5-turbo-0613": {
6649
- id: "openai/gpt-3.5-turbo-0613",
6650
- name: "OpenAI: GPT-3.5 Turbo (older v0613)",
6665
+ "openai/gpt-4-turbo-preview": {
6666
+ id: "openai/gpt-4-turbo-preview",
6667
+ name: "OpenAI: GPT-4 Turbo Preview",
6651
6668
  api: "openai-completions",
6652
6669
  provider: "openrouter",
6653
6670
  baseUrl: "https://openrouter.ai/api/v1",
6654
6671
  reasoning: false,
6655
6672
  input: ["text"],
6656
6673
  cost: {
6657
- input: 1,
6658
- output: 2,
6674
+ input: 10,
6675
+ output: 30,
6659
6676
  cacheRead: 0,
6660
6677
  cacheWrite: 0,
6661
6678
  },
6662
- contextWindow: 4095,
6679
+ contextWindow: 128000,
6663
6680
  maxTokens: 4096,
6664
6681
  },
6665
- "openai/gpt-4-turbo-preview": {
6666
- id: "openai/gpt-4-turbo-preview",
6667
- name: "OpenAI: GPT-4 Turbo Preview",
6682
+ "openai/gpt-3.5-turbo-0613": {
6683
+ id: "openai/gpt-3.5-turbo-0613",
6684
+ name: "OpenAI: GPT-3.5 Turbo (older v0613)",
6668
6685
  api: "openai-completions",
6669
6686
  provider: "openrouter",
6670
6687
  baseUrl: "https://openrouter.ai/api/v1",
6671
6688
  reasoning: false,
6672
6689
  input: ["text"],
6673
6690
  cost: {
6674
- input: 10,
6675
- output: 30,
6691
+ input: 1,
6692
+ output: 2,
6676
6693
  cacheRead: 0,
6677
6694
  cacheWrite: 0,
6678
6695
  },
6679
- contextWindow: 128000,
6696
+ contextWindow: 4095,
6680
6697
  maxTokens: 4096,
6681
6698
  },
6682
6699
  "mistralai/mistral-tiny": {
@@ -6764,38 +6781,38 @@ export const MODELS = {
6764
6781
  contextWindow: 8191,
6765
6782
  maxTokens: 4096,
6766
6783
  },
6767
- "openai/gpt-4": {
6768
- id: "openai/gpt-4",
6769
- name: "OpenAI: GPT-4",
6784
+ "openai/gpt-3.5-turbo": {
6785
+ id: "openai/gpt-3.5-turbo",
6786
+ name: "OpenAI: GPT-3.5 Turbo",
6770
6787
  api: "openai-completions",
6771
6788
  provider: "openrouter",
6772
6789
  baseUrl: "https://openrouter.ai/api/v1",
6773
6790
  reasoning: false,
6774
6791
  input: ["text"],
6775
6792
  cost: {
6776
- input: 30,
6777
- output: 60,
6793
+ input: 0.5,
6794
+ output: 1.5,
6778
6795
  cacheRead: 0,
6779
6796
  cacheWrite: 0,
6780
6797
  },
6781
- contextWindow: 8191,
6798
+ contextWindow: 16385,
6782
6799
  maxTokens: 4096,
6783
6800
  },
6784
- "openai/gpt-3.5-turbo": {
6785
- id: "openai/gpt-3.5-turbo",
6786
- name: "OpenAI: GPT-3.5 Turbo",
6801
+ "openai/gpt-4": {
6802
+ id: "openai/gpt-4",
6803
+ name: "OpenAI: GPT-4",
6787
6804
  api: "openai-completions",
6788
6805
  provider: "openrouter",
6789
6806
  baseUrl: "https://openrouter.ai/api/v1",
6790
6807
  reasoning: false,
6791
6808
  input: ["text"],
6792
6809
  cost: {
6793
- input: 0.5,
6794
- output: 1.5,
6810
+ input: 30,
6811
+ output: 60,
6795
6812
  cacheRead: 0,
6796
6813
  cacheWrite: 0,
6797
6814
  },
6798
- contextWindow: 16385,
6815
+ contextWindow: 8191,
6799
6816
  maxTokens: 4096,
6800
6817
  },
6801
6818
  "openrouter/auto": {