@mariozechner/pi-ai 0.18.1 → 0.18.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2413,6 +2413,23 @@ export declare const MODELS: {
2413
2413
  contextWindow: number;
2414
2414
  maxTokens: number;
2415
2415
  };
2416
+ readonly "mistralai/devstral-2512": {
2417
+ id: string;
2418
+ name: string;
2419
+ api: "openai-completions";
2420
+ provider: string;
2421
+ baseUrl: string;
2422
+ reasoning: false;
2423
+ input: "text"[];
2424
+ cost: {
2425
+ input: number;
2426
+ output: number;
2427
+ cacheRead: number;
2428
+ cacheWrite: number;
2429
+ };
2430
+ contextWindow: number;
2431
+ maxTokens: number;
2432
+ };
2416
2433
  readonly "relace/relace-search": {
2417
2434
  id: string;
2418
2435
  name: string;
@@ -2447,6 +2464,23 @@ export declare const MODELS: {
2447
2464
  contextWindow: number;
2448
2465
  maxTokens: number;
2449
2466
  };
2467
+ readonly "nex-agi/deepseek-v3.1-nex-n1:free": {
2468
+ id: string;
2469
+ name: string;
2470
+ api: "openai-completions";
2471
+ provider: string;
2472
+ baseUrl: string;
2473
+ reasoning: true;
2474
+ input: "text"[];
2475
+ cost: {
2476
+ input: number;
2477
+ output: number;
2478
+ cacheRead: number;
2479
+ cacheWrite: number;
2480
+ };
2481
+ contextWindow: number;
2482
+ maxTokens: number;
2483
+ };
2450
2484
  readonly "openai/gpt-5.1-codex-max": {
2451
2485
  id: string;
2452
2486
  name: string;
@@ -5473,7 +5507,7 @@ export declare const MODELS: {
5473
5507
  contextWindow: number;
5474
5508
  maxTokens: number;
5475
5509
  };
5476
- readonly "mistralai/ministral-3b": {
5510
+ readonly "mistralai/ministral-8b": {
5477
5511
  id: string;
5478
5512
  name: string;
5479
5513
  api: "openai-completions";
@@ -5490,7 +5524,7 @@ export declare const MODELS: {
5490
5524
  contextWindow: number;
5491
5525
  maxTokens: number;
5492
5526
  };
5493
- readonly "mistralai/ministral-8b": {
5527
+ readonly "mistralai/ministral-3b": {
5494
5528
  id: string;
5495
5529
  name: string;
5496
5530
  api: "openai-completions";
@@ -5677,7 +5711,7 @@ export declare const MODELS: {
5677
5711
  contextWindow: number;
5678
5712
  maxTokens: number;
5679
5713
  };
5680
- readonly "meta-llama/llama-3.1-405b-instruct": {
5714
+ readonly "meta-llama/llama-3.1-8b-instruct": {
5681
5715
  id: string;
5682
5716
  name: string;
5683
5717
  api: "openai-completions";
@@ -5694,7 +5728,7 @@ export declare const MODELS: {
5694
5728
  contextWindow: number;
5695
5729
  maxTokens: number;
5696
5730
  };
5697
- readonly "meta-llama/llama-3.1-70b-instruct": {
5731
+ readonly "meta-llama/llama-3.1-405b-instruct": {
5698
5732
  id: string;
5699
5733
  name: string;
5700
5734
  api: "openai-completions";
@@ -5711,7 +5745,7 @@ export declare const MODELS: {
5711
5745
  contextWindow: number;
5712
5746
  maxTokens: number;
5713
5747
  };
5714
- readonly "meta-llama/llama-3.1-8b-instruct": {
5748
+ readonly "meta-llama/llama-3.1-70b-instruct": {
5715
5749
  id: string;
5716
5750
  name: string;
5717
5751
  api: "openai-completions";
@@ -5864,7 +5898,7 @@ export declare const MODELS: {
5864
5898
  contextWindow: number;
5865
5899
  maxTokens: number;
5866
5900
  };
5867
- readonly "openai/gpt-4o": {
5901
+ readonly "openai/gpt-4o-2024-05-13": {
5868
5902
  id: string;
5869
5903
  name: string;
5870
5904
  api: "openai-completions";
@@ -5881,7 +5915,7 @@ export declare const MODELS: {
5881
5915
  contextWindow: number;
5882
5916
  maxTokens: number;
5883
5917
  };
5884
- readonly "openai/gpt-4o:extended": {
5918
+ readonly "openai/gpt-4o": {
5885
5919
  id: string;
5886
5920
  name: string;
5887
5921
  api: "openai-completions";
@@ -5898,7 +5932,7 @@ export declare const MODELS: {
5898
5932
  contextWindow: number;
5899
5933
  maxTokens: number;
5900
5934
  };
5901
- readonly "openai/gpt-4o-2024-05-13": {
5935
+ readonly "openai/gpt-4o:extended": {
5902
5936
  id: string;
5903
5937
  name: string;
5904
5938
  api: "openai-completions";
@@ -5915,7 +5949,7 @@ export declare const MODELS: {
5915
5949
  contextWindow: number;
5916
5950
  maxTokens: number;
5917
5951
  };
5918
- readonly "meta-llama/llama-3-8b-instruct": {
5952
+ readonly "meta-llama/llama-3-70b-instruct": {
5919
5953
  id: string;
5920
5954
  name: string;
5921
5955
  api: "openai-completions";
@@ -5932,7 +5966,7 @@ export declare const MODELS: {
5932
5966
  contextWindow: number;
5933
5967
  maxTokens: number;
5934
5968
  };
5935
- readonly "meta-llama/llama-3-70b-instruct": {
5969
+ readonly "meta-llama/llama-3-8b-instruct": {
5936
5970
  id: string;
5937
5971
  name: string;
5938
5972
  api: "openai-completions";
@@ -6034,7 +6068,7 @@ export declare const MODELS: {
6034
6068
  contextWindow: number;
6035
6069
  maxTokens: number;
6036
6070
  };
6037
- readonly "openai/gpt-4-turbo-preview": {
6071
+ readonly "openai/gpt-3.5-turbo-0613": {
6038
6072
  id: string;
6039
6073
  name: string;
6040
6074
  api: "openai-completions";
@@ -6051,7 +6085,7 @@ export declare const MODELS: {
6051
6085
  contextWindow: number;
6052
6086
  maxTokens: number;
6053
6087
  };
6054
- readonly "openai/gpt-3.5-turbo-0613": {
6088
+ readonly "openai/gpt-4-turbo-preview": {
6055
6089
  id: string;
6056
6090
  name: string;
6057
6091
  api: "openai-completions";
@@ -6136,7 +6170,7 @@ export declare const MODELS: {
6136
6170
  contextWindow: number;
6137
6171
  maxTokens: number;
6138
6172
  };
6139
- readonly "openai/gpt-4": {
6173
+ readonly "openai/gpt-4-0314": {
6140
6174
  id: string;
6141
6175
  name: string;
6142
6176
  api: "openai-completions";
@@ -6153,7 +6187,7 @@ export declare const MODELS: {
6153
6187
  contextWindow: number;
6154
6188
  maxTokens: number;
6155
6189
  };
6156
- readonly "openai/gpt-3.5-turbo": {
6190
+ readonly "openai/gpt-4": {
6157
6191
  id: string;
6158
6192
  name: string;
6159
6193
  api: "openai-completions";
@@ -6170,7 +6204,7 @@ export declare const MODELS: {
6170
6204
  contextWindow: number;
6171
6205
  maxTokens: number;
6172
6206
  };
6173
- readonly "openai/gpt-4-0314": {
6207
+ readonly "openai/gpt-3.5-turbo": {
6174
6208
  id: string;
6175
6209
  name: string;
6176
6210
  api: "openai-completions";