@mariozechner/pi-ai 0.7.16 → 0.7.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -359,6 +359,23 @@ export declare const MODELS: {
359
359
  contextWindow: number;
360
360
  maxTokens: number;
361
361
  };
362
+ readonly "gemini-3-pro-preview": {
363
+ id: string;
364
+ name: string;
365
+ api: "google-generative-ai";
366
+ provider: string;
367
+ baseUrl: string;
368
+ reasoning: true;
369
+ input: ("image" | "text")[];
370
+ cost: {
371
+ input: number;
372
+ output: number;
373
+ cacheRead: number;
374
+ cacheWrite: number;
375
+ };
376
+ contextWindow: number;
377
+ maxTokens: number;
378
+ };
362
379
  readonly "gemini-2.5-flash": {
363
380
  id: string;
364
381
  name: string;
@@ -1901,6 +1918,23 @@ export declare const MODELS: {
1901
1918
  };
1902
1919
  };
1903
1920
  readonly openrouter: {
1921
+ readonly "google/gemini-3-pro-preview": {
1922
+ id: string;
1923
+ name: string;
1924
+ api: "openai-completions";
1925
+ provider: string;
1926
+ baseUrl: string;
1927
+ reasoning: true;
1928
+ input: ("image" | "text")[];
1929
+ cost: {
1930
+ input: number;
1931
+ output: number;
1932
+ cacheRead: number;
1933
+ cacheWrite: number;
1934
+ };
1935
+ contextWindow: number;
1936
+ maxTokens: number;
1937
+ };
1904
1938
  readonly "openrouter/sherlock-dash-alpha": {
1905
1939
  id: string;
1906
1940
  name: string;
@@ -1952,6 +1986,23 @@ export declare const MODELS: {
1952
1986
  contextWindow: number;
1953
1987
  maxTokens: number;
1954
1988
  };
1989
+ readonly "openai/gpt-5.1-chat": {
1990
+ id: string;
1991
+ name: string;
1992
+ api: "openai-completions";
1993
+ provider: string;
1994
+ baseUrl: string;
1995
+ reasoning: false;
1996
+ input: ("image" | "text")[];
1997
+ cost: {
1998
+ input: number;
1999
+ output: number;
2000
+ cacheRead: number;
2001
+ cacheWrite: number;
2002
+ };
2003
+ contextWindow: number;
2004
+ maxTokens: number;
2005
+ };
1955
2006
  readonly "openai/gpt-5.1-codex": {
1956
2007
  id: string;
1957
2008
  name: string;
@@ -2207,40 +2258,6 @@ export declare const MODELS: {
2207
2258
  contextWindow: number;
2208
2259
  maxTokens: number;
2209
2260
  };
2210
- readonly "inclusionai/ring-1t": {
2211
- id: string;
2212
- name: string;
2213
- api: "openai-completions";
2214
- provider: string;
2215
- baseUrl: string;
2216
- reasoning: true;
2217
- input: "text"[];
2218
- cost: {
2219
- input: number;
2220
- output: number;
2221
- cacheRead: number;
2222
- cacheWrite: number;
2223
- };
2224
- contextWindow: number;
2225
- maxTokens: number;
2226
- };
2227
- readonly "inclusionai/ling-1t": {
2228
- id: string;
2229
- name: string;
2230
- api: "openai-completions";
2231
- provider: string;
2232
- baseUrl: string;
2233
- reasoning: false;
2234
- input: "text"[];
2235
- cost: {
2236
- input: number;
2237
- output: number;
2238
- cacheRead: number;
2239
- cacheWrite: number;
2240
- };
2241
- contextWindow: number;
2242
- maxTokens: number;
2243
- };
2244
2261
  readonly "openai/o3-deep-research": {
2245
2262
  id: string;
2246
2263
  name: string;
@@ -4808,7 +4825,7 @@ export declare const MODELS: {
4808
4825
  contextWindow: number;
4809
4826
  maxTokens: number;
4810
4827
  };
4811
- readonly "mistralai/ministral-3b": {
4828
+ readonly "mistralai/ministral-8b": {
4812
4829
  id: string;
4813
4830
  name: string;
4814
4831
  api: "openai-completions";
@@ -4825,7 +4842,7 @@ export declare const MODELS: {
4825
4842
  contextWindow: number;
4826
4843
  maxTokens: number;
4827
4844
  };
4828
- readonly "mistralai/ministral-8b": {
4845
+ readonly "mistralai/ministral-3b": {
4829
4846
  id: string;
4830
4847
  name: string;
4831
4848
  api: "openai-completions";
@@ -4944,7 +4961,7 @@ export declare const MODELS: {
4944
4961
  contextWindow: number;
4945
4962
  maxTokens: number;
4946
4963
  };
4947
- readonly "cohere/command-r-plus-08-2024": {
4964
+ readonly "cohere/command-r-08-2024": {
4948
4965
  id: string;
4949
4966
  name: string;
4950
4967
  api: "openai-completions";
@@ -4961,7 +4978,7 @@ export declare const MODELS: {
4961
4978
  contextWindow: number;
4962
4979
  maxTokens: number;
4963
4980
  };
4964
- readonly "cohere/command-r-08-2024": {
4981
+ readonly "cohere/command-r-plus-08-2024": {
4965
4982
  id: string;
4966
4983
  name: string;
4967
4984
  api: "openai-completions";
@@ -5556,7 +5573,7 @@ export declare const MODELS: {
5556
5573
  contextWindow: number;
5557
5574
  maxTokens: number;
5558
5575
  };
5559
- readonly "openai/gpt-4": {
5576
+ readonly "openai/gpt-3.5-turbo": {
5560
5577
  id: string;
5561
5578
  name: string;
5562
5579
  api: "openai-completions";
@@ -5573,7 +5590,7 @@ export declare const MODELS: {
5573
5590
  contextWindow: number;
5574
5591
  maxTokens: number;
5575
5592
  };
5576
- readonly "openai/gpt-3.5-turbo": {
5593
+ readonly "openai/gpt-4": {
5577
5594
  id: string;
5578
5595
  name: string;
5579
5596
  api: "openai-completions";