@mariozechner/pi-ai 0.18.0 → 0.18.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1986,6 +1986,416 @@ export const MODELS = {
1986
1986
  contextWindow: 204800,
1987
1987
  maxTokens: 131072,
1988
1988
  },
1989
+ "glm-4.6v": {
1990
+ id: "glm-4.6v",
1991
+ name: "GLM-4.6V",
1992
+ api: "anthropic-messages",
1993
+ provider: "zai",
1994
+ baseUrl: "https://api.z.ai/api/anthropic",
1995
+ reasoning: true,
1996
+ input: ["text", "image"],
1997
+ cost: {
1998
+ input: 0.3,
1999
+ output: 0.9,
2000
+ cacheRead: 0,
2001
+ cacheWrite: 0,
2002
+ },
2003
+ contextWindow: 128000,
2004
+ maxTokens: 32768,
2005
+ },
2006
+ },
2007
+ mistral: {
2008
+ "devstral-medium-2507": {
2009
+ id: "devstral-medium-2507",
2010
+ name: "Devstral Medium",
2011
+ api: "openai-completions",
2012
+ provider: "mistral",
2013
+ baseUrl: "https://api.mistral.ai/v1",
2014
+ reasoning: false,
2015
+ input: ["text"],
2016
+ cost: {
2017
+ input: 0.4,
2018
+ output: 2,
2019
+ cacheRead: 0,
2020
+ cacheWrite: 0,
2021
+ },
2022
+ contextWindow: 128000,
2023
+ maxTokens: 128000,
2024
+ },
2025
+ "mistral-large-2512": {
2026
+ id: "mistral-large-2512",
2027
+ name: "Mistral Large 3",
2028
+ api: "openai-completions",
2029
+ provider: "mistral",
2030
+ baseUrl: "https://api.mistral.ai/v1",
2031
+ reasoning: false,
2032
+ input: ["text", "image"],
2033
+ cost: {
2034
+ input: 0.5,
2035
+ output: 1.5,
2036
+ cacheRead: 0,
2037
+ cacheWrite: 0,
2038
+ },
2039
+ contextWindow: 262144,
2040
+ maxTokens: 262144,
2041
+ },
2042
+ "open-mixtral-8x22b": {
2043
+ id: "open-mixtral-8x22b",
2044
+ name: "Mixtral 8x22B",
2045
+ api: "openai-completions",
2046
+ provider: "mistral",
2047
+ baseUrl: "https://api.mistral.ai/v1",
2048
+ reasoning: false,
2049
+ input: ["text"],
2050
+ cost: {
2051
+ input: 2,
2052
+ output: 6,
2053
+ cacheRead: 0,
2054
+ cacheWrite: 0,
2055
+ },
2056
+ contextWindow: 64000,
2057
+ maxTokens: 64000,
2058
+ },
2059
+ "ministral-8b-latest": {
2060
+ id: "ministral-8b-latest",
2061
+ name: "Ministral 8B",
2062
+ api: "openai-completions",
2063
+ provider: "mistral",
2064
+ baseUrl: "https://api.mistral.ai/v1",
2065
+ reasoning: false,
2066
+ input: ["text"],
2067
+ cost: {
2068
+ input: 0.1,
2069
+ output: 0.1,
2070
+ cacheRead: 0,
2071
+ cacheWrite: 0,
2072
+ },
2073
+ contextWindow: 128000,
2074
+ maxTokens: 128000,
2075
+ },
2076
+ "pixtral-large-latest": {
2077
+ id: "pixtral-large-latest",
2078
+ name: "Pixtral Large",
2079
+ api: "openai-completions",
2080
+ provider: "mistral",
2081
+ baseUrl: "https://api.mistral.ai/v1",
2082
+ reasoning: false,
2083
+ input: ["text", "image"],
2084
+ cost: {
2085
+ input: 2,
2086
+ output: 6,
2087
+ cacheRead: 0,
2088
+ cacheWrite: 0,
2089
+ },
2090
+ contextWindow: 128000,
2091
+ maxTokens: 128000,
2092
+ },
2093
+ "ministral-3b-latest": {
2094
+ id: "ministral-3b-latest",
2095
+ name: "Ministral 3B",
2096
+ api: "openai-completions",
2097
+ provider: "mistral",
2098
+ baseUrl: "https://api.mistral.ai/v1",
2099
+ reasoning: false,
2100
+ input: ["text"],
2101
+ cost: {
2102
+ input: 0.04,
2103
+ output: 0.04,
2104
+ cacheRead: 0,
2105
+ cacheWrite: 0,
2106
+ },
2107
+ contextWindow: 128000,
2108
+ maxTokens: 128000,
2109
+ },
2110
+ "pixtral-12b": {
2111
+ id: "pixtral-12b",
2112
+ name: "Pixtral 12B",
2113
+ api: "openai-completions",
2114
+ provider: "mistral",
2115
+ baseUrl: "https://api.mistral.ai/v1",
2116
+ reasoning: false,
2117
+ input: ["text", "image"],
2118
+ cost: {
2119
+ input: 0.15,
2120
+ output: 0.15,
2121
+ cacheRead: 0,
2122
+ cacheWrite: 0,
2123
+ },
2124
+ contextWindow: 128000,
2125
+ maxTokens: 128000,
2126
+ },
2127
+ "mistral-medium-2505": {
2128
+ id: "mistral-medium-2505",
2129
+ name: "Mistral Medium 3",
2130
+ api: "openai-completions",
2131
+ provider: "mistral",
2132
+ baseUrl: "https://api.mistral.ai/v1",
2133
+ reasoning: false,
2134
+ input: ["text", "image"],
2135
+ cost: {
2136
+ input: 0.4,
2137
+ output: 2,
2138
+ cacheRead: 0,
2139
+ cacheWrite: 0,
2140
+ },
2141
+ contextWindow: 131072,
2142
+ maxTokens: 131072,
2143
+ },
2144
+ "labs-devstral-small-2512": {
2145
+ id: "labs-devstral-small-2512",
2146
+ name: "Devstral Small 2",
2147
+ api: "openai-completions",
2148
+ provider: "mistral",
2149
+ baseUrl: "https://api.mistral.ai/v1",
2150
+ reasoning: false,
2151
+ input: ["text", "image"],
2152
+ cost: {
2153
+ input: 0.1,
2154
+ output: 0.3,
2155
+ cacheRead: 0,
2156
+ cacheWrite: 0,
2157
+ },
2158
+ contextWindow: 256000,
2159
+ maxTokens: 256000,
2160
+ },
2161
+ "devstral-medium-latest": {
2162
+ id: "devstral-medium-latest",
2163
+ name: "Devstral 2",
2164
+ api: "openai-completions",
2165
+ provider: "mistral",
2166
+ baseUrl: "https://api.mistral.ai/v1",
2167
+ reasoning: false,
2168
+ input: ["text"],
2169
+ cost: {
2170
+ input: 0.4,
2171
+ output: 2,
2172
+ cacheRead: 0,
2173
+ cacheWrite: 0,
2174
+ },
2175
+ contextWindow: 262144,
2176
+ maxTokens: 262144,
2177
+ },
2178
+ "devstral-small-2505": {
2179
+ id: "devstral-small-2505",
2180
+ name: "Devstral Small 2505",
2181
+ api: "openai-completions",
2182
+ provider: "mistral",
2183
+ baseUrl: "https://api.mistral.ai/v1",
2184
+ reasoning: false,
2185
+ input: ["text"],
2186
+ cost: {
2187
+ input: 0.1,
2188
+ output: 0.3,
2189
+ cacheRead: 0,
2190
+ cacheWrite: 0,
2191
+ },
2192
+ contextWindow: 128000,
2193
+ maxTokens: 128000,
2194
+ },
2195
+ "mistral-medium-2508": {
2196
+ id: "mistral-medium-2508",
2197
+ name: "Mistral Medium 3.1",
2198
+ api: "openai-completions",
2199
+ provider: "mistral",
2200
+ baseUrl: "https://api.mistral.ai/v1",
2201
+ reasoning: false,
2202
+ input: ["text", "image"],
2203
+ cost: {
2204
+ input: 0.4,
2205
+ output: 2,
2206
+ cacheRead: 0,
2207
+ cacheWrite: 0,
2208
+ },
2209
+ contextWindow: 262144,
2210
+ maxTokens: 262144,
2211
+ },
2212
+ "mistral-small-latest": {
2213
+ id: "mistral-small-latest",
2214
+ name: "Mistral Small",
2215
+ api: "openai-completions",
2216
+ provider: "mistral",
2217
+ baseUrl: "https://api.mistral.ai/v1",
2218
+ reasoning: false,
2219
+ input: ["text", "image"],
2220
+ cost: {
2221
+ input: 0.1,
2222
+ output: 0.3,
2223
+ cacheRead: 0,
2224
+ cacheWrite: 0,
2225
+ },
2226
+ contextWindow: 128000,
2227
+ maxTokens: 16384,
2228
+ },
2229
+ "magistral-small": {
2230
+ id: "magistral-small",
2231
+ name: "Magistral Small",
2232
+ api: "openai-completions",
2233
+ provider: "mistral",
2234
+ baseUrl: "https://api.mistral.ai/v1",
2235
+ reasoning: true,
2236
+ input: ["text"],
2237
+ cost: {
2238
+ input: 0.5,
2239
+ output: 1.5,
2240
+ cacheRead: 0,
2241
+ cacheWrite: 0,
2242
+ },
2243
+ contextWindow: 128000,
2244
+ maxTokens: 128000,
2245
+ },
2246
+ "devstral-small-2507": {
2247
+ id: "devstral-small-2507",
2248
+ name: "Devstral Small",
2249
+ api: "openai-completions",
2250
+ provider: "mistral",
2251
+ baseUrl: "https://api.mistral.ai/v1",
2252
+ reasoning: false,
2253
+ input: ["text"],
2254
+ cost: {
2255
+ input: 0.1,
2256
+ output: 0.3,
2257
+ cacheRead: 0,
2258
+ cacheWrite: 0,
2259
+ },
2260
+ contextWindow: 128000,
2261
+ maxTokens: 128000,
2262
+ },
2263
+ "codestral-latest": {
2264
+ id: "codestral-latest",
2265
+ name: "Codestral",
2266
+ api: "openai-completions",
2267
+ provider: "mistral",
2268
+ baseUrl: "https://api.mistral.ai/v1",
2269
+ reasoning: false,
2270
+ input: ["text"],
2271
+ cost: {
2272
+ input: 0.3,
2273
+ output: 0.9,
2274
+ cacheRead: 0,
2275
+ cacheWrite: 0,
2276
+ },
2277
+ contextWindow: 256000,
2278
+ maxTokens: 4096,
2279
+ },
2280
+ "open-mixtral-8x7b": {
2281
+ id: "open-mixtral-8x7b",
2282
+ name: "Mixtral 8x7B",
2283
+ api: "openai-completions",
2284
+ provider: "mistral",
2285
+ baseUrl: "https://api.mistral.ai/v1",
2286
+ reasoning: false,
2287
+ input: ["text"],
2288
+ cost: {
2289
+ input: 0.7,
2290
+ output: 0.7,
2291
+ cacheRead: 0,
2292
+ cacheWrite: 0,
2293
+ },
2294
+ contextWindow: 32000,
2295
+ maxTokens: 32000,
2296
+ },
2297
+ "mistral-nemo": {
2298
+ id: "mistral-nemo",
2299
+ name: "Mistral Nemo",
2300
+ api: "openai-completions",
2301
+ provider: "mistral",
2302
+ baseUrl: "https://api.mistral.ai/v1",
2303
+ reasoning: false,
2304
+ input: ["text"],
2305
+ cost: {
2306
+ input: 0.15,
2307
+ output: 0.15,
2308
+ cacheRead: 0,
2309
+ cacheWrite: 0,
2310
+ },
2311
+ contextWindow: 128000,
2312
+ maxTokens: 128000,
2313
+ },
2314
+ "open-mistral-7b": {
2315
+ id: "open-mistral-7b",
2316
+ name: "Mistral 7B",
2317
+ api: "openai-completions",
2318
+ provider: "mistral",
2319
+ baseUrl: "https://api.mistral.ai/v1",
2320
+ reasoning: false,
2321
+ input: ["text"],
2322
+ cost: {
2323
+ input: 0.25,
2324
+ output: 0.25,
2325
+ cacheRead: 0,
2326
+ cacheWrite: 0,
2327
+ },
2328
+ contextWindow: 8000,
2329
+ maxTokens: 8000,
2330
+ },
2331
+ "mistral-large-latest": {
2332
+ id: "mistral-large-latest",
2333
+ name: "Mistral Large",
2334
+ api: "openai-completions",
2335
+ provider: "mistral",
2336
+ baseUrl: "https://api.mistral.ai/v1",
2337
+ reasoning: false,
2338
+ input: ["text", "image"],
2339
+ cost: {
2340
+ input: 0.5,
2341
+ output: 1.5,
2342
+ cacheRead: 0,
2343
+ cacheWrite: 0,
2344
+ },
2345
+ contextWindow: 262144,
2346
+ maxTokens: 262144,
2347
+ },
2348
+ "mistral-medium-latest": {
2349
+ id: "mistral-medium-latest",
2350
+ name: "Mistral Medium",
2351
+ api: "openai-completions",
2352
+ provider: "mistral",
2353
+ baseUrl: "https://api.mistral.ai/v1",
2354
+ reasoning: false,
2355
+ input: ["text", "image"],
2356
+ cost: {
2357
+ input: 0.4,
2358
+ output: 2,
2359
+ cacheRead: 0,
2360
+ cacheWrite: 0,
2361
+ },
2362
+ contextWindow: 128000,
2363
+ maxTokens: 16384,
2364
+ },
2365
+ "mistral-large-2411": {
2366
+ id: "mistral-large-2411",
2367
+ name: "Mistral Large 2.1",
2368
+ api: "openai-completions",
2369
+ provider: "mistral",
2370
+ baseUrl: "https://api.mistral.ai/v1",
2371
+ reasoning: false,
2372
+ input: ["text"],
2373
+ cost: {
2374
+ input: 2,
2375
+ output: 6,
2376
+ cacheRead: 0,
2377
+ cacheWrite: 0,
2378
+ },
2379
+ contextWindow: 131072,
2380
+ maxTokens: 16384,
2381
+ },
2382
+ "magistral-medium-latest": {
2383
+ id: "magistral-medium-latest",
2384
+ name: "Magistral Medium",
2385
+ api: "openai-completions",
2386
+ provider: "mistral",
2387
+ baseUrl: "https://api.mistral.ai/v1",
2388
+ reasoning: true,
2389
+ input: ["text"],
2390
+ cost: {
2391
+ input: 2,
2392
+ output: 5,
2393
+ cacheRead: 0,
2394
+ cacheWrite: 0,
2395
+ },
2396
+ contextWindow: 128000,
2397
+ maxTokens: 16384,
2398
+ },
1989
2399
  },
1990
2400
  openrouter: {
1991
2401
  "mistralai/devstral-2512:free": {
@@ -2005,6 +2415,23 @@ export const MODELS = {
2005
2415
  contextWindow: 262144,
2006
2416
  maxTokens: 4096,
2007
2417
  },
2418
+ "mistralai/devstral-2512": {
2419
+ id: "mistralai/devstral-2512",
2420
+ name: "Mistral: Devstral 2 2512",
2421
+ api: "openai-completions",
2422
+ provider: "openrouter",
2423
+ baseUrl: "https://openrouter.ai/api/v1",
2424
+ reasoning: false,
2425
+ input: ["text"],
2426
+ cost: {
2427
+ input: 0.15,
2428
+ output: 0.6,
2429
+ cacheRead: 0,
2430
+ cacheWrite: 0,
2431
+ },
2432
+ contextWindow: 262144,
2433
+ maxTokens: 65536,
2434
+ },
2008
2435
  "relace/relace-search": {
2009
2436
  id: "relace/relace-search",
2010
2437
  name: "Relace: Relace Search",
@@ -2039,6 +2466,23 @@ export const MODELS = {
2039
2466
  contextWindow: 131072,
2040
2467
  maxTokens: 24000,
2041
2468
  },
2469
+ "nex-agi/deepseek-v3.1-nex-n1:free": {
2470
+ id: "nex-agi/deepseek-v3.1-nex-n1:free",
2471
+ name: "Nex AGI: DeepSeek V3.1 Nex N1 (free)",
2472
+ api: "openai-completions",
2473
+ provider: "openrouter",
2474
+ baseUrl: "https://openrouter.ai/api/v1",
2475
+ reasoning: true,
2476
+ input: ["text"],
2477
+ cost: {
2478
+ input: 0,
2479
+ output: 0,
2480
+ cacheRead: 0,
2481
+ cacheWrite: 0,
2482
+ },
2483
+ contextWindow: 131072,
2484
+ maxTokens: 163840,
2485
+ },
2042
2486
  "openai/gpt-5.1-codex-max": {
2043
2487
  id: "openai/gpt-5.1-codex-max",
2044
2488
  name: "OpenAI: GPT-5.1-Codex-Max",
@@ -4445,13 +4889,13 @@ export const MODELS = {
4445
4889
  reasoning: false,
4446
4890
  input: ["text", "image"],
4447
4891
  cost: {
4448
- input: 0.136,
4449
- output: 0.6799999999999999,
4892
+ input: 0.15,
4893
+ output: 0.6,
4450
4894
  cacheRead: 0,
4451
4895
  cacheWrite: 0,
4452
4896
  },
4453
4897
  contextWindow: 1048576,
4454
- maxTokens: 8192,
4898
+ maxTokens: 16384,
4455
4899
  },
4456
4900
  "meta-llama/llama-4-scout": {
4457
4901
  id: "meta-llama/llama-4-scout",
@@ -5065,34 +5509,34 @@ export const MODELS = {
5065
5509
  contextWindow: 200000,
5066
5510
  maxTokens: 8192,
5067
5511
  },
5068
- "mistralai/ministral-3b": {
5069
- id: "mistralai/ministral-3b",
5070
- name: "Mistral: Ministral 3B",
5512
+ "mistralai/ministral-8b": {
5513
+ id: "mistralai/ministral-8b",
5514
+ name: "Mistral: Ministral 8B",
5071
5515
  api: "openai-completions",
5072
5516
  provider: "openrouter",
5073
5517
  baseUrl: "https://openrouter.ai/api/v1",
5074
5518
  reasoning: false,
5075
5519
  input: ["text"],
5076
5520
  cost: {
5077
- input: 0.04,
5078
- output: 0.04,
5521
+ input: 0.09999999999999999,
5522
+ output: 0.09999999999999999,
5079
5523
  cacheRead: 0,
5080
5524
  cacheWrite: 0,
5081
5525
  },
5082
5526
  contextWindow: 131072,
5083
5527
  maxTokens: 4096,
5084
5528
  },
5085
- "mistralai/ministral-8b": {
5086
- id: "mistralai/ministral-8b",
5087
- name: "Mistral: Ministral 8B",
5529
+ "mistralai/ministral-3b": {
5530
+ id: "mistralai/ministral-3b",
5531
+ name: "Mistral: Ministral 3B",
5088
5532
  api: "openai-completions",
5089
5533
  provider: "openrouter",
5090
5534
  baseUrl: "https://openrouter.ai/api/v1",
5091
5535
  reasoning: false,
5092
5536
  input: ["text"],
5093
5537
  cost: {
5094
- input: 0.09999999999999999,
5095
- output: 0.09999999999999999,
5538
+ input: 0.04,
5539
+ output: 0.04,
5096
5540
  cacheRead: 0,
5097
5541
  cacheWrite: 0,
5098
5542
  },
@@ -5269,6 +5713,23 @@ export const MODELS = {
5269
5713
  contextWindow: 128000,
5270
5714
  maxTokens: 16384,
5271
5715
  },
5716
+ "meta-llama/llama-3.1-8b-instruct": {
5717
+ id: "meta-llama/llama-3.1-8b-instruct",
5718
+ name: "Meta: Llama 3.1 8B Instruct",
5719
+ api: "openai-completions",
5720
+ provider: "openrouter",
5721
+ baseUrl: "https://openrouter.ai/api/v1",
5722
+ reasoning: false,
5723
+ input: ["text"],
5724
+ cost: {
5725
+ input: 0.02,
5726
+ output: 0.03,
5727
+ cacheRead: 0,
5728
+ cacheWrite: 0,
5729
+ },
5730
+ contextWindow: 131072,
5731
+ maxTokens: 16384,
5732
+ },
5272
5733
  "meta-llama/llama-3.1-405b-instruct": {
5273
5734
  id: "meta-llama/llama-3.1-405b-instruct",
5274
5735
  name: "Meta: Llama 3.1 405B Instruct",
@@ -5303,23 +5764,6 @@ export const MODELS = {
5303
5764
  contextWindow: 131072,
5304
5765
  maxTokens: 4096,
5305
5766
  },
5306
- "meta-llama/llama-3.1-8b-instruct": {
5307
- id: "meta-llama/llama-3.1-8b-instruct",
5308
- name: "Meta: Llama 3.1 8B Instruct",
5309
- api: "openai-completions",
5310
- provider: "openrouter",
5311
- baseUrl: "https://openrouter.ai/api/v1",
5312
- reasoning: false,
5313
- input: ["text"],
5314
- cost: {
5315
- input: 0.02,
5316
- output: 0.03,
5317
- cacheRead: 0,
5318
- cacheWrite: 0,
5319
- },
5320
- contextWindow: 131072,
5321
- maxTokens: 16384,
5322
- },
5323
5767
  "mistralai/mistral-nemo": {
5324
5768
  id: "mistralai/mistral-nemo",
5325
5769
  name: "Mistral: Mistral Nemo",
@@ -5456,6 +5900,23 @@ export const MODELS = {
5456
5900
  contextWindow: 128000,
5457
5901
  maxTokens: 4096,
5458
5902
  },
5903
+ "openai/gpt-4o-2024-05-13": {
5904
+ id: "openai/gpt-4o-2024-05-13",
5905
+ name: "OpenAI: GPT-4o (2024-05-13)",
5906
+ api: "openai-completions",
5907
+ provider: "openrouter",
5908
+ baseUrl: "https://openrouter.ai/api/v1",
5909
+ reasoning: false,
5910
+ input: ["text", "image"],
5911
+ cost: {
5912
+ input: 5,
5913
+ output: 15,
5914
+ cacheRead: 0,
5915
+ cacheWrite: 0,
5916
+ },
5917
+ contextWindow: 128000,
5918
+ maxTokens: 4096,
5919
+ },
5459
5920
  "openai/gpt-4o": {
5460
5921
  id: "openai/gpt-4o",
5461
5922
  name: "OpenAI: GPT-4o",
@@ -5490,22 +5951,22 @@ export const MODELS = {
5490
5951
  contextWindow: 128000,
5491
5952
  maxTokens: 64000,
5492
5953
  },
5493
- "openai/gpt-4o-2024-05-13": {
5494
- id: "openai/gpt-4o-2024-05-13",
5495
- name: "OpenAI: GPT-4o (2024-05-13)",
5954
+ "meta-llama/llama-3-70b-instruct": {
5955
+ id: "meta-llama/llama-3-70b-instruct",
5956
+ name: "Meta: Llama 3 70B Instruct",
5496
5957
  api: "openai-completions",
5497
5958
  provider: "openrouter",
5498
5959
  baseUrl: "https://openrouter.ai/api/v1",
5499
5960
  reasoning: false,
5500
- input: ["text", "image"],
5961
+ input: ["text"],
5501
5962
  cost: {
5502
- input: 5,
5503
- output: 15,
5963
+ input: 0.3,
5964
+ output: 0.39999999999999997,
5504
5965
  cacheRead: 0,
5505
5966
  cacheWrite: 0,
5506
5967
  },
5507
- contextWindow: 128000,
5508
- maxTokens: 4096,
5968
+ contextWindow: 8192,
5969
+ maxTokens: 16384,
5509
5970
  },
5510
5971
  "meta-llama/llama-3-8b-instruct": {
5511
5972
  id: "meta-llama/llama-3-8b-instruct",
@@ -5524,23 +5985,6 @@ export const MODELS = {
5524
5985
  contextWindow: 8192,
5525
5986
  maxTokens: 16384,
5526
5987
  },
5527
- "meta-llama/llama-3-70b-instruct": {
5528
- id: "meta-llama/llama-3-70b-instruct",
5529
- name: "Meta: Llama 3 70B Instruct",
5530
- api: "openai-completions",
5531
- provider: "openrouter",
5532
- baseUrl: "https://openrouter.ai/api/v1",
5533
- reasoning: false,
5534
- input: ["text"],
5535
- cost: {
5536
- input: 0.3,
5537
- output: 0.39999999999999997,
5538
- cacheRead: 0,
5539
- cacheWrite: 0,
5540
- },
5541
- contextWindow: 8192,
5542
- maxTokens: 16384,
5543
- },
5544
5988
  "mistralai/mixtral-8x22b-instruct": {
5545
5989
  id: "mistralai/mixtral-8x22b-instruct",
5546
5990
  name: "Mistral: Mixtral 8x22B Instruct",
@@ -5626,38 +6070,38 @@ export const MODELS = {
5626
6070
  contextWindow: 128000,
5627
6071
  maxTokens: 4096,
5628
6072
  },
5629
- "openai/gpt-4-turbo-preview": {
5630
- id: "openai/gpt-4-turbo-preview",
5631
- name: "OpenAI: GPT-4 Turbo Preview",
6073
+ "openai/gpt-3.5-turbo-0613": {
6074
+ id: "openai/gpt-3.5-turbo-0613",
6075
+ name: "OpenAI: GPT-3.5 Turbo (older v0613)",
5632
6076
  api: "openai-completions",
5633
6077
  provider: "openrouter",
5634
6078
  baseUrl: "https://openrouter.ai/api/v1",
5635
6079
  reasoning: false,
5636
6080
  input: ["text"],
5637
6081
  cost: {
5638
- input: 10,
5639
- output: 30,
6082
+ input: 1,
6083
+ output: 2,
5640
6084
  cacheRead: 0,
5641
6085
  cacheWrite: 0,
5642
6086
  },
5643
- contextWindow: 128000,
6087
+ contextWindow: 4095,
5644
6088
  maxTokens: 4096,
5645
6089
  },
5646
- "openai/gpt-3.5-turbo-0613": {
5647
- id: "openai/gpt-3.5-turbo-0613",
5648
- name: "OpenAI: GPT-3.5 Turbo (older v0613)",
6090
+ "openai/gpt-4-turbo-preview": {
6091
+ id: "openai/gpt-4-turbo-preview",
6092
+ name: "OpenAI: GPT-4 Turbo Preview",
5649
6093
  api: "openai-completions",
5650
6094
  provider: "openrouter",
5651
6095
  baseUrl: "https://openrouter.ai/api/v1",
5652
6096
  reasoning: false,
5653
6097
  input: ["text"],
5654
6098
  cost: {
5655
- input: 1,
5656
- output: 2,
6099
+ input: 10,
6100
+ output: 30,
5657
6101
  cacheRead: 0,
5658
6102
  cacheWrite: 0,
5659
6103
  },
5660
- contextWindow: 4095,
6104
+ contextWindow: 128000,
5661
6105
  maxTokens: 4096,
5662
6106
  },
5663
6107
  "mistralai/mistral-tiny": {
@@ -5728,9 +6172,9 @@ export const MODELS = {
5728
6172
  contextWindow: 16385,
5729
6173
  maxTokens: 4096,
5730
6174
  },
5731
- "openai/gpt-4": {
5732
- id: "openai/gpt-4",
5733
- name: "OpenAI: GPT-4",
6175
+ "openai/gpt-4-0314": {
6176
+ id: "openai/gpt-4-0314",
6177
+ name: "OpenAI: GPT-4 (older v0314)",
5734
6178
  api: "openai-completions",
5735
6179
  provider: "openrouter",
5736
6180
  baseUrl: "https://openrouter.ai/api/v1",
@@ -5745,38 +6189,38 @@ export const MODELS = {
5745
6189
  contextWindow: 8191,
5746
6190
  maxTokens: 4096,
5747
6191
  },
5748
- "openai/gpt-3.5-turbo": {
5749
- id: "openai/gpt-3.5-turbo",
5750
- name: "OpenAI: GPT-3.5 Turbo",
6192
+ "openai/gpt-4": {
6193
+ id: "openai/gpt-4",
6194
+ name: "OpenAI: GPT-4",
5751
6195
  api: "openai-completions",
5752
6196
  provider: "openrouter",
5753
6197
  baseUrl: "https://openrouter.ai/api/v1",
5754
6198
  reasoning: false,
5755
6199
  input: ["text"],
5756
6200
  cost: {
5757
- input: 0.5,
5758
- output: 1.5,
6201
+ input: 30,
6202
+ output: 60,
5759
6203
  cacheRead: 0,
5760
6204
  cacheWrite: 0,
5761
6205
  },
5762
- contextWindow: 16385,
6206
+ contextWindow: 8191,
5763
6207
  maxTokens: 4096,
5764
6208
  },
5765
- "openai/gpt-4-0314": {
5766
- id: "openai/gpt-4-0314",
5767
- name: "OpenAI: GPT-4 (older v0314)",
6209
+ "openai/gpt-3.5-turbo": {
6210
+ id: "openai/gpt-3.5-turbo",
6211
+ name: "OpenAI: GPT-3.5 Turbo",
5768
6212
  api: "openai-completions",
5769
6213
  provider: "openrouter",
5770
6214
  baseUrl: "https://openrouter.ai/api/v1",
5771
6215
  reasoning: false,
5772
6216
  input: ["text"],
5773
6217
  cost: {
5774
- input: 30,
5775
- output: 60,
6218
+ input: 0.5,
6219
+ output: 1.5,
5776
6220
  cacheRead: 0,
5777
6221
  cacheWrite: 0,
5778
6222
  },
5779
- contextWindow: 8191,
6223
+ contextWindow: 16385,
5780
6224
  maxTokens: 4096,
5781
6225
  },
5782
6226
  "openrouter/auto": {