@mariozechner/pi-ai 0.17.0 → 0.18.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/agent/index.d.ts +1 -1
- package/dist/agent/index.d.ts.map +1 -1
- package/dist/agent/index.js.map +1 -1
- package/dist/models.generated.d.ts +425 -15
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +492 -82
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/anthropic.d.ts.map +1 -1
- package/dist/providers/anthropic.js +0 -4
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +69 -11
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +1 -0
- package/dist/stream.js.map +1 -1
- package/dist/types.d.ts +9 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utils/overflow.d.ts +1 -0
- package/dist/utils/overflow.d.ts.map +1 -1
- package/dist/utils/overflow.js +3 -1
- package/dist/utils/overflow.js.map +1 -1
- package/package.json +2 -1
package/dist/models.generated.js
CHANGED
|
@@ -1986,6 +1986,416 @@ export const MODELS = {
|
|
|
1986
1986
|
contextWindow: 204800,
|
|
1987
1987
|
maxTokens: 131072,
|
|
1988
1988
|
},
|
|
1989
|
+
"glm-4.6v": {
|
|
1990
|
+
id: "glm-4.6v",
|
|
1991
|
+
name: "GLM-4.6V",
|
|
1992
|
+
api: "anthropic-messages",
|
|
1993
|
+
provider: "zai",
|
|
1994
|
+
baseUrl: "https://api.z.ai/api/anthropic",
|
|
1995
|
+
reasoning: true,
|
|
1996
|
+
input: ["text", "image"],
|
|
1997
|
+
cost: {
|
|
1998
|
+
input: 0.3,
|
|
1999
|
+
output: 0.9,
|
|
2000
|
+
cacheRead: 0,
|
|
2001
|
+
cacheWrite: 0,
|
|
2002
|
+
},
|
|
2003
|
+
contextWindow: 128000,
|
|
2004
|
+
maxTokens: 32768,
|
|
2005
|
+
},
|
|
2006
|
+
},
|
|
2007
|
+
mistral: {
|
|
2008
|
+
"devstral-medium-2507": {
|
|
2009
|
+
id: "devstral-medium-2507",
|
|
2010
|
+
name: "Devstral Medium",
|
|
2011
|
+
api: "openai-completions",
|
|
2012
|
+
provider: "mistral",
|
|
2013
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2014
|
+
reasoning: false,
|
|
2015
|
+
input: ["text"],
|
|
2016
|
+
cost: {
|
|
2017
|
+
input: 0.4,
|
|
2018
|
+
output: 2,
|
|
2019
|
+
cacheRead: 0,
|
|
2020
|
+
cacheWrite: 0,
|
|
2021
|
+
},
|
|
2022
|
+
contextWindow: 128000,
|
|
2023
|
+
maxTokens: 128000,
|
|
2024
|
+
},
|
|
2025
|
+
"mistral-large-2512": {
|
|
2026
|
+
id: "mistral-large-2512",
|
|
2027
|
+
name: "Mistral Large 3",
|
|
2028
|
+
api: "openai-completions",
|
|
2029
|
+
provider: "mistral",
|
|
2030
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2031
|
+
reasoning: false,
|
|
2032
|
+
input: ["text", "image"],
|
|
2033
|
+
cost: {
|
|
2034
|
+
input: 0.5,
|
|
2035
|
+
output: 1.5,
|
|
2036
|
+
cacheRead: 0,
|
|
2037
|
+
cacheWrite: 0,
|
|
2038
|
+
},
|
|
2039
|
+
contextWindow: 262144,
|
|
2040
|
+
maxTokens: 262144,
|
|
2041
|
+
},
|
|
2042
|
+
"open-mixtral-8x22b": {
|
|
2043
|
+
id: "open-mixtral-8x22b",
|
|
2044
|
+
name: "Mixtral 8x22B",
|
|
2045
|
+
api: "openai-completions",
|
|
2046
|
+
provider: "mistral",
|
|
2047
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2048
|
+
reasoning: false,
|
|
2049
|
+
input: ["text"],
|
|
2050
|
+
cost: {
|
|
2051
|
+
input: 2,
|
|
2052
|
+
output: 6,
|
|
2053
|
+
cacheRead: 0,
|
|
2054
|
+
cacheWrite: 0,
|
|
2055
|
+
},
|
|
2056
|
+
contextWindow: 64000,
|
|
2057
|
+
maxTokens: 64000,
|
|
2058
|
+
},
|
|
2059
|
+
"ministral-8b-latest": {
|
|
2060
|
+
id: "ministral-8b-latest",
|
|
2061
|
+
name: "Ministral 8B",
|
|
2062
|
+
api: "openai-completions",
|
|
2063
|
+
provider: "mistral",
|
|
2064
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2065
|
+
reasoning: false,
|
|
2066
|
+
input: ["text"],
|
|
2067
|
+
cost: {
|
|
2068
|
+
input: 0.1,
|
|
2069
|
+
output: 0.1,
|
|
2070
|
+
cacheRead: 0,
|
|
2071
|
+
cacheWrite: 0,
|
|
2072
|
+
},
|
|
2073
|
+
contextWindow: 128000,
|
|
2074
|
+
maxTokens: 128000,
|
|
2075
|
+
},
|
|
2076
|
+
"pixtral-large-latest": {
|
|
2077
|
+
id: "pixtral-large-latest",
|
|
2078
|
+
name: "Pixtral Large",
|
|
2079
|
+
api: "openai-completions",
|
|
2080
|
+
provider: "mistral",
|
|
2081
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2082
|
+
reasoning: false,
|
|
2083
|
+
input: ["text", "image"],
|
|
2084
|
+
cost: {
|
|
2085
|
+
input: 2,
|
|
2086
|
+
output: 6,
|
|
2087
|
+
cacheRead: 0,
|
|
2088
|
+
cacheWrite: 0,
|
|
2089
|
+
},
|
|
2090
|
+
contextWindow: 128000,
|
|
2091
|
+
maxTokens: 128000,
|
|
2092
|
+
},
|
|
2093
|
+
"ministral-3b-latest": {
|
|
2094
|
+
id: "ministral-3b-latest",
|
|
2095
|
+
name: "Ministral 3B",
|
|
2096
|
+
api: "openai-completions",
|
|
2097
|
+
provider: "mistral",
|
|
2098
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2099
|
+
reasoning: false,
|
|
2100
|
+
input: ["text"],
|
|
2101
|
+
cost: {
|
|
2102
|
+
input: 0.04,
|
|
2103
|
+
output: 0.04,
|
|
2104
|
+
cacheRead: 0,
|
|
2105
|
+
cacheWrite: 0,
|
|
2106
|
+
},
|
|
2107
|
+
contextWindow: 128000,
|
|
2108
|
+
maxTokens: 128000,
|
|
2109
|
+
},
|
|
2110
|
+
"pixtral-12b": {
|
|
2111
|
+
id: "pixtral-12b",
|
|
2112
|
+
name: "Pixtral 12B",
|
|
2113
|
+
api: "openai-completions",
|
|
2114
|
+
provider: "mistral",
|
|
2115
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2116
|
+
reasoning: false,
|
|
2117
|
+
input: ["text", "image"],
|
|
2118
|
+
cost: {
|
|
2119
|
+
input: 0.15,
|
|
2120
|
+
output: 0.15,
|
|
2121
|
+
cacheRead: 0,
|
|
2122
|
+
cacheWrite: 0,
|
|
2123
|
+
},
|
|
2124
|
+
contextWindow: 128000,
|
|
2125
|
+
maxTokens: 128000,
|
|
2126
|
+
},
|
|
2127
|
+
"mistral-medium-2505": {
|
|
2128
|
+
id: "mistral-medium-2505",
|
|
2129
|
+
name: "Mistral Medium 3",
|
|
2130
|
+
api: "openai-completions",
|
|
2131
|
+
provider: "mistral",
|
|
2132
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2133
|
+
reasoning: false,
|
|
2134
|
+
input: ["text", "image"],
|
|
2135
|
+
cost: {
|
|
2136
|
+
input: 0.4,
|
|
2137
|
+
output: 2,
|
|
2138
|
+
cacheRead: 0,
|
|
2139
|
+
cacheWrite: 0,
|
|
2140
|
+
},
|
|
2141
|
+
contextWindow: 131072,
|
|
2142
|
+
maxTokens: 131072,
|
|
2143
|
+
},
|
|
2144
|
+
"labs-devstral-small-2512": {
|
|
2145
|
+
id: "labs-devstral-small-2512",
|
|
2146
|
+
name: "Devstral Small 2",
|
|
2147
|
+
api: "openai-completions",
|
|
2148
|
+
provider: "mistral",
|
|
2149
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2150
|
+
reasoning: false,
|
|
2151
|
+
input: ["text", "image"],
|
|
2152
|
+
cost: {
|
|
2153
|
+
input: 0.1,
|
|
2154
|
+
output: 0.3,
|
|
2155
|
+
cacheRead: 0,
|
|
2156
|
+
cacheWrite: 0,
|
|
2157
|
+
},
|
|
2158
|
+
contextWindow: 256000,
|
|
2159
|
+
maxTokens: 256000,
|
|
2160
|
+
},
|
|
2161
|
+
"devstral-medium-latest": {
|
|
2162
|
+
id: "devstral-medium-latest",
|
|
2163
|
+
name: "Devstral 2",
|
|
2164
|
+
api: "openai-completions",
|
|
2165
|
+
provider: "mistral",
|
|
2166
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2167
|
+
reasoning: false,
|
|
2168
|
+
input: ["text"],
|
|
2169
|
+
cost: {
|
|
2170
|
+
input: 0.4,
|
|
2171
|
+
output: 2,
|
|
2172
|
+
cacheRead: 0,
|
|
2173
|
+
cacheWrite: 0,
|
|
2174
|
+
},
|
|
2175
|
+
contextWindow: 262144,
|
|
2176
|
+
maxTokens: 262144,
|
|
2177
|
+
},
|
|
2178
|
+
"devstral-small-2505": {
|
|
2179
|
+
id: "devstral-small-2505",
|
|
2180
|
+
name: "Devstral Small 2505",
|
|
2181
|
+
api: "openai-completions",
|
|
2182
|
+
provider: "mistral",
|
|
2183
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2184
|
+
reasoning: false,
|
|
2185
|
+
input: ["text"],
|
|
2186
|
+
cost: {
|
|
2187
|
+
input: 0.1,
|
|
2188
|
+
output: 0.3,
|
|
2189
|
+
cacheRead: 0,
|
|
2190
|
+
cacheWrite: 0,
|
|
2191
|
+
},
|
|
2192
|
+
contextWindow: 128000,
|
|
2193
|
+
maxTokens: 128000,
|
|
2194
|
+
},
|
|
2195
|
+
"mistral-medium-2508": {
|
|
2196
|
+
id: "mistral-medium-2508",
|
|
2197
|
+
name: "Mistral Medium 3.1",
|
|
2198
|
+
api: "openai-completions",
|
|
2199
|
+
provider: "mistral",
|
|
2200
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2201
|
+
reasoning: false,
|
|
2202
|
+
input: ["text", "image"],
|
|
2203
|
+
cost: {
|
|
2204
|
+
input: 0.4,
|
|
2205
|
+
output: 2,
|
|
2206
|
+
cacheRead: 0,
|
|
2207
|
+
cacheWrite: 0,
|
|
2208
|
+
},
|
|
2209
|
+
contextWindow: 262144,
|
|
2210
|
+
maxTokens: 262144,
|
|
2211
|
+
},
|
|
2212
|
+
"mistral-small-latest": {
|
|
2213
|
+
id: "mistral-small-latest",
|
|
2214
|
+
name: "Mistral Small",
|
|
2215
|
+
api: "openai-completions",
|
|
2216
|
+
provider: "mistral",
|
|
2217
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2218
|
+
reasoning: false,
|
|
2219
|
+
input: ["text", "image"],
|
|
2220
|
+
cost: {
|
|
2221
|
+
input: 0.1,
|
|
2222
|
+
output: 0.3,
|
|
2223
|
+
cacheRead: 0,
|
|
2224
|
+
cacheWrite: 0,
|
|
2225
|
+
},
|
|
2226
|
+
contextWindow: 128000,
|
|
2227
|
+
maxTokens: 16384,
|
|
2228
|
+
},
|
|
2229
|
+
"magistral-small": {
|
|
2230
|
+
id: "magistral-small",
|
|
2231
|
+
name: "Magistral Small",
|
|
2232
|
+
api: "openai-completions",
|
|
2233
|
+
provider: "mistral",
|
|
2234
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2235
|
+
reasoning: true,
|
|
2236
|
+
input: ["text"],
|
|
2237
|
+
cost: {
|
|
2238
|
+
input: 0.5,
|
|
2239
|
+
output: 1.5,
|
|
2240
|
+
cacheRead: 0,
|
|
2241
|
+
cacheWrite: 0,
|
|
2242
|
+
},
|
|
2243
|
+
contextWindow: 128000,
|
|
2244
|
+
maxTokens: 128000,
|
|
2245
|
+
},
|
|
2246
|
+
"devstral-small-2507": {
|
|
2247
|
+
id: "devstral-small-2507",
|
|
2248
|
+
name: "Devstral Small",
|
|
2249
|
+
api: "openai-completions",
|
|
2250
|
+
provider: "mistral",
|
|
2251
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2252
|
+
reasoning: false,
|
|
2253
|
+
input: ["text"],
|
|
2254
|
+
cost: {
|
|
2255
|
+
input: 0.1,
|
|
2256
|
+
output: 0.3,
|
|
2257
|
+
cacheRead: 0,
|
|
2258
|
+
cacheWrite: 0,
|
|
2259
|
+
},
|
|
2260
|
+
contextWindow: 128000,
|
|
2261
|
+
maxTokens: 128000,
|
|
2262
|
+
},
|
|
2263
|
+
"codestral-latest": {
|
|
2264
|
+
id: "codestral-latest",
|
|
2265
|
+
name: "Codestral",
|
|
2266
|
+
api: "openai-completions",
|
|
2267
|
+
provider: "mistral",
|
|
2268
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2269
|
+
reasoning: false,
|
|
2270
|
+
input: ["text"],
|
|
2271
|
+
cost: {
|
|
2272
|
+
input: 0.3,
|
|
2273
|
+
output: 0.9,
|
|
2274
|
+
cacheRead: 0,
|
|
2275
|
+
cacheWrite: 0,
|
|
2276
|
+
},
|
|
2277
|
+
contextWindow: 256000,
|
|
2278
|
+
maxTokens: 4096,
|
|
2279
|
+
},
|
|
2280
|
+
"open-mixtral-8x7b": {
|
|
2281
|
+
id: "open-mixtral-8x7b",
|
|
2282
|
+
name: "Mixtral 8x7B",
|
|
2283
|
+
api: "openai-completions",
|
|
2284
|
+
provider: "mistral",
|
|
2285
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2286
|
+
reasoning: false,
|
|
2287
|
+
input: ["text"],
|
|
2288
|
+
cost: {
|
|
2289
|
+
input: 0.7,
|
|
2290
|
+
output: 0.7,
|
|
2291
|
+
cacheRead: 0,
|
|
2292
|
+
cacheWrite: 0,
|
|
2293
|
+
},
|
|
2294
|
+
contextWindow: 32000,
|
|
2295
|
+
maxTokens: 32000,
|
|
2296
|
+
},
|
|
2297
|
+
"mistral-nemo": {
|
|
2298
|
+
id: "mistral-nemo",
|
|
2299
|
+
name: "Mistral Nemo",
|
|
2300
|
+
api: "openai-completions",
|
|
2301
|
+
provider: "mistral",
|
|
2302
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2303
|
+
reasoning: false,
|
|
2304
|
+
input: ["text"],
|
|
2305
|
+
cost: {
|
|
2306
|
+
input: 0.15,
|
|
2307
|
+
output: 0.15,
|
|
2308
|
+
cacheRead: 0,
|
|
2309
|
+
cacheWrite: 0,
|
|
2310
|
+
},
|
|
2311
|
+
contextWindow: 128000,
|
|
2312
|
+
maxTokens: 128000,
|
|
2313
|
+
},
|
|
2314
|
+
"open-mistral-7b": {
|
|
2315
|
+
id: "open-mistral-7b",
|
|
2316
|
+
name: "Mistral 7B",
|
|
2317
|
+
api: "openai-completions",
|
|
2318
|
+
provider: "mistral",
|
|
2319
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2320
|
+
reasoning: false,
|
|
2321
|
+
input: ["text"],
|
|
2322
|
+
cost: {
|
|
2323
|
+
input: 0.25,
|
|
2324
|
+
output: 0.25,
|
|
2325
|
+
cacheRead: 0,
|
|
2326
|
+
cacheWrite: 0,
|
|
2327
|
+
},
|
|
2328
|
+
contextWindow: 8000,
|
|
2329
|
+
maxTokens: 8000,
|
|
2330
|
+
},
|
|
2331
|
+
"mistral-large-latest": {
|
|
2332
|
+
id: "mistral-large-latest",
|
|
2333
|
+
name: "Mistral Large",
|
|
2334
|
+
api: "openai-completions",
|
|
2335
|
+
provider: "mistral",
|
|
2336
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2337
|
+
reasoning: false,
|
|
2338
|
+
input: ["text", "image"],
|
|
2339
|
+
cost: {
|
|
2340
|
+
input: 0.5,
|
|
2341
|
+
output: 1.5,
|
|
2342
|
+
cacheRead: 0,
|
|
2343
|
+
cacheWrite: 0,
|
|
2344
|
+
},
|
|
2345
|
+
contextWindow: 262144,
|
|
2346
|
+
maxTokens: 262144,
|
|
2347
|
+
},
|
|
2348
|
+
"mistral-medium-latest": {
|
|
2349
|
+
id: "mistral-medium-latest",
|
|
2350
|
+
name: "Mistral Medium",
|
|
2351
|
+
api: "openai-completions",
|
|
2352
|
+
provider: "mistral",
|
|
2353
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2354
|
+
reasoning: false,
|
|
2355
|
+
input: ["text", "image"],
|
|
2356
|
+
cost: {
|
|
2357
|
+
input: 0.4,
|
|
2358
|
+
output: 2,
|
|
2359
|
+
cacheRead: 0,
|
|
2360
|
+
cacheWrite: 0,
|
|
2361
|
+
},
|
|
2362
|
+
contextWindow: 128000,
|
|
2363
|
+
maxTokens: 16384,
|
|
2364
|
+
},
|
|
2365
|
+
"mistral-large-2411": {
|
|
2366
|
+
id: "mistral-large-2411",
|
|
2367
|
+
name: "Mistral Large 2.1",
|
|
2368
|
+
api: "openai-completions",
|
|
2369
|
+
provider: "mistral",
|
|
2370
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2371
|
+
reasoning: false,
|
|
2372
|
+
input: ["text"],
|
|
2373
|
+
cost: {
|
|
2374
|
+
input: 2,
|
|
2375
|
+
output: 6,
|
|
2376
|
+
cacheRead: 0,
|
|
2377
|
+
cacheWrite: 0,
|
|
2378
|
+
},
|
|
2379
|
+
contextWindow: 131072,
|
|
2380
|
+
maxTokens: 16384,
|
|
2381
|
+
},
|
|
2382
|
+
"magistral-medium-latest": {
|
|
2383
|
+
id: "magistral-medium-latest",
|
|
2384
|
+
name: "Magistral Medium",
|
|
2385
|
+
api: "openai-completions",
|
|
2386
|
+
provider: "mistral",
|
|
2387
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2388
|
+
reasoning: true,
|
|
2389
|
+
input: ["text"],
|
|
2390
|
+
cost: {
|
|
2391
|
+
input: 2,
|
|
2392
|
+
output: 5,
|
|
2393
|
+
cacheRead: 0,
|
|
2394
|
+
cacheWrite: 0,
|
|
2395
|
+
},
|
|
2396
|
+
contextWindow: 128000,
|
|
2397
|
+
maxTokens: 16384,
|
|
2398
|
+
},
|
|
1989
2399
|
},
|
|
1990
2400
|
openrouter: {
|
|
1991
2401
|
"mistralai/devstral-2512:free": {
|
|
@@ -4445,13 +4855,13 @@ export const MODELS = {
|
|
|
4445
4855
|
reasoning: false,
|
|
4446
4856
|
input: ["text", "image"],
|
|
4447
4857
|
cost: {
|
|
4448
|
-
input: 0.
|
|
4449
|
-
output: 0.
|
|
4858
|
+
input: 0.15,
|
|
4859
|
+
output: 0.6,
|
|
4450
4860
|
cacheRead: 0,
|
|
4451
4861
|
cacheWrite: 0,
|
|
4452
4862
|
},
|
|
4453
4863
|
contextWindow: 1048576,
|
|
4454
|
-
maxTokens:
|
|
4864
|
+
maxTokens: 16384,
|
|
4455
4865
|
},
|
|
4456
4866
|
"meta-llama/llama-4-scout": {
|
|
4457
4867
|
id: "meta-llama/llama-4-scout",
|
|
@@ -5065,34 +5475,34 @@ export const MODELS = {
|
|
|
5065
5475
|
contextWindow: 200000,
|
|
5066
5476
|
maxTokens: 8192,
|
|
5067
5477
|
},
|
|
5068
|
-
"mistralai/ministral-
|
|
5069
|
-
id: "mistralai/ministral-
|
|
5070
|
-
name: "Mistral: Ministral
|
|
5478
|
+
"mistralai/ministral-3b": {
|
|
5479
|
+
id: "mistralai/ministral-3b",
|
|
5480
|
+
name: "Mistral: Ministral 3B",
|
|
5071
5481
|
api: "openai-completions",
|
|
5072
5482
|
provider: "openrouter",
|
|
5073
5483
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5074
5484
|
reasoning: false,
|
|
5075
5485
|
input: ["text"],
|
|
5076
5486
|
cost: {
|
|
5077
|
-
input: 0.
|
|
5078
|
-
output: 0.
|
|
5487
|
+
input: 0.04,
|
|
5488
|
+
output: 0.04,
|
|
5079
5489
|
cacheRead: 0,
|
|
5080
5490
|
cacheWrite: 0,
|
|
5081
5491
|
},
|
|
5082
5492
|
contextWindow: 131072,
|
|
5083
5493
|
maxTokens: 4096,
|
|
5084
5494
|
},
|
|
5085
|
-
"mistralai/ministral-
|
|
5086
|
-
id: "mistralai/ministral-
|
|
5087
|
-
name: "Mistral: Ministral
|
|
5495
|
+
"mistralai/ministral-8b": {
|
|
5496
|
+
id: "mistralai/ministral-8b",
|
|
5497
|
+
name: "Mistral: Ministral 8B",
|
|
5088
5498
|
api: "openai-completions",
|
|
5089
5499
|
provider: "openrouter",
|
|
5090
5500
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5091
5501
|
reasoning: false,
|
|
5092
5502
|
input: ["text"],
|
|
5093
5503
|
cost: {
|
|
5094
|
-
input: 0.
|
|
5095
|
-
output: 0.
|
|
5504
|
+
input: 0.09999999999999999,
|
|
5505
|
+
output: 0.09999999999999999,
|
|
5096
5506
|
cacheRead: 0,
|
|
5097
5507
|
cacheWrite: 0,
|
|
5098
5508
|
},
|
|
@@ -5269,23 +5679,6 @@ export const MODELS = {
|
|
|
5269
5679
|
contextWindow: 128000,
|
|
5270
5680
|
maxTokens: 16384,
|
|
5271
5681
|
},
|
|
5272
|
-
"meta-llama/llama-3.1-8b-instruct": {
|
|
5273
|
-
id: "meta-llama/llama-3.1-8b-instruct",
|
|
5274
|
-
name: "Meta: Llama 3.1 8B Instruct",
|
|
5275
|
-
api: "openai-completions",
|
|
5276
|
-
provider: "openrouter",
|
|
5277
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5278
|
-
reasoning: false,
|
|
5279
|
-
input: ["text"],
|
|
5280
|
-
cost: {
|
|
5281
|
-
input: 0.02,
|
|
5282
|
-
output: 0.03,
|
|
5283
|
-
cacheRead: 0,
|
|
5284
|
-
cacheWrite: 0,
|
|
5285
|
-
},
|
|
5286
|
-
contextWindow: 131072,
|
|
5287
|
-
maxTokens: 16384,
|
|
5288
|
-
},
|
|
5289
5682
|
"meta-llama/llama-3.1-405b-instruct": {
|
|
5290
5683
|
id: "meta-llama/llama-3.1-405b-instruct",
|
|
5291
5684
|
name: "Meta: Llama 3.1 405B Instruct",
|
|
@@ -5320,6 +5713,23 @@ export const MODELS = {
|
|
|
5320
5713
|
contextWindow: 131072,
|
|
5321
5714
|
maxTokens: 4096,
|
|
5322
5715
|
},
|
|
5716
|
+
"meta-llama/llama-3.1-8b-instruct": {
|
|
5717
|
+
id: "meta-llama/llama-3.1-8b-instruct",
|
|
5718
|
+
name: "Meta: Llama 3.1 8B Instruct",
|
|
5719
|
+
api: "openai-completions",
|
|
5720
|
+
provider: "openrouter",
|
|
5721
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
5722
|
+
reasoning: false,
|
|
5723
|
+
input: ["text"],
|
|
5724
|
+
cost: {
|
|
5725
|
+
input: 0.02,
|
|
5726
|
+
output: 0.03,
|
|
5727
|
+
cacheRead: 0,
|
|
5728
|
+
cacheWrite: 0,
|
|
5729
|
+
},
|
|
5730
|
+
contextWindow: 131072,
|
|
5731
|
+
maxTokens: 16384,
|
|
5732
|
+
},
|
|
5323
5733
|
"mistralai/mistral-nemo": {
|
|
5324
5734
|
id: "mistralai/mistral-nemo",
|
|
5325
5735
|
name: "Mistral: Mistral Nemo",
|
|
@@ -5456,23 +5866,6 @@ export const MODELS = {
|
|
|
5456
5866
|
contextWindow: 128000,
|
|
5457
5867
|
maxTokens: 4096,
|
|
5458
5868
|
},
|
|
5459
|
-
"openai/gpt-4o-2024-05-13": {
|
|
5460
|
-
id: "openai/gpt-4o-2024-05-13",
|
|
5461
|
-
name: "OpenAI: GPT-4o (2024-05-13)",
|
|
5462
|
-
api: "openai-completions",
|
|
5463
|
-
provider: "openrouter",
|
|
5464
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
5465
|
-
reasoning: false,
|
|
5466
|
-
input: ["text", "image"],
|
|
5467
|
-
cost: {
|
|
5468
|
-
input: 5,
|
|
5469
|
-
output: 15,
|
|
5470
|
-
cacheRead: 0,
|
|
5471
|
-
cacheWrite: 0,
|
|
5472
|
-
},
|
|
5473
|
-
contextWindow: 128000,
|
|
5474
|
-
maxTokens: 4096,
|
|
5475
|
-
},
|
|
5476
5869
|
"openai/gpt-4o": {
|
|
5477
5870
|
id: "openai/gpt-4o",
|
|
5478
5871
|
name: "OpenAI: GPT-4o",
|
|
@@ -5507,22 +5900,22 @@ export const MODELS = {
|
|
|
5507
5900
|
contextWindow: 128000,
|
|
5508
5901
|
maxTokens: 64000,
|
|
5509
5902
|
},
|
|
5510
|
-
"
|
|
5511
|
-
id: "
|
|
5512
|
-
name: "
|
|
5903
|
+
"openai/gpt-4o-2024-05-13": {
|
|
5904
|
+
id: "openai/gpt-4o-2024-05-13",
|
|
5905
|
+
name: "OpenAI: GPT-4o (2024-05-13)",
|
|
5513
5906
|
api: "openai-completions",
|
|
5514
5907
|
provider: "openrouter",
|
|
5515
5908
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5516
5909
|
reasoning: false,
|
|
5517
|
-
input: ["text"],
|
|
5910
|
+
input: ["text", "image"],
|
|
5518
5911
|
cost: {
|
|
5519
|
-
input:
|
|
5520
|
-
output:
|
|
5912
|
+
input: 5,
|
|
5913
|
+
output: 15,
|
|
5521
5914
|
cacheRead: 0,
|
|
5522
5915
|
cacheWrite: 0,
|
|
5523
5916
|
},
|
|
5524
|
-
contextWindow:
|
|
5525
|
-
maxTokens:
|
|
5917
|
+
contextWindow: 128000,
|
|
5918
|
+
maxTokens: 4096,
|
|
5526
5919
|
},
|
|
5527
5920
|
"meta-llama/llama-3-8b-instruct": {
|
|
5528
5921
|
id: "meta-llama/llama-3-8b-instruct",
|
|
@@ -5541,6 +5934,23 @@ export const MODELS = {
|
|
|
5541
5934
|
contextWindow: 8192,
|
|
5542
5935
|
maxTokens: 16384,
|
|
5543
5936
|
},
|
|
5937
|
+
"meta-llama/llama-3-70b-instruct": {
|
|
5938
|
+
id: "meta-llama/llama-3-70b-instruct",
|
|
5939
|
+
name: "Meta: Llama 3 70B Instruct",
|
|
5940
|
+
api: "openai-completions",
|
|
5941
|
+
provider: "openrouter",
|
|
5942
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
5943
|
+
reasoning: false,
|
|
5944
|
+
input: ["text"],
|
|
5945
|
+
cost: {
|
|
5946
|
+
input: 0.3,
|
|
5947
|
+
output: 0.39999999999999997,
|
|
5948
|
+
cacheRead: 0,
|
|
5949
|
+
cacheWrite: 0,
|
|
5950
|
+
},
|
|
5951
|
+
contextWindow: 8192,
|
|
5952
|
+
maxTokens: 16384,
|
|
5953
|
+
},
|
|
5544
5954
|
"mistralai/mixtral-8x22b-instruct": {
|
|
5545
5955
|
id: "mistralai/mixtral-8x22b-instruct",
|
|
5546
5956
|
name: "Mistral: Mixtral 8x22B Instruct",
|
|
@@ -5626,38 +6036,38 @@ export const MODELS = {
|
|
|
5626
6036
|
contextWindow: 128000,
|
|
5627
6037
|
maxTokens: 4096,
|
|
5628
6038
|
},
|
|
5629
|
-
"openai/gpt-
|
|
5630
|
-
id: "openai/gpt-
|
|
5631
|
-
name: "OpenAI: GPT-
|
|
6039
|
+
"openai/gpt-4-turbo-preview": {
|
|
6040
|
+
id: "openai/gpt-4-turbo-preview",
|
|
6041
|
+
name: "OpenAI: GPT-4 Turbo Preview",
|
|
5632
6042
|
api: "openai-completions",
|
|
5633
6043
|
provider: "openrouter",
|
|
5634
6044
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5635
6045
|
reasoning: false,
|
|
5636
6046
|
input: ["text"],
|
|
5637
6047
|
cost: {
|
|
5638
|
-
input:
|
|
5639
|
-
output:
|
|
6048
|
+
input: 10,
|
|
6049
|
+
output: 30,
|
|
5640
6050
|
cacheRead: 0,
|
|
5641
6051
|
cacheWrite: 0,
|
|
5642
6052
|
},
|
|
5643
|
-
contextWindow:
|
|
6053
|
+
contextWindow: 128000,
|
|
5644
6054
|
maxTokens: 4096,
|
|
5645
6055
|
},
|
|
5646
|
-
"openai/gpt-
|
|
5647
|
-
id: "openai/gpt-
|
|
5648
|
-
name: "OpenAI: GPT-
|
|
6056
|
+
"openai/gpt-3.5-turbo-0613": {
|
|
6057
|
+
id: "openai/gpt-3.5-turbo-0613",
|
|
6058
|
+
name: "OpenAI: GPT-3.5 Turbo (older v0613)",
|
|
5649
6059
|
api: "openai-completions",
|
|
5650
6060
|
provider: "openrouter",
|
|
5651
6061
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5652
6062
|
reasoning: false,
|
|
5653
6063
|
input: ["text"],
|
|
5654
6064
|
cost: {
|
|
5655
|
-
input:
|
|
5656
|
-
output:
|
|
6065
|
+
input: 1,
|
|
6066
|
+
output: 2,
|
|
5657
6067
|
cacheRead: 0,
|
|
5658
6068
|
cacheWrite: 0,
|
|
5659
6069
|
},
|
|
5660
|
-
contextWindow:
|
|
6070
|
+
contextWindow: 4095,
|
|
5661
6071
|
maxTokens: 4096,
|
|
5662
6072
|
},
|
|
5663
6073
|
"mistralai/mistral-tiny": {
|
|
@@ -5728,9 +6138,9 @@ export const MODELS = {
|
|
|
5728
6138
|
contextWindow: 16385,
|
|
5729
6139
|
maxTokens: 4096,
|
|
5730
6140
|
},
|
|
5731
|
-
"openai/gpt-4
|
|
5732
|
-
id: "openai/gpt-4
|
|
5733
|
-
name: "OpenAI: GPT-4
|
|
6141
|
+
"openai/gpt-4": {
|
|
6142
|
+
id: "openai/gpt-4",
|
|
6143
|
+
name: "OpenAI: GPT-4",
|
|
5734
6144
|
api: "openai-completions",
|
|
5735
6145
|
provider: "openrouter",
|
|
5736
6146
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5745,38 +6155,38 @@ export const MODELS = {
|
|
|
5745
6155
|
contextWindow: 8191,
|
|
5746
6156
|
maxTokens: 4096,
|
|
5747
6157
|
},
|
|
5748
|
-
"openai/gpt-
|
|
5749
|
-
id: "openai/gpt-
|
|
5750
|
-
name: "OpenAI: GPT-
|
|
6158
|
+
"openai/gpt-3.5-turbo": {
|
|
6159
|
+
id: "openai/gpt-3.5-turbo",
|
|
6160
|
+
name: "OpenAI: GPT-3.5 Turbo",
|
|
5751
6161
|
api: "openai-completions",
|
|
5752
6162
|
provider: "openrouter",
|
|
5753
6163
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5754
6164
|
reasoning: false,
|
|
5755
6165
|
input: ["text"],
|
|
5756
6166
|
cost: {
|
|
5757
|
-
input:
|
|
5758
|
-
output:
|
|
6167
|
+
input: 0.5,
|
|
6168
|
+
output: 1.5,
|
|
5759
6169
|
cacheRead: 0,
|
|
5760
6170
|
cacheWrite: 0,
|
|
5761
6171
|
},
|
|
5762
|
-
contextWindow:
|
|
6172
|
+
contextWindow: 16385,
|
|
5763
6173
|
maxTokens: 4096,
|
|
5764
6174
|
},
|
|
5765
|
-
"openai/gpt-
|
|
5766
|
-
id: "openai/gpt-
|
|
5767
|
-
name: "OpenAI: GPT-
|
|
6175
|
+
"openai/gpt-4-0314": {
|
|
6176
|
+
id: "openai/gpt-4-0314",
|
|
6177
|
+
name: "OpenAI: GPT-4 (older v0314)",
|
|
5768
6178
|
api: "openai-completions",
|
|
5769
6179
|
provider: "openrouter",
|
|
5770
6180
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
5771
6181
|
reasoning: false,
|
|
5772
6182
|
input: ["text"],
|
|
5773
6183
|
cost: {
|
|
5774
|
-
input:
|
|
5775
|
-
output:
|
|
6184
|
+
input: 30,
|
|
6185
|
+
output: 60,
|
|
5776
6186
|
cacheRead: 0,
|
|
5777
6187
|
cacheWrite: 0,
|
|
5778
6188
|
},
|
|
5779
|
-
contextWindow:
|
|
6189
|
+
contextWindow: 8191,
|
|
5780
6190
|
maxTokens: 4096,
|
|
5781
6191
|
},
|
|
5782
6192
|
"openrouter/auto": {
|