@mariozechner/pi-ai 0.18.0 → 0.18.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/models.generated.d.ts +459 -15
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +526 -82
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/anthropic.d.ts.map +1 -1
- package/dist/providers/anthropic.js +2 -4
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +69 -11
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +1 -0
- package/dist/stream.js.map +1 -1
- package/dist/types.d.ts +9 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utils/overflow.d.ts +1 -0
- package/dist/utils/overflow.d.ts.map +1 -1
- package/dist/utils/overflow.js +3 -1
- package/dist/utils/overflow.js.map +1 -1
- package/package.json +2 -1
package/README.md
CHANGED
|
@@ -9,6 +9,7 @@ Unified LLM API with automatic model discovery, provider configuration, token an
|
|
|
9
9
|
- **OpenAI**
|
|
10
10
|
- **Anthropic**
|
|
11
11
|
- **Google**
|
|
12
|
+
- **Mistral**
|
|
12
13
|
- **Groq**
|
|
13
14
|
- **Cerebras**
|
|
14
15
|
- **xAI**
|
|
@@ -564,7 +565,7 @@ A **provider** offers models through a specific API. For example:
|
|
|
564
565
|
- **Anthropic** models use the `anthropic-messages` API
|
|
565
566
|
- **Google** models use the `google-generative-ai` API
|
|
566
567
|
- **OpenAI** models use the `openai-responses` API
|
|
567
|
-
- **xAI, Cerebras, Groq, etc.** models use the `openai-completions` API (OpenAI-compatible)
|
|
568
|
+
- **Mistral, xAI, Cerebras, Groq, etc.** models use the `openai-completions` API (OpenAI-compatible)
|
|
568
569
|
|
|
569
570
|
### Querying Providers and Models
|
|
570
571
|
|
|
@@ -1036,6 +1037,7 @@ In Node.js environments, you can set environment variables to avoid passing API
|
|
|
1036
1037
|
OPENAI_API_KEY=sk-...
|
|
1037
1038
|
ANTHROPIC_API_KEY=sk-ant-...
|
|
1038
1039
|
GEMINI_API_KEY=...
|
|
1040
|
+
MISTRAL_API_KEY=...
|
|
1039
1041
|
GROQ_API_KEY=gsk_...
|
|
1040
1042
|
CEREBRAS_API_KEY=csk-...
|
|
1041
1043
|
XAI_API_KEY=xai-...
|
|
@@ -1984,6 +1984,416 @@ export declare const MODELS: {
|
|
|
1984
1984
|
contextWindow: number;
|
|
1985
1985
|
maxTokens: number;
|
|
1986
1986
|
};
|
|
1987
|
+
readonly "glm-4.6v": {
|
|
1988
|
+
id: string;
|
|
1989
|
+
name: string;
|
|
1990
|
+
api: "anthropic-messages";
|
|
1991
|
+
provider: string;
|
|
1992
|
+
baseUrl: string;
|
|
1993
|
+
reasoning: true;
|
|
1994
|
+
input: ("image" | "text")[];
|
|
1995
|
+
cost: {
|
|
1996
|
+
input: number;
|
|
1997
|
+
output: number;
|
|
1998
|
+
cacheRead: number;
|
|
1999
|
+
cacheWrite: number;
|
|
2000
|
+
};
|
|
2001
|
+
contextWindow: number;
|
|
2002
|
+
maxTokens: number;
|
|
2003
|
+
};
|
|
2004
|
+
};
|
|
2005
|
+
readonly mistral: {
|
|
2006
|
+
readonly "devstral-medium-2507": {
|
|
2007
|
+
id: string;
|
|
2008
|
+
name: string;
|
|
2009
|
+
api: "openai-completions";
|
|
2010
|
+
provider: string;
|
|
2011
|
+
baseUrl: string;
|
|
2012
|
+
reasoning: false;
|
|
2013
|
+
input: "text"[];
|
|
2014
|
+
cost: {
|
|
2015
|
+
input: number;
|
|
2016
|
+
output: number;
|
|
2017
|
+
cacheRead: number;
|
|
2018
|
+
cacheWrite: number;
|
|
2019
|
+
};
|
|
2020
|
+
contextWindow: number;
|
|
2021
|
+
maxTokens: number;
|
|
2022
|
+
};
|
|
2023
|
+
readonly "mistral-large-2512": {
|
|
2024
|
+
id: string;
|
|
2025
|
+
name: string;
|
|
2026
|
+
api: "openai-completions";
|
|
2027
|
+
provider: string;
|
|
2028
|
+
baseUrl: string;
|
|
2029
|
+
reasoning: false;
|
|
2030
|
+
input: ("image" | "text")[];
|
|
2031
|
+
cost: {
|
|
2032
|
+
input: number;
|
|
2033
|
+
output: number;
|
|
2034
|
+
cacheRead: number;
|
|
2035
|
+
cacheWrite: number;
|
|
2036
|
+
};
|
|
2037
|
+
contextWindow: number;
|
|
2038
|
+
maxTokens: number;
|
|
2039
|
+
};
|
|
2040
|
+
readonly "open-mixtral-8x22b": {
|
|
2041
|
+
id: string;
|
|
2042
|
+
name: string;
|
|
2043
|
+
api: "openai-completions";
|
|
2044
|
+
provider: string;
|
|
2045
|
+
baseUrl: string;
|
|
2046
|
+
reasoning: false;
|
|
2047
|
+
input: "text"[];
|
|
2048
|
+
cost: {
|
|
2049
|
+
input: number;
|
|
2050
|
+
output: number;
|
|
2051
|
+
cacheRead: number;
|
|
2052
|
+
cacheWrite: number;
|
|
2053
|
+
};
|
|
2054
|
+
contextWindow: number;
|
|
2055
|
+
maxTokens: number;
|
|
2056
|
+
};
|
|
2057
|
+
readonly "ministral-8b-latest": {
|
|
2058
|
+
id: string;
|
|
2059
|
+
name: string;
|
|
2060
|
+
api: "openai-completions";
|
|
2061
|
+
provider: string;
|
|
2062
|
+
baseUrl: string;
|
|
2063
|
+
reasoning: false;
|
|
2064
|
+
input: "text"[];
|
|
2065
|
+
cost: {
|
|
2066
|
+
input: number;
|
|
2067
|
+
output: number;
|
|
2068
|
+
cacheRead: number;
|
|
2069
|
+
cacheWrite: number;
|
|
2070
|
+
};
|
|
2071
|
+
contextWindow: number;
|
|
2072
|
+
maxTokens: number;
|
|
2073
|
+
};
|
|
2074
|
+
readonly "pixtral-large-latest": {
|
|
2075
|
+
id: string;
|
|
2076
|
+
name: string;
|
|
2077
|
+
api: "openai-completions";
|
|
2078
|
+
provider: string;
|
|
2079
|
+
baseUrl: string;
|
|
2080
|
+
reasoning: false;
|
|
2081
|
+
input: ("image" | "text")[];
|
|
2082
|
+
cost: {
|
|
2083
|
+
input: number;
|
|
2084
|
+
output: number;
|
|
2085
|
+
cacheRead: number;
|
|
2086
|
+
cacheWrite: number;
|
|
2087
|
+
};
|
|
2088
|
+
contextWindow: number;
|
|
2089
|
+
maxTokens: number;
|
|
2090
|
+
};
|
|
2091
|
+
readonly "ministral-3b-latest": {
|
|
2092
|
+
id: string;
|
|
2093
|
+
name: string;
|
|
2094
|
+
api: "openai-completions";
|
|
2095
|
+
provider: string;
|
|
2096
|
+
baseUrl: string;
|
|
2097
|
+
reasoning: false;
|
|
2098
|
+
input: "text"[];
|
|
2099
|
+
cost: {
|
|
2100
|
+
input: number;
|
|
2101
|
+
output: number;
|
|
2102
|
+
cacheRead: number;
|
|
2103
|
+
cacheWrite: number;
|
|
2104
|
+
};
|
|
2105
|
+
contextWindow: number;
|
|
2106
|
+
maxTokens: number;
|
|
2107
|
+
};
|
|
2108
|
+
readonly "pixtral-12b": {
|
|
2109
|
+
id: string;
|
|
2110
|
+
name: string;
|
|
2111
|
+
api: "openai-completions";
|
|
2112
|
+
provider: string;
|
|
2113
|
+
baseUrl: string;
|
|
2114
|
+
reasoning: false;
|
|
2115
|
+
input: ("image" | "text")[];
|
|
2116
|
+
cost: {
|
|
2117
|
+
input: number;
|
|
2118
|
+
output: number;
|
|
2119
|
+
cacheRead: number;
|
|
2120
|
+
cacheWrite: number;
|
|
2121
|
+
};
|
|
2122
|
+
contextWindow: number;
|
|
2123
|
+
maxTokens: number;
|
|
2124
|
+
};
|
|
2125
|
+
readonly "mistral-medium-2505": {
|
|
2126
|
+
id: string;
|
|
2127
|
+
name: string;
|
|
2128
|
+
api: "openai-completions";
|
|
2129
|
+
provider: string;
|
|
2130
|
+
baseUrl: string;
|
|
2131
|
+
reasoning: false;
|
|
2132
|
+
input: ("image" | "text")[];
|
|
2133
|
+
cost: {
|
|
2134
|
+
input: number;
|
|
2135
|
+
output: number;
|
|
2136
|
+
cacheRead: number;
|
|
2137
|
+
cacheWrite: number;
|
|
2138
|
+
};
|
|
2139
|
+
contextWindow: number;
|
|
2140
|
+
maxTokens: number;
|
|
2141
|
+
};
|
|
2142
|
+
readonly "labs-devstral-small-2512": {
|
|
2143
|
+
id: string;
|
|
2144
|
+
name: string;
|
|
2145
|
+
api: "openai-completions";
|
|
2146
|
+
provider: string;
|
|
2147
|
+
baseUrl: string;
|
|
2148
|
+
reasoning: false;
|
|
2149
|
+
input: ("image" | "text")[];
|
|
2150
|
+
cost: {
|
|
2151
|
+
input: number;
|
|
2152
|
+
output: number;
|
|
2153
|
+
cacheRead: number;
|
|
2154
|
+
cacheWrite: number;
|
|
2155
|
+
};
|
|
2156
|
+
contextWindow: number;
|
|
2157
|
+
maxTokens: number;
|
|
2158
|
+
};
|
|
2159
|
+
readonly "devstral-medium-latest": {
|
|
2160
|
+
id: string;
|
|
2161
|
+
name: string;
|
|
2162
|
+
api: "openai-completions";
|
|
2163
|
+
provider: string;
|
|
2164
|
+
baseUrl: string;
|
|
2165
|
+
reasoning: false;
|
|
2166
|
+
input: "text"[];
|
|
2167
|
+
cost: {
|
|
2168
|
+
input: number;
|
|
2169
|
+
output: number;
|
|
2170
|
+
cacheRead: number;
|
|
2171
|
+
cacheWrite: number;
|
|
2172
|
+
};
|
|
2173
|
+
contextWindow: number;
|
|
2174
|
+
maxTokens: number;
|
|
2175
|
+
};
|
|
2176
|
+
readonly "devstral-small-2505": {
|
|
2177
|
+
id: string;
|
|
2178
|
+
name: string;
|
|
2179
|
+
api: "openai-completions";
|
|
2180
|
+
provider: string;
|
|
2181
|
+
baseUrl: string;
|
|
2182
|
+
reasoning: false;
|
|
2183
|
+
input: "text"[];
|
|
2184
|
+
cost: {
|
|
2185
|
+
input: number;
|
|
2186
|
+
output: number;
|
|
2187
|
+
cacheRead: number;
|
|
2188
|
+
cacheWrite: number;
|
|
2189
|
+
};
|
|
2190
|
+
contextWindow: number;
|
|
2191
|
+
maxTokens: number;
|
|
2192
|
+
};
|
|
2193
|
+
readonly "mistral-medium-2508": {
|
|
2194
|
+
id: string;
|
|
2195
|
+
name: string;
|
|
2196
|
+
api: "openai-completions";
|
|
2197
|
+
provider: string;
|
|
2198
|
+
baseUrl: string;
|
|
2199
|
+
reasoning: false;
|
|
2200
|
+
input: ("image" | "text")[];
|
|
2201
|
+
cost: {
|
|
2202
|
+
input: number;
|
|
2203
|
+
output: number;
|
|
2204
|
+
cacheRead: number;
|
|
2205
|
+
cacheWrite: number;
|
|
2206
|
+
};
|
|
2207
|
+
contextWindow: number;
|
|
2208
|
+
maxTokens: number;
|
|
2209
|
+
};
|
|
2210
|
+
readonly "mistral-small-latest": {
|
|
2211
|
+
id: string;
|
|
2212
|
+
name: string;
|
|
2213
|
+
api: "openai-completions";
|
|
2214
|
+
provider: string;
|
|
2215
|
+
baseUrl: string;
|
|
2216
|
+
reasoning: false;
|
|
2217
|
+
input: ("image" | "text")[];
|
|
2218
|
+
cost: {
|
|
2219
|
+
input: number;
|
|
2220
|
+
output: number;
|
|
2221
|
+
cacheRead: number;
|
|
2222
|
+
cacheWrite: number;
|
|
2223
|
+
};
|
|
2224
|
+
contextWindow: number;
|
|
2225
|
+
maxTokens: number;
|
|
2226
|
+
};
|
|
2227
|
+
readonly "magistral-small": {
|
|
2228
|
+
id: string;
|
|
2229
|
+
name: string;
|
|
2230
|
+
api: "openai-completions";
|
|
2231
|
+
provider: string;
|
|
2232
|
+
baseUrl: string;
|
|
2233
|
+
reasoning: true;
|
|
2234
|
+
input: "text"[];
|
|
2235
|
+
cost: {
|
|
2236
|
+
input: number;
|
|
2237
|
+
output: number;
|
|
2238
|
+
cacheRead: number;
|
|
2239
|
+
cacheWrite: number;
|
|
2240
|
+
};
|
|
2241
|
+
contextWindow: number;
|
|
2242
|
+
maxTokens: number;
|
|
2243
|
+
};
|
|
2244
|
+
readonly "devstral-small-2507": {
|
|
2245
|
+
id: string;
|
|
2246
|
+
name: string;
|
|
2247
|
+
api: "openai-completions";
|
|
2248
|
+
provider: string;
|
|
2249
|
+
baseUrl: string;
|
|
2250
|
+
reasoning: false;
|
|
2251
|
+
input: "text"[];
|
|
2252
|
+
cost: {
|
|
2253
|
+
input: number;
|
|
2254
|
+
output: number;
|
|
2255
|
+
cacheRead: number;
|
|
2256
|
+
cacheWrite: number;
|
|
2257
|
+
};
|
|
2258
|
+
contextWindow: number;
|
|
2259
|
+
maxTokens: number;
|
|
2260
|
+
};
|
|
2261
|
+
readonly "codestral-latest": {
|
|
2262
|
+
id: string;
|
|
2263
|
+
name: string;
|
|
2264
|
+
api: "openai-completions";
|
|
2265
|
+
provider: string;
|
|
2266
|
+
baseUrl: string;
|
|
2267
|
+
reasoning: false;
|
|
2268
|
+
input: "text"[];
|
|
2269
|
+
cost: {
|
|
2270
|
+
input: number;
|
|
2271
|
+
output: number;
|
|
2272
|
+
cacheRead: number;
|
|
2273
|
+
cacheWrite: number;
|
|
2274
|
+
};
|
|
2275
|
+
contextWindow: number;
|
|
2276
|
+
maxTokens: number;
|
|
2277
|
+
};
|
|
2278
|
+
readonly "open-mixtral-8x7b": {
|
|
2279
|
+
id: string;
|
|
2280
|
+
name: string;
|
|
2281
|
+
api: "openai-completions";
|
|
2282
|
+
provider: string;
|
|
2283
|
+
baseUrl: string;
|
|
2284
|
+
reasoning: false;
|
|
2285
|
+
input: "text"[];
|
|
2286
|
+
cost: {
|
|
2287
|
+
input: number;
|
|
2288
|
+
output: number;
|
|
2289
|
+
cacheRead: number;
|
|
2290
|
+
cacheWrite: number;
|
|
2291
|
+
};
|
|
2292
|
+
contextWindow: number;
|
|
2293
|
+
maxTokens: number;
|
|
2294
|
+
};
|
|
2295
|
+
readonly "mistral-nemo": {
|
|
2296
|
+
id: string;
|
|
2297
|
+
name: string;
|
|
2298
|
+
api: "openai-completions";
|
|
2299
|
+
provider: string;
|
|
2300
|
+
baseUrl: string;
|
|
2301
|
+
reasoning: false;
|
|
2302
|
+
input: "text"[];
|
|
2303
|
+
cost: {
|
|
2304
|
+
input: number;
|
|
2305
|
+
output: number;
|
|
2306
|
+
cacheRead: number;
|
|
2307
|
+
cacheWrite: number;
|
|
2308
|
+
};
|
|
2309
|
+
contextWindow: number;
|
|
2310
|
+
maxTokens: number;
|
|
2311
|
+
};
|
|
2312
|
+
readonly "open-mistral-7b": {
|
|
2313
|
+
id: string;
|
|
2314
|
+
name: string;
|
|
2315
|
+
api: "openai-completions";
|
|
2316
|
+
provider: string;
|
|
2317
|
+
baseUrl: string;
|
|
2318
|
+
reasoning: false;
|
|
2319
|
+
input: "text"[];
|
|
2320
|
+
cost: {
|
|
2321
|
+
input: number;
|
|
2322
|
+
output: number;
|
|
2323
|
+
cacheRead: number;
|
|
2324
|
+
cacheWrite: number;
|
|
2325
|
+
};
|
|
2326
|
+
contextWindow: number;
|
|
2327
|
+
maxTokens: number;
|
|
2328
|
+
};
|
|
2329
|
+
readonly "mistral-large-latest": {
|
|
2330
|
+
id: string;
|
|
2331
|
+
name: string;
|
|
2332
|
+
api: "openai-completions";
|
|
2333
|
+
provider: string;
|
|
2334
|
+
baseUrl: string;
|
|
2335
|
+
reasoning: false;
|
|
2336
|
+
input: ("image" | "text")[];
|
|
2337
|
+
cost: {
|
|
2338
|
+
input: number;
|
|
2339
|
+
output: number;
|
|
2340
|
+
cacheRead: number;
|
|
2341
|
+
cacheWrite: number;
|
|
2342
|
+
};
|
|
2343
|
+
contextWindow: number;
|
|
2344
|
+
maxTokens: number;
|
|
2345
|
+
};
|
|
2346
|
+
readonly "mistral-medium-latest": {
|
|
2347
|
+
id: string;
|
|
2348
|
+
name: string;
|
|
2349
|
+
api: "openai-completions";
|
|
2350
|
+
provider: string;
|
|
2351
|
+
baseUrl: string;
|
|
2352
|
+
reasoning: false;
|
|
2353
|
+
input: ("image" | "text")[];
|
|
2354
|
+
cost: {
|
|
2355
|
+
input: number;
|
|
2356
|
+
output: number;
|
|
2357
|
+
cacheRead: number;
|
|
2358
|
+
cacheWrite: number;
|
|
2359
|
+
};
|
|
2360
|
+
contextWindow: number;
|
|
2361
|
+
maxTokens: number;
|
|
2362
|
+
};
|
|
2363
|
+
readonly "mistral-large-2411": {
|
|
2364
|
+
id: string;
|
|
2365
|
+
name: string;
|
|
2366
|
+
api: "openai-completions";
|
|
2367
|
+
provider: string;
|
|
2368
|
+
baseUrl: string;
|
|
2369
|
+
reasoning: false;
|
|
2370
|
+
input: "text"[];
|
|
2371
|
+
cost: {
|
|
2372
|
+
input: number;
|
|
2373
|
+
output: number;
|
|
2374
|
+
cacheRead: number;
|
|
2375
|
+
cacheWrite: number;
|
|
2376
|
+
};
|
|
2377
|
+
contextWindow: number;
|
|
2378
|
+
maxTokens: number;
|
|
2379
|
+
};
|
|
2380
|
+
readonly "magistral-medium-latest": {
|
|
2381
|
+
id: string;
|
|
2382
|
+
name: string;
|
|
2383
|
+
api: "openai-completions";
|
|
2384
|
+
provider: string;
|
|
2385
|
+
baseUrl: string;
|
|
2386
|
+
reasoning: true;
|
|
2387
|
+
input: "text"[];
|
|
2388
|
+
cost: {
|
|
2389
|
+
input: number;
|
|
2390
|
+
output: number;
|
|
2391
|
+
cacheRead: number;
|
|
2392
|
+
cacheWrite: number;
|
|
2393
|
+
};
|
|
2394
|
+
contextWindow: number;
|
|
2395
|
+
maxTokens: number;
|
|
2396
|
+
};
|
|
1987
2397
|
};
|
|
1988
2398
|
readonly openrouter: {
|
|
1989
2399
|
readonly "mistralai/devstral-2512:free": {
|
|
@@ -2003,6 +2413,23 @@ export declare const MODELS: {
|
|
|
2003
2413
|
contextWindow: number;
|
|
2004
2414
|
maxTokens: number;
|
|
2005
2415
|
};
|
|
2416
|
+
readonly "mistralai/devstral-2512": {
|
|
2417
|
+
id: string;
|
|
2418
|
+
name: string;
|
|
2419
|
+
api: "openai-completions";
|
|
2420
|
+
provider: string;
|
|
2421
|
+
baseUrl: string;
|
|
2422
|
+
reasoning: false;
|
|
2423
|
+
input: "text"[];
|
|
2424
|
+
cost: {
|
|
2425
|
+
input: number;
|
|
2426
|
+
output: number;
|
|
2427
|
+
cacheRead: number;
|
|
2428
|
+
cacheWrite: number;
|
|
2429
|
+
};
|
|
2430
|
+
contextWindow: number;
|
|
2431
|
+
maxTokens: number;
|
|
2432
|
+
};
|
|
2006
2433
|
readonly "relace/relace-search": {
|
|
2007
2434
|
id: string;
|
|
2008
2435
|
name: string;
|
|
@@ -2037,6 +2464,23 @@ export declare const MODELS: {
|
|
|
2037
2464
|
contextWindow: number;
|
|
2038
2465
|
maxTokens: number;
|
|
2039
2466
|
};
|
|
2467
|
+
readonly "nex-agi/deepseek-v3.1-nex-n1:free": {
|
|
2468
|
+
id: string;
|
|
2469
|
+
name: string;
|
|
2470
|
+
api: "openai-completions";
|
|
2471
|
+
provider: string;
|
|
2472
|
+
baseUrl: string;
|
|
2473
|
+
reasoning: true;
|
|
2474
|
+
input: "text"[];
|
|
2475
|
+
cost: {
|
|
2476
|
+
input: number;
|
|
2477
|
+
output: number;
|
|
2478
|
+
cacheRead: number;
|
|
2479
|
+
cacheWrite: number;
|
|
2480
|
+
};
|
|
2481
|
+
contextWindow: number;
|
|
2482
|
+
maxTokens: number;
|
|
2483
|
+
};
|
|
2040
2484
|
readonly "openai/gpt-5.1-codex-max": {
|
|
2041
2485
|
id: string;
|
|
2042
2486
|
name: string;
|
|
@@ -5063,7 +5507,7 @@ export declare const MODELS: {
|
|
|
5063
5507
|
contextWindow: number;
|
|
5064
5508
|
maxTokens: number;
|
|
5065
5509
|
};
|
|
5066
|
-
readonly "mistralai/ministral-
|
|
5510
|
+
readonly "mistralai/ministral-8b": {
|
|
5067
5511
|
id: string;
|
|
5068
5512
|
name: string;
|
|
5069
5513
|
api: "openai-completions";
|
|
@@ -5080,7 +5524,7 @@ export declare const MODELS: {
|
|
|
5080
5524
|
contextWindow: number;
|
|
5081
5525
|
maxTokens: number;
|
|
5082
5526
|
};
|
|
5083
|
-
readonly "mistralai/ministral-
|
|
5527
|
+
readonly "mistralai/ministral-3b": {
|
|
5084
5528
|
id: string;
|
|
5085
5529
|
name: string;
|
|
5086
5530
|
api: "openai-completions";
|
|
@@ -5267,7 +5711,7 @@ export declare const MODELS: {
|
|
|
5267
5711
|
contextWindow: number;
|
|
5268
5712
|
maxTokens: number;
|
|
5269
5713
|
};
|
|
5270
|
-
readonly "meta-llama/llama-3.1-
|
|
5714
|
+
readonly "meta-llama/llama-3.1-8b-instruct": {
|
|
5271
5715
|
id: string;
|
|
5272
5716
|
name: string;
|
|
5273
5717
|
api: "openai-completions";
|
|
@@ -5284,7 +5728,7 @@ export declare const MODELS: {
|
|
|
5284
5728
|
contextWindow: number;
|
|
5285
5729
|
maxTokens: number;
|
|
5286
5730
|
};
|
|
5287
|
-
readonly "meta-llama/llama-3.1-
|
|
5731
|
+
readonly "meta-llama/llama-3.1-405b-instruct": {
|
|
5288
5732
|
id: string;
|
|
5289
5733
|
name: string;
|
|
5290
5734
|
api: "openai-completions";
|
|
@@ -5301,7 +5745,7 @@ export declare const MODELS: {
|
|
|
5301
5745
|
contextWindow: number;
|
|
5302
5746
|
maxTokens: number;
|
|
5303
5747
|
};
|
|
5304
|
-
readonly "meta-llama/llama-3.1-
|
|
5748
|
+
readonly "meta-llama/llama-3.1-70b-instruct": {
|
|
5305
5749
|
id: string;
|
|
5306
5750
|
name: string;
|
|
5307
5751
|
api: "openai-completions";
|
|
@@ -5454,7 +5898,7 @@ export declare const MODELS: {
|
|
|
5454
5898
|
contextWindow: number;
|
|
5455
5899
|
maxTokens: number;
|
|
5456
5900
|
};
|
|
5457
|
-
readonly "openai/gpt-4o": {
|
|
5901
|
+
readonly "openai/gpt-4o-2024-05-13": {
|
|
5458
5902
|
id: string;
|
|
5459
5903
|
name: string;
|
|
5460
5904
|
api: "openai-completions";
|
|
@@ -5471,7 +5915,7 @@ export declare const MODELS: {
|
|
|
5471
5915
|
contextWindow: number;
|
|
5472
5916
|
maxTokens: number;
|
|
5473
5917
|
};
|
|
5474
|
-
readonly "openai/gpt-4o
|
|
5918
|
+
readonly "openai/gpt-4o": {
|
|
5475
5919
|
id: string;
|
|
5476
5920
|
name: string;
|
|
5477
5921
|
api: "openai-completions";
|
|
@@ -5488,7 +5932,7 @@ export declare const MODELS: {
|
|
|
5488
5932
|
contextWindow: number;
|
|
5489
5933
|
maxTokens: number;
|
|
5490
5934
|
};
|
|
5491
|
-
readonly "openai/gpt-4o
|
|
5935
|
+
readonly "openai/gpt-4o:extended": {
|
|
5492
5936
|
id: string;
|
|
5493
5937
|
name: string;
|
|
5494
5938
|
api: "openai-completions";
|
|
@@ -5505,7 +5949,7 @@ export declare const MODELS: {
|
|
|
5505
5949
|
contextWindow: number;
|
|
5506
5950
|
maxTokens: number;
|
|
5507
5951
|
};
|
|
5508
|
-
readonly "meta-llama/llama-3-
|
|
5952
|
+
readonly "meta-llama/llama-3-70b-instruct": {
|
|
5509
5953
|
id: string;
|
|
5510
5954
|
name: string;
|
|
5511
5955
|
api: "openai-completions";
|
|
@@ -5522,7 +5966,7 @@ export declare const MODELS: {
|
|
|
5522
5966
|
contextWindow: number;
|
|
5523
5967
|
maxTokens: number;
|
|
5524
5968
|
};
|
|
5525
|
-
readonly "meta-llama/llama-3-
|
|
5969
|
+
readonly "meta-llama/llama-3-8b-instruct": {
|
|
5526
5970
|
id: string;
|
|
5527
5971
|
name: string;
|
|
5528
5972
|
api: "openai-completions";
|
|
@@ -5624,7 +6068,7 @@ export declare const MODELS: {
|
|
|
5624
6068
|
contextWindow: number;
|
|
5625
6069
|
maxTokens: number;
|
|
5626
6070
|
};
|
|
5627
|
-
readonly "openai/gpt-
|
|
6071
|
+
readonly "openai/gpt-3.5-turbo-0613": {
|
|
5628
6072
|
id: string;
|
|
5629
6073
|
name: string;
|
|
5630
6074
|
api: "openai-completions";
|
|
@@ -5641,7 +6085,7 @@ export declare const MODELS: {
|
|
|
5641
6085
|
contextWindow: number;
|
|
5642
6086
|
maxTokens: number;
|
|
5643
6087
|
};
|
|
5644
|
-
readonly "openai/gpt-
|
|
6088
|
+
readonly "openai/gpt-4-turbo-preview": {
|
|
5645
6089
|
id: string;
|
|
5646
6090
|
name: string;
|
|
5647
6091
|
api: "openai-completions";
|
|
@@ -5726,7 +6170,7 @@ export declare const MODELS: {
|
|
|
5726
6170
|
contextWindow: number;
|
|
5727
6171
|
maxTokens: number;
|
|
5728
6172
|
};
|
|
5729
|
-
readonly "openai/gpt-4": {
|
|
6173
|
+
readonly "openai/gpt-4-0314": {
|
|
5730
6174
|
id: string;
|
|
5731
6175
|
name: string;
|
|
5732
6176
|
api: "openai-completions";
|
|
@@ -5743,7 +6187,7 @@ export declare const MODELS: {
|
|
|
5743
6187
|
contextWindow: number;
|
|
5744
6188
|
maxTokens: number;
|
|
5745
6189
|
};
|
|
5746
|
-
readonly "openai/gpt-
|
|
6190
|
+
readonly "openai/gpt-4": {
|
|
5747
6191
|
id: string;
|
|
5748
6192
|
name: string;
|
|
5749
6193
|
api: "openai-completions";
|
|
@@ -5760,7 +6204,7 @@ export declare const MODELS: {
|
|
|
5760
6204
|
contextWindow: number;
|
|
5761
6205
|
maxTokens: number;
|
|
5762
6206
|
};
|
|
5763
|
-
readonly "openai/gpt-
|
|
6207
|
+
readonly "openai/gpt-3.5-turbo": {
|
|
5764
6208
|
id: string;
|
|
5765
6209
|
name: string;
|
|
5766
6210
|
api: "openai-completions";
|