@mariozechner/pi-ai 0.17.0 → 0.18.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/agent/index.d.ts +1 -1
- package/dist/agent/index.d.ts.map +1 -1
- package/dist/agent/index.js.map +1 -1
- package/dist/models.generated.d.ts +425 -15
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +492 -82
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/anthropic.d.ts.map +1 -1
- package/dist/providers/anthropic.js +0 -4
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +69 -11
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +1 -0
- package/dist/stream.js.map +1 -1
- package/dist/types.d.ts +9 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utils/overflow.d.ts +1 -0
- package/dist/utils/overflow.d.ts.map +1 -1
- package/dist/utils/overflow.js +3 -1
- package/dist/utils/overflow.js.map +1 -1
- package/package.json +2 -1
package/README.md
CHANGED
|
@@ -9,6 +9,7 @@ Unified LLM API with automatic model discovery, provider configuration, token an
|
|
|
9
9
|
- **OpenAI**
|
|
10
10
|
- **Anthropic**
|
|
11
11
|
- **Google**
|
|
12
|
+
- **Mistral**
|
|
12
13
|
- **Groq**
|
|
13
14
|
- **Cerebras**
|
|
14
15
|
- **xAI**
|
|
@@ -564,7 +565,7 @@ A **provider** offers models through a specific API. For example:
|
|
|
564
565
|
- **Anthropic** models use the `anthropic-messages` API
|
|
565
566
|
- **Google** models use the `google-generative-ai` API
|
|
566
567
|
- **OpenAI** models use the `openai-responses` API
|
|
567
|
-
- **xAI, Cerebras, Groq, etc.** models use the `openai-completions` API (OpenAI-compatible)
|
|
568
|
+
- **Mistral, xAI, Cerebras, Groq, etc.** models use the `openai-completions` API (OpenAI-compatible)
|
|
568
569
|
|
|
569
570
|
### Querying Providers and Models
|
|
570
571
|
|
|
@@ -1036,6 +1037,7 @@ In Node.js environments, you can set environment variables to avoid passing API
|
|
|
1036
1037
|
OPENAI_API_KEY=sk-...
|
|
1037
1038
|
ANTHROPIC_API_KEY=sk-ant-...
|
|
1038
1039
|
GEMINI_API_KEY=...
|
|
1040
|
+
MISTRAL_API_KEY=...
|
|
1039
1041
|
GROQ_API_KEY=gsk_...
|
|
1040
1042
|
CEREBRAS_API_KEY=csk-...
|
|
1041
1043
|
XAI_API_KEY=xai-...
|
package/dist/agent/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
export { agentLoop, agentLoopContinue } from "./agent-loop.js";
|
|
2
2
|
export * from "./tools/index.js";
|
|
3
|
-
export type { AgentContext, AgentEvent, AgentLoopConfig, AgentTool, QueuedMessage } from "./types.js";
|
|
3
|
+
export type { AgentContext, AgentEvent, AgentLoopConfig, AgentTool, AgentToolResult, QueuedMessage } from "./types.js";
|
|
4
4
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/agent/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAC/D,cAAc,kBAAkB,CAAC;AACjC,YAAY,EAAE,YAAY,EAAE,UAAU,EAAE,eAAe,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,YAAY,CAAC","sourcesContent":["export { agentLoop, agentLoopContinue } from \"./agent-loop.js\";\nexport * from \"./tools/index.js\";\nexport type { AgentContext, AgentEvent, AgentLoopConfig, AgentTool, QueuedMessage } from \"./types.js\";\n"]}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/agent/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAC/D,cAAc,kBAAkB,CAAC;AACjC,YAAY,EAAE,YAAY,EAAE,UAAU,EAAE,eAAe,EAAE,SAAS,EAAE,eAAe,EAAE,aAAa,EAAE,MAAM,YAAY,CAAC","sourcesContent":["export { agentLoop, agentLoopContinue } from \"./agent-loop.js\";\nexport * from \"./tools/index.js\";\nexport type { AgentContext, AgentEvent, AgentLoopConfig, AgentTool, AgentToolResult, QueuedMessage } from \"./types.js\";\n"]}
|
package/dist/agent/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/agent/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAC/D,cAAc,kBAAkB,CAAC","sourcesContent":["export { agentLoop, agentLoopContinue } from \"./agent-loop.js\";\nexport * from \"./tools/index.js\";\nexport type { AgentContext, AgentEvent, AgentLoopConfig, AgentTool, QueuedMessage } from \"./types.js\";\n"]}
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/agent/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAC/D,cAAc,kBAAkB,CAAC","sourcesContent":["export { agentLoop, agentLoopContinue } from \"./agent-loop.js\";\nexport * from \"./tools/index.js\";\nexport type { AgentContext, AgentEvent, AgentLoopConfig, AgentTool, AgentToolResult, QueuedMessage } from \"./types.js\";\n"]}
|
|
@@ -1984,6 +1984,416 @@ export declare const MODELS: {
|
|
|
1984
1984
|
contextWindow: number;
|
|
1985
1985
|
maxTokens: number;
|
|
1986
1986
|
};
|
|
1987
|
+
readonly "glm-4.6v": {
|
|
1988
|
+
id: string;
|
|
1989
|
+
name: string;
|
|
1990
|
+
api: "anthropic-messages";
|
|
1991
|
+
provider: string;
|
|
1992
|
+
baseUrl: string;
|
|
1993
|
+
reasoning: true;
|
|
1994
|
+
input: ("image" | "text")[];
|
|
1995
|
+
cost: {
|
|
1996
|
+
input: number;
|
|
1997
|
+
output: number;
|
|
1998
|
+
cacheRead: number;
|
|
1999
|
+
cacheWrite: number;
|
|
2000
|
+
};
|
|
2001
|
+
contextWindow: number;
|
|
2002
|
+
maxTokens: number;
|
|
2003
|
+
};
|
|
2004
|
+
};
|
|
2005
|
+
readonly mistral: {
|
|
2006
|
+
readonly "devstral-medium-2507": {
|
|
2007
|
+
id: string;
|
|
2008
|
+
name: string;
|
|
2009
|
+
api: "openai-completions";
|
|
2010
|
+
provider: string;
|
|
2011
|
+
baseUrl: string;
|
|
2012
|
+
reasoning: false;
|
|
2013
|
+
input: "text"[];
|
|
2014
|
+
cost: {
|
|
2015
|
+
input: number;
|
|
2016
|
+
output: number;
|
|
2017
|
+
cacheRead: number;
|
|
2018
|
+
cacheWrite: number;
|
|
2019
|
+
};
|
|
2020
|
+
contextWindow: number;
|
|
2021
|
+
maxTokens: number;
|
|
2022
|
+
};
|
|
2023
|
+
readonly "mistral-large-2512": {
|
|
2024
|
+
id: string;
|
|
2025
|
+
name: string;
|
|
2026
|
+
api: "openai-completions";
|
|
2027
|
+
provider: string;
|
|
2028
|
+
baseUrl: string;
|
|
2029
|
+
reasoning: false;
|
|
2030
|
+
input: ("image" | "text")[];
|
|
2031
|
+
cost: {
|
|
2032
|
+
input: number;
|
|
2033
|
+
output: number;
|
|
2034
|
+
cacheRead: number;
|
|
2035
|
+
cacheWrite: number;
|
|
2036
|
+
};
|
|
2037
|
+
contextWindow: number;
|
|
2038
|
+
maxTokens: number;
|
|
2039
|
+
};
|
|
2040
|
+
readonly "open-mixtral-8x22b": {
|
|
2041
|
+
id: string;
|
|
2042
|
+
name: string;
|
|
2043
|
+
api: "openai-completions";
|
|
2044
|
+
provider: string;
|
|
2045
|
+
baseUrl: string;
|
|
2046
|
+
reasoning: false;
|
|
2047
|
+
input: "text"[];
|
|
2048
|
+
cost: {
|
|
2049
|
+
input: number;
|
|
2050
|
+
output: number;
|
|
2051
|
+
cacheRead: number;
|
|
2052
|
+
cacheWrite: number;
|
|
2053
|
+
};
|
|
2054
|
+
contextWindow: number;
|
|
2055
|
+
maxTokens: number;
|
|
2056
|
+
};
|
|
2057
|
+
readonly "ministral-8b-latest": {
|
|
2058
|
+
id: string;
|
|
2059
|
+
name: string;
|
|
2060
|
+
api: "openai-completions";
|
|
2061
|
+
provider: string;
|
|
2062
|
+
baseUrl: string;
|
|
2063
|
+
reasoning: false;
|
|
2064
|
+
input: "text"[];
|
|
2065
|
+
cost: {
|
|
2066
|
+
input: number;
|
|
2067
|
+
output: number;
|
|
2068
|
+
cacheRead: number;
|
|
2069
|
+
cacheWrite: number;
|
|
2070
|
+
};
|
|
2071
|
+
contextWindow: number;
|
|
2072
|
+
maxTokens: number;
|
|
2073
|
+
};
|
|
2074
|
+
readonly "pixtral-large-latest": {
|
|
2075
|
+
id: string;
|
|
2076
|
+
name: string;
|
|
2077
|
+
api: "openai-completions";
|
|
2078
|
+
provider: string;
|
|
2079
|
+
baseUrl: string;
|
|
2080
|
+
reasoning: false;
|
|
2081
|
+
input: ("image" | "text")[];
|
|
2082
|
+
cost: {
|
|
2083
|
+
input: number;
|
|
2084
|
+
output: number;
|
|
2085
|
+
cacheRead: number;
|
|
2086
|
+
cacheWrite: number;
|
|
2087
|
+
};
|
|
2088
|
+
contextWindow: number;
|
|
2089
|
+
maxTokens: number;
|
|
2090
|
+
};
|
|
2091
|
+
readonly "ministral-3b-latest": {
|
|
2092
|
+
id: string;
|
|
2093
|
+
name: string;
|
|
2094
|
+
api: "openai-completions";
|
|
2095
|
+
provider: string;
|
|
2096
|
+
baseUrl: string;
|
|
2097
|
+
reasoning: false;
|
|
2098
|
+
input: "text"[];
|
|
2099
|
+
cost: {
|
|
2100
|
+
input: number;
|
|
2101
|
+
output: number;
|
|
2102
|
+
cacheRead: number;
|
|
2103
|
+
cacheWrite: number;
|
|
2104
|
+
};
|
|
2105
|
+
contextWindow: number;
|
|
2106
|
+
maxTokens: number;
|
|
2107
|
+
};
|
|
2108
|
+
readonly "pixtral-12b": {
|
|
2109
|
+
id: string;
|
|
2110
|
+
name: string;
|
|
2111
|
+
api: "openai-completions";
|
|
2112
|
+
provider: string;
|
|
2113
|
+
baseUrl: string;
|
|
2114
|
+
reasoning: false;
|
|
2115
|
+
input: ("image" | "text")[];
|
|
2116
|
+
cost: {
|
|
2117
|
+
input: number;
|
|
2118
|
+
output: number;
|
|
2119
|
+
cacheRead: number;
|
|
2120
|
+
cacheWrite: number;
|
|
2121
|
+
};
|
|
2122
|
+
contextWindow: number;
|
|
2123
|
+
maxTokens: number;
|
|
2124
|
+
};
|
|
2125
|
+
readonly "mistral-medium-2505": {
|
|
2126
|
+
id: string;
|
|
2127
|
+
name: string;
|
|
2128
|
+
api: "openai-completions";
|
|
2129
|
+
provider: string;
|
|
2130
|
+
baseUrl: string;
|
|
2131
|
+
reasoning: false;
|
|
2132
|
+
input: ("image" | "text")[];
|
|
2133
|
+
cost: {
|
|
2134
|
+
input: number;
|
|
2135
|
+
output: number;
|
|
2136
|
+
cacheRead: number;
|
|
2137
|
+
cacheWrite: number;
|
|
2138
|
+
};
|
|
2139
|
+
contextWindow: number;
|
|
2140
|
+
maxTokens: number;
|
|
2141
|
+
};
|
|
2142
|
+
readonly "labs-devstral-small-2512": {
|
|
2143
|
+
id: string;
|
|
2144
|
+
name: string;
|
|
2145
|
+
api: "openai-completions";
|
|
2146
|
+
provider: string;
|
|
2147
|
+
baseUrl: string;
|
|
2148
|
+
reasoning: false;
|
|
2149
|
+
input: ("image" | "text")[];
|
|
2150
|
+
cost: {
|
|
2151
|
+
input: number;
|
|
2152
|
+
output: number;
|
|
2153
|
+
cacheRead: number;
|
|
2154
|
+
cacheWrite: number;
|
|
2155
|
+
};
|
|
2156
|
+
contextWindow: number;
|
|
2157
|
+
maxTokens: number;
|
|
2158
|
+
};
|
|
2159
|
+
readonly "devstral-medium-latest": {
|
|
2160
|
+
id: string;
|
|
2161
|
+
name: string;
|
|
2162
|
+
api: "openai-completions";
|
|
2163
|
+
provider: string;
|
|
2164
|
+
baseUrl: string;
|
|
2165
|
+
reasoning: false;
|
|
2166
|
+
input: "text"[];
|
|
2167
|
+
cost: {
|
|
2168
|
+
input: number;
|
|
2169
|
+
output: number;
|
|
2170
|
+
cacheRead: number;
|
|
2171
|
+
cacheWrite: number;
|
|
2172
|
+
};
|
|
2173
|
+
contextWindow: number;
|
|
2174
|
+
maxTokens: number;
|
|
2175
|
+
};
|
|
2176
|
+
readonly "devstral-small-2505": {
|
|
2177
|
+
id: string;
|
|
2178
|
+
name: string;
|
|
2179
|
+
api: "openai-completions";
|
|
2180
|
+
provider: string;
|
|
2181
|
+
baseUrl: string;
|
|
2182
|
+
reasoning: false;
|
|
2183
|
+
input: "text"[];
|
|
2184
|
+
cost: {
|
|
2185
|
+
input: number;
|
|
2186
|
+
output: number;
|
|
2187
|
+
cacheRead: number;
|
|
2188
|
+
cacheWrite: number;
|
|
2189
|
+
};
|
|
2190
|
+
contextWindow: number;
|
|
2191
|
+
maxTokens: number;
|
|
2192
|
+
};
|
|
2193
|
+
readonly "mistral-medium-2508": {
|
|
2194
|
+
id: string;
|
|
2195
|
+
name: string;
|
|
2196
|
+
api: "openai-completions";
|
|
2197
|
+
provider: string;
|
|
2198
|
+
baseUrl: string;
|
|
2199
|
+
reasoning: false;
|
|
2200
|
+
input: ("image" | "text")[];
|
|
2201
|
+
cost: {
|
|
2202
|
+
input: number;
|
|
2203
|
+
output: number;
|
|
2204
|
+
cacheRead: number;
|
|
2205
|
+
cacheWrite: number;
|
|
2206
|
+
};
|
|
2207
|
+
contextWindow: number;
|
|
2208
|
+
maxTokens: number;
|
|
2209
|
+
};
|
|
2210
|
+
readonly "mistral-small-latest": {
|
|
2211
|
+
id: string;
|
|
2212
|
+
name: string;
|
|
2213
|
+
api: "openai-completions";
|
|
2214
|
+
provider: string;
|
|
2215
|
+
baseUrl: string;
|
|
2216
|
+
reasoning: false;
|
|
2217
|
+
input: ("image" | "text")[];
|
|
2218
|
+
cost: {
|
|
2219
|
+
input: number;
|
|
2220
|
+
output: number;
|
|
2221
|
+
cacheRead: number;
|
|
2222
|
+
cacheWrite: number;
|
|
2223
|
+
};
|
|
2224
|
+
contextWindow: number;
|
|
2225
|
+
maxTokens: number;
|
|
2226
|
+
};
|
|
2227
|
+
readonly "magistral-small": {
|
|
2228
|
+
id: string;
|
|
2229
|
+
name: string;
|
|
2230
|
+
api: "openai-completions";
|
|
2231
|
+
provider: string;
|
|
2232
|
+
baseUrl: string;
|
|
2233
|
+
reasoning: true;
|
|
2234
|
+
input: "text"[];
|
|
2235
|
+
cost: {
|
|
2236
|
+
input: number;
|
|
2237
|
+
output: number;
|
|
2238
|
+
cacheRead: number;
|
|
2239
|
+
cacheWrite: number;
|
|
2240
|
+
};
|
|
2241
|
+
contextWindow: number;
|
|
2242
|
+
maxTokens: number;
|
|
2243
|
+
};
|
|
2244
|
+
readonly "devstral-small-2507": {
|
|
2245
|
+
id: string;
|
|
2246
|
+
name: string;
|
|
2247
|
+
api: "openai-completions";
|
|
2248
|
+
provider: string;
|
|
2249
|
+
baseUrl: string;
|
|
2250
|
+
reasoning: false;
|
|
2251
|
+
input: "text"[];
|
|
2252
|
+
cost: {
|
|
2253
|
+
input: number;
|
|
2254
|
+
output: number;
|
|
2255
|
+
cacheRead: number;
|
|
2256
|
+
cacheWrite: number;
|
|
2257
|
+
};
|
|
2258
|
+
contextWindow: number;
|
|
2259
|
+
maxTokens: number;
|
|
2260
|
+
};
|
|
2261
|
+
readonly "codestral-latest": {
|
|
2262
|
+
id: string;
|
|
2263
|
+
name: string;
|
|
2264
|
+
api: "openai-completions";
|
|
2265
|
+
provider: string;
|
|
2266
|
+
baseUrl: string;
|
|
2267
|
+
reasoning: false;
|
|
2268
|
+
input: "text"[];
|
|
2269
|
+
cost: {
|
|
2270
|
+
input: number;
|
|
2271
|
+
output: number;
|
|
2272
|
+
cacheRead: number;
|
|
2273
|
+
cacheWrite: number;
|
|
2274
|
+
};
|
|
2275
|
+
contextWindow: number;
|
|
2276
|
+
maxTokens: number;
|
|
2277
|
+
};
|
|
2278
|
+
readonly "open-mixtral-8x7b": {
|
|
2279
|
+
id: string;
|
|
2280
|
+
name: string;
|
|
2281
|
+
api: "openai-completions";
|
|
2282
|
+
provider: string;
|
|
2283
|
+
baseUrl: string;
|
|
2284
|
+
reasoning: false;
|
|
2285
|
+
input: "text"[];
|
|
2286
|
+
cost: {
|
|
2287
|
+
input: number;
|
|
2288
|
+
output: number;
|
|
2289
|
+
cacheRead: number;
|
|
2290
|
+
cacheWrite: number;
|
|
2291
|
+
};
|
|
2292
|
+
contextWindow: number;
|
|
2293
|
+
maxTokens: number;
|
|
2294
|
+
};
|
|
2295
|
+
readonly "mistral-nemo": {
|
|
2296
|
+
id: string;
|
|
2297
|
+
name: string;
|
|
2298
|
+
api: "openai-completions";
|
|
2299
|
+
provider: string;
|
|
2300
|
+
baseUrl: string;
|
|
2301
|
+
reasoning: false;
|
|
2302
|
+
input: "text"[];
|
|
2303
|
+
cost: {
|
|
2304
|
+
input: number;
|
|
2305
|
+
output: number;
|
|
2306
|
+
cacheRead: number;
|
|
2307
|
+
cacheWrite: number;
|
|
2308
|
+
};
|
|
2309
|
+
contextWindow: number;
|
|
2310
|
+
maxTokens: number;
|
|
2311
|
+
};
|
|
2312
|
+
readonly "open-mistral-7b": {
|
|
2313
|
+
id: string;
|
|
2314
|
+
name: string;
|
|
2315
|
+
api: "openai-completions";
|
|
2316
|
+
provider: string;
|
|
2317
|
+
baseUrl: string;
|
|
2318
|
+
reasoning: false;
|
|
2319
|
+
input: "text"[];
|
|
2320
|
+
cost: {
|
|
2321
|
+
input: number;
|
|
2322
|
+
output: number;
|
|
2323
|
+
cacheRead: number;
|
|
2324
|
+
cacheWrite: number;
|
|
2325
|
+
};
|
|
2326
|
+
contextWindow: number;
|
|
2327
|
+
maxTokens: number;
|
|
2328
|
+
};
|
|
2329
|
+
readonly "mistral-large-latest": {
|
|
2330
|
+
id: string;
|
|
2331
|
+
name: string;
|
|
2332
|
+
api: "openai-completions";
|
|
2333
|
+
provider: string;
|
|
2334
|
+
baseUrl: string;
|
|
2335
|
+
reasoning: false;
|
|
2336
|
+
input: ("image" | "text")[];
|
|
2337
|
+
cost: {
|
|
2338
|
+
input: number;
|
|
2339
|
+
output: number;
|
|
2340
|
+
cacheRead: number;
|
|
2341
|
+
cacheWrite: number;
|
|
2342
|
+
};
|
|
2343
|
+
contextWindow: number;
|
|
2344
|
+
maxTokens: number;
|
|
2345
|
+
};
|
|
2346
|
+
readonly "mistral-medium-latest": {
|
|
2347
|
+
id: string;
|
|
2348
|
+
name: string;
|
|
2349
|
+
api: "openai-completions";
|
|
2350
|
+
provider: string;
|
|
2351
|
+
baseUrl: string;
|
|
2352
|
+
reasoning: false;
|
|
2353
|
+
input: ("image" | "text")[];
|
|
2354
|
+
cost: {
|
|
2355
|
+
input: number;
|
|
2356
|
+
output: number;
|
|
2357
|
+
cacheRead: number;
|
|
2358
|
+
cacheWrite: number;
|
|
2359
|
+
};
|
|
2360
|
+
contextWindow: number;
|
|
2361
|
+
maxTokens: number;
|
|
2362
|
+
};
|
|
2363
|
+
readonly "mistral-large-2411": {
|
|
2364
|
+
id: string;
|
|
2365
|
+
name: string;
|
|
2366
|
+
api: "openai-completions";
|
|
2367
|
+
provider: string;
|
|
2368
|
+
baseUrl: string;
|
|
2369
|
+
reasoning: false;
|
|
2370
|
+
input: "text"[];
|
|
2371
|
+
cost: {
|
|
2372
|
+
input: number;
|
|
2373
|
+
output: number;
|
|
2374
|
+
cacheRead: number;
|
|
2375
|
+
cacheWrite: number;
|
|
2376
|
+
};
|
|
2377
|
+
contextWindow: number;
|
|
2378
|
+
maxTokens: number;
|
|
2379
|
+
};
|
|
2380
|
+
readonly "magistral-medium-latest": {
|
|
2381
|
+
id: string;
|
|
2382
|
+
name: string;
|
|
2383
|
+
api: "openai-completions";
|
|
2384
|
+
provider: string;
|
|
2385
|
+
baseUrl: string;
|
|
2386
|
+
reasoning: true;
|
|
2387
|
+
input: "text"[];
|
|
2388
|
+
cost: {
|
|
2389
|
+
input: number;
|
|
2390
|
+
output: number;
|
|
2391
|
+
cacheRead: number;
|
|
2392
|
+
cacheWrite: number;
|
|
2393
|
+
};
|
|
2394
|
+
contextWindow: number;
|
|
2395
|
+
maxTokens: number;
|
|
2396
|
+
};
|
|
1987
2397
|
};
|
|
1988
2398
|
readonly openrouter: {
|
|
1989
2399
|
readonly "mistralai/devstral-2512:free": {
|
|
@@ -5063,7 +5473,7 @@ export declare const MODELS: {
|
|
|
5063
5473
|
contextWindow: number;
|
|
5064
5474
|
maxTokens: number;
|
|
5065
5475
|
};
|
|
5066
|
-
readonly "mistralai/ministral-
|
|
5476
|
+
readonly "mistralai/ministral-3b": {
|
|
5067
5477
|
id: string;
|
|
5068
5478
|
name: string;
|
|
5069
5479
|
api: "openai-completions";
|
|
@@ -5080,7 +5490,7 @@ export declare const MODELS: {
|
|
|
5080
5490
|
contextWindow: number;
|
|
5081
5491
|
maxTokens: number;
|
|
5082
5492
|
};
|
|
5083
|
-
readonly "mistralai/ministral-
|
|
5493
|
+
readonly "mistralai/ministral-8b": {
|
|
5084
5494
|
id: string;
|
|
5085
5495
|
name: string;
|
|
5086
5496
|
api: "openai-completions";
|
|
@@ -5267,7 +5677,7 @@ export declare const MODELS: {
|
|
|
5267
5677
|
contextWindow: number;
|
|
5268
5678
|
maxTokens: number;
|
|
5269
5679
|
};
|
|
5270
|
-
readonly "meta-llama/llama-3.1-
|
|
5680
|
+
readonly "meta-llama/llama-3.1-405b-instruct": {
|
|
5271
5681
|
id: string;
|
|
5272
5682
|
name: string;
|
|
5273
5683
|
api: "openai-completions";
|
|
@@ -5284,7 +5694,7 @@ export declare const MODELS: {
|
|
|
5284
5694
|
contextWindow: number;
|
|
5285
5695
|
maxTokens: number;
|
|
5286
5696
|
};
|
|
5287
|
-
readonly "meta-llama/llama-3.1-
|
|
5697
|
+
readonly "meta-llama/llama-3.1-70b-instruct": {
|
|
5288
5698
|
id: string;
|
|
5289
5699
|
name: string;
|
|
5290
5700
|
api: "openai-completions";
|
|
@@ -5301,7 +5711,7 @@ export declare const MODELS: {
|
|
|
5301
5711
|
contextWindow: number;
|
|
5302
5712
|
maxTokens: number;
|
|
5303
5713
|
};
|
|
5304
|
-
readonly "meta-llama/llama-3.1-
|
|
5714
|
+
readonly "meta-llama/llama-3.1-8b-instruct": {
|
|
5305
5715
|
id: string;
|
|
5306
5716
|
name: string;
|
|
5307
5717
|
api: "openai-completions";
|
|
@@ -5454,7 +5864,7 @@ export declare const MODELS: {
|
|
|
5454
5864
|
contextWindow: number;
|
|
5455
5865
|
maxTokens: number;
|
|
5456
5866
|
};
|
|
5457
|
-
readonly "openai/gpt-4o
|
|
5867
|
+
readonly "openai/gpt-4o": {
|
|
5458
5868
|
id: string;
|
|
5459
5869
|
name: string;
|
|
5460
5870
|
api: "openai-completions";
|
|
@@ -5471,7 +5881,7 @@ export declare const MODELS: {
|
|
|
5471
5881
|
contextWindow: number;
|
|
5472
5882
|
maxTokens: number;
|
|
5473
5883
|
};
|
|
5474
|
-
readonly "openai/gpt-4o": {
|
|
5884
|
+
readonly "openai/gpt-4o:extended": {
|
|
5475
5885
|
id: string;
|
|
5476
5886
|
name: string;
|
|
5477
5887
|
api: "openai-completions";
|
|
@@ -5488,7 +5898,7 @@ export declare const MODELS: {
|
|
|
5488
5898
|
contextWindow: number;
|
|
5489
5899
|
maxTokens: number;
|
|
5490
5900
|
};
|
|
5491
|
-
readonly "openai/gpt-4o
|
|
5901
|
+
readonly "openai/gpt-4o-2024-05-13": {
|
|
5492
5902
|
id: string;
|
|
5493
5903
|
name: string;
|
|
5494
5904
|
api: "openai-completions";
|
|
@@ -5505,7 +5915,7 @@ export declare const MODELS: {
|
|
|
5505
5915
|
contextWindow: number;
|
|
5506
5916
|
maxTokens: number;
|
|
5507
5917
|
};
|
|
5508
|
-
readonly "meta-llama/llama-3-
|
|
5918
|
+
readonly "meta-llama/llama-3-8b-instruct": {
|
|
5509
5919
|
id: string;
|
|
5510
5920
|
name: string;
|
|
5511
5921
|
api: "openai-completions";
|
|
@@ -5522,7 +5932,7 @@ export declare const MODELS: {
|
|
|
5522
5932
|
contextWindow: number;
|
|
5523
5933
|
maxTokens: number;
|
|
5524
5934
|
};
|
|
5525
|
-
readonly "meta-llama/llama-3-
|
|
5935
|
+
readonly "meta-llama/llama-3-70b-instruct": {
|
|
5526
5936
|
id: string;
|
|
5527
5937
|
name: string;
|
|
5528
5938
|
api: "openai-completions";
|
|
@@ -5624,7 +6034,7 @@ export declare const MODELS: {
|
|
|
5624
6034
|
contextWindow: number;
|
|
5625
6035
|
maxTokens: number;
|
|
5626
6036
|
};
|
|
5627
|
-
readonly "openai/gpt-
|
|
6037
|
+
readonly "openai/gpt-4-turbo-preview": {
|
|
5628
6038
|
id: string;
|
|
5629
6039
|
name: string;
|
|
5630
6040
|
api: "openai-completions";
|
|
@@ -5641,7 +6051,7 @@ export declare const MODELS: {
|
|
|
5641
6051
|
contextWindow: number;
|
|
5642
6052
|
maxTokens: number;
|
|
5643
6053
|
};
|
|
5644
|
-
readonly "openai/gpt-
|
|
6054
|
+
readonly "openai/gpt-3.5-turbo-0613": {
|
|
5645
6055
|
id: string;
|
|
5646
6056
|
name: string;
|
|
5647
6057
|
api: "openai-completions";
|
|
@@ -5726,7 +6136,7 @@ export declare const MODELS: {
|
|
|
5726
6136
|
contextWindow: number;
|
|
5727
6137
|
maxTokens: number;
|
|
5728
6138
|
};
|
|
5729
|
-
readonly "openai/gpt-4
|
|
6139
|
+
readonly "openai/gpt-4": {
|
|
5730
6140
|
id: string;
|
|
5731
6141
|
name: string;
|
|
5732
6142
|
api: "openai-completions";
|
|
@@ -5743,7 +6153,7 @@ export declare const MODELS: {
|
|
|
5743
6153
|
contextWindow: number;
|
|
5744
6154
|
maxTokens: number;
|
|
5745
6155
|
};
|
|
5746
|
-
readonly "openai/gpt-
|
|
6156
|
+
readonly "openai/gpt-3.5-turbo": {
|
|
5747
6157
|
id: string;
|
|
5748
6158
|
name: string;
|
|
5749
6159
|
api: "openai-completions";
|
|
@@ -5760,7 +6170,7 @@ export declare const MODELS: {
|
|
|
5760
6170
|
contextWindow: number;
|
|
5761
6171
|
maxTokens: number;
|
|
5762
6172
|
};
|
|
5763
|
-
readonly "openai/gpt-
|
|
6173
|
+
readonly "openai/gpt-4-0314": {
|
|
5764
6174
|
id: string;
|
|
5765
6175
|
name: string;
|
|
5766
6176
|
api: "openai-completions";
|