@mariozechner/pi-ai 0.5.24 → 0.5.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,1238 +1,1273 @@
1
1
  // This file is auto-generated by scripts/generate-models.ts
2
2
  // Do not edit manually - run 'npm run generate-models' to update
3
3
  export const PROVIDERS = {
4
- groq: {
4
+ anthropic: {
5
5
  models: {
6
- "llama-3.1-8b-instant": {
7
- id: "llama-3.1-8b-instant",
8
- name: "Llama 3.1 8B Instant",
9
- provider: "groq",
10
- baseUrl: "https://api.groq.com/openai/v1",
11
- reasoning: false,
12
- input: ["text"],
6
+ "claude-3-7-sonnet-20250219": {
7
+ id: "claude-3-7-sonnet-20250219",
8
+ name: "Claude Sonnet 3.7",
9
+ provider: "anthropic",
10
+ reasoning: true,
11
+ input: ["text", "image"],
13
12
  cost: {
14
- input: 0.05,
15
- output: 0.08,
16
- cacheRead: 0,
17
- cacheWrite: 0,
13
+ input: 3,
14
+ output: 15,
15
+ cacheRead: 0.3,
16
+ cacheWrite: 3.75,
18
17
  },
19
- contextWindow: 131072,
20
- maxTokens: 8192,
18
+ contextWindow: 200000,
19
+ maxTokens: 64000,
21
20
  },
22
- "qwen-qwq-32b": {
23
- id: "qwen-qwq-32b",
24
- name: "Qwen QwQ 32B",
25
- provider: "groq",
26
- baseUrl: "https://api.groq.com/openai/v1",
21
+ "claude-opus-4-1-20250805": {
22
+ id: "claude-opus-4-1-20250805",
23
+ name: "Claude Opus 4.1",
24
+ provider: "anthropic",
27
25
  reasoning: true,
28
- input: ["text"],
26
+ input: ["text", "image"],
29
27
  cost: {
30
- input: 0.29,
31
- output: 0.39,
32
- cacheRead: 0,
33
- cacheWrite: 0,
28
+ input: 15,
29
+ output: 75,
30
+ cacheRead: 1.5,
31
+ cacheWrite: 18.75,
34
32
  },
35
- contextWindow: 131072,
36
- maxTokens: 16384,
33
+ contextWindow: 200000,
34
+ maxTokens: 32000,
37
35
  },
38
- "llama3-70b-8192": {
39
- id: "llama3-70b-8192",
40
- name: "Llama 3 70B",
41
- provider: "groq",
42
- baseUrl: "https://api.groq.com/openai/v1",
36
+ "claude-3-haiku-20240307": {
37
+ id: "claude-3-haiku-20240307",
38
+ name: "Claude Haiku 3",
39
+ provider: "anthropic",
43
40
  reasoning: false,
44
- input: ["text"],
41
+ input: ["text", "image"],
45
42
  cost: {
46
- input: 0.59,
47
- output: 0.79,
48
- cacheRead: 0,
49
- cacheWrite: 0,
43
+ input: 0.25,
44
+ output: 1.25,
45
+ cacheRead: 0.03,
46
+ cacheWrite: 0.3,
50
47
  },
51
- contextWindow: 8192,
48
+ contextWindow: 200000,
49
+ maxTokens: 4096,
50
+ },
51
+ "claude-3-5-haiku-20241022": {
52
+ id: "claude-3-5-haiku-20241022",
53
+ name: "Claude Haiku 3.5",
54
+ provider: "anthropic",
55
+ reasoning: false,
56
+ input: ["text", "image"],
57
+ cost: {
58
+ input: 0.8,
59
+ output: 4,
60
+ cacheRead: 0.08,
61
+ cacheWrite: 1,
62
+ },
63
+ contextWindow: 200000,
52
64
  maxTokens: 8192,
53
65
  },
54
- "deepseek-r1-distill-llama-70b": {
55
- id: "deepseek-r1-distill-llama-70b",
56
- name: "DeepSeek R1 Distill Llama 70B",
57
- provider: "groq",
58
- baseUrl: "https://api.groq.com/openai/v1",
66
+ "claude-opus-4-20250514": {
67
+ id: "claude-opus-4-20250514",
68
+ name: "Claude Opus 4",
69
+ provider: "anthropic",
59
70
  reasoning: true,
60
- input: ["text"],
71
+ input: ["text", "image"],
61
72
  cost: {
62
- input: 0.75,
63
- output: 0.99,
64
- cacheRead: 0,
65
- cacheWrite: 0,
73
+ input: 15,
74
+ output: 75,
75
+ cacheRead: 1.5,
76
+ cacheWrite: 18.75,
66
77
  },
67
- contextWindow: 131072,
68
- maxTokens: 8192,
78
+ contextWindow: 200000,
79
+ maxTokens: 32000,
69
80
  },
70
- "llama3-8b-8192": {
71
- id: "llama3-8b-8192",
72
- name: "Llama 3 8B",
73
- provider: "groq",
74
- baseUrl: "https://api.groq.com/openai/v1",
81
+ "claude-3-5-sonnet-20241022": {
82
+ id: "claude-3-5-sonnet-20241022",
83
+ name: "Claude Sonnet 3.5 v2",
84
+ provider: "anthropic",
75
85
  reasoning: false,
76
- input: ["text"],
86
+ input: ["text", "image"],
77
87
  cost: {
78
- input: 0.05,
79
- output: 0.08,
80
- cacheRead: 0,
81
- cacheWrite: 0,
88
+ input: 3,
89
+ output: 15,
90
+ cacheRead: 0.3,
91
+ cacheWrite: 3.75,
82
92
  },
83
- contextWindow: 8192,
93
+ contextWindow: 200000,
84
94
  maxTokens: 8192,
85
95
  },
86
- "gemma2-9b-it": {
87
- id: "gemma2-9b-it",
88
- name: "Gemma 2 9B",
89
- provider: "groq",
90
- baseUrl: "https://api.groq.com/openai/v1",
96
+ "claude-3-5-sonnet-20240620": {
97
+ id: "claude-3-5-sonnet-20240620",
98
+ name: "Claude Sonnet 3.5",
99
+ provider: "anthropic",
91
100
  reasoning: false,
92
- input: ["text"],
101
+ input: ["text", "image"],
93
102
  cost: {
94
- input: 0.2,
95
- output: 0.2,
96
- cacheRead: 0,
97
- cacheWrite: 0,
103
+ input: 3,
104
+ output: 15,
105
+ cacheRead: 0.3,
106
+ cacheWrite: 3.75,
98
107
  },
99
- contextWindow: 8192,
108
+ contextWindow: 200000,
100
109
  maxTokens: 8192,
101
110
  },
102
- "llama-3.3-70b-versatile": {
103
- id: "llama-3.3-70b-versatile",
104
- name: "Llama 3.3 70B Versatile",
105
- provider: "groq",
106
- baseUrl: "https://api.groq.com/openai/v1",
111
+ "claude-3-sonnet-20240229": {
112
+ id: "claude-3-sonnet-20240229",
113
+ name: "Claude Sonnet 3",
114
+ provider: "anthropic",
107
115
  reasoning: false,
108
- input: ["text"],
116
+ input: ["text", "image"],
109
117
  cost: {
110
- input: 0.59,
111
- output: 0.79,
112
- cacheRead: 0,
113
- cacheWrite: 0,
118
+ input: 3,
119
+ output: 15,
120
+ cacheRead: 0.3,
121
+ cacheWrite: 0.3,
114
122
  },
115
- contextWindow: 131072,
116
- maxTokens: 32768,
123
+ contextWindow: 200000,
124
+ maxTokens: 4096,
117
125
  },
118
- "mistral-saba-24b": {
119
- id: "mistral-saba-24b",
120
- name: "Mistral Saba 24B",
121
- provider: "groq",
122
- baseUrl: "https://api.groq.com/openai/v1",
126
+ "claude-sonnet-4-20250514": {
127
+ id: "claude-sonnet-4-20250514",
128
+ name: "Claude Sonnet 4",
129
+ provider: "anthropic",
130
+ reasoning: true,
131
+ input: ["text", "image"],
132
+ cost: {
133
+ input: 3,
134
+ output: 15,
135
+ cacheRead: 0.3,
136
+ cacheWrite: 3.75,
137
+ },
138
+ contextWindow: 200000,
139
+ maxTokens: 64000,
140
+ },
141
+ "claude-3-opus-20240229": {
142
+ id: "claude-3-opus-20240229",
143
+ name: "Claude Opus 3",
144
+ provider: "anthropic",
123
145
  reasoning: false,
124
- input: ["text"],
146
+ input: ["text", "image"],
125
147
  cost: {
126
- input: 0.79,
127
- output: 0.79,
128
- cacheRead: 0,
129
- cacheWrite: 0,
148
+ input: 15,
149
+ output: 75,
150
+ cacheRead: 1.5,
151
+ cacheWrite: 18.75,
130
152
  },
131
- contextWindow: 32768,
132
- maxTokens: 32768,
153
+ contextWindow: 200000,
154
+ maxTokens: 4096,
133
155
  },
134
- "openai/gpt-oss-20b": {
135
- id: "openai/gpt-oss-20b",
136
- name: "GPT OSS 20B",
137
- provider: "groq",
138
- baseUrl: "https://api.groq.com/openai/v1",
156
+ }
157
+ },
158
+ google: {
159
+ models: {
160
+ "gemini-2.5-flash-preview-05-20": {
161
+ id: "gemini-2.5-flash-preview-05-20",
162
+ name: "Gemini 2.5 Flash Preview 05-20",
163
+ provider: "google",
139
164
  reasoning: true,
140
- input: ["text"],
165
+ input: ["text", "image"],
141
166
  cost: {
142
- input: 0.1,
143
- output: 0.5,
144
- cacheRead: 0,
167
+ input: 0.15,
168
+ output: 0.6,
169
+ cacheRead: 0.0375,
145
170
  cacheWrite: 0,
146
171
  },
147
- contextWindow: 131072,
148
- maxTokens: 32768,
172
+ contextWindow: 1048576,
173
+ maxTokens: 65536,
149
174
  },
150
- "openai/gpt-oss-120b": {
151
- id: "openai/gpt-oss-120b",
152
- name: "GPT OSS 120B",
153
- provider: "groq",
154
- baseUrl: "https://api.groq.com/openai/v1",
175
+ "gemini-2.5-pro": {
176
+ id: "gemini-2.5-pro",
177
+ name: "Gemini 2.5 Pro",
178
+ provider: "google",
155
179
  reasoning: true,
156
- input: ["text"],
180
+ input: ["text", "image"],
157
181
  cost: {
158
- input: 0.15,
159
- output: 0.75,
160
- cacheRead: 0,
182
+ input: 1.25,
183
+ output: 10,
184
+ cacheRead: 0.31,
161
185
  cacheWrite: 0,
162
186
  },
163
- contextWindow: 131072,
164
- maxTokens: 32768,
187
+ contextWindow: 1048576,
188
+ maxTokens: 65536,
165
189
  },
166
- "meta-llama/llama-4-maverick-17b-128e-instruct": {
167
- id: "meta-llama/llama-4-maverick-17b-128e-instruct",
168
- name: "Llama 4 Maverick 17B",
169
- provider: "groq",
170
- baseUrl: "https://api.groq.com/openai/v1",
190
+ "gemini-1.5-flash": {
191
+ id: "gemini-1.5-flash",
192
+ name: "Gemini 1.5 Flash",
193
+ provider: "google",
171
194
  reasoning: false,
172
195
  input: ["text", "image"],
173
196
  cost: {
174
- input: 0.2,
175
- output: 0.6,
176
- cacheRead: 0,
197
+ input: 0.075,
198
+ output: 0.3,
199
+ cacheRead: 0.01875,
177
200
  cacheWrite: 0,
178
201
  },
179
- contextWindow: 131072,
202
+ contextWindow: 1000000,
180
203
  maxTokens: 8192,
181
204
  },
182
- "meta-llama/llama-4-scout-17b-16e-instruct": {
183
- id: "meta-llama/llama-4-scout-17b-16e-instruct",
184
- name: "Llama 4 Scout 17B",
185
- provider: "groq",
186
- baseUrl: "https://api.groq.com/openai/v1",
205
+ "gemini-2.0-flash-lite": {
206
+ id: "gemini-2.0-flash-lite",
207
+ name: "Gemini 2.0 Flash Lite",
208
+ provider: "google",
187
209
  reasoning: false,
188
210
  input: ["text", "image"],
189
211
  cost: {
190
- input: 0.11,
191
- output: 0.34,
212
+ input: 0.075,
213
+ output: 0.3,
192
214
  cacheRead: 0,
193
215
  cacheWrite: 0,
194
216
  },
195
- contextWindow: 131072,
217
+ contextWindow: 1048576,
196
218
  maxTokens: 8192,
197
219
  },
198
- "qwen/qwen3-32b": {
199
- id: "qwen/qwen3-32b",
200
- name: "Qwen3 32B",
201
- provider: "groq",
202
- baseUrl: "https://api.groq.com/openai/v1",
203
- reasoning: true,
204
- input: ["text"],
220
+ "gemini-1.5-pro": {
221
+ id: "gemini-1.5-pro",
222
+ name: "Gemini 1.5 Pro",
223
+ provider: "google",
224
+ reasoning: false,
225
+ input: ["text", "image"],
205
226
  cost: {
206
- input: 0.29,
207
- output: 0.59,
208
- cacheRead: 0,
227
+ input: 1.25,
228
+ output: 5,
229
+ cacheRead: 0.3125,
209
230
  cacheWrite: 0,
210
231
  },
211
- contextWindow: 131072,
212
- maxTokens: 16384,
232
+ contextWindow: 1000000,
233
+ maxTokens: 8192,
213
234
  },
214
- "moonshotai/kimi-k2-instruct": {
215
- id: "moonshotai/kimi-k2-instruct",
216
- name: "Kimi K2 Instruct",
217
- provider: "groq",
218
- baseUrl: "https://api.groq.com/openai/v1",
235
+ "gemini-1.5-flash-8b": {
236
+ id: "gemini-1.5-flash-8b",
237
+ name: "Gemini 1.5 Flash-8B",
238
+ provider: "google",
219
239
  reasoning: false,
220
- input: ["text"],
240
+ input: ["text", "image"],
221
241
  cost: {
222
- input: 1,
223
- output: 3,
224
- cacheRead: 0,
242
+ input: 0.0375,
243
+ output: 0.15,
244
+ cacheRead: 0.01,
225
245
  cacheWrite: 0,
226
246
  },
227
- contextWindow: 131072,
228
- maxTokens: 16384,
247
+ contextWindow: 1000000,
248
+ maxTokens: 8192,
229
249
  },
230
- }
231
- },
232
- cerebras: {
233
- models: {
234
- "qwen-3-235b-a22b-instruct-2507": {
235
- id: "qwen-3-235b-a22b-instruct-2507",
236
- name: "Qwen 3 235B Instruct",
237
- provider: "cerebras",
238
- baseUrl: "https://api.cerebras.ai/v1",
239
- reasoning: false,
240
- input: ["text"],
250
+ "gemini-2.5-flash": {
251
+ id: "gemini-2.5-flash",
252
+ name: "Gemini 2.5 Flash",
253
+ provider: "google",
254
+ reasoning: true,
255
+ input: ["text", "image"],
241
256
  cost: {
242
- input: 0.6,
243
- output: 1.2,
244
- cacheRead: 0,
257
+ input: 0.3,
258
+ output: 2.5,
259
+ cacheRead: 0.075,
245
260
  cacheWrite: 0,
246
261
  },
247
- contextWindow: 131000,
248
- maxTokens: 32000,
262
+ contextWindow: 1048576,
263
+ maxTokens: 65536,
249
264
  },
250
- "gpt-oss-120b": {
251
- id: "gpt-oss-120b",
252
- name: "GPT OSS 120B",
253
- provider: "cerebras",
254
- baseUrl: "https://api.cerebras.ai/v1",
265
+ "gemini-2.5-pro-preview-06-05": {
266
+ id: "gemini-2.5-pro-preview-06-05",
267
+ name: "Gemini 2.5 Pro Preview 06-05",
268
+ provider: "google",
255
269
  reasoning: true,
256
- input: ["text"],
270
+ input: ["text", "image"],
257
271
  cost: {
258
- input: 0.25,
259
- output: 0.69,
260
- cacheRead: 0,
272
+ input: 1.25,
273
+ output: 10,
274
+ cacheRead: 0.31,
261
275
  cacheWrite: 0,
262
276
  },
263
- contextWindow: 131072,
264
- maxTokens: 32768,
277
+ contextWindow: 1048576,
278
+ maxTokens: 65536,
265
279
  },
266
- "qwen-3-coder-480b": {
267
- id: "qwen-3-coder-480b",
268
- name: "Qwen 3 Coder 480B",
269
- provider: "cerebras",
270
- baseUrl: "https://api.cerebras.ai/v1",
280
+ "gemini-2.5-pro-preview-05-06": {
281
+ id: "gemini-2.5-pro-preview-05-06",
282
+ name: "Gemini 2.5 Pro Preview 05-06",
283
+ provider: "google",
284
+ reasoning: true,
285
+ input: ["text", "image"],
286
+ cost: {
287
+ input: 1.25,
288
+ output: 10,
289
+ cacheRead: 0.31,
290
+ cacheWrite: 0,
291
+ },
292
+ contextWindow: 1048576,
293
+ maxTokens: 65536,
294
+ },
295
+ "gemini-2.0-flash": {
296
+ id: "gemini-2.0-flash",
297
+ name: "Gemini 2.0 Flash",
298
+ provider: "google",
271
299
  reasoning: false,
272
- input: ["text"],
300
+ input: ["text", "image"],
273
301
  cost: {
274
- input: 2,
275
- output: 2,
276
- cacheRead: 0,
302
+ input: 0.1,
303
+ output: 0.4,
304
+ cacheRead: 0.025,
277
305
  cacheWrite: 0,
278
306
  },
279
- contextWindow: 131000,
280
- maxTokens: 32000,
307
+ contextWindow: 1048576,
308
+ maxTokens: 8192,
281
309
  },
282
- }
283
- },
284
- openrouter: {
285
- models: {
286
- "qwen/qwen3-30b-a3b-thinking-2507": {
287
- id: "qwen/qwen3-30b-a3b-thinking-2507",
288
- name: "Qwen: Qwen3 30B A3B Thinking 2507",
289
- provider: "openrouter",
290
- baseUrl: "https://openrouter.ai/api/v1",
310
+ "gemini-2.5-flash-lite-preview-06-17": {
311
+ id: "gemini-2.5-flash-lite-preview-06-17",
312
+ name: "Gemini 2.5 Flash Lite Preview 06-17",
313
+ provider: "google",
291
314
  reasoning: true,
292
- input: ["text"],
315
+ input: ["text", "image"],
293
316
  cost: {
294
- input: 0.0713,
295
- output: 0.2852,
296
- cacheRead: 0,
317
+ input: 0.1,
318
+ output: 0.4,
319
+ cacheRead: 0.025,
297
320
  cacheWrite: 0,
298
321
  },
299
- contextWindow: 262144,
300
- maxTokens: 262144,
322
+ contextWindow: 65536,
323
+ maxTokens: 65536,
301
324
  },
302
- "nousresearch/hermes-4-70b": {
303
- id: "nousresearch/hermes-4-70b",
304
- name: "Nous: Hermes 4 70B",
305
- provider: "openrouter",
306
- baseUrl: "https://openrouter.ai/api/v1",
325
+ "gemini-2.5-flash-preview-04-17": {
326
+ id: "gemini-2.5-flash-preview-04-17",
327
+ name: "Gemini 2.5 Flash Preview 04-17",
328
+ provider: "google",
307
329
  reasoning: true,
308
- input: ["text"],
330
+ input: ["text", "image"],
309
331
  cost: {
310
- input: 0.09329544,
311
- output: 0.3733632,
312
- cacheRead: 0,
332
+ input: 0.15,
333
+ output: 0.6,
334
+ cacheRead: 0.0375,
313
335
  cacheWrite: 0,
314
336
  },
315
- contextWindow: 131072,
316
- maxTokens: 4096,
337
+ contextWindow: 1048576,
338
+ maxTokens: 65536,
317
339
  },
318
- "nousresearch/hermes-4-405b": {
319
- id: "nousresearch/hermes-4-405b",
320
- name: "Nous: Hermes 4 405B",
321
- provider: "openrouter",
322
- baseUrl: "https://openrouter.ai/api/v1",
340
+ }
341
+ },
342
+ openai: {
343
+ models: {
344
+ "gpt-5-nano": {
345
+ id: "gpt-5-nano",
346
+ name: "GPT-5 Nano",
347
+ provider: "openai",
323
348
  reasoning: true,
324
- input: ["text"],
349
+ input: ["text", "image"],
325
350
  cost: {
326
- input: 0.1999188,
327
- output: 0.800064,
328
- cacheRead: 0,
351
+ input: 0.05,
352
+ output: 0.4,
353
+ cacheRead: 0.01,
329
354
  cacheWrite: 0,
330
355
  },
331
- contextWindow: 131072,
332
- maxTokens: 4096,
356
+ contextWindow: 400000,
357
+ maxTokens: 128000,
333
358
  },
334
- "deepseek/deepseek-chat-v3.1:free": {
335
- id: "deepseek/deepseek-chat-v3.1:free",
336
- name: "DeepSeek: DeepSeek V3.1 (free)",
337
- provider: "openrouter",
338
- baseUrl: "https://openrouter.ai/api/v1",
359
+ "o3-pro": {
360
+ id: "o3-pro",
361
+ name: "o3-pro",
362
+ provider: "openai",
339
363
  reasoning: true,
340
- input: ["text"],
364
+ input: ["text", "image"],
341
365
  cost: {
342
- input: 0,
343
- output: 0,
366
+ input: 20,
367
+ output: 80,
344
368
  cacheRead: 0,
345
369
  cacheWrite: 0,
346
370
  },
347
- contextWindow: 64000,
348
- maxTokens: 4096,
371
+ contextWindow: 200000,
372
+ maxTokens: 100000,
349
373
  },
350
- "deepseek/deepseek-chat-v3.1": {
351
- id: "deepseek/deepseek-chat-v3.1",
352
- name: "DeepSeek: DeepSeek V3.1",
353
- provider: "openrouter",
354
- baseUrl: "https://openrouter.ai/api/v1",
374
+ "codex-mini-latest": {
375
+ id: "codex-mini-latest",
376
+ name: "Codex Mini",
377
+ provider: "openai",
355
378
  reasoning: true,
356
379
  input: ["text"],
357
380
  cost: {
358
- input: 0.19999999999999998,
359
- output: 0.7999999999999999,
360
- cacheRead: 0,
381
+ input: 1.5,
382
+ output: 6,
383
+ cacheRead: 0.375,
361
384
  cacheWrite: 0,
362
385
  },
363
- contextWindow: 163840,
364
- maxTokens: 4096,
386
+ contextWindow: 200000,
387
+ maxTokens: 100000,
365
388
  },
366
- "mistralai/mistral-medium-3.1": {
367
- id: "mistralai/mistral-medium-3.1",
368
- name: "Mistral: Mistral Medium 3.1",
369
- provider: "openrouter",
370
- baseUrl: "https://openrouter.ai/api/v1",
389
+ "gpt-4.1": {
390
+ id: "gpt-4.1",
391
+ name: "GPT-4.1",
392
+ provider: "openai",
371
393
  reasoning: false,
372
394
  input: ["text", "image"],
373
395
  cost: {
374
- input: 0.39999999999999997,
375
- output: 2,
396
+ input: 2,
397
+ output: 8,
398
+ cacheRead: 0.5,
399
+ cacheWrite: 0,
400
+ },
401
+ contextWindow: 1047576,
402
+ maxTokens: 32768,
403
+ },
404
+ "gpt-4-turbo": {
405
+ id: "gpt-4-turbo",
406
+ name: "GPT-4 Turbo",
407
+ provider: "openai",
408
+ reasoning: false,
409
+ input: ["text", "image"],
410
+ cost: {
411
+ input: 10,
412
+ output: 30,
376
413
  cacheRead: 0,
377
414
  cacheWrite: 0,
378
415
  },
379
- contextWindow: 131072,
416
+ contextWindow: 128000,
380
417
  maxTokens: 4096,
381
418
  },
382
- "z-ai/glm-4.5v": {
383
- id: "z-ai/glm-4.5v",
384
- name: "Z.AI: GLM 4.5V",
385
- provider: "openrouter",
386
- baseUrl: "https://openrouter.ai/api/v1",
419
+ "o1": {
420
+ id: "o1",
421
+ name: "o1",
422
+ provider: "openai",
387
423
  reasoning: true,
388
424
  input: ["text", "image"],
389
425
  cost: {
390
- input: 0.5,
391
- output: 1.7999999999999998,
392
- cacheRead: 0,
426
+ input: 15,
427
+ output: 60,
428
+ cacheRead: 7.5,
393
429
  cacheWrite: 0,
394
430
  },
395
- contextWindow: 65536,
396
- maxTokens: 65536,
431
+ contextWindow: 200000,
432
+ maxTokens: 100000,
397
433
  },
398
- "ai21/jamba-mini-1.7": {
399
- id: "ai21/jamba-mini-1.7",
400
- name: "AI21: Jamba Mini 1.7",
401
- provider: "openrouter",
402
- baseUrl: "https://openrouter.ai/api/v1",
403
- reasoning: false,
404
- input: ["text"],
434
+ "o3-deep-research": {
435
+ id: "o3-deep-research",
436
+ name: "o3-deep-research",
437
+ provider: "openai",
438
+ reasoning: true,
439
+ input: ["text", "image"],
405
440
  cost: {
406
- input: 0.19999999999999998,
407
- output: 0.39999999999999997,
408
- cacheRead: 0,
441
+ input: 10,
442
+ output: 40,
443
+ cacheRead: 2.5,
409
444
  cacheWrite: 0,
410
445
  },
411
- contextWindow: 256000,
412
- maxTokens: 4096,
446
+ contextWindow: 200000,
447
+ maxTokens: 100000,
413
448
  },
414
- "ai21/jamba-large-1.7": {
415
- id: "ai21/jamba-large-1.7",
416
- name: "AI21: Jamba Large 1.7",
417
- provider: "openrouter",
418
- baseUrl: "https://openrouter.ai/api/v1",
419
- reasoning: false,
420
- input: ["text"],
449
+ "gpt-5": {
450
+ id: "gpt-5",
451
+ name: "GPT-5",
452
+ provider: "openai",
453
+ reasoning: true,
454
+ input: ["text", "image"],
421
455
  cost: {
422
- input: 2,
423
- output: 8,
424
- cacheRead: 0,
456
+ input: 1.25,
457
+ output: 10,
458
+ cacheRead: 0.13,
425
459
  cacheWrite: 0,
426
460
  },
427
- contextWindow: 256000,
428
- maxTokens: 4096,
461
+ contextWindow: 400000,
462
+ maxTokens: 128000,
429
463
  },
430
- "mistralai/codestral-2508": {
431
- id: "mistralai/codestral-2508",
432
- name: "Mistral: Codestral 2508",
433
- provider: "openrouter",
434
- baseUrl: "https://openrouter.ai/api/v1",
435
- reasoning: false,
436
- input: ["text"],
464
+ "o1-pro": {
465
+ id: "o1-pro",
466
+ name: "o1-pro",
467
+ provider: "openai",
468
+ reasoning: true,
469
+ input: ["text", "image"],
437
470
  cost: {
438
- input: 0.3,
439
- output: 0.8999999999999999,
471
+ input: 150,
472
+ output: 600,
440
473
  cacheRead: 0,
441
474
  cacheWrite: 0,
442
475
  },
443
- contextWindow: 256000,
444
- maxTokens: 4096,
476
+ contextWindow: 200000,
477
+ maxTokens: 100000,
445
478
  },
446
- "qwen/qwen3-coder-30b-a3b-instruct": {
447
- id: "qwen/qwen3-coder-30b-a3b-instruct",
448
- name: "Qwen: Qwen3 Coder 30B A3B Instruct",
449
- provider: "openrouter",
450
- baseUrl: "https://openrouter.ai/api/v1",
451
- reasoning: false,
452
- input: ["text"],
479
+ "o3": {
480
+ id: "o3",
481
+ name: "o3",
482
+ provider: "openai",
483
+ reasoning: true,
484
+ input: ["text", "image"],
453
485
  cost: {
454
- input: 0.051830799999999996,
455
- output: 0.207424,
456
- cacheRead: 0,
486
+ input: 2,
487
+ output: 8,
488
+ cacheRead: 0.5,
457
489
  cacheWrite: 0,
458
490
  },
459
- contextWindow: 262144,
460
- maxTokens: 4096,
491
+ contextWindow: 200000,
492
+ maxTokens: 100000,
461
493
  },
462
- "qwen/qwen3-30b-a3b-instruct-2507": {
463
- id: "qwen/qwen3-30b-a3b-instruct-2507",
464
- name: "Qwen: Qwen3 30B A3B Instruct 2507",
465
- provider: "openrouter",
466
- baseUrl: "https://openrouter.ai/api/v1",
467
- reasoning: false,
468
- input: ["text"],
494
+ "gpt-5-mini": {
495
+ id: "gpt-5-mini",
496
+ name: "GPT-5 Mini",
497
+ provider: "openai",
498
+ reasoning: true,
499
+ input: ["text", "image"],
469
500
  cost: {
470
- input: 0.051830799999999996,
471
- output: 0.207424,
472
- cacheRead: 0,
501
+ input: 0.25,
502
+ output: 2,
503
+ cacheRead: 0.03,
473
504
  cacheWrite: 0,
474
505
  },
475
- contextWindow: 262144,
476
- maxTokens: 4096,
506
+ contextWindow: 400000,
507
+ maxTokens: 128000,
477
508
  },
478
- "z-ai/glm-4.5": {
479
- id: "z-ai/glm-4.5",
480
- name: "Z.AI: GLM 4.5",
481
- provider: "openrouter",
482
- baseUrl: "https://openrouter.ai/api/v1",
509
+ "o4-mini-deep-research": {
510
+ id: "o4-mini-deep-research",
511
+ name: "o4-mini-deep-research",
512
+ provider: "openai",
483
513
  reasoning: true,
484
- input: ["text"],
514
+ input: ["text", "image"],
485
515
  cost: {
486
- input: 0.32986602,
487
- output: 1.3201056,
488
- cacheRead: 0,
516
+ input: 2,
517
+ output: 8,
518
+ cacheRead: 0.5,
489
519
  cacheWrite: 0,
490
520
  },
491
- contextWindow: 131072,
492
- maxTokens: 4096,
521
+ contextWindow: 200000,
522
+ maxTokens: 100000,
493
523
  },
494
- "z-ai/glm-4.5-air:free": {
495
- id: "z-ai/glm-4.5-air:free",
496
- name: "Z.AI: GLM 4.5 Air (free)",
497
- provider: "openrouter",
498
- baseUrl: "https://openrouter.ai/api/v1",
499
- reasoning: true,
500
- input: ["text"],
524
+ "gpt-4o-mini": {
525
+ id: "gpt-4o-mini",
526
+ name: "GPT-4o mini",
527
+ provider: "openai",
528
+ reasoning: false,
529
+ input: ["text", "image"],
501
530
  cost: {
502
- input: 0,
503
- output: 0,
504
- cacheRead: 0,
531
+ input: 0.15,
532
+ output: 0.6,
533
+ cacheRead: 0.08,
505
534
  cacheWrite: 0,
506
535
  },
507
- contextWindow: 131072,
508
- maxTokens: 4096,
536
+ contextWindow: 128000,
537
+ maxTokens: 16384,
509
538
  },
510
- "z-ai/glm-4.5-air": {
511
- id: "z-ai/glm-4.5-air",
512
- name: "Z.AI: GLM 4.5 Air",
513
- provider: "openrouter",
514
- baseUrl: "https://openrouter.ai/api/v1",
515
- reasoning: true,
516
- input: ["text"],
539
+ "gpt-4.1-nano": {
540
+ id: "gpt-4.1-nano",
541
+ name: "GPT-4.1 nano",
542
+ provider: "openai",
543
+ reasoning: false,
544
+ input: ["text", "image"],
517
545
  cost: {
518
- input: 0.14,
519
- output: 0.86,
520
- cacheRead: 0,
546
+ input: 0.1,
547
+ output: 0.4,
548
+ cacheRead: 0.03,
521
549
  cacheWrite: 0,
522
550
  },
523
- contextWindow: 131072,
524
- maxTokens: 131072,
551
+ contextWindow: 1047576,
552
+ maxTokens: 32768,
525
553
  },
526
- "qwen/qwen3-235b-a22b-thinking-2507": {
527
- id: "qwen/qwen3-235b-a22b-thinking-2507",
528
- name: "Qwen: Qwen3 235B A22B Thinking 2507",
529
- provider: "openrouter",
530
- baseUrl: "https://openrouter.ai/api/v1",
531
- reasoning: true,
532
- input: ["text"],
554
+ "gpt-4.1-mini": {
555
+ id: "gpt-4.1-mini",
556
+ name: "GPT-4.1 mini",
557
+ provider: "openai",
558
+ reasoning: false,
559
+ input: ["text", "image"],
533
560
  cost: {
534
- input: 0.077968332,
535
- output: 0.31202496,
536
- cacheRead: 0,
561
+ input: 0.4,
562
+ output: 1.6,
563
+ cacheRead: 0.1,
537
564
  cacheWrite: 0,
538
565
  },
539
- contextWindow: 262144,
540
- maxTokens: 4096,
566
+ contextWindow: 1047576,
567
+ maxTokens: 32768,
541
568
  },
542
- "z-ai/glm-4-32b": {
543
- id: "z-ai/glm-4-32b",
544
- name: "Z.AI: GLM 4 32B ",
545
- provider: "openrouter",
546
- baseUrl: "https://openrouter.ai/api/v1",
569
+ "gpt-4o": {
570
+ id: "gpt-4o",
571
+ name: "GPT-4o",
572
+ provider: "openai",
547
573
  reasoning: false,
548
- input: ["text"],
574
+ input: ["text", "image"],
549
575
  cost: {
550
- input: 0.09999999999999999,
551
- output: 0.09999999999999999,
552
- cacheRead: 0,
576
+ input: 2.5,
577
+ output: 10,
578
+ cacheRead: 1.25,
553
579
  cacheWrite: 0,
554
580
  },
555
581
  contextWindow: 128000,
556
- maxTokens: 4096,
582
+ maxTokens: 16384,
557
583
  },
558
- "qwen/qwen3-coder:free": {
559
- id: "qwen/qwen3-coder:free",
560
- name: "Qwen: Qwen3 Coder 480B A35B (free)",
561
- provider: "openrouter",
562
- baseUrl: "https://openrouter.ai/api/v1",
584
+ "gpt-4": {
585
+ id: "gpt-4",
586
+ name: "GPT-4",
587
+ provider: "openai",
563
588
  reasoning: false,
564
589
  input: ["text"],
565
590
  cost: {
566
- input: 0,
567
- output: 0,
591
+ input: 30,
592
+ output: 60,
568
593
  cacheRead: 0,
569
594
  cacheWrite: 0,
570
595
  },
571
- contextWindow: 262144,
572
- maxTokens: 4096,
596
+ contextWindow: 8192,
597
+ maxTokens: 8192,
573
598
  },
574
- "qwen/qwen3-coder": {
575
- id: "qwen/qwen3-coder",
576
- name: "Qwen: Qwen3 Coder 480B A35B",
577
- provider: "openrouter",
578
- baseUrl: "https://openrouter.ai/api/v1",
579
- reasoning: false,
580
- input: ["text"],
599
+ "o4-mini": {
600
+ id: "o4-mini",
601
+ name: "o4-mini",
602
+ provider: "openai",
603
+ reasoning: true,
604
+ input: ["text", "image"],
581
605
  cost: {
582
- input: 0.19999999999999998,
583
- output: 0.7999999999999999,
584
- cacheRead: 0,
606
+ input: 1.1,
607
+ output: 4.4,
608
+ cacheRead: 0.28,
585
609
  cacheWrite: 0,
586
610
  },
587
- contextWindow: 262144,
588
- maxTokens: 4096,
611
+ contextWindow: 200000,
612
+ maxTokens: 100000,
589
613
  },
590
- "qwen/qwen3-235b-a22b-2507": {
591
- id: "qwen/qwen3-235b-a22b-2507",
592
- name: "Qwen: Qwen3 235B A22B Instruct 2507",
593
- provider: "openrouter",
594
- baseUrl: "https://openrouter.ai/api/v1",
595
- reasoning: false,
614
+ "o3-mini": {
615
+ id: "o3-mini",
616
+ name: "o3-mini",
617
+ provider: "openai",
618
+ reasoning: true,
596
619
  input: ["text"],
597
620
  cost: {
598
- input: 0.077968332,
599
- output: 0.31202496,
600
- cacheRead: 0,
621
+ input: 1.1,
622
+ output: 4.4,
623
+ cacheRead: 0.55,
601
624
  cacheWrite: 0,
602
625
  },
603
- contextWindow: 262144,
604
- maxTokens: 4096,
626
+ contextWindow: 200000,
627
+ maxTokens: 100000,
605
628
  },
606
- "moonshotai/kimi-k2:free": {
607
- id: "moonshotai/kimi-k2:free",
608
- name: "MoonshotAI: Kimi K2 (free)",
609
- provider: "openrouter",
610
- baseUrl: "https://openrouter.ai/api/v1",
629
+ }
630
+ },
631
+ groq: {
632
+ models: {
633
+ "llama-3.1-8b-instant": {
634
+ id: "llama-3.1-8b-instant",
635
+ name: "Llama 3.1 8B Instant",
636
+ provider: "groq",
637
+ baseUrl: "https://api.groq.com/openai/v1",
611
638
  reasoning: false,
612
639
  input: ["text"],
613
640
  cost: {
614
- input: 0,
615
- output: 0,
641
+ input: 0.05,
642
+ output: 0.08,
616
643
  cacheRead: 0,
617
644
  cacheWrite: 0,
618
645
  },
619
- contextWindow: 32768,
620
- maxTokens: 4096,
646
+ contextWindow: 131072,
647
+ maxTokens: 8192,
621
648
  },
622
- "moonshotai/kimi-k2": {
623
- id: "moonshotai/kimi-k2",
624
- name: "MoonshotAI: Kimi K2",
625
- provider: "openrouter",
626
- baseUrl: "https://openrouter.ai/api/v1",
627
- reasoning: false,
649
+ "qwen-qwq-32b": {
650
+ id: "qwen-qwq-32b",
651
+ name: "Qwen QwQ 32B",
652
+ provider: "groq",
653
+ baseUrl: "https://api.groq.com/openai/v1",
654
+ reasoning: true,
628
655
  input: ["text"],
629
656
  cost: {
630
- input: 0.14,
631
- output: 2.4899999999999998,
657
+ input: 0.29,
658
+ output: 0.39,
632
659
  cacheRead: 0,
633
660
  cacheWrite: 0,
634
661
  },
635
- contextWindow: 63000,
636
- maxTokens: 63000,
662
+ contextWindow: 131072,
663
+ maxTokens: 16384,
637
664
  },
638
- "mistralai/devstral-medium": {
639
- id: "mistralai/devstral-medium",
640
- name: "Mistral: Devstral Medium",
641
- provider: "openrouter",
642
- baseUrl: "https://openrouter.ai/api/v1",
665
+ "llama3-70b-8192": {
666
+ id: "llama3-70b-8192",
667
+ name: "Llama 3 70B",
668
+ provider: "groq",
669
+ baseUrl: "https://api.groq.com/openai/v1",
643
670
  reasoning: false,
644
671
  input: ["text"],
645
672
  cost: {
646
- input: 0.39999999999999997,
647
- output: 2,
673
+ input: 0.59,
674
+ output: 0.79,
675
+ cacheRead: 0,
676
+ cacheWrite: 0,
677
+ },
678
+ contextWindow: 8192,
679
+ maxTokens: 8192,
680
+ },
681
+ "deepseek-r1-distill-llama-70b": {
682
+ id: "deepseek-r1-distill-llama-70b",
683
+ name: "DeepSeek R1 Distill Llama 70B",
684
+ provider: "groq",
685
+ baseUrl: "https://api.groq.com/openai/v1",
686
+ reasoning: true,
687
+ input: ["text"],
688
+ cost: {
689
+ input: 0.75,
690
+ output: 0.99,
648
691
  cacheRead: 0,
649
692
  cacheWrite: 0,
650
693
  },
651
694
  contextWindow: 131072,
652
- maxTokens: 4096,
695
+ maxTokens: 8192,
653
696
  },
654
- "mistralai/devstral-small": {
655
- id: "mistralai/devstral-small",
656
- name: "Mistral: Devstral Small 1.1",
657
- provider: "openrouter",
658
- baseUrl: "https://openrouter.ai/api/v1",
697
+ "llama3-8b-8192": {
698
+ id: "llama3-8b-8192",
699
+ name: "Llama 3 8B",
700
+ provider: "groq",
701
+ baseUrl: "https://api.groq.com/openai/v1",
659
702
  reasoning: false,
660
703
  input: ["text"],
661
704
  cost: {
662
- input: 0.07,
663
- output: 0.28,
705
+ input: 0.05,
706
+ output: 0.08,
664
707
  cacheRead: 0,
665
708
  cacheWrite: 0,
666
709
  },
667
- contextWindow: 128000,
668
- maxTokens: 4096,
710
+ contextWindow: 8192,
711
+ maxTokens: 8192,
669
712
  },
670
- "inception/mercury": {
671
- id: "inception/mercury",
672
- name: "Inception: Mercury",
673
- provider: "openrouter",
674
- baseUrl: "https://openrouter.ai/api/v1",
713
+ "gemma2-9b-it": {
714
+ id: "gemma2-9b-it",
715
+ name: "Gemma 2 9B",
716
+ provider: "groq",
717
+ baseUrl: "https://api.groq.com/openai/v1",
675
718
  reasoning: false,
676
719
  input: ["text"],
677
720
  cost: {
678
- input: 0.25,
679
- output: 1,
721
+ input: 0.2,
722
+ output: 0.2,
680
723
  cacheRead: 0,
681
724
  cacheWrite: 0,
682
725
  },
683
- contextWindow: 128000,
684
- maxTokens: 16384,
726
+ contextWindow: 8192,
727
+ maxTokens: 8192,
685
728
  },
686
- "mistralai/mistral-small-3.2-24b-instruct:free": {
687
- id: "mistralai/mistral-small-3.2-24b-instruct:free",
688
- name: "Mistral: Mistral Small 3.2 24B (free)",
689
- provider: "openrouter",
690
- baseUrl: "https://openrouter.ai/api/v1",
729
+ "llama-3.3-70b-versatile": {
730
+ id: "llama-3.3-70b-versatile",
731
+ name: "Llama 3.3 70B Versatile",
732
+ provider: "groq",
733
+ baseUrl: "https://api.groq.com/openai/v1",
691
734
  reasoning: false,
692
- input: ["text", "image"],
735
+ input: ["text"],
693
736
  cost: {
694
- input: 0,
695
- output: 0,
737
+ input: 0.59,
738
+ output: 0.79,
696
739
  cacheRead: 0,
697
740
  cacheWrite: 0,
698
741
  },
699
742
  contextWindow: 131072,
700
- maxTokens: 4096,
743
+ maxTokens: 32768,
701
744
  },
702
- "mistralai/mistral-small-3.2-24b-instruct": {
703
- id: "mistralai/mistral-small-3.2-24b-instruct",
704
- name: "Mistral: Mistral Small 3.2 24B",
705
- provider: "openrouter",
706
- baseUrl: "https://openrouter.ai/api/v1",
745
+ "mistral-saba-24b": {
746
+ id: "mistral-saba-24b",
747
+ name: "Mistral Saba 24B",
748
+ provider: "groq",
749
+ baseUrl: "https://api.groq.com/openai/v1",
707
750
  reasoning: false,
708
- input: ["text", "image"],
751
+ input: ["text"],
709
752
  cost: {
710
- input: 0.049999999999999996,
711
- output: 0.09999999999999999,
753
+ input: 0.79,
754
+ output: 0.79,
712
755
  cacheRead: 0,
713
756
  cacheWrite: 0,
714
757
  },
715
- contextWindow: 128000,
716
- maxTokens: 4096,
758
+ contextWindow: 32768,
759
+ maxTokens: 32768,
717
760
  },
718
- "minimax/minimax-m1": {
719
- id: "minimax/minimax-m1",
720
- name: "MiniMax: MiniMax M1",
721
- provider: "openrouter",
722
- baseUrl: "https://openrouter.ai/api/v1",
761
+ "openai/gpt-oss-20b": {
762
+ id: "openai/gpt-oss-20b",
763
+ name: "GPT OSS 20B",
764
+ provider: "groq",
765
+ baseUrl: "https://api.groq.com/openai/v1",
723
766
  reasoning: true,
724
767
  input: ["text"],
725
768
  cost: {
726
- input: 0.3,
727
- output: 1.6500000000000001,
769
+ input: 0.1,
770
+ output: 0.5,
728
771
  cacheRead: 0,
729
772
  cacheWrite: 0,
730
773
  },
731
- contextWindow: 1000000,
732
- maxTokens: 40000,
774
+ contextWindow: 131072,
775
+ maxTokens: 32768,
733
776
  },
734
- "mistralai/magistral-small-2506": {
735
- id: "mistralai/magistral-small-2506",
736
- name: "Mistral: Magistral Small 2506",
737
- provider: "openrouter",
738
- baseUrl: "https://openrouter.ai/api/v1",
777
+ "openai/gpt-oss-120b": {
778
+ id: "openai/gpt-oss-120b",
779
+ name: "GPT OSS 120B",
780
+ provider: "groq",
781
+ baseUrl: "https://api.groq.com/openai/v1",
739
782
  reasoning: true,
740
783
  input: ["text"],
741
784
  cost: {
742
- input: 0.5,
743
- output: 1.5,
785
+ input: 0.15,
786
+ output: 0.75,
744
787
  cacheRead: 0,
745
788
  cacheWrite: 0,
746
789
  },
747
- contextWindow: 40000,
748
- maxTokens: 40000,
790
+ contextWindow: 131072,
791
+ maxTokens: 32768,
749
792
  },
750
- "mistralai/magistral-medium-2506": {
751
- id: "mistralai/magistral-medium-2506",
752
- name: "Mistral: Magistral Medium 2506",
753
- provider: "openrouter",
754
- baseUrl: "https://openrouter.ai/api/v1",
755
- reasoning: true,
756
- input: ["text"],
793
+ "meta-llama/llama-4-maverick-17b-128e-instruct": {
794
+ id: "meta-llama/llama-4-maverick-17b-128e-instruct",
795
+ name: "Llama 4 Maverick 17B",
796
+ provider: "groq",
797
+ baseUrl: "https://api.groq.com/openai/v1",
798
+ reasoning: false,
799
+ input: ["text", "image"],
757
800
  cost: {
758
- input: 2,
759
- output: 5,
801
+ input: 0.2,
802
+ output: 0.6,
760
803
  cacheRead: 0,
761
804
  cacheWrite: 0,
762
805
  },
763
- contextWindow: 40960,
764
- maxTokens: 40000,
806
+ contextWindow: 131072,
807
+ maxTokens: 8192,
765
808
  },
766
- "mistralai/magistral-medium-2506:thinking": {
767
- id: "mistralai/magistral-medium-2506:thinking",
768
- name: "Mistral: Magistral Medium 2506 (thinking)",
769
- provider: "openrouter",
770
- baseUrl: "https://openrouter.ai/api/v1",
771
- reasoning: true,
772
- input: ["text"],
809
+ "meta-llama/llama-4-scout-17b-16e-instruct": {
810
+ id: "meta-llama/llama-4-scout-17b-16e-instruct",
811
+ name: "Llama 4 Scout 17B",
812
+ provider: "groq",
813
+ baseUrl: "https://api.groq.com/openai/v1",
814
+ reasoning: false,
815
+ input: ["text", "image"],
773
816
  cost: {
774
- input: 2,
775
- output: 5,
817
+ input: 0.11,
818
+ output: 0.34,
776
819
  cacheRead: 0,
777
820
  cacheWrite: 0,
778
821
  },
779
- contextWindow: 40960,
780
- maxTokens: 40000,
822
+ contextWindow: 131072,
823
+ maxTokens: 8192,
781
824
  },
782
- "deepseek/deepseek-r1-0528": {
783
- id: "deepseek/deepseek-r1-0528",
784
- name: "DeepSeek: R1 0528",
785
- provider: "openrouter",
786
- baseUrl: "https://openrouter.ai/api/v1",
825
+ "qwen/qwen3-32b": {
826
+ id: "qwen/qwen3-32b",
827
+ name: "Qwen3 32B",
828
+ provider: "groq",
829
+ baseUrl: "https://api.groq.com/openai/v1",
787
830
  reasoning: true,
788
831
  input: ["text"],
789
832
  cost: {
790
- input: 0.1999188,
791
- output: 0.800064,
833
+ input: 0.29,
834
+ output: 0.59,
792
835
  cacheRead: 0,
793
836
  cacheWrite: 0,
794
837
  },
795
- contextWindow: 163840,
796
- maxTokens: 4096,
838
+ contextWindow: 131072,
839
+ maxTokens: 16384,
797
840
  },
798
- "mistralai/devstral-small-2505:free": {
799
- id: "mistralai/devstral-small-2505:free",
800
- name: "Mistral: Devstral Small 2505 (free)",
801
- provider: "openrouter",
802
- baseUrl: "https://openrouter.ai/api/v1",
841
+ "moonshotai/kimi-k2-instruct": {
842
+ id: "moonshotai/kimi-k2-instruct",
843
+ name: "Kimi K2 Instruct",
844
+ provider: "groq",
845
+ baseUrl: "https://api.groq.com/openai/v1",
803
846
  reasoning: false,
804
847
  input: ["text"],
805
848
  cost: {
806
- input: 0,
807
- output: 0,
808
- cacheRead: 0,
809
- cacheWrite: 0,
810
- },
811
- contextWindow: 32768,
812
- maxTokens: 4096,
813
- },
814
- "mistralai/devstral-small-2505": {
815
- id: "mistralai/devstral-small-2505",
816
- name: "Mistral: Devstral Small 2505",
817
- provider: "openrouter",
818
- baseUrl: "https://openrouter.ai/api/v1",
819
- reasoning: false,
820
- input: ["text"],
821
- cost: {
822
- input: 0.01999188,
823
- output: 0.0800064,
849
+ input: 1,
850
+ output: 3,
824
851
  cacheRead: 0,
825
852
  cacheWrite: 0,
826
853
  },
827
854
  contextWindow: 131072,
828
- maxTokens: 4096,
855
+ maxTokens: 16384,
829
856
  },
830
- "meta-llama/llama-3.3-8b-instruct:free": {
831
- id: "meta-llama/llama-3.3-8b-instruct:free",
832
- name: "Meta: Llama 3.3 8B Instruct (free)",
833
- provider: "openrouter",
834
- baseUrl: "https://openrouter.ai/api/v1",
857
+ }
858
+ },
859
+ cerebras: {
860
+ models: {
861
+ "qwen-3-235b-a22b-instruct-2507": {
862
+ id: "qwen-3-235b-a22b-instruct-2507",
863
+ name: "Qwen 3 235B Instruct",
864
+ provider: "cerebras",
865
+ baseUrl: "https://api.cerebras.ai/v1",
835
866
  reasoning: false,
836
867
  input: ["text"],
837
868
  cost: {
838
- input: 0,
839
- output: 0,
869
+ input: 0.6,
870
+ output: 1.2,
840
871
  cacheRead: 0,
841
872
  cacheWrite: 0,
842
873
  },
843
- contextWindow: 128000,
844
- maxTokens: 4028,
874
+ contextWindow: 131000,
875
+ maxTokens: 32000,
845
876
  },
846
- "mistralai/mistral-medium-3": {
847
- id: "mistralai/mistral-medium-3",
848
- name: "Mistral: Mistral Medium 3",
849
- provider: "openrouter",
850
- baseUrl: "https://openrouter.ai/api/v1",
851
- reasoning: false,
852
- input: ["text", "image"],
877
+ "gpt-oss-120b": {
878
+ id: "gpt-oss-120b",
879
+ name: "GPT OSS 120B",
880
+ provider: "cerebras",
881
+ baseUrl: "https://api.cerebras.ai/v1",
882
+ reasoning: true,
883
+ input: ["text"],
853
884
  cost: {
854
- input: 0.39999999999999997,
855
- output: 2,
885
+ input: 0.25,
886
+ output: 0.69,
856
887
  cacheRead: 0,
857
888
  cacheWrite: 0,
858
889
  },
859
890
  contextWindow: 131072,
860
- maxTokens: 4096,
891
+ maxTokens: 32768,
861
892
  },
862
- "arcee-ai/virtuoso-large": {
863
- id: "arcee-ai/virtuoso-large",
864
- name: "Arcee AI: Virtuoso Large",
865
- provider: "openrouter",
866
- baseUrl: "https://openrouter.ai/api/v1",
893
+ "qwen-3-coder-480b": {
894
+ id: "qwen-3-coder-480b",
895
+ name: "Qwen 3 Coder 480B",
896
+ provider: "cerebras",
897
+ baseUrl: "https://api.cerebras.ai/v1",
867
898
  reasoning: false,
868
899
  input: ["text"],
869
900
  cost: {
870
- input: 0.75,
871
- output: 1.2,
901
+ input: 2,
902
+ output: 2,
872
903
  cacheRead: 0,
873
904
  cacheWrite: 0,
874
905
  },
875
- contextWindow: 131072,
876
- maxTokens: 64000,
906
+ contextWindow: 131000,
907
+ maxTokens: 32000,
877
908
  },
878
- "inception/mercury-coder": {
879
- id: "inception/mercury-coder",
880
- name: "Inception: Mercury Coder",
909
+ }
910
+ },
911
+ openrouter: {
912
+ models: {
913
+ "qwen/qwen3-30b-a3b-thinking-2507": {
914
+ id: "qwen/qwen3-30b-a3b-thinking-2507",
915
+ name: "Qwen: Qwen3 30B A3B Thinking 2507",
881
916
  provider: "openrouter",
882
917
  baseUrl: "https://openrouter.ai/api/v1",
883
- reasoning: false,
918
+ reasoning: true,
884
919
  input: ["text"],
885
920
  cost: {
886
- input: 0.25,
887
- output: 1,
921
+ input: 0.0713,
922
+ output: 0.2852,
888
923
  cacheRead: 0,
889
924
  cacheWrite: 0,
890
925
  },
891
- contextWindow: 128000,
892
- maxTokens: 16384,
926
+ contextWindow: 262144,
927
+ maxTokens: 262144,
893
928
  },
894
- "qwen/qwen3-4b:free": {
895
- id: "qwen/qwen3-4b:free",
896
- name: "Qwen: Qwen3 4B (free)",
929
+ "nousresearch/hermes-4-70b": {
930
+ id: "nousresearch/hermes-4-70b",
931
+ name: "Nous: Hermes 4 70B",
897
932
  provider: "openrouter",
898
933
  baseUrl: "https://openrouter.ai/api/v1",
899
934
  reasoning: true,
900
935
  input: ["text"],
901
936
  cost: {
902
- input: 0,
903
- output: 0,
937
+ input: 0.09329544,
938
+ output: 0.3733632,
904
939
  cacheRead: 0,
905
940
  cacheWrite: 0,
906
941
  },
907
- contextWindow: 40960,
942
+ contextWindow: 131072,
908
943
  maxTokens: 4096,
909
944
  },
910
- "qwen/qwen3-30b-a3b": {
911
- id: "qwen/qwen3-30b-a3b",
912
- name: "Qwen: Qwen3 30B A3B",
945
+ "nousresearch/hermes-4-405b": {
946
+ id: "nousresearch/hermes-4-405b",
947
+ name: "Nous: Hermes 4 405B",
913
948
  provider: "openrouter",
914
949
  baseUrl: "https://openrouter.ai/api/v1",
915
950
  reasoning: true,
916
951
  input: ["text"],
917
952
  cost: {
918
- input: 0.01999188,
919
- output: 0.0800064,
953
+ input: 0.1999188,
954
+ output: 0.800064,
920
955
  cacheRead: 0,
921
956
  cacheWrite: 0,
922
957
  },
923
- contextWindow: 40960,
958
+ contextWindow: 131072,
924
959
  maxTokens: 4096,
925
960
  },
926
- "qwen/qwen3-14b": {
927
- id: "qwen/qwen3-14b",
928
- name: "Qwen: Qwen3 14B",
961
+ "deepseek/deepseek-chat-v3.1:free": {
962
+ id: "deepseek/deepseek-chat-v3.1:free",
963
+ name: "DeepSeek: DeepSeek V3.1 (free)",
929
964
  provider: "openrouter",
930
965
  baseUrl: "https://openrouter.ai/api/v1",
931
966
  reasoning: true,
932
967
  input: ["text"],
933
968
  cost: {
934
- input: 0.06,
935
- output: 0.24,
969
+ input: 0,
970
+ output: 0,
936
971
  cacheRead: 0,
937
972
  cacheWrite: 0,
938
973
  },
939
- contextWindow: 40960,
940
- maxTokens: 40960,
974
+ contextWindow: 64000,
975
+ maxTokens: 4096,
941
976
  },
942
- "qwen/qwen3-32b": {
943
- id: "qwen/qwen3-32b",
944
- name: "Qwen: Qwen3 32B",
977
+ "deepseek/deepseek-chat-v3.1": {
978
+ id: "deepseek/deepseek-chat-v3.1",
979
+ name: "DeepSeek: DeepSeek V3.1",
945
980
  provider: "openrouter",
946
981
  baseUrl: "https://openrouter.ai/api/v1",
947
982
  reasoning: true,
948
983
  input: ["text"],
949
984
  cost: {
950
- input: 0.017992691999999998,
951
- output: 0.07200576,
985
+ input: 0.19999999999999998,
986
+ output: 0.7999999999999999,
952
987
  cacheRead: 0,
953
988
  cacheWrite: 0,
954
989
  },
955
- contextWindow: 40960,
990
+ contextWindow: 163840,
956
991
  maxTokens: 4096,
957
992
  },
958
- "qwen/qwen3-235b-a22b:free": {
959
- id: "qwen/qwen3-235b-a22b:free",
960
- name: "Qwen: Qwen3 235B A22B (free)",
993
+ "mistralai/mistral-medium-3.1": {
994
+ id: "mistralai/mistral-medium-3.1",
995
+ name: "Mistral: Mistral Medium 3.1",
961
996
  provider: "openrouter",
962
997
  baseUrl: "https://openrouter.ai/api/v1",
963
- reasoning: true,
964
- input: ["text"],
998
+ reasoning: false,
999
+ input: ["text", "image"],
965
1000
  cost: {
966
- input: 0,
967
- output: 0,
1001
+ input: 0.39999999999999997,
1002
+ output: 2,
968
1003
  cacheRead: 0,
969
1004
  cacheWrite: 0,
970
1005
  },
971
1006
  contextWindow: 131072,
972
1007
  maxTokens: 4096,
973
1008
  },
974
- "qwen/qwen3-235b-a22b": {
975
- id: "qwen/qwen3-235b-a22b",
976
- name: "Qwen: Qwen3 235B A22B",
1009
+ "z-ai/glm-4.5v": {
1010
+ id: "z-ai/glm-4.5v",
1011
+ name: "Z.AI: GLM 4.5V",
977
1012
  provider: "openrouter",
978
1013
  baseUrl: "https://openrouter.ai/api/v1",
979
1014
  reasoning: true,
980
- input: ["text"],
1015
+ input: ["text", "image"],
981
1016
  cost: {
982
- input: 0.13,
983
- output: 0.6,
1017
+ input: 0.5,
1018
+ output: 1.7999999999999998,
984
1019
  cacheRead: 0,
985
1020
  cacheWrite: 0,
986
1021
  },
987
- contextWindow: 40960,
988
- maxTokens: 40960,
1022
+ contextWindow: 65536,
1023
+ maxTokens: 65536,
989
1024
  },
990
- "meta-llama/llama-4-maverick:free": {
991
- id: "meta-llama/llama-4-maverick:free",
992
- name: "Meta: Llama 4 Maverick (free)",
1025
+ "ai21/jamba-mini-1.7": {
1026
+ id: "ai21/jamba-mini-1.7",
1027
+ name: "AI21: Jamba Mini 1.7",
993
1028
  provider: "openrouter",
994
1029
  baseUrl: "https://openrouter.ai/api/v1",
995
1030
  reasoning: false,
996
- input: ["text", "image"],
1031
+ input: ["text"],
997
1032
  cost: {
998
- input: 0,
999
- output: 0,
1033
+ input: 0.19999999999999998,
1034
+ output: 0.39999999999999997,
1000
1035
  cacheRead: 0,
1001
1036
  cacheWrite: 0,
1002
1037
  },
1003
- contextWindow: 128000,
1004
- maxTokens: 4028,
1038
+ contextWindow: 256000,
1039
+ maxTokens: 4096,
1005
1040
  },
1006
- "meta-llama/llama-4-maverick": {
1007
- id: "meta-llama/llama-4-maverick",
1008
- name: "Meta: Llama 4 Maverick",
1041
+ "ai21/jamba-large-1.7": {
1042
+ id: "ai21/jamba-large-1.7",
1043
+ name: "AI21: Jamba Large 1.7",
1009
1044
  provider: "openrouter",
1010
1045
  baseUrl: "https://openrouter.ai/api/v1",
1011
1046
  reasoning: false,
1012
- input: ["text", "image"],
1047
+ input: ["text"],
1013
1048
  cost: {
1014
- input: 0.15,
1015
- output: 0.6,
1049
+ input: 2,
1050
+ output: 8,
1016
1051
  cacheRead: 0,
1017
1052
  cacheWrite: 0,
1018
1053
  },
1019
- contextWindow: 1048576,
1020
- maxTokens: 16384,
1054
+ contextWindow: 256000,
1055
+ maxTokens: 4096,
1021
1056
  },
1022
- "meta-llama/llama-4-scout:free": {
1023
- id: "meta-llama/llama-4-scout:free",
1024
- name: "Meta: Llama 4 Scout (free)",
1057
+ "mistralai/codestral-2508": {
1058
+ id: "mistralai/codestral-2508",
1059
+ name: "Mistral: Codestral 2508",
1025
1060
  provider: "openrouter",
1026
1061
  baseUrl: "https://openrouter.ai/api/v1",
1027
1062
  reasoning: false,
1028
- input: ["text", "image"],
1063
+ input: ["text"],
1029
1064
  cost: {
1030
- input: 0,
1031
- output: 0,
1065
+ input: 0.3,
1066
+ output: 0.8999999999999999,
1032
1067
  cacheRead: 0,
1033
1068
  cacheWrite: 0,
1034
1069
  },
1035
- contextWindow: 128000,
1036
- maxTokens: 4028,
1070
+ contextWindow: 256000,
1071
+ maxTokens: 4096,
1037
1072
  },
1038
- "meta-llama/llama-4-scout": {
1039
- id: "meta-llama/llama-4-scout",
1040
- name: "Meta: Llama 4 Scout",
1073
+ "qwen/qwen3-coder-30b-a3b-instruct": {
1074
+ id: "qwen/qwen3-coder-30b-a3b-instruct",
1075
+ name: "Qwen: Qwen3 Coder 30B A3B Instruct",
1041
1076
  provider: "openrouter",
1042
1077
  baseUrl: "https://openrouter.ai/api/v1",
1043
1078
  reasoning: false,
1044
- input: ["text", "image"],
1079
+ input: ["text"],
1045
1080
  cost: {
1046
- input: 0.08,
1047
- output: 0.3,
1081
+ input: 0.051830799999999996,
1082
+ output: 0.207424,
1048
1083
  cacheRead: 0,
1049
1084
  cacheWrite: 0,
1050
1085
  },
1051
- contextWindow: 1048576,
1052
- maxTokens: 1048576,
1086
+ contextWindow: 262144,
1087
+ maxTokens: 4096,
1053
1088
  },
1054
- "deepseek/deepseek-chat-v3-0324:free": {
1055
- id: "deepseek/deepseek-chat-v3-0324:free",
1056
- name: "DeepSeek: DeepSeek V3 0324 (free)",
1089
+ "qwen/qwen3-30b-a3b-instruct-2507": {
1090
+ id: "qwen/qwen3-30b-a3b-instruct-2507",
1091
+ name: "Qwen: Qwen3 30B A3B Instruct 2507",
1057
1092
  provider: "openrouter",
1058
1093
  baseUrl: "https://openrouter.ai/api/v1",
1059
1094
  reasoning: false,
1060
1095
  input: ["text"],
1061
1096
  cost: {
1062
- input: 0,
1063
- output: 0,
1097
+ input: 0.051830799999999996,
1098
+ output: 0.207424,
1064
1099
  cacheRead: 0,
1065
1100
  cacheWrite: 0,
1066
1101
  },
1067
- contextWindow: 163840,
1102
+ contextWindow: 262144,
1068
1103
  maxTokens: 4096,
1069
1104
  },
1070
- "deepseek/deepseek-chat-v3-0324": {
1071
- id: "deepseek/deepseek-chat-v3-0324",
1072
- name: "DeepSeek: DeepSeek V3 0324",
1105
+ "z-ai/glm-4.5": {
1106
+ id: "z-ai/glm-4.5",
1107
+ name: "Z.AI: GLM 4.5",
1073
1108
  provider: "openrouter",
1074
1109
  baseUrl: "https://openrouter.ai/api/v1",
1075
- reasoning: false,
1110
+ reasoning: true,
1076
1111
  input: ["text"],
1077
1112
  cost: {
1078
- input: 0.1999188,
1079
- output: 0.800064,
1113
+ input: 0.32986602,
1114
+ output: 1.3201056,
1080
1115
  cacheRead: 0,
1081
1116
  cacheWrite: 0,
1082
1117
  },
1083
- contextWindow: 163840,
1118
+ contextWindow: 131072,
1084
1119
  maxTokens: 4096,
1085
1120
  },
1086
- "mistralai/mistral-small-3.1-24b-instruct:free": {
1087
- id: "mistralai/mistral-small-3.1-24b-instruct:free",
1088
- name: "Mistral: Mistral Small 3.1 24B (free)",
1121
+ "z-ai/glm-4.5-air:free": {
1122
+ id: "z-ai/glm-4.5-air:free",
1123
+ name: "Z.AI: GLM 4.5 Air (free)",
1089
1124
  provider: "openrouter",
1090
1125
  baseUrl: "https://openrouter.ai/api/v1",
1091
- reasoning: false,
1092
- input: ["text", "image"],
1126
+ reasoning: true,
1127
+ input: ["text"],
1093
1128
  cost: {
1094
1129
  input: 0,
1095
1130
  output: 0,
1096
1131
  cacheRead: 0,
1097
1132
  cacheWrite: 0,
1098
1133
  },
1099
- contextWindow: 128000,
1134
+ contextWindow: 131072,
1100
1135
  maxTokens: 4096,
1101
1136
  },
1102
- "mistralai/mistral-small-3.1-24b-instruct": {
1103
- id: "mistralai/mistral-small-3.1-24b-instruct",
1104
- name: "Mistral: Mistral Small 3.1 24B",
1137
+ "z-ai/glm-4.5-air": {
1138
+ id: "z-ai/glm-4.5-air",
1139
+ name: "Z.AI: GLM 4.5 Air",
1105
1140
  provider: "openrouter",
1106
1141
  baseUrl: "https://openrouter.ai/api/v1",
1107
- reasoning: false,
1108
- input: ["text", "image"],
1142
+ reasoning: true,
1143
+ input: ["text"],
1109
1144
  cost: {
1110
- input: 0.01999188,
1111
- output: 0.0800064,
1145
+ input: 0.14,
1146
+ output: 0.86,
1112
1147
  cacheRead: 0,
1113
1148
  cacheWrite: 0,
1114
1149
  },
1115
1150
  contextWindow: 131072,
1116
- maxTokens: 96000,
1151
+ maxTokens: 131072,
1117
1152
  },
1118
- "qwen/qwq-32b": {
1119
- id: "qwen/qwq-32b",
1120
- name: "Qwen: QwQ 32B",
1153
+ "qwen/qwen3-235b-a22b-thinking-2507": {
1154
+ id: "qwen/qwen3-235b-a22b-thinking-2507",
1155
+ name: "Qwen: Qwen3 235B A22B Thinking 2507",
1121
1156
  provider: "openrouter",
1122
1157
  baseUrl: "https://openrouter.ai/api/v1",
1123
1158
  reasoning: true,
1124
1159
  input: ["text"],
1125
1160
  cost: {
1126
- input: 0.075,
1127
- output: 0.15,
1161
+ input: 0.077968332,
1162
+ output: 0.31202496,
1128
1163
  cacheRead: 0,
1129
1164
  cacheWrite: 0,
1130
1165
  },
1131
- contextWindow: 131072,
1166
+ contextWindow: 262144,
1132
1167
  maxTokens: 4096,
1133
1168
  },
1134
- "mistralai/mistral-saba": {
1135
- id: "mistralai/mistral-saba",
1136
- name: "Mistral: Saba",
1169
+ "z-ai/glm-4-32b": {
1170
+ id: "z-ai/glm-4-32b",
1171
+ name: "Z.AI: GLM 4 32B ",
1137
1172
  provider: "openrouter",
1138
1173
  baseUrl: "https://openrouter.ai/api/v1",
1139
1174
  reasoning: false,
1140
1175
  input: ["text"],
1141
1176
  cost: {
1142
- input: 0.19999999999999998,
1143
- output: 0.6,
1177
+ input: 0.09999999999999999,
1178
+ output: 0.09999999999999999,
1144
1179
  cacheRead: 0,
1145
1180
  cacheWrite: 0,
1146
1181
  },
1147
- contextWindow: 32768,
1182
+ contextWindow: 128000,
1148
1183
  maxTokens: 4096,
1149
1184
  },
1150
- "qwen/qwen-turbo": {
1151
- id: "qwen/qwen-turbo",
1152
- name: "Qwen: Qwen-Turbo",
1185
+ "qwen/qwen3-coder:free": {
1186
+ id: "qwen/qwen3-coder:free",
1187
+ name: "Qwen: Qwen3 Coder 480B A35B (free)",
1153
1188
  provider: "openrouter",
1154
1189
  baseUrl: "https://openrouter.ai/api/v1",
1155
1190
  reasoning: false,
1156
1191
  input: ["text"],
1157
1192
  cost: {
1158
- input: 0.049999999999999996,
1159
- output: 0.19999999999999998,
1160
- cacheRead: 0.02,
1193
+ input: 0,
1194
+ output: 0,
1195
+ cacheRead: 0,
1161
1196
  cacheWrite: 0,
1162
1197
  },
1163
- contextWindow: 1000000,
1164
- maxTokens: 8192,
1198
+ contextWindow: 262144,
1199
+ maxTokens: 4096,
1165
1200
  },
1166
- "qwen/qwen-plus": {
1167
- id: "qwen/qwen-plus",
1168
- name: "Qwen: Qwen-Plus",
1201
+ "qwen/qwen3-coder": {
1202
+ id: "qwen/qwen3-coder",
1203
+ name: "Qwen: Qwen3 Coder 480B A35B",
1169
1204
  provider: "openrouter",
1170
1205
  baseUrl: "https://openrouter.ai/api/v1",
1171
1206
  reasoning: false,
1172
1207
  input: ["text"],
1173
1208
  cost: {
1174
- input: 0.39999999999999997,
1175
- output: 1.2,
1176
- cacheRead: 0.16,
1209
+ input: 0.19999999999999998,
1210
+ output: 0.7999999999999999,
1211
+ cacheRead: 0,
1177
1212
  cacheWrite: 0,
1178
1213
  },
1179
- contextWindow: 131072,
1180
- maxTokens: 8192,
1214
+ contextWindow: 262144,
1215
+ maxTokens: 4096,
1181
1216
  },
1182
- "qwen/qwen-max": {
1183
- id: "qwen/qwen-max",
1184
- name: "Qwen: Qwen-Max ",
1217
+ "qwen/qwen3-235b-a22b-2507": {
1218
+ id: "qwen/qwen3-235b-a22b-2507",
1219
+ name: "Qwen: Qwen3 235B A22B Instruct 2507",
1185
1220
  provider: "openrouter",
1186
1221
  baseUrl: "https://openrouter.ai/api/v1",
1187
1222
  reasoning: false,
1188
1223
  input: ["text"],
1189
1224
  cost: {
1190
- input: 1.5999999999999999,
1191
- output: 6.3999999999999995,
1192
- cacheRead: 0.64,
1225
+ input: 0.077968332,
1226
+ output: 0.31202496,
1227
+ cacheRead: 0,
1193
1228
  cacheWrite: 0,
1194
1229
  },
1195
- contextWindow: 32768,
1196
- maxTokens: 8192,
1230
+ contextWindow: 262144,
1231
+ maxTokens: 4096,
1197
1232
  },
1198
- "mistralai/mistral-small-24b-instruct-2501": {
1199
- id: "mistralai/mistral-small-24b-instruct-2501",
1200
- name: "Mistral: Mistral Small 3",
1233
+ "moonshotai/kimi-k2:free": {
1234
+ id: "moonshotai/kimi-k2:free",
1235
+ name: "MoonshotAI: Kimi K2 (free)",
1201
1236
  provider: "openrouter",
1202
1237
  baseUrl: "https://openrouter.ai/api/v1",
1203
1238
  reasoning: false,
1204
1239
  input: ["text"],
1205
1240
  cost: {
1206
- input: 0.01999188,
1207
- output: 0.0800064,
1241
+ input: 0,
1242
+ output: 0,
1208
1243
  cacheRead: 0,
1209
1244
  cacheWrite: 0,
1210
1245
  },
1211
1246
  contextWindow: 32768,
1212
1247
  maxTokens: 4096,
1213
1248
  },
1214
- "deepseek/deepseek-r1-distill-llama-70b": {
1215
- id: "deepseek/deepseek-r1-distill-llama-70b",
1216
- name: "DeepSeek: R1 Distill Llama 70B",
1249
+ "moonshotai/kimi-k2": {
1250
+ id: "moonshotai/kimi-k2",
1251
+ name: "MoonshotAI: Kimi K2",
1217
1252
  provider: "openrouter",
1218
1253
  baseUrl: "https://openrouter.ai/api/v1",
1219
- reasoning: true,
1254
+ reasoning: false,
1220
1255
  input: ["text"],
1221
1256
  cost: {
1222
- input: 0.025915399999999998,
1223
- output: 0.103712,
1257
+ input: 0.14,
1258
+ output: 2.4899999999999998,
1224
1259
  cacheRead: 0,
1225
1260
  cacheWrite: 0,
1226
1261
  },
1227
- contextWindow: 131072,
1228
- maxTokens: 4096,
1262
+ contextWindow: 63000,
1263
+ maxTokens: 63000,
1229
1264
  },
1230
- "deepseek/deepseek-r1": {
1231
- id: "deepseek/deepseek-r1",
1232
- name: "DeepSeek: R1",
1265
+ "mistralai/devstral-medium": {
1266
+ id: "mistralai/devstral-medium",
1267
+ name: "Mistral: Devstral Medium",
1233
1268
  provider: "openrouter",
1234
1269
  baseUrl: "https://openrouter.ai/api/v1",
1235
- reasoning: true,
1270
+ reasoning: false,
1236
1271
  input: ["text"],
1237
1272
  cost: {
1238
1273
  input: 0.39999999999999997,
@@ -1240,1648 +1275,1388 @@ export const PROVIDERS = {
1240
1275
  cacheRead: 0,
1241
1276
  cacheWrite: 0,
1242
1277
  },
1243
- contextWindow: 163840,
1244
- maxTokens: 163840,
1278
+ contextWindow: 131072,
1279
+ maxTokens: 4096,
1245
1280
  },
1246
- "mistralai/codestral-2501": {
1247
- id: "mistralai/codestral-2501",
1248
- name: "Mistral: Codestral 2501",
1281
+ "mistralai/devstral-small": {
1282
+ id: "mistralai/devstral-small",
1283
+ name: "Mistral: Devstral Small 1.1",
1249
1284
  provider: "openrouter",
1250
1285
  baseUrl: "https://openrouter.ai/api/v1",
1251
1286
  reasoning: false,
1252
1287
  input: ["text"],
1253
1288
  cost: {
1254
- input: 0.3,
1255
- output: 0.8999999999999999,
1289
+ input: 0.07,
1290
+ output: 0.28,
1256
1291
  cacheRead: 0,
1257
1292
  cacheWrite: 0,
1258
1293
  },
1259
- contextWindow: 262144,
1294
+ contextWindow: 128000,
1260
1295
  maxTokens: 4096,
1261
1296
  },
1262
- "deepseek/deepseek-chat": {
1263
- id: "deepseek/deepseek-chat",
1264
- name: "DeepSeek: DeepSeek V3",
1297
+ "inception/mercury": {
1298
+ id: "inception/mercury",
1299
+ name: "Inception: Mercury",
1265
1300
  provider: "openrouter",
1266
1301
  baseUrl: "https://openrouter.ai/api/v1",
1267
1302
  reasoning: false,
1268
1303
  input: ["text"],
1269
1304
  cost: {
1270
- input: 0.1999188,
1271
- output: 0.800064,
1305
+ input: 0.25,
1306
+ output: 1,
1272
1307
  cacheRead: 0,
1273
1308
  cacheWrite: 0,
1274
1309
  },
1275
- contextWindow: 163840,
1276
- maxTokens: 4096,
1310
+ contextWindow: 128000,
1311
+ maxTokens: 16384,
1277
1312
  },
1278
- "meta-llama/llama-3.3-70b-instruct:free": {
1279
- id: "meta-llama/llama-3.3-70b-instruct:free",
1280
- name: "Meta: Llama 3.3 70B Instruct (free)",
1313
+ "mistralai/mistral-small-3.2-24b-instruct:free": {
1314
+ id: "mistralai/mistral-small-3.2-24b-instruct:free",
1315
+ name: "Mistral: Mistral Small 3.2 24B (free)",
1281
1316
  provider: "openrouter",
1282
1317
  baseUrl: "https://openrouter.ai/api/v1",
1283
1318
  reasoning: false,
1284
- input: ["text"],
1319
+ input: ["text", "image"],
1285
1320
  cost: {
1286
1321
  input: 0,
1287
1322
  output: 0,
1288
1323
  cacheRead: 0,
1289
1324
  cacheWrite: 0,
1290
1325
  },
1291
- contextWindow: 65536,
1326
+ contextWindow: 131072,
1292
1327
  maxTokens: 4096,
1293
1328
  },
1294
- "meta-llama/llama-3.3-70b-instruct": {
1295
- id: "meta-llama/llama-3.3-70b-instruct",
1296
- name: "Meta: Llama 3.3 70B Instruct",
1329
+ "mistralai/mistral-small-3.2-24b-instruct": {
1330
+ id: "mistralai/mistral-small-3.2-24b-instruct",
1331
+ name: "Mistral: Mistral Small 3.2 24B",
1297
1332
  provider: "openrouter",
1298
1333
  baseUrl: "https://openrouter.ai/api/v1",
1299
1334
  reasoning: false,
1300
- input: ["text"],
1335
+ input: ["text", "image"],
1301
1336
  cost: {
1302
- input: 0.038000000000000006,
1303
- output: 0.12,
1337
+ input: 0.049999999999999996,
1338
+ output: 0.09999999999999999,
1304
1339
  cacheRead: 0,
1305
1340
  cacheWrite: 0,
1306
1341
  },
1307
- contextWindow: 131072,
1308
- maxTokens: 16384,
1342
+ contextWindow: 128000,
1343
+ maxTokens: 4096,
1309
1344
  },
1310
- "amazon/nova-lite-v1": {
1311
- id: "amazon/nova-lite-v1",
1312
- name: "Amazon: Nova Lite 1.0",
1345
+ "minimax/minimax-m1": {
1346
+ id: "minimax/minimax-m1",
1347
+ name: "MiniMax: MiniMax M1",
1313
1348
  provider: "openrouter",
1314
1349
  baseUrl: "https://openrouter.ai/api/v1",
1315
- reasoning: false,
1316
- input: ["text", "image"],
1350
+ reasoning: true,
1351
+ input: ["text"],
1317
1352
  cost: {
1318
- input: 0.06,
1319
- output: 0.24,
1353
+ input: 0.3,
1354
+ output: 1.6500000000000001,
1320
1355
  cacheRead: 0,
1321
1356
  cacheWrite: 0,
1322
1357
  },
1323
- contextWindow: 300000,
1324
- maxTokens: 5120,
1358
+ contextWindow: 1000000,
1359
+ maxTokens: 40000,
1325
1360
  },
1326
- "amazon/nova-micro-v1": {
1327
- id: "amazon/nova-micro-v1",
1328
- name: "Amazon: Nova Micro 1.0",
1361
+ "mistralai/magistral-small-2506": {
1362
+ id: "mistralai/magistral-small-2506",
1363
+ name: "Mistral: Magistral Small 2506",
1329
1364
  provider: "openrouter",
1330
1365
  baseUrl: "https://openrouter.ai/api/v1",
1331
- reasoning: false,
1366
+ reasoning: true,
1332
1367
  input: ["text"],
1333
1368
  cost: {
1334
- input: 0.035,
1335
- output: 0.14,
1369
+ input: 0.5,
1370
+ output: 1.5,
1336
1371
  cacheRead: 0,
1337
1372
  cacheWrite: 0,
1338
1373
  },
1339
- contextWindow: 128000,
1340
- maxTokens: 5120,
1374
+ contextWindow: 40000,
1375
+ maxTokens: 40000,
1341
1376
  },
1342
- "amazon/nova-pro-v1": {
1343
- id: "amazon/nova-pro-v1",
1344
- name: "Amazon: Nova Pro 1.0",
1377
+ "mistralai/magistral-medium-2506": {
1378
+ id: "mistralai/magistral-medium-2506",
1379
+ name: "Mistral: Magistral Medium 2506",
1345
1380
  provider: "openrouter",
1346
1381
  baseUrl: "https://openrouter.ai/api/v1",
1347
- reasoning: false,
1348
- input: ["text", "image"],
1382
+ reasoning: true,
1383
+ input: ["text"],
1349
1384
  cost: {
1350
- input: 0.7999999999999999,
1351
- output: 3.1999999999999997,
1385
+ input: 2,
1386
+ output: 5,
1352
1387
  cacheRead: 0,
1353
1388
  cacheWrite: 0,
1354
1389
  },
1355
- contextWindow: 300000,
1356
- maxTokens: 5120,
1390
+ contextWindow: 40960,
1391
+ maxTokens: 40000,
1357
1392
  },
1358
- "mistralai/mistral-large-2411": {
1359
- id: "mistralai/mistral-large-2411",
1360
- name: "Mistral Large 2411",
1393
+ "mistralai/magistral-medium-2506:thinking": {
1394
+ id: "mistralai/magistral-medium-2506:thinking",
1395
+ name: "Mistral: Magistral Medium 2506 (thinking)",
1361
1396
  provider: "openrouter",
1362
1397
  baseUrl: "https://openrouter.ai/api/v1",
1363
- reasoning: false,
1398
+ reasoning: true,
1364
1399
  input: ["text"],
1365
1400
  cost: {
1366
1401
  input: 2,
1367
- output: 6,
1402
+ output: 5,
1368
1403
  cacheRead: 0,
1369
1404
  cacheWrite: 0,
1370
1405
  },
1371
- contextWindow: 131072,
1372
- maxTokens: 4096,
1406
+ contextWindow: 40960,
1407
+ maxTokens: 40000,
1373
1408
  },
1374
- "mistralai/mistral-large-2407": {
1375
- id: "mistralai/mistral-large-2407",
1376
- name: "Mistral Large 2407",
1409
+ "deepseek/deepseek-r1-0528": {
1410
+ id: "deepseek/deepseek-r1-0528",
1411
+ name: "DeepSeek: R1 0528",
1377
1412
  provider: "openrouter",
1378
1413
  baseUrl: "https://openrouter.ai/api/v1",
1379
- reasoning: false,
1414
+ reasoning: true,
1380
1415
  input: ["text"],
1381
1416
  cost: {
1382
- input: 2,
1383
- output: 6,
1417
+ input: 0.1999188,
1418
+ output: 0.800064,
1384
1419
  cacheRead: 0,
1385
1420
  cacheWrite: 0,
1386
1421
  },
1387
- contextWindow: 131072,
1422
+ contextWindow: 163840,
1388
1423
  maxTokens: 4096,
1389
1424
  },
1390
- "mistralai/pixtral-large-2411": {
1391
- id: "mistralai/pixtral-large-2411",
1392
- name: "Mistral: Pixtral Large 2411",
1425
+ "mistralai/devstral-small-2505:free": {
1426
+ id: "mistralai/devstral-small-2505:free",
1427
+ name: "Mistral: Devstral Small 2505 (free)",
1393
1428
  provider: "openrouter",
1394
1429
  baseUrl: "https://openrouter.ai/api/v1",
1395
1430
  reasoning: false,
1396
- input: ["text", "image"],
1431
+ input: ["text"],
1397
1432
  cost: {
1398
- input: 2,
1399
- output: 6,
1433
+ input: 0,
1434
+ output: 0,
1400
1435
  cacheRead: 0,
1401
1436
  cacheWrite: 0,
1402
1437
  },
1403
- contextWindow: 131072,
1438
+ contextWindow: 32768,
1404
1439
  maxTokens: 4096,
1405
1440
  },
1406
- "thedrummer/unslopnemo-12b": {
1407
- id: "thedrummer/unslopnemo-12b",
1408
- name: "TheDrummer: UnslopNemo 12B",
1441
+ "mistralai/devstral-small-2505": {
1442
+ id: "mistralai/devstral-small-2505",
1443
+ name: "Mistral: Devstral Small 2505",
1409
1444
  provider: "openrouter",
1410
1445
  baseUrl: "https://openrouter.ai/api/v1",
1411
1446
  reasoning: false,
1412
1447
  input: ["text"],
1413
1448
  cost: {
1414
- input: 0.39999999999999997,
1415
- output: 0.39999999999999997,
1449
+ input: 0.01999188,
1450
+ output: 0.0800064,
1416
1451
  cacheRead: 0,
1417
1452
  cacheWrite: 0,
1418
1453
  },
1419
- contextWindow: 32768,
1454
+ contextWindow: 131072,
1420
1455
  maxTokens: 4096,
1421
1456
  },
1422
- "mistralai/ministral-8b": {
1423
- id: "mistralai/ministral-8b",
1424
- name: "Mistral: Ministral 8B",
1457
+ "meta-llama/llama-3.3-8b-instruct:free": {
1458
+ id: "meta-llama/llama-3.3-8b-instruct:free",
1459
+ name: "Meta: Llama 3.3 8B Instruct (free)",
1425
1460
  provider: "openrouter",
1426
1461
  baseUrl: "https://openrouter.ai/api/v1",
1427
1462
  reasoning: false,
1428
1463
  input: ["text"],
1429
1464
  cost: {
1430
- input: 0.09999999999999999,
1431
- output: 0.09999999999999999,
1465
+ input: 0,
1466
+ output: 0,
1432
1467
  cacheRead: 0,
1433
1468
  cacheWrite: 0,
1434
1469
  },
1435
1470
  contextWindow: 128000,
1436
- maxTokens: 4096,
1471
+ maxTokens: 4028,
1437
1472
  },
1438
- "nvidia/llama-3.1-nemotron-70b-instruct": {
1439
- id: "nvidia/llama-3.1-nemotron-70b-instruct",
1440
- name: "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1473
+ "mistralai/mistral-medium-3": {
1474
+ id: "mistralai/mistral-medium-3",
1475
+ name: "Mistral: Mistral Medium 3",
1441
1476
  provider: "openrouter",
1442
1477
  baseUrl: "https://openrouter.ai/api/v1",
1443
1478
  reasoning: false,
1444
- input: ["text"],
1479
+ input: ["text", "image"],
1445
1480
  cost: {
1446
- input: 0.12,
1447
- output: 0.3,
1481
+ input: 0.39999999999999997,
1482
+ output: 2,
1448
1483
  cacheRead: 0,
1449
1484
  cacheWrite: 0,
1450
1485
  },
1451
1486
  contextWindow: 131072,
1452
- maxTokens: 16384,
1487
+ maxTokens: 4096,
1453
1488
  },
1454
- "thedrummer/rocinante-12b": {
1455
- id: "thedrummer/rocinante-12b",
1456
- name: "TheDrummer: Rocinante 12B",
1489
+ "arcee-ai/virtuoso-large": {
1490
+ id: "arcee-ai/virtuoso-large",
1491
+ name: "Arcee AI: Virtuoso Large",
1457
1492
  provider: "openrouter",
1458
1493
  baseUrl: "https://openrouter.ai/api/v1",
1459
1494
  reasoning: false,
1460
1495
  input: ["text"],
1461
1496
  cost: {
1462
- input: 0.16999999999999998,
1463
- output: 0.43,
1497
+ input: 0.75,
1498
+ output: 1.2,
1464
1499
  cacheRead: 0,
1465
1500
  cacheWrite: 0,
1466
1501
  },
1467
- contextWindow: 32768,
1468
- maxTokens: 4096,
1502
+ contextWindow: 131072,
1503
+ maxTokens: 64000,
1469
1504
  },
1470
- "meta-llama/llama-3.2-3b-instruct": {
1471
- id: "meta-llama/llama-3.2-3b-instruct",
1472
- name: "Meta: Llama 3.2 3B Instruct",
1505
+ "inception/mercury-coder": {
1506
+ id: "inception/mercury-coder",
1507
+ name: "Inception: Mercury Coder",
1473
1508
  provider: "openrouter",
1474
1509
  baseUrl: "https://openrouter.ai/api/v1",
1475
1510
  reasoning: false,
1476
1511
  input: ["text"],
1477
1512
  cost: {
1478
- input: 0.003,
1479
- output: 0.006,
1513
+ input: 0.25,
1514
+ output: 1,
1480
1515
  cacheRead: 0,
1481
1516
  cacheWrite: 0,
1482
1517
  },
1483
- contextWindow: 20000,
1484
- maxTokens: 20000,
1518
+ contextWindow: 128000,
1519
+ maxTokens: 16384,
1485
1520
  },
1486
- "qwen/qwen-2.5-72b-instruct": {
1487
- id: "qwen/qwen-2.5-72b-instruct",
1488
- name: "Qwen2.5 72B Instruct",
1521
+ "qwen/qwen3-4b:free": {
1522
+ id: "qwen/qwen3-4b:free",
1523
+ name: "Qwen: Qwen3 4B (free)",
1489
1524
  provider: "openrouter",
1490
1525
  baseUrl: "https://openrouter.ai/api/v1",
1491
- reasoning: false,
1526
+ reasoning: true,
1492
1527
  input: ["text"],
1493
1528
  cost: {
1494
- input: 0.051830799999999996,
1495
- output: 0.207424,
1529
+ input: 0,
1530
+ output: 0,
1496
1531
  cacheRead: 0,
1497
1532
  cacheWrite: 0,
1498
1533
  },
1499
- contextWindow: 32768,
1534
+ contextWindow: 40960,
1500
1535
  maxTokens: 4096,
1501
1536
  },
1502
- "mistralai/pixtral-12b": {
1503
- id: "mistralai/pixtral-12b",
1504
- name: "Mistral: Pixtral 12B",
1537
+ "qwen/qwen3-30b-a3b": {
1538
+ id: "qwen/qwen3-30b-a3b",
1539
+ name: "Qwen: Qwen3 30B A3B",
1505
1540
  provider: "openrouter",
1506
1541
  baseUrl: "https://openrouter.ai/api/v1",
1507
- reasoning: false,
1508
- input: ["text", "image"],
1542
+ reasoning: true,
1543
+ input: ["text"],
1509
1544
  cost: {
1510
- input: 0.09999999999999999,
1511
- output: 0.09999999999999999,
1545
+ input: 0.01999188,
1546
+ output: 0.0800064,
1512
1547
  cacheRead: 0,
1513
1548
  cacheWrite: 0,
1514
1549
  },
1515
- contextWindow: 32768,
1550
+ contextWindow: 40960,
1516
1551
  maxTokens: 4096,
1517
1552
  },
1518
- "cohere/command-r-plus-08-2024": {
1519
- id: "cohere/command-r-plus-08-2024",
1520
- name: "Cohere: Command R+ (08-2024)",
1553
+ "qwen/qwen3-14b": {
1554
+ id: "qwen/qwen3-14b",
1555
+ name: "Qwen: Qwen3 14B",
1521
1556
  provider: "openrouter",
1522
1557
  baseUrl: "https://openrouter.ai/api/v1",
1523
- reasoning: false,
1558
+ reasoning: true,
1524
1559
  input: ["text"],
1525
1560
  cost: {
1526
- input: 2.5,
1527
- output: 10,
1561
+ input: 0.06,
1562
+ output: 0.24,
1528
1563
  cacheRead: 0,
1529
1564
  cacheWrite: 0,
1530
1565
  },
1531
- contextWindow: 128000,
1532
- maxTokens: 4000,
1566
+ contextWindow: 40960,
1567
+ maxTokens: 40960,
1533
1568
  },
1534
- "cohere/command-r-08-2024": {
1535
- id: "cohere/command-r-08-2024",
1536
- name: "Cohere: Command R (08-2024)",
1569
+ "qwen/qwen3-32b": {
1570
+ id: "qwen/qwen3-32b",
1571
+ name: "Qwen: Qwen3 32B",
1537
1572
  provider: "openrouter",
1538
1573
  baseUrl: "https://openrouter.ai/api/v1",
1539
- reasoning: false,
1574
+ reasoning: true,
1540
1575
  input: ["text"],
1541
1576
  cost: {
1542
- input: 0.15,
1543
- output: 0.6,
1577
+ input: 0.017992691999999998,
1578
+ output: 0.07200576,
1544
1579
  cacheRead: 0,
1545
1580
  cacheWrite: 0,
1546
1581
  },
1547
- contextWindow: 128000,
1548
- maxTokens: 4000,
1582
+ contextWindow: 40960,
1583
+ maxTokens: 4096,
1549
1584
  },
1550
- "microsoft/phi-3.5-mini-128k-instruct": {
1551
- id: "microsoft/phi-3.5-mini-128k-instruct",
1552
- name: "Microsoft: Phi-3.5 Mini 128K Instruct",
1585
+ "qwen/qwen3-235b-a22b:free": {
1586
+ id: "qwen/qwen3-235b-a22b:free",
1587
+ name: "Qwen: Qwen3 235B A22B (free)",
1553
1588
  provider: "openrouter",
1554
1589
  baseUrl: "https://openrouter.ai/api/v1",
1555
- reasoning: false,
1590
+ reasoning: true,
1556
1591
  input: ["text"],
1557
1592
  cost: {
1558
- input: 0.09999999999999999,
1559
- output: 0.09999999999999999,
1593
+ input: 0,
1594
+ output: 0,
1560
1595
  cacheRead: 0,
1561
1596
  cacheWrite: 0,
1562
1597
  },
1563
- contextWindow: 128000,
1598
+ contextWindow: 131072,
1564
1599
  maxTokens: 4096,
1565
1600
  },
1566
- "nousresearch/hermes-3-llama-3.1-70b": {
1567
- id: "nousresearch/hermes-3-llama-3.1-70b",
1568
- name: "Nous: Hermes 3 70B Instruct",
1601
+ "qwen/qwen3-235b-a22b": {
1602
+ id: "qwen/qwen3-235b-a22b",
1603
+ name: "Qwen: Qwen3 235B A22B",
1569
1604
  provider: "openrouter",
1570
1605
  baseUrl: "https://openrouter.ai/api/v1",
1571
- reasoning: false,
1606
+ reasoning: true,
1572
1607
  input: ["text"],
1573
1608
  cost: {
1574
- input: 0.09999999999999999,
1575
- output: 0.28,
1609
+ input: 0.13,
1610
+ output: 0.6,
1576
1611
  cacheRead: 0,
1577
1612
  cacheWrite: 0,
1578
1613
  },
1579
- contextWindow: 131072,
1580
- maxTokens: 4096,
1614
+ contextWindow: 40960,
1615
+ maxTokens: 40960,
1581
1616
  },
1582
- "meta-llama/llama-3.1-8b-instruct": {
1583
- id: "meta-llama/llama-3.1-8b-instruct",
1584
- name: "Meta: Llama 3.1 8B Instruct",
1617
+ "meta-llama/llama-4-maverick:free": {
1618
+ id: "meta-llama/llama-4-maverick:free",
1619
+ name: "Meta: Llama 4 Maverick (free)",
1585
1620
  provider: "openrouter",
1586
1621
  baseUrl: "https://openrouter.ai/api/v1",
1587
1622
  reasoning: false,
1588
- input: ["text"],
1623
+ input: ["text", "image"],
1589
1624
  cost: {
1590
- input: 0.015,
1591
- output: 0.02,
1625
+ input: 0,
1626
+ output: 0,
1592
1627
  cacheRead: 0,
1593
1628
  cacheWrite: 0,
1594
1629
  },
1595
- contextWindow: 131072,
1596
- maxTokens: 16384,
1630
+ contextWindow: 128000,
1631
+ maxTokens: 4028,
1597
1632
  },
1598
- "meta-llama/llama-3.1-70b-instruct": {
1599
- id: "meta-llama/llama-3.1-70b-instruct",
1600
- name: "Meta: Llama 3.1 70B Instruct",
1633
+ "meta-llama/llama-4-maverick": {
1634
+ id: "meta-llama/llama-4-maverick",
1635
+ name: "Meta: Llama 4 Maverick",
1601
1636
  provider: "openrouter",
1602
1637
  baseUrl: "https://openrouter.ai/api/v1",
1603
1638
  reasoning: false,
1604
- input: ["text"],
1639
+ input: ["text", "image"],
1605
1640
  cost: {
1606
- input: 0.09999999999999999,
1607
- output: 0.28,
1641
+ input: 0.15,
1642
+ output: 0.6,
1608
1643
  cacheRead: 0,
1609
1644
  cacheWrite: 0,
1610
1645
  },
1611
- contextWindow: 131072,
1646
+ contextWindow: 1048576,
1612
1647
  maxTokens: 16384,
1613
1648
  },
1614
- "meta-llama/llama-3.1-405b-instruct": {
1615
- id: "meta-llama/llama-3.1-405b-instruct",
1616
- name: "Meta: Llama 3.1 405B Instruct",
1649
+ "meta-llama/llama-4-scout:free": {
1650
+ id: "meta-llama/llama-4-scout:free",
1651
+ name: "Meta: Llama 4 Scout (free)",
1617
1652
  provider: "openrouter",
1618
1653
  baseUrl: "https://openrouter.ai/api/v1",
1619
1654
  reasoning: false,
1620
- input: ["text"],
1655
+ input: ["text", "image"],
1621
1656
  cost: {
1622
- input: 0.7999999999999999,
1623
- output: 0.7999999999999999,
1657
+ input: 0,
1658
+ output: 0,
1624
1659
  cacheRead: 0,
1625
1660
  cacheWrite: 0,
1626
1661
  },
1627
- contextWindow: 32768,
1628
- maxTokens: 16384,
1662
+ contextWindow: 128000,
1663
+ maxTokens: 4028,
1629
1664
  },
1630
- "mistralai/mistral-nemo": {
1631
- id: "mistralai/mistral-nemo",
1632
- name: "Mistral: Mistral Nemo",
1665
+ "meta-llama/llama-4-scout": {
1666
+ id: "meta-llama/llama-4-scout",
1667
+ name: "Meta: Llama 4 Scout",
1633
1668
  provider: "openrouter",
1634
1669
  baseUrl: "https://openrouter.ai/api/v1",
1635
1670
  reasoning: false,
1636
- input: ["text"],
1671
+ input: ["text", "image"],
1637
1672
  cost: {
1638
- input: 0.0075,
1639
- output: 0.049999999999999996,
1673
+ input: 0.08,
1674
+ output: 0.3,
1640
1675
  cacheRead: 0,
1641
1676
  cacheWrite: 0,
1642
1677
  },
1643
- contextWindow: 32000,
1644
- maxTokens: 4096,
1678
+ contextWindow: 1048576,
1679
+ maxTokens: 1048576,
1645
1680
  },
1646
- "mistralai/mistral-7b-instruct-v0.3": {
1647
- id: "mistralai/mistral-7b-instruct-v0.3",
1648
- name: "Mistral: Mistral 7B Instruct v0.3",
1681
+ "deepseek/deepseek-chat-v3-0324:free": {
1682
+ id: "deepseek/deepseek-chat-v3-0324:free",
1683
+ name: "DeepSeek: DeepSeek V3 0324 (free)",
1649
1684
  provider: "openrouter",
1650
1685
  baseUrl: "https://openrouter.ai/api/v1",
1651
1686
  reasoning: false,
1652
1687
  input: ["text"],
1653
1688
  cost: {
1654
- input: 0.028,
1655
- output: 0.054,
1689
+ input: 0,
1690
+ output: 0,
1656
1691
  cacheRead: 0,
1657
1692
  cacheWrite: 0,
1658
1693
  },
1659
- contextWindow: 32768,
1660
- maxTokens: 16384,
1694
+ contextWindow: 163840,
1695
+ maxTokens: 4096,
1661
1696
  },
1662
- "mistralai/mistral-7b-instruct:free": {
1663
- id: "mistralai/mistral-7b-instruct:free",
1664
- name: "Mistral: Mistral 7B Instruct (free)",
1697
+ "deepseek/deepseek-chat-v3-0324": {
1698
+ id: "deepseek/deepseek-chat-v3-0324",
1699
+ name: "DeepSeek: DeepSeek V3 0324",
1665
1700
  provider: "openrouter",
1666
1701
  baseUrl: "https://openrouter.ai/api/v1",
1667
1702
  reasoning: false,
1668
1703
  input: ["text"],
1704
+ cost: {
1705
+ input: 0.1999188,
1706
+ output: 0.800064,
1707
+ cacheRead: 0,
1708
+ cacheWrite: 0,
1709
+ },
1710
+ contextWindow: 163840,
1711
+ maxTokens: 4096,
1712
+ },
1713
+ "mistralai/mistral-small-3.1-24b-instruct:free": {
1714
+ id: "mistralai/mistral-small-3.1-24b-instruct:free",
1715
+ name: "Mistral: Mistral Small 3.1 24B (free)",
1716
+ provider: "openrouter",
1717
+ baseUrl: "https://openrouter.ai/api/v1",
1718
+ reasoning: false,
1719
+ input: ["text", "image"],
1669
1720
  cost: {
1670
1721
  input: 0,
1671
1722
  output: 0,
1672
1723
  cacheRead: 0,
1673
1724
  cacheWrite: 0,
1674
1725
  },
1675
- contextWindow: 32768,
1676
- maxTokens: 16384,
1726
+ contextWindow: 128000,
1727
+ maxTokens: 4096,
1677
1728
  },
1678
- "mistralai/mistral-7b-instruct": {
1679
- id: "mistralai/mistral-7b-instruct",
1680
- name: "Mistral: Mistral 7B Instruct",
1729
+ "mistralai/mistral-small-3.1-24b-instruct": {
1730
+ id: "mistralai/mistral-small-3.1-24b-instruct",
1731
+ name: "Mistral: Mistral Small 3.1 24B",
1681
1732
  provider: "openrouter",
1682
1733
  baseUrl: "https://openrouter.ai/api/v1",
1683
1734
  reasoning: false,
1684
- input: ["text"],
1735
+ input: ["text", "image"],
1685
1736
  cost: {
1686
- input: 0.028,
1687
- output: 0.054,
1737
+ input: 0.01999188,
1738
+ output: 0.0800064,
1688
1739
  cacheRead: 0,
1689
1740
  cacheWrite: 0,
1690
1741
  },
1691
- contextWindow: 32768,
1692
- maxTokens: 16384,
1742
+ contextWindow: 131072,
1743
+ maxTokens: 96000,
1693
1744
  },
1694
- "microsoft/phi-3-mini-128k-instruct": {
1695
- id: "microsoft/phi-3-mini-128k-instruct",
1696
- name: "Microsoft: Phi-3 Mini 128K Instruct",
1745
+ "qwen/qwq-32b": {
1746
+ id: "qwen/qwq-32b",
1747
+ name: "Qwen: QwQ 32B",
1697
1748
  provider: "openrouter",
1698
1749
  baseUrl: "https://openrouter.ai/api/v1",
1699
- reasoning: false,
1750
+ reasoning: true,
1700
1751
  input: ["text"],
1701
1752
  cost: {
1702
- input: 0.09999999999999999,
1703
- output: 0.09999999999999999,
1753
+ input: 0.075,
1754
+ output: 0.15,
1704
1755
  cacheRead: 0,
1705
1756
  cacheWrite: 0,
1706
1757
  },
1707
- contextWindow: 128000,
1758
+ contextWindow: 131072,
1708
1759
  maxTokens: 4096,
1709
1760
  },
1710
- "microsoft/phi-3-medium-128k-instruct": {
1711
- id: "microsoft/phi-3-medium-128k-instruct",
1712
- name: "Microsoft: Phi-3 Medium 128K Instruct",
1761
+ "mistralai/mistral-saba": {
1762
+ id: "mistralai/mistral-saba",
1763
+ name: "Mistral: Saba",
1713
1764
  provider: "openrouter",
1714
1765
  baseUrl: "https://openrouter.ai/api/v1",
1715
1766
  reasoning: false,
1716
1767
  input: ["text"],
1717
1768
  cost: {
1718
- input: 1,
1719
- output: 1,
1769
+ input: 0.19999999999999998,
1770
+ output: 0.6,
1720
1771
  cacheRead: 0,
1721
1772
  cacheWrite: 0,
1722
1773
  },
1723
- contextWindow: 128000,
1774
+ contextWindow: 32768,
1724
1775
  maxTokens: 4096,
1725
1776
  },
1726
- "meta-llama/llama-3-70b-instruct": {
1727
- id: "meta-llama/llama-3-70b-instruct",
1728
- name: "Meta: Llama 3 70B Instruct",
1777
+ "qwen/qwen-turbo": {
1778
+ id: "qwen/qwen-turbo",
1779
+ name: "Qwen: Qwen-Turbo",
1729
1780
  provider: "openrouter",
1730
1781
  baseUrl: "https://openrouter.ai/api/v1",
1731
1782
  reasoning: false,
1732
1783
  input: ["text"],
1733
1784
  cost: {
1734
- input: 0.3,
1735
- output: 0.39999999999999997,
1736
- cacheRead: 0,
1785
+ input: 0.049999999999999996,
1786
+ output: 0.19999999999999998,
1787
+ cacheRead: 0.02,
1737
1788
  cacheWrite: 0,
1738
1789
  },
1739
- contextWindow: 8192,
1740
- maxTokens: 16384,
1790
+ contextWindow: 1000000,
1791
+ maxTokens: 8192,
1741
1792
  },
1742
- "meta-llama/llama-3-8b-instruct": {
1743
- id: "meta-llama/llama-3-8b-instruct",
1744
- name: "Meta: Llama 3 8B Instruct",
1793
+ "qwen/qwen-plus": {
1794
+ id: "qwen/qwen-plus",
1795
+ name: "Qwen: Qwen-Plus",
1745
1796
  provider: "openrouter",
1746
1797
  baseUrl: "https://openrouter.ai/api/v1",
1747
1798
  reasoning: false,
1748
1799
  input: ["text"],
1749
1800
  cost: {
1750
- input: 0.03,
1751
- output: 0.06,
1752
- cacheRead: 0,
1801
+ input: 0.39999999999999997,
1802
+ output: 1.2,
1803
+ cacheRead: 0.16,
1753
1804
  cacheWrite: 0,
1754
1805
  },
1755
- contextWindow: 8192,
1756
- maxTokens: 16384,
1806
+ contextWindow: 131072,
1807
+ maxTokens: 8192,
1757
1808
  },
1758
- "mistralai/mixtral-8x22b-instruct": {
1759
- id: "mistralai/mixtral-8x22b-instruct",
1760
- name: "Mistral: Mixtral 8x22B Instruct",
1809
+ "qwen/qwen-max": {
1810
+ id: "qwen/qwen-max",
1811
+ name: "Qwen: Qwen-Max ",
1761
1812
  provider: "openrouter",
1762
1813
  baseUrl: "https://openrouter.ai/api/v1",
1763
1814
  reasoning: false,
1764
1815
  input: ["text"],
1765
1816
  cost: {
1766
- input: 0.8999999999999999,
1767
- output: 0.8999999999999999,
1768
- cacheRead: 0,
1817
+ input: 1.5999999999999999,
1818
+ output: 6.3999999999999995,
1819
+ cacheRead: 0.64,
1769
1820
  cacheWrite: 0,
1770
1821
  },
1771
- contextWindow: 65536,
1772
- maxTokens: 4096,
1822
+ contextWindow: 32768,
1823
+ maxTokens: 8192,
1773
1824
  },
1774
- "cohere/command-r-plus": {
1775
- id: "cohere/command-r-plus",
1776
- name: "Cohere: Command R+",
1825
+ "mistralai/mistral-small-24b-instruct-2501": {
1826
+ id: "mistralai/mistral-small-24b-instruct-2501",
1827
+ name: "Mistral: Mistral Small 3",
1777
1828
  provider: "openrouter",
1778
1829
  baseUrl: "https://openrouter.ai/api/v1",
1779
1830
  reasoning: false,
1780
1831
  input: ["text"],
1781
1832
  cost: {
1782
- input: 3,
1783
- output: 15,
1833
+ input: 0.01999188,
1834
+ output: 0.0800064,
1784
1835
  cacheRead: 0,
1785
1836
  cacheWrite: 0,
1786
1837
  },
1787
- contextWindow: 128000,
1788
- maxTokens: 4000,
1838
+ contextWindow: 32768,
1839
+ maxTokens: 4096,
1789
1840
  },
1790
- "cohere/command-r-plus-04-2024": {
1791
- id: "cohere/command-r-plus-04-2024",
1792
- name: "Cohere: Command R+ (04-2024)",
1841
+ "deepseek/deepseek-r1-distill-llama-70b": {
1842
+ id: "deepseek/deepseek-r1-distill-llama-70b",
1843
+ name: "DeepSeek: R1 Distill Llama 70B",
1793
1844
  provider: "openrouter",
1794
1845
  baseUrl: "https://openrouter.ai/api/v1",
1795
- reasoning: false,
1846
+ reasoning: true,
1796
1847
  input: ["text"],
1797
1848
  cost: {
1798
- input: 3,
1799
- output: 15,
1849
+ input: 0.025915399999999998,
1850
+ output: 0.103712,
1800
1851
  cacheRead: 0,
1801
1852
  cacheWrite: 0,
1802
1853
  },
1803
- contextWindow: 128000,
1804
- maxTokens: 4000,
1854
+ contextWindow: 131072,
1855
+ maxTokens: 4096,
1805
1856
  },
1806
- "cohere/command-r": {
1807
- id: "cohere/command-r",
1808
- name: "Cohere: Command R",
1857
+ "deepseek/deepseek-r1": {
1858
+ id: "deepseek/deepseek-r1",
1859
+ name: "DeepSeek: R1",
1809
1860
  provider: "openrouter",
1810
1861
  baseUrl: "https://openrouter.ai/api/v1",
1811
- reasoning: false,
1862
+ reasoning: true,
1812
1863
  input: ["text"],
1813
1864
  cost: {
1814
- input: 0.5,
1815
- output: 1.5,
1865
+ input: 0.39999999999999997,
1866
+ output: 2,
1816
1867
  cacheRead: 0,
1817
1868
  cacheWrite: 0,
1818
1869
  },
1819
- contextWindow: 128000,
1820
- maxTokens: 4000,
1870
+ contextWindow: 163840,
1871
+ maxTokens: 163840,
1821
1872
  },
1822
- "cohere/command-r-03-2024": {
1823
- id: "cohere/command-r-03-2024",
1824
- name: "Cohere: Command R (03-2024)",
1873
+ "mistralai/codestral-2501": {
1874
+ id: "mistralai/codestral-2501",
1875
+ name: "Mistral: Codestral 2501",
1825
1876
  provider: "openrouter",
1826
1877
  baseUrl: "https://openrouter.ai/api/v1",
1827
1878
  reasoning: false,
1828
1879
  input: ["text"],
1829
1880
  cost: {
1830
- input: 0.5,
1831
- output: 1.5,
1881
+ input: 0.3,
1882
+ output: 0.8999999999999999,
1832
1883
  cacheRead: 0,
1833
1884
  cacheWrite: 0,
1834
1885
  },
1835
- contextWindow: 128000,
1836
- maxTokens: 4000,
1886
+ contextWindow: 262144,
1887
+ maxTokens: 4096,
1837
1888
  },
1838
- "mistralai/mistral-large": {
1839
- id: "mistralai/mistral-large",
1840
- name: "Mistral Large",
1889
+ "deepseek/deepseek-chat": {
1890
+ id: "deepseek/deepseek-chat",
1891
+ name: "DeepSeek: DeepSeek V3",
1841
1892
  provider: "openrouter",
1842
1893
  baseUrl: "https://openrouter.ai/api/v1",
1843
1894
  reasoning: false,
1844
1895
  input: ["text"],
1845
1896
  cost: {
1846
- input: 2,
1847
- output: 6,
1897
+ input: 0.1999188,
1898
+ output: 0.800064,
1848
1899
  cacheRead: 0,
1849
1900
  cacheWrite: 0,
1850
1901
  },
1851
- contextWindow: 128000,
1902
+ contextWindow: 163840,
1852
1903
  maxTokens: 4096,
1853
1904
  },
1854
- "mistralai/mistral-tiny": {
1855
- id: "mistralai/mistral-tiny",
1856
- name: "Mistral Tiny",
1905
+ "meta-llama/llama-3.3-70b-instruct:free": {
1906
+ id: "meta-llama/llama-3.3-70b-instruct:free",
1907
+ name: "Meta: Llama 3.3 70B Instruct (free)",
1857
1908
  provider: "openrouter",
1858
1909
  baseUrl: "https://openrouter.ai/api/v1",
1859
1910
  reasoning: false,
1860
1911
  input: ["text"],
1861
1912
  cost: {
1862
- input: 0.25,
1863
- output: 0.25,
1913
+ input: 0,
1914
+ output: 0,
1864
1915
  cacheRead: 0,
1865
1916
  cacheWrite: 0,
1866
1917
  },
1867
- contextWindow: 32768,
1918
+ contextWindow: 65536,
1868
1919
  maxTokens: 4096,
1869
1920
  },
1870
- "mistralai/mistral-small": {
1871
- id: "mistralai/mistral-small",
1872
- name: "Mistral Small",
1921
+ "meta-llama/llama-3.3-70b-instruct": {
1922
+ id: "meta-llama/llama-3.3-70b-instruct",
1923
+ name: "Meta: Llama 3.3 70B Instruct",
1873
1924
  provider: "openrouter",
1874
1925
  baseUrl: "https://openrouter.ai/api/v1",
1875
1926
  reasoning: false,
1876
1927
  input: ["text"],
1877
1928
  cost: {
1878
- input: 0.19999999999999998,
1879
- output: 0.6,
1929
+ input: 0.038000000000000006,
1930
+ output: 0.12,
1880
1931
  cacheRead: 0,
1881
1932
  cacheWrite: 0,
1882
1933
  },
1883
- contextWindow: 32768,
1884
- maxTokens: 4096,
1934
+ contextWindow: 131072,
1935
+ maxTokens: 16384,
1885
1936
  },
1886
- "mistralai/mixtral-8x7b-instruct": {
1887
- id: "mistralai/mixtral-8x7b-instruct",
1888
- name: "Mistral: Mixtral 8x7B Instruct",
1937
+ "amazon/nova-lite-v1": {
1938
+ id: "amazon/nova-lite-v1",
1939
+ name: "Amazon: Nova Lite 1.0",
1889
1940
  provider: "openrouter",
1890
1941
  baseUrl: "https://openrouter.ai/api/v1",
1891
1942
  reasoning: false,
1892
- input: ["text"],
1943
+ input: ["text", "image"],
1893
1944
  cost: {
1894
- input: 0.08,
1945
+ input: 0.06,
1895
1946
  output: 0.24,
1896
1947
  cacheRead: 0,
1897
1948
  cacheWrite: 0,
1898
1949
  },
1899
- contextWindow: 32768,
1900
- maxTokens: 16384,
1950
+ contextWindow: 300000,
1951
+ maxTokens: 5120,
1901
1952
  },
1902
- "mistralai/mistral-7b-instruct-v0.1": {
1903
- id: "mistralai/mistral-7b-instruct-v0.1",
1904
- name: "Mistral: Mistral 7B Instruct v0.1",
1953
+ "amazon/nova-micro-v1": {
1954
+ id: "amazon/nova-micro-v1",
1955
+ name: "Amazon: Nova Micro 1.0",
1905
1956
  provider: "openrouter",
1906
1957
  baseUrl: "https://openrouter.ai/api/v1",
1907
1958
  reasoning: false,
1908
1959
  input: ["text"],
1909
1960
  cost: {
1910
- input: 0.11,
1911
- output: 0.19,
1961
+ input: 0.035,
1962
+ output: 0.14,
1912
1963
  cacheRead: 0,
1913
1964
  cacheWrite: 0,
1914
1965
  },
1915
- contextWindow: 2824,
1916
- maxTokens: 4096,
1966
+ contextWindow: 128000,
1967
+ maxTokens: 5120,
1917
1968
  },
1918
- }
1919
- },
1920
- xai: {
1921
- models: {
1922
- "grok-code-fast-1": {
1923
- id: "grok-code-fast-1",
1924
- name: "xAI: Grok Code Fast 1",
1925
- provider: "xai",
1926
- baseUrl: "https://api.x.ai/v1",
1927
- reasoning: true,
1928
- input: ["text"],
1969
+ "amazon/nova-pro-v1": {
1970
+ id: "amazon/nova-pro-v1",
1971
+ name: "Amazon: Nova Pro 1.0",
1972
+ provider: "openrouter",
1973
+ baseUrl: "https://openrouter.ai/api/v1",
1974
+ reasoning: false,
1975
+ input: ["text", "image"],
1929
1976
  cost: {
1930
- input: 0.19999999999999998,
1931
- output: 1.5,
1932
- cacheRead: 0.02,
1977
+ input: 0.7999999999999999,
1978
+ output: 3.1999999999999997,
1979
+ cacheRead: 0,
1933
1980
  cacheWrite: 0,
1934
1981
  },
1935
- contextWindow: 256000,
1936
- maxTokens: 10000,
1982
+ contextWindow: 300000,
1983
+ maxTokens: 5120,
1937
1984
  },
1938
- "grok-4": {
1939
- id: "grok-4",
1940
- name: "xAI: Grok 4",
1941
- provider: "xai",
1942
- baseUrl: "https://api.x.ai/v1",
1943
- reasoning: true,
1944
- input: ["text", "image"],
1985
+ "mistralai/mistral-large-2411": {
1986
+ id: "mistralai/mistral-large-2411",
1987
+ name: "Mistral Large 2411",
1988
+ provider: "openrouter",
1989
+ baseUrl: "https://openrouter.ai/api/v1",
1990
+ reasoning: false,
1991
+ input: ["text"],
1945
1992
  cost: {
1946
- input: 3,
1947
- output: 15,
1948
- cacheRead: 0.75,
1993
+ input: 2,
1994
+ output: 6,
1995
+ cacheRead: 0,
1949
1996
  cacheWrite: 0,
1950
1997
  },
1951
- contextWindow: 256000,
1998
+ contextWindow: 131072,
1952
1999
  maxTokens: 4096,
1953
2000
  },
1954
- "grok-3-mini": {
1955
- id: "grok-3-mini",
1956
- name: "xAI: Grok 3 Mini",
1957
- provider: "xai",
1958
- baseUrl: "https://api.x.ai/v1",
1959
- reasoning: true,
2001
+ "mistralai/mistral-large-2407": {
2002
+ id: "mistralai/mistral-large-2407",
2003
+ name: "Mistral Large 2407",
2004
+ provider: "openrouter",
2005
+ baseUrl: "https://openrouter.ai/api/v1",
2006
+ reasoning: false,
1960
2007
  input: ["text"],
1961
2008
  cost: {
1962
- input: 0.3,
1963
- output: 0.5,
1964
- cacheRead: 0.075,
2009
+ input: 2,
2010
+ output: 6,
2011
+ cacheRead: 0,
1965
2012
  cacheWrite: 0,
1966
2013
  },
1967
2014
  contextWindow: 131072,
1968
2015
  maxTokens: 4096,
1969
2016
  },
1970
- "grok-3": {
1971
- id: "grok-3",
1972
- name: "xAI: Grok 3",
1973
- provider: "xai",
1974
- baseUrl: "https://api.x.ai/v1",
2017
+ "mistralai/pixtral-large-2411": {
2018
+ id: "mistralai/pixtral-large-2411",
2019
+ name: "Mistral: Pixtral Large 2411",
2020
+ provider: "openrouter",
2021
+ baseUrl: "https://openrouter.ai/api/v1",
1975
2022
  reasoning: false,
1976
- input: ["text"],
2023
+ input: ["text", "image"],
1977
2024
  cost: {
1978
- input: 3,
1979
- output: 15,
1980
- cacheRead: 0.75,
2025
+ input: 2,
2026
+ output: 6,
2027
+ cacheRead: 0,
1981
2028
  cacheWrite: 0,
1982
2029
  },
1983
2030
  contextWindow: 131072,
1984
2031
  maxTokens: 4096,
1985
2032
  },
1986
- "grok-3-mini-beta": {
1987
- id: "grok-3-mini-beta",
1988
- name: "xAI: Grok 3 Mini Beta",
1989
- provider: "xai",
1990
- baseUrl: "https://api.x.ai/v1",
1991
- reasoning: true,
2033
+ "thedrummer/unslopnemo-12b": {
2034
+ id: "thedrummer/unslopnemo-12b",
2035
+ name: "TheDrummer: UnslopNemo 12B",
2036
+ provider: "openrouter",
2037
+ baseUrl: "https://openrouter.ai/api/v1",
2038
+ reasoning: false,
1992
2039
  input: ["text"],
1993
2040
  cost: {
1994
- input: 0.3,
1995
- output: 0.5,
1996
- cacheRead: 0.075,
2041
+ input: 0.39999999999999997,
2042
+ output: 0.39999999999999997,
2043
+ cacheRead: 0,
1997
2044
  cacheWrite: 0,
1998
2045
  },
1999
- contextWindow: 131072,
2046
+ contextWindow: 32768,
2000
2047
  maxTokens: 4096,
2001
2048
  },
2002
- "grok-3-beta": {
2003
- id: "grok-3-beta",
2004
- name: "xAI: Grok 3 Beta",
2005
- provider: "xai",
2006
- baseUrl: "https://api.x.ai/v1",
2049
+ "mistralai/ministral-8b": {
2050
+ id: "mistralai/ministral-8b",
2051
+ name: "Mistral: Ministral 8B",
2052
+ provider: "openrouter",
2053
+ baseUrl: "https://openrouter.ai/api/v1",
2007
2054
  reasoning: false,
2008
2055
  input: ["text"],
2009
2056
  cost: {
2010
- input: 3,
2011
- output: 15,
2012
- cacheRead: 0.75,
2057
+ input: 0.09999999999999999,
2058
+ output: 0.09999999999999999,
2059
+ cacheRead: 0,
2013
2060
  cacheWrite: 0,
2014
2061
  },
2015
- contextWindow: 131072,
2062
+ contextWindow: 128000,
2016
2063
  maxTokens: 4096,
2017
2064
  },
2018
- "grok-2-1212": {
2019
- id: "grok-2-1212",
2020
- name: "xAI: Grok 2 1212",
2021
- provider: "xai",
2022
- baseUrl: "https://api.x.ai/v1",
2065
+ "nvidia/llama-3.1-nemotron-70b-instruct": {
2066
+ id: "nvidia/llama-3.1-nemotron-70b-instruct",
2067
+ name: "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
2068
+ provider: "openrouter",
2069
+ baseUrl: "https://openrouter.ai/api/v1",
2023
2070
  reasoning: false,
2024
2071
  input: ["text"],
2025
2072
  cost: {
2026
- input: 2,
2027
- output: 10,
2073
+ input: 0.12,
2074
+ output: 0.3,
2028
2075
  cacheRead: 0,
2029
2076
  cacheWrite: 0,
2030
2077
  },
2031
2078
  contextWindow: 131072,
2032
- maxTokens: 4096,
2079
+ maxTokens: 16384,
2033
2080
  },
2034
- }
2035
- },
2036
- openai: {
2037
- models: {
2038
- "gpt-4o-audio-preview": {
2039
- id: "gpt-4o-audio-preview",
2040
- name: "OpenAI: GPT-4o Audio",
2041
- provider: "openai",
2081
+ "thedrummer/rocinante-12b": {
2082
+ id: "thedrummer/rocinante-12b",
2083
+ name: "TheDrummer: Rocinante 12B",
2084
+ provider: "openrouter",
2085
+ baseUrl: "https://openrouter.ai/api/v1",
2042
2086
  reasoning: false,
2043
2087
  input: ["text"],
2044
2088
  cost: {
2045
- input: 2.5,
2046
- output: 10,
2089
+ input: 0.16999999999999998,
2090
+ output: 0.43,
2047
2091
  cacheRead: 0,
2048
2092
  cacheWrite: 0,
2049
2093
  },
2050
- contextWindow: 128000,
2051
- maxTokens: 16384,
2094
+ contextWindow: 32768,
2095
+ maxTokens: 4096,
2052
2096
  },
2053
- "gpt-5": {
2054
- id: "gpt-5",
2055
- name: "OpenAI: GPT-5",
2056
- provider: "openai",
2057
- reasoning: true,
2058
- input: ["text", "image"],
2097
+ "meta-llama/llama-3.2-3b-instruct": {
2098
+ id: "meta-llama/llama-3.2-3b-instruct",
2099
+ name: "Meta: Llama 3.2 3B Instruct",
2100
+ provider: "openrouter",
2101
+ baseUrl: "https://openrouter.ai/api/v1",
2102
+ reasoning: false,
2103
+ input: ["text"],
2059
2104
  cost: {
2060
- input: 1.25,
2061
- output: 10,
2062
- cacheRead: 0.125,
2105
+ input: 0.003,
2106
+ output: 0.006,
2107
+ cacheRead: 0,
2063
2108
  cacheWrite: 0,
2064
2109
  },
2065
- contextWindow: 400000,
2066
- maxTokens: 128000,
2110
+ contextWindow: 20000,
2111
+ maxTokens: 20000,
2067
2112
  },
2068
- "gpt-5-mini": {
2069
- id: "gpt-5-mini",
2070
- name: "OpenAI: GPT-5 Mini",
2071
- provider: "openai",
2072
- reasoning: true,
2073
- input: ["text", "image"],
2113
+ "qwen/qwen-2.5-72b-instruct": {
2114
+ id: "qwen/qwen-2.5-72b-instruct",
2115
+ name: "Qwen2.5 72B Instruct",
2116
+ provider: "openrouter",
2117
+ baseUrl: "https://openrouter.ai/api/v1",
2118
+ reasoning: false,
2119
+ input: ["text"],
2074
2120
  cost: {
2075
- input: 0.25,
2076
- output: 2,
2077
- cacheRead: 0.024999999999999998,
2121
+ input: 0.051830799999999996,
2122
+ output: 0.207424,
2123
+ cacheRead: 0,
2078
2124
  cacheWrite: 0,
2079
2125
  },
2080
- contextWindow: 400000,
2081
- maxTokens: 128000,
2126
+ contextWindow: 32768,
2127
+ maxTokens: 4096,
2082
2128
  },
2083
- "gpt-5-nano": {
2084
- id: "gpt-5-nano",
2085
- name: "OpenAI: GPT-5 Nano",
2086
- provider: "openai",
2087
- reasoning: true,
2129
+ "mistralai/pixtral-12b": {
2130
+ id: "mistralai/pixtral-12b",
2131
+ name: "Mistral: Pixtral 12B",
2132
+ provider: "openrouter",
2133
+ baseUrl: "https://openrouter.ai/api/v1",
2134
+ reasoning: false,
2088
2135
  input: ["text", "image"],
2089
2136
  cost: {
2090
- input: 0.049999999999999996,
2091
- output: 0.39999999999999997,
2092
- cacheRead: 0.005,
2137
+ input: 0.09999999999999999,
2138
+ output: 0.09999999999999999,
2139
+ cacheRead: 0,
2093
2140
  cacheWrite: 0,
2094
2141
  },
2095
- contextWindow: 400000,
2096
- maxTokens: 128000,
2142
+ contextWindow: 32768,
2143
+ maxTokens: 4096,
2097
2144
  },
2098
- "gpt-oss-120b": {
2099
- id: "gpt-oss-120b",
2100
- name: "OpenAI: gpt-oss-120b",
2101
- provider: "openai",
2102
- reasoning: true,
2145
+ "cohere/command-r-plus-08-2024": {
2146
+ id: "cohere/command-r-plus-08-2024",
2147
+ name: "Cohere: Command R+ (08-2024)",
2148
+ provider: "openrouter",
2149
+ baseUrl: "https://openrouter.ai/api/v1",
2150
+ reasoning: false,
2103
2151
  input: ["text"],
2104
2152
  cost: {
2105
- input: 0.072,
2106
- output: 0.28,
2153
+ input: 2.5,
2154
+ output: 10,
2107
2155
  cacheRead: 0,
2108
2156
  cacheWrite: 0,
2109
2157
  },
2110
- contextWindow: 131000,
2111
- maxTokens: 131000,
2158
+ contextWindow: 128000,
2159
+ maxTokens: 4000,
2112
2160
  },
2113
- "gpt-oss-20b": {
2114
- id: "gpt-oss-20b",
2115
- name: "OpenAI: gpt-oss-20b",
2116
- provider: "openai",
2117
- reasoning: true,
2161
+ "cohere/command-r-08-2024": {
2162
+ id: "cohere/command-r-08-2024",
2163
+ name: "Cohere: Command R (08-2024)",
2164
+ provider: "openrouter",
2165
+ baseUrl: "https://openrouter.ai/api/v1",
2166
+ reasoning: false,
2118
2167
  input: ["text"],
2119
2168
  cost: {
2120
- input: 0.04,
2121
- output: 0.15,
2169
+ input: 0.15,
2170
+ output: 0.6,
2122
2171
  cacheRead: 0,
2123
2172
  cacheWrite: 0,
2124
2173
  },
2125
- contextWindow: 131000,
2126
- maxTokens: 131000,
2174
+ contextWindow: 128000,
2175
+ maxTokens: 4000,
2127
2176
  },
2128
- "o3-pro": {
2129
- id: "o3-pro",
2130
- name: "OpenAI: o3 Pro",
2131
- provider: "openai",
2132
- reasoning: true,
2133
- input: ["text", "image"],
2177
+ "microsoft/phi-3.5-mini-128k-instruct": {
2178
+ id: "microsoft/phi-3.5-mini-128k-instruct",
2179
+ name: "Microsoft: Phi-3.5 Mini 128K Instruct",
2180
+ provider: "openrouter",
2181
+ baseUrl: "https://openrouter.ai/api/v1",
2182
+ reasoning: false,
2183
+ input: ["text"],
2134
2184
  cost: {
2135
- input: 20,
2136
- output: 80,
2185
+ input: 0.09999999999999999,
2186
+ output: 0.09999999999999999,
2137
2187
  cacheRead: 0,
2138
2188
  cacheWrite: 0,
2139
2189
  },
2140
- contextWindow: 200000,
2141
- maxTokens: 100000,
2142
- },
2143
- "codex-mini": {
2144
- id: "codex-mini",
2145
- name: "OpenAI: Codex Mini",
2146
- provider: "openai",
2147
- reasoning: true,
2148
- input: ["text", "image"],
2149
- cost: {
2150
- input: 1.5,
2151
- output: 6,
2152
- cacheRead: 0.375,
2153
- cacheWrite: 0,
2154
- },
2155
- contextWindow: 200000,
2156
- maxTokens: 100000,
2190
+ contextWindow: 128000,
2191
+ maxTokens: 4096,
2157
2192
  },
2158
- "o4-mini-high": {
2159
- id: "o4-mini-high",
2160
- name: "OpenAI: o4 Mini High",
2161
- provider: "openai",
2162
- reasoning: true,
2163
- input: ["text", "image"],
2193
+ "nousresearch/hermes-3-llama-3.1-70b": {
2194
+ id: "nousresearch/hermes-3-llama-3.1-70b",
2195
+ name: "Nous: Hermes 3 70B Instruct",
2196
+ provider: "openrouter",
2197
+ baseUrl: "https://openrouter.ai/api/v1",
2198
+ reasoning: false,
2199
+ input: ["text"],
2164
2200
  cost: {
2165
- input: 1.1,
2166
- output: 4.4,
2167
- cacheRead: 0.275,
2201
+ input: 0.09999999999999999,
2202
+ output: 0.28,
2203
+ cacheRead: 0,
2168
2204
  cacheWrite: 0,
2169
2205
  },
2170
- contextWindow: 200000,
2171
- maxTokens: 100000,
2206
+ contextWindow: 131072,
2207
+ maxTokens: 4096,
2172
2208
  },
2173
- "o3": {
2174
- id: "o3",
2175
- name: "OpenAI: o3",
2176
- provider: "openai",
2177
- reasoning: true,
2178
- input: ["text", "image"],
2209
+ "meta-llama/llama-3.1-8b-instruct": {
2210
+ id: "meta-llama/llama-3.1-8b-instruct",
2211
+ name: "Meta: Llama 3.1 8B Instruct",
2212
+ provider: "openrouter",
2213
+ baseUrl: "https://openrouter.ai/api/v1",
2214
+ reasoning: false,
2215
+ input: ["text"],
2179
2216
  cost: {
2180
- input: 2,
2181
- output: 8,
2182
- cacheRead: 0.5,
2217
+ input: 0.015,
2218
+ output: 0.02,
2219
+ cacheRead: 0,
2183
2220
  cacheWrite: 0,
2184
2221
  },
2185
- contextWindow: 200000,
2186
- maxTokens: 100000,
2222
+ contextWindow: 131072,
2223
+ maxTokens: 16384,
2187
2224
  },
2188
- "o4-mini": {
2189
- id: "o4-mini",
2190
- name: "OpenAI: o4 Mini",
2191
- provider: "openai",
2192
- reasoning: true,
2193
- input: ["text", "image"],
2225
+ "meta-llama/llama-3.1-70b-instruct": {
2226
+ id: "meta-llama/llama-3.1-70b-instruct",
2227
+ name: "Meta: Llama 3.1 70B Instruct",
2228
+ provider: "openrouter",
2229
+ baseUrl: "https://openrouter.ai/api/v1",
2230
+ reasoning: false,
2231
+ input: ["text"],
2194
2232
  cost: {
2195
- input: 1.1,
2196
- output: 4.4,
2197
- cacheRead: 0.275,
2233
+ input: 0.09999999999999999,
2234
+ output: 0.28,
2235
+ cacheRead: 0,
2198
2236
  cacheWrite: 0,
2199
2237
  },
2200
- contextWindow: 200000,
2201
- maxTokens: 100000,
2238
+ contextWindow: 131072,
2239
+ maxTokens: 16384,
2202
2240
  },
2203
- "gpt-4.1": {
2204
- id: "gpt-4.1",
2205
- name: "OpenAI: GPT-4.1",
2206
- provider: "openai",
2241
+ "meta-llama/llama-3.1-405b-instruct": {
2242
+ id: "meta-llama/llama-3.1-405b-instruct",
2243
+ name: "Meta: Llama 3.1 405B Instruct",
2244
+ provider: "openrouter",
2245
+ baseUrl: "https://openrouter.ai/api/v1",
2207
2246
  reasoning: false,
2208
- input: ["text", "image"],
2247
+ input: ["text"],
2209
2248
  cost: {
2210
- input: 2,
2211
- output: 8,
2212
- cacheRead: 0.5,
2249
+ input: 0.7999999999999999,
2250
+ output: 0.7999999999999999,
2251
+ cacheRead: 0,
2213
2252
  cacheWrite: 0,
2214
2253
  },
2215
- contextWindow: 1047576,
2216
- maxTokens: 32768,
2254
+ contextWindow: 32768,
2255
+ maxTokens: 16384,
2217
2256
  },
2218
- "gpt-4.1-mini": {
2219
- id: "gpt-4.1-mini",
2220
- name: "OpenAI: GPT-4.1 Mini",
2221
- provider: "openai",
2257
+ "mistralai/mistral-nemo": {
2258
+ id: "mistralai/mistral-nemo",
2259
+ name: "Mistral: Mistral Nemo",
2260
+ provider: "openrouter",
2261
+ baseUrl: "https://openrouter.ai/api/v1",
2222
2262
  reasoning: false,
2223
- input: ["text", "image"],
2263
+ input: ["text"],
2224
2264
  cost: {
2225
- input: 0.39999999999999997,
2226
- output: 1.5999999999999999,
2227
- cacheRead: 0.09999999999999999,
2265
+ input: 0.0075,
2266
+ output: 0.049999999999999996,
2267
+ cacheRead: 0,
2228
2268
  cacheWrite: 0,
2229
2269
  },
2230
- contextWindow: 1047576,
2231
- maxTokens: 32768,
2270
+ contextWindow: 32000,
2271
+ maxTokens: 4096,
2232
2272
  },
2233
- "gpt-4.1-nano": {
2234
- id: "gpt-4.1-nano",
2235
- name: "OpenAI: GPT-4.1 Nano",
2236
- provider: "openai",
2273
+ "mistralai/mistral-7b-instruct-v0.3": {
2274
+ id: "mistralai/mistral-7b-instruct-v0.3",
2275
+ name: "Mistral: Mistral 7B Instruct v0.3",
2276
+ provider: "openrouter",
2277
+ baseUrl: "https://openrouter.ai/api/v1",
2237
2278
  reasoning: false,
2238
- input: ["text", "image"],
2279
+ input: ["text"],
2239
2280
  cost: {
2240
- input: 0.09999999999999999,
2241
- output: 0.39999999999999997,
2242
- cacheRead: 0.024999999999999998,
2281
+ input: 0.028,
2282
+ output: 0.054,
2283
+ cacheRead: 0,
2243
2284
  cacheWrite: 0,
2244
2285
  },
2245
- contextWindow: 1047576,
2246
- maxTokens: 32768,
2286
+ contextWindow: 32768,
2287
+ maxTokens: 16384,
2247
2288
  },
2248
- "o3-mini-high": {
2249
- id: "o3-mini-high",
2250
- name: "OpenAI: o3 Mini High",
2251
- provider: "openai",
2289
+ "mistralai/mistral-7b-instruct:free": {
2290
+ id: "mistralai/mistral-7b-instruct:free",
2291
+ name: "Mistral: Mistral 7B Instruct (free)",
2292
+ provider: "openrouter",
2293
+ baseUrl: "https://openrouter.ai/api/v1",
2252
2294
  reasoning: false,
2253
2295
  input: ["text"],
2254
2296
  cost: {
2255
- input: 1.1,
2256
- output: 4.4,
2257
- cacheRead: 0.55,
2297
+ input: 0,
2298
+ output: 0,
2299
+ cacheRead: 0,
2258
2300
  cacheWrite: 0,
2259
2301
  },
2260
- contextWindow: 200000,
2261
- maxTokens: 100000,
2302
+ contextWindow: 32768,
2303
+ maxTokens: 16384,
2262
2304
  },
2263
- "o3-mini": {
2264
- id: "o3-mini",
2265
- name: "OpenAI: o3 Mini",
2266
- provider: "openai",
2305
+ "mistralai/mistral-7b-instruct": {
2306
+ id: "mistralai/mistral-7b-instruct",
2307
+ name: "Mistral: Mistral 7B Instruct",
2308
+ provider: "openrouter",
2309
+ baseUrl: "https://openrouter.ai/api/v1",
2267
2310
  reasoning: false,
2268
2311
  input: ["text"],
2269
2312
  cost: {
2270
- input: 1.1,
2271
- output: 4.4,
2272
- cacheRead: 0.55,
2313
+ input: 0.028,
2314
+ output: 0.054,
2315
+ cacheRead: 0,
2273
2316
  cacheWrite: 0,
2274
2317
  },
2275
- contextWindow: 200000,
2276
- maxTokens: 100000,
2318
+ contextWindow: 32768,
2319
+ maxTokens: 16384,
2277
2320
  },
2278
- "o1": {
2279
- id: "o1",
2280
- name: "OpenAI: o1",
2281
- provider: "openai",
2321
+ "microsoft/phi-3-mini-128k-instruct": {
2322
+ id: "microsoft/phi-3-mini-128k-instruct",
2323
+ name: "Microsoft: Phi-3 Mini 128K Instruct",
2324
+ provider: "openrouter",
2325
+ baseUrl: "https://openrouter.ai/api/v1",
2282
2326
  reasoning: false,
2283
- input: ["text", "image"],
2327
+ input: ["text"],
2284
2328
  cost: {
2285
- input: 15,
2286
- output: 60,
2287
- cacheRead: 7.5,
2329
+ input: 0.09999999999999999,
2330
+ output: 0.09999999999999999,
2331
+ cacheRead: 0,
2288
2332
  cacheWrite: 0,
2289
2333
  },
2290
- contextWindow: 200000,
2291
- maxTokens: 100000,
2334
+ contextWindow: 128000,
2335
+ maxTokens: 4096,
2292
2336
  },
2293
- "gpt-4o-2024-11-20": {
2294
- id: "gpt-4o-2024-11-20",
2295
- name: "OpenAI: GPT-4o (2024-11-20)",
2296
- provider: "openai",
2337
+ "microsoft/phi-3-medium-128k-instruct": {
2338
+ id: "microsoft/phi-3-medium-128k-instruct",
2339
+ name: "Microsoft: Phi-3 Medium 128K Instruct",
2340
+ provider: "openrouter",
2341
+ baseUrl: "https://openrouter.ai/api/v1",
2297
2342
  reasoning: false,
2298
- input: ["text", "image"],
2343
+ input: ["text"],
2299
2344
  cost: {
2300
- input: 2.5,
2301
- output: 10,
2302
- cacheRead: 1.25,
2345
+ input: 1,
2346
+ output: 1,
2347
+ cacheRead: 0,
2303
2348
  cacheWrite: 0,
2304
2349
  },
2305
2350
  contextWindow: 128000,
2306
- maxTokens: 16384,
2351
+ maxTokens: 4096,
2307
2352
  },
2308
- "gpt-4o-2024-08-06": {
2309
- id: "gpt-4o-2024-08-06",
2310
- name: "OpenAI: GPT-4o (2024-08-06)",
2311
- provider: "openai",
2353
+ "meta-llama/llama-3-70b-instruct": {
2354
+ id: "meta-llama/llama-3-70b-instruct",
2355
+ name: "Meta: Llama 3 70B Instruct",
2356
+ provider: "openrouter",
2357
+ baseUrl: "https://openrouter.ai/api/v1",
2312
2358
  reasoning: false,
2313
- input: ["text", "image"],
2359
+ input: ["text"],
2314
2360
  cost: {
2315
- input: 2.5,
2316
- output: 10,
2317
- cacheRead: 1.25,
2361
+ input: 0.3,
2362
+ output: 0.39999999999999997,
2363
+ cacheRead: 0,
2318
2364
  cacheWrite: 0,
2319
2365
  },
2320
- contextWindow: 128000,
2366
+ contextWindow: 8192,
2321
2367
  maxTokens: 16384,
2322
2368
  },
2323
- "gpt-4o-mini": {
2324
- id: "gpt-4o-mini",
2325
- name: "OpenAI: GPT-4o-mini",
2326
- provider: "openai",
2369
+ "meta-llama/llama-3-8b-instruct": {
2370
+ id: "meta-llama/llama-3-8b-instruct",
2371
+ name: "Meta: Llama 3 8B Instruct",
2372
+ provider: "openrouter",
2373
+ baseUrl: "https://openrouter.ai/api/v1",
2327
2374
  reasoning: false,
2328
- input: ["text", "image"],
2375
+ input: ["text"],
2329
2376
  cost: {
2330
- input: 0.15,
2331
- output: 0.6,
2332
- cacheRead: 0.075,
2377
+ input: 0.03,
2378
+ output: 0.06,
2379
+ cacheRead: 0,
2333
2380
  cacheWrite: 0,
2334
2381
  },
2335
- contextWindow: 128000,
2382
+ contextWindow: 8192,
2336
2383
  maxTokens: 16384,
2337
2384
  },
2338
- "gpt-4o-mini-2024-07-18": {
2339
- id: "gpt-4o-mini-2024-07-18",
2340
- name: "OpenAI: GPT-4o-mini (2024-07-18)",
2341
- provider: "openai",
2342
- reasoning: false,
2343
- input: ["text", "image"],
2344
- cost: {
2345
- input: 0.15,
2346
- output: 0.6,
2347
- cacheRead: 0.075,
2348
- cacheWrite: 0,
2349
- },
2350
- contextWindow: 128000,
2351
- maxTokens: 16384,
2352
- },
2353
- "gpt-4o": {
2354
- id: "gpt-4o",
2355
- name: "OpenAI: GPT-4o",
2356
- provider: "openai",
2357
- reasoning: false,
2358
- input: ["text", "image"],
2359
- cost: {
2360
- input: 2.5,
2361
- output: 10,
2362
- cacheRead: 1.25,
2363
- cacheWrite: 0,
2364
- },
2365
- contextWindow: 128000,
2366
- maxTokens: 16384,
2367
- },
2368
- "gpt-4o:extended": {
2369
- id: "gpt-4o:extended",
2370
- name: "OpenAI: GPT-4o (extended)",
2371
- provider: "openai",
2385
+ "mistralai/mixtral-8x22b-instruct": {
2386
+ id: "mistralai/mixtral-8x22b-instruct",
2387
+ name: "Mistral: Mixtral 8x22B Instruct",
2388
+ provider: "openrouter",
2389
+ baseUrl: "https://openrouter.ai/api/v1",
2372
2390
  reasoning: false,
2373
- input: ["text", "image"],
2391
+ input: ["text"],
2374
2392
  cost: {
2375
- input: 6,
2376
- output: 18,
2393
+ input: 0.8999999999999999,
2394
+ output: 0.8999999999999999,
2377
2395
  cacheRead: 0,
2378
2396
  cacheWrite: 0,
2379
2397
  },
2380
- contextWindow: 128000,
2381
- maxTokens: 64000,
2398
+ contextWindow: 65536,
2399
+ maxTokens: 4096,
2382
2400
  },
2383
- "gpt-4o-2024-05-13": {
2384
- id: "gpt-4o-2024-05-13",
2385
- name: "OpenAI: GPT-4o (2024-05-13)",
2386
- provider: "openai",
2401
+ "cohere/command-r-plus": {
2402
+ id: "cohere/command-r-plus",
2403
+ name: "Cohere: Command R+",
2404
+ provider: "openrouter",
2405
+ baseUrl: "https://openrouter.ai/api/v1",
2387
2406
  reasoning: false,
2388
- input: ["text", "image"],
2407
+ input: ["text"],
2389
2408
  cost: {
2390
- input: 5,
2409
+ input: 3,
2391
2410
  output: 15,
2392
2411
  cacheRead: 0,
2393
2412
  cacheWrite: 0,
2394
2413
  },
2395
2414
  contextWindow: 128000,
2396
- maxTokens: 4096,
2415
+ maxTokens: 4000,
2397
2416
  },
2398
- "gpt-4-turbo": {
2399
- id: "gpt-4-turbo",
2400
- name: "OpenAI: GPT-4 Turbo",
2401
- provider: "openai",
2417
+ "cohere/command-r-plus-04-2024": {
2418
+ id: "cohere/command-r-plus-04-2024",
2419
+ name: "Cohere: Command R+ (04-2024)",
2420
+ provider: "openrouter",
2421
+ baseUrl: "https://openrouter.ai/api/v1",
2402
2422
  reasoning: false,
2403
- input: ["text", "image"],
2423
+ input: ["text"],
2404
2424
  cost: {
2405
- input: 10,
2406
- output: 30,
2425
+ input: 3,
2426
+ output: 15,
2407
2427
  cacheRead: 0,
2408
2428
  cacheWrite: 0,
2409
2429
  },
2410
2430
  contextWindow: 128000,
2411
- maxTokens: 4096,
2431
+ maxTokens: 4000,
2412
2432
  },
2413
- "gpt-3.5-turbo-0613": {
2414
- id: "gpt-3.5-turbo-0613",
2415
- name: "OpenAI: GPT-3.5 Turbo (older v0613)",
2416
- provider: "openai",
2433
+ "cohere/command-r": {
2434
+ id: "cohere/command-r",
2435
+ name: "Cohere: Command R",
2436
+ provider: "openrouter",
2437
+ baseUrl: "https://openrouter.ai/api/v1",
2417
2438
  reasoning: false,
2418
2439
  input: ["text"],
2419
2440
  cost: {
2420
- input: 1,
2421
- output: 2,
2441
+ input: 0.5,
2442
+ output: 1.5,
2422
2443
  cacheRead: 0,
2423
2444
  cacheWrite: 0,
2424
2445
  },
2425
- contextWindow: 4095,
2426
- maxTokens: 4096,
2446
+ contextWindow: 128000,
2447
+ maxTokens: 4000,
2427
2448
  },
2428
- "gpt-4-turbo-preview": {
2429
- id: "gpt-4-turbo-preview",
2430
- name: "OpenAI: GPT-4 Turbo Preview",
2431
- provider: "openai",
2449
+ "cohere/command-r-03-2024": {
2450
+ id: "cohere/command-r-03-2024",
2451
+ name: "Cohere: Command R (03-2024)",
2452
+ provider: "openrouter",
2453
+ baseUrl: "https://openrouter.ai/api/v1",
2432
2454
  reasoning: false,
2433
2455
  input: ["text"],
2434
2456
  cost: {
2435
- input: 10,
2436
- output: 30,
2457
+ input: 0.5,
2458
+ output: 1.5,
2437
2459
  cacheRead: 0,
2438
2460
  cacheWrite: 0,
2439
2461
  },
2440
2462
  contextWindow: 128000,
2441
- maxTokens: 4096,
2463
+ maxTokens: 4000,
2442
2464
  },
2443
- "gpt-4-1106-preview": {
2444
- id: "gpt-4-1106-preview",
2445
- name: "OpenAI: GPT-4 Turbo (older v1106)",
2446
- provider: "openai",
2465
+ "mistralai/mistral-large": {
2466
+ id: "mistralai/mistral-large",
2467
+ name: "Mistral Large",
2468
+ provider: "openrouter",
2469
+ baseUrl: "https://openrouter.ai/api/v1",
2447
2470
  reasoning: false,
2448
2471
  input: ["text"],
2449
2472
  cost: {
2450
- input: 10,
2451
- output: 30,
2473
+ input: 2,
2474
+ output: 6,
2452
2475
  cacheRead: 0,
2453
2476
  cacheWrite: 0,
2454
2477
  },
2455
2478
  contextWindow: 128000,
2456
2479
  maxTokens: 4096,
2457
2480
  },
2458
- "gpt-3.5-turbo-16k": {
2459
- id: "gpt-3.5-turbo-16k",
2460
- name: "OpenAI: GPT-3.5 Turbo 16k",
2461
- provider: "openai",
2481
+ "mistralai/mistral-tiny": {
2482
+ id: "mistralai/mistral-tiny",
2483
+ name: "Mistral Tiny",
2484
+ provider: "openrouter",
2485
+ baseUrl: "https://openrouter.ai/api/v1",
2462
2486
  reasoning: false,
2463
2487
  input: ["text"],
2464
2488
  cost: {
2465
- input: 3,
2466
- output: 4,
2489
+ input: 0.25,
2490
+ output: 0.25,
2467
2491
  cacheRead: 0,
2468
2492
  cacheWrite: 0,
2469
2493
  },
2470
- contextWindow: 16385,
2494
+ contextWindow: 32768,
2471
2495
  maxTokens: 4096,
2472
2496
  },
2473
- "gpt-4": {
2474
- id: "gpt-4",
2475
- name: "OpenAI: GPT-4",
2476
- provider: "openai",
2497
+ "mistralai/mistral-small": {
2498
+ id: "mistralai/mistral-small",
2499
+ name: "Mistral Small",
2500
+ provider: "openrouter",
2501
+ baseUrl: "https://openrouter.ai/api/v1",
2477
2502
  reasoning: false,
2478
2503
  input: ["text"],
2479
2504
  cost: {
2480
- input: 30,
2481
- output: 60,
2505
+ input: 0.19999999999999998,
2506
+ output: 0.6,
2482
2507
  cacheRead: 0,
2483
2508
  cacheWrite: 0,
2484
2509
  },
2485
- contextWindow: 8191,
2510
+ contextWindow: 32768,
2486
2511
  maxTokens: 4096,
2487
2512
  },
2488
- "gpt-4-0314": {
2489
- id: "gpt-4-0314",
2490
- name: "OpenAI: GPT-4 (older v0314)",
2491
- provider: "openai",
2513
+ "mistralai/mixtral-8x7b-instruct": {
2514
+ id: "mistralai/mixtral-8x7b-instruct",
2515
+ name: "Mistral: Mixtral 8x7B Instruct",
2516
+ provider: "openrouter",
2517
+ baseUrl: "https://openrouter.ai/api/v1",
2492
2518
  reasoning: false,
2493
2519
  input: ["text"],
2494
2520
  cost: {
2495
- input: 30,
2496
- output: 60,
2521
+ input: 0.08,
2522
+ output: 0.24,
2497
2523
  cacheRead: 0,
2498
2524
  cacheWrite: 0,
2499
2525
  },
2500
- contextWindow: 8191,
2501
- maxTokens: 4096,
2526
+ contextWindow: 32768,
2527
+ maxTokens: 16384,
2502
2528
  },
2503
- "gpt-3.5-turbo": {
2504
- id: "gpt-3.5-turbo",
2505
- name: "OpenAI: GPT-3.5 Turbo",
2506
- provider: "openai",
2529
+ "mistralai/mistral-7b-instruct-v0.1": {
2530
+ id: "mistralai/mistral-7b-instruct-v0.1",
2531
+ name: "Mistral: Mistral 7B Instruct v0.1",
2532
+ provider: "openrouter",
2533
+ baseUrl: "https://openrouter.ai/api/v1",
2507
2534
  reasoning: false,
2508
2535
  input: ["text"],
2509
2536
  cost: {
2510
- input: 0.5,
2511
- output: 1.5,
2537
+ input: 0.11,
2538
+ output: 0.19,
2512
2539
  cacheRead: 0,
2513
2540
  cacheWrite: 0,
2514
2541
  },
2515
- contextWindow: 16385,
2542
+ contextWindow: 2824,
2516
2543
  maxTokens: 4096,
2517
2544
  },
2518
2545
  }
2519
2546
  },
2520
- anthropic: {
2547
+ xai: {
2521
2548
  models: {
2522
- "claude-opus-4-1": {
2523
- id: "claude-opus-4-1",
2524
- name: "Anthropic: Claude Opus 4.1",
2525
- provider: "anthropic",
2526
- reasoning: true,
2527
- input: ["text", "image"],
2528
- cost: {
2529
- input: 15,
2530
- output: 75,
2531
- cacheRead: 1.5,
2532
- cacheWrite: 18.75,
2533
- },
2534
- contextWindow: 200000,
2535
- maxTokens: 32000,
2536
- },
2537
- "claude-opus-4-0": {
2538
- id: "claude-opus-4-0",
2539
- name: "Anthropic: Claude Opus 4",
2540
- provider: "anthropic",
2541
- reasoning: true,
2542
- input: ["text", "image"],
2543
- cost: {
2544
- input: 15,
2545
- output: 75,
2546
- cacheRead: 1.5,
2547
- cacheWrite: 18.75,
2548
- },
2549
- contextWindow: 200000,
2550
- maxTokens: 32000,
2551
- },
2552
- "claude-sonnet-4-0": {
2553
- id: "claude-sonnet-4-0",
2554
- name: "Anthropic: Claude Sonnet 4",
2555
- provider: "anthropic",
2549
+ "grok-code-fast-1": {
2550
+ id: "grok-code-fast-1",
2551
+ name: "xAI: Grok Code Fast 1",
2552
+ provider: "xai",
2553
+ baseUrl: "https://api.x.ai/v1",
2556
2554
  reasoning: true,
2557
- input: ["text", "image"],
2555
+ input: ["text"],
2558
2556
  cost: {
2559
- input: 3,
2560
- output: 15,
2561
- cacheRead: 0.3,
2562
- cacheWrite: 3.75,
2557
+ input: 0.19999999999999998,
2558
+ output: 1.5,
2559
+ cacheRead: 0.02,
2560
+ cacheWrite: 0,
2563
2561
  },
2564
- contextWindow: 1000000,
2565
- maxTokens: 64000,
2562
+ contextWindow: 256000,
2563
+ maxTokens: 10000,
2566
2564
  },
2567
- "claude-3-7-sonnet-latest": {
2568
- id: "claude-3-7-sonnet-latest",
2569
- name: "Anthropic: Claude 3.7 Sonnet",
2570
- provider: "anthropic",
2565
+ "grok-4": {
2566
+ id: "grok-4",
2567
+ name: "xAI: Grok 4",
2568
+ provider: "xai",
2569
+ baseUrl: "https://api.x.ai/v1",
2571
2570
  reasoning: true,
2572
2571
  input: ["text", "image"],
2573
2572
  cost: {
2574
2573
  input: 3,
2575
2574
  output: 15,
2576
- cacheRead: 0.3,
2577
- cacheWrite: 3.75,
2575
+ cacheRead: 0.75,
2576
+ cacheWrite: 0,
2578
2577
  },
2579
- contextWindow: 200000,
2580
- maxTokens: 64000,
2578
+ contextWindow: 256000,
2579
+ maxTokens: 4096,
2581
2580
  },
2582
- "claude-3-7-sonnet-latest:thinking": {
2583
- id: "claude-3-7-sonnet-latest:thinking",
2584
- name: "Anthropic: Claude 3.7 Sonnet (thinking)",
2585
- provider: "anthropic",
2581
+ "grok-3-mini": {
2582
+ id: "grok-3-mini",
2583
+ name: "xAI: Grok 3 Mini",
2584
+ provider: "xai",
2585
+ baseUrl: "https://api.x.ai/v1",
2586
2586
  reasoning: true,
2587
- input: ["text", "image"],
2588
- cost: {
2589
- input: 3,
2590
- output: 15,
2591
- cacheRead: 0.3,
2592
- cacheWrite: 3.75,
2593
- },
2594
- contextWindow: 200000,
2595
- maxTokens: 64000,
2596
- },
2597
- "claude-3-5-haiku-20241022": {
2598
- id: "claude-3-5-haiku-20241022",
2599
- name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
2600
- provider: "anthropic",
2601
- reasoning: false,
2602
- input: ["text", "image"],
2603
- cost: {
2604
- input: 0.7999999999999999,
2605
- output: 4,
2606
- cacheRead: 0.08,
2607
- cacheWrite: 1,
2608
- },
2609
- contextWindow: 200000,
2610
- maxTokens: 8192,
2611
- },
2612
- "claude-3-5-haiku-latest": {
2613
- id: "claude-3-5-haiku-latest",
2614
- name: "Anthropic: Claude 3.5 Haiku",
2615
- provider: "anthropic",
2616
- reasoning: false,
2617
- input: ["text", "image"],
2618
- cost: {
2619
- input: 0.7999999999999999,
2620
- output: 4,
2621
- cacheRead: 0.08,
2622
- cacheWrite: 1,
2623
- },
2624
- contextWindow: 200000,
2625
- maxTokens: 8192,
2626
- },
2627
- "claude-3-5-sonnet-latest": {
2628
- id: "claude-3-5-sonnet-latest",
2629
- name: "Anthropic: Claude 3.5 Sonnet",
2630
- provider: "anthropic",
2631
- reasoning: false,
2632
- input: ["text", "image"],
2587
+ input: ["text"],
2633
2588
  cost: {
2634
- input: 3,
2635
- output: 15,
2636
- cacheRead: 0.3,
2637
- cacheWrite: 3.75,
2589
+ input: 0.3,
2590
+ output: 0.5,
2591
+ cacheRead: 0.075,
2592
+ cacheWrite: 0,
2638
2593
  },
2639
- contextWindow: 200000,
2640
- maxTokens: 8192,
2594
+ contextWindow: 131072,
2595
+ maxTokens: 4096,
2641
2596
  },
2642
- "claude-3-5-sonnet-20240620": {
2643
- id: "claude-3-5-sonnet-20240620",
2644
- name: "Anthropic: Claude 3.5 Sonnet (2024-06-20)",
2645
- provider: "anthropic",
2597
+ "grok-3": {
2598
+ id: "grok-3",
2599
+ name: "xAI: Grok 3",
2600
+ provider: "xai",
2601
+ baseUrl: "https://api.x.ai/v1",
2646
2602
  reasoning: false,
2647
- input: ["text", "image"],
2603
+ input: ["text"],
2648
2604
  cost: {
2649
2605
  input: 3,
2650
2606
  output: 15,
2651
- cacheRead: 0.3,
2652
- cacheWrite: 3.75,
2653
- },
2654
- contextWindow: 200000,
2655
- maxTokens: 8192,
2656
- },
2657
- "claude-3-haiku-20240307": {
2658
- id: "claude-3-haiku-20240307",
2659
- name: "Anthropic: Claude 3 Haiku",
2660
- provider: "anthropic",
2661
- reasoning: false,
2662
- input: ["text", "image"],
2663
- cost: {
2664
- input: 0.25,
2665
- output: 1.25,
2666
- cacheRead: 0.03,
2667
- cacheWrite: 0.3,
2668
- },
2669
- contextWindow: 200000,
2670
- maxTokens: 4096,
2671
- },
2672
- "claude-3-opus-20240229": {
2673
- id: "claude-3-opus-20240229",
2674
- name: "Anthropic: Claude 3 Opus",
2675
- provider: "anthropic",
2676
- reasoning: false,
2677
- input: ["text", "image"],
2678
- cost: {
2679
- input: 15,
2680
- output: 75,
2681
- cacheRead: 1.5,
2682
- cacheWrite: 18.75,
2607
+ cacheRead: 0.75,
2608
+ cacheWrite: 0,
2683
2609
  },
2684
- contextWindow: 200000,
2610
+ contextWindow: 131072,
2685
2611
  maxTokens: 4096,
2686
2612
  },
2687
- }
2688
- },
2689
- google: {
2690
- models: {
2691
- "gemini-2.5-flash-lite": {
2692
- id: "gemini-2.5-flash-lite",
2693
- name: "Google: Gemini 2.5 Flash Lite",
2694
- provider: "google",
2695
- reasoning: true,
2696
- input: ["text", "image"],
2697
- cost: {
2698
- input: 0.09999999999999999,
2699
- output: 0.39999999999999997,
2700
- cacheRead: 0.024999999999999998,
2701
- cacheWrite: 0.18330000000000002,
2702
- },
2703
- contextWindow: 1048576,
2704
- maxTokens: 65535,
2705
- },
2706
- "gemini-2.5-flash-lite-preview-06-17": {
2707
- id: "gemini-2.5-flash-lite-preview-06-17",
2708
- name: "Google: Gemini 2.5 Flash Lite Preview 06-17",
2709
- provider: "google",
2710
- reasoning: true,
2711
- input: ["text", "image"],
2712
- cost: {
2713
- input: 0.09999999999999999,
2714
- output: 0.39999999999999997,
2715
- cacheRead: 0.024999999999999998,
2716
- cacheWrite: 0.18330000000000002,
2717
- },
2718
- contextWindow: 1048576,
2719
- maxTokens: 65535,
2720
- },
2721
- "gemini-2.5-flash": {
2722
- id: "gemini-2.5-flash",
2723
- name: "Google: Gemini 2.5 Flash",
2724
- provider: "google",
2613
+ "grok-3-mini-beta": {
2614
+ id: "grok-3-mini-beta",
2615
+ name: "xAI: Grok 3 Mini Beta",
2616
+ provider: "xai",
2617
+ baseUrl: "https://api.x.ai/v1",
2725
2618
  reasoning: true,
2726
- input: ["text", "image"],
2619
+ input: ["text"],
2727
2620
  cost: {
2728
2621
  input: 0.3,
2729
- output: 2.5,
2622
+ output: 0.5,
2730
2623
  cacheRead: 0.075,
2731
- cacheWrite: 0.3833,
2732
- },
2733
- contextWindow: 1048576,
2734
- maxTokens: 65535,
2735
- },
2736
- "gemini-2.5-pro": {
2737
- id: "gemini-2.5-pro",
2738
- name: "Google: Gemini 2.5 Pro",
2739
- provider: "google",
2740
- reasoning: true,
2741
- input: ["text", "image"],
2742
- cost: {
2743
- input: 1.25,
2744
- output: 10,
2745
- cacheRead: 0.31,
2746
- cacheWrite: 1.625,
2747
- },
2748
- contextWindow: 1048576,
2749
- maxTokens: 65536,
2750
- },
2751
- "gemini-2.5-pro-preview": {
2752
- id: "gemini-2.5-pro-preview",
2753
- name: "Google: Gemini 2.5 Pro Preview 06-05",
2754
- provider: "google",
2755
- reasoning: true,
2756
- input: ["text", "image"],
2757
- cost: {
2758
- input: 1.25,
2759
- output: 10,
2760
- cacheRead: 0.31,
2761
- cacheWrite: 1.625,
2762
- },
2763
- contextWindow: 1048576,
2764
- maxTokens: 65536,
2765
- },
2766
- "gemini-2.5-pro-preview-05-06": {
2767
- id: "gemini-2.5-pro-preview-05-06",
2768
- name: "Google: Gemini 2.5 Pro Preview 05-06",
2769
- provider: "google",
2770
- reasoning: true,
2771
- input: ["text", "image"],
2772
- cost: {
2773
- input: 1.25,
2774
- output: 10,
2775
- cacheRead: 0.31,
2776
- cacheWrite: 1.625,
2777
- },
2778
- contextWindow: 1048576,
2779
- maxTokens: 65535,
2780
- },
2781
- "gemini-2.5-pro-exp-03-25": {
2782
- id: "gemini-2.5-pro-exp-03-25",
2783
- name: "Google: Gemini 2.5 Pro Experimental",
2784
- provider: "google",
2785
- reasoning: false,
2786
- input: ["text", "image"],
2787
- cost: {
2788
- input: 0,
2789
- output: 0,
2790
- cacheRead: 0,
2791
2624
  cacheWrite: 0,
2792
2625
  },
2793
- contextWindow: 1048576,
2794
- maxTokens: 65535,
2795
- },
2796
- "gemini-2.0-flash-lite-001": {
2797
- id: "gemini-2.0-flash-lite-001",
2798
- name: "Google: Gemini 2.0 Flash Lite",
2799
- provider: "google",
2800
- reasoning: false,
2801
- input: ["text", "image"],
2802
- cost: {
2803
- input: 0.075,
2804
- output: 0.3,
2805
- cacheRead: 0,
2806
- cacheWrite: 0,
2807
- },
2808
- contextWindow: 1048576,
2809
- maxTokens: 8192,
2810
- },
2811
- "gemini-2.0-flash-001": {
2812
- id: "gemini-2.0-flash-001",
2813
- name: "Google: Gemini 2.0 Flash",
2814
- provider: "google",
2815
- reasoning: false,
2816
- input: ["text", "image"],
2817
- cost: {
2818
- input: 0.09999999999999999,
2819
- output: 0.39999999999999997,
2820
- cacheRead: 0.024999999999999998,
2821
- cacheWrite: 0.18330000000000002,
2822
- },
2823
- contextWindow: 1048576,
2824
- maxTokens: 8192,
2626
+ contextWindow: 131072,
2627
+ maxTokens: 4096,
2825
2628
  },
2826
- "gemini-2.0-flash-exp:free": {
2827
- id: "gemini-2.0-flash-exp:free",
2828
- name: "Google: Gemini 2.0 Flash Experimental (free)",
2829
- provider: "google",
2629
+ "grok-3-beta": {
2630
+ id: "grok-3-beta",
2631
+ name: "xAI: Grok 3 Beta",
2632
+ provider: "xai",
2633
+ baseUrl: "https://api.x.ai/v1",
2830
2634
  reasoning: false,
2831
- input: ["text", "image"],
2635
+ input: ["text"],
2832
2636
  cost: {
2833
- input: 0,
2834
- output: 0,
2835
- cacheRead: 0,
2637
+ input: 3,
2638
+ output: 15,
2639
+ cacheRead: 0.75,
2836
2640
  cacheWrite: 0,
2837
2641
  },
2838
- contextWindow: 1048576,
2839
- maxTokens: 8192,
2840
- },
2841
- "gemini-flash-1.5-8b": {
2842
- id: "gemini-flash-1.5-8b",
2843
- name: "Google: Gemini 1.5 Flash 8B",
2844
- provider: "google",
2845
- reasoning: false,
2846
- input: ["text", "image"],
2847
- cost: {
2848
- input: 0.0375,
2849
- output: 0.15,
2850
- cacheRead: 0.01,
2851
- cacheWrite: 0.0583,
2852
- },
2853
- contextWindow: 1000000,
2854
- maxTokens: 8192,
2855
- },
2856
- "gemini-flash-1.5": {
2857
- id: "gemini-flash-1.5",
2858
- name: "Google: Gemini 1.5 Flash ",
2859
- provider: "google",
2860
- reasoning: false,
2861
- input: ["text", "image"],
2862
- cost: {
2863
- input: 0.075,
2864
- output: 0.3,
2865
- cacheRead: 0.01875,
2866
- cacheWrite: 0.1583,
2867
- },
2868
- contextWindow: 1000000,
2869
- maxTokens: 8192,
2642
+ contextWindow: 131072,
2643
+ maxTokens: 4096,
2870
2644
  },
2871
- "gemini-pro-1.5": {
2872
- id: "gemini-pro-1.5",
2873
- name: "Google: Gemini 1.5 Pro",
2874
- provider: "google",
2645
+ "grok-2-1212": {
2646
+ id: "grok-2-1212",
2647
+ name: "xAI: Grok 2 1212",
2648
+ provider: "xai",
2649
+ baseUrl: "https://api.x.ai/v1",
2875
2650
  reasoning: false,
2876
- input: ["text", "image"],
2651
+ input: ["text"],
2877
2652
  cost: {
2878
- input: 1.25,
2879
- output: 5,
2653
+ input: 2,
2654
+ output: 10,
2880
2655
  cacheRead: 0,
2881
2656
  cacheWrite: 0,
2882
2657
  },
2883
- contextWindow: 2000000,
2884
- maxTokens: 8192,
2658
+ contextWindow: 131072,
2659
+ maxTokens: 4096,
2885
2660
  },
2886
2661
  }
2887
2662
  },