@mariozechner/pi-ai 0.5.25 → 0.5.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,963 +1,981 @@
1
1
  // This file is auto-generated by scripts/generate-models.ts
2
2
  // Do not edit manually - run 'npm run generate-models' to update
3
3
  export const PROVIDERS = {
4
- groq: {
4
+ anthropic: {
5
5
  models: {
6
- "llama-3.1-8b-instant": {
7
- id: "llama-3.1-8b-instant",
8
- name: "Llama 3.1 8B Instant",
9
- provider: "groq",
10
- baseUrl: "https://api.groq.com/openai/v1",
11
- reasoning: false,
12
- input: ["text"],
6
+ "claude-3-7-sonnet-20250219": {
7
+ id: "claude-3-7-sonnet-20250219",
8
+ name: "Claude Sonnet 3.7",
9
+ provider: "anthropic",
10
+ reasoning: true,
11
+ input: ["text", "image"],
13
12
  cost: {
14
- input: 0.05,
15
- output: 0.08,
16
- cacheRead: 0,
17
- cacheWrite: 0,
13
+ input: 3,
14
+ output: 15,
15
+ cacheRead: 0.3,
16
+ cacheWrite: 3.75,
18
17
  },
19
- contextWindow: 131072,
20
- maxTokens: 8192,
18
+ contextWindow: 200000,
19
+ maxTokens: 64000,
21
20
  },
22
- "qwen-qwq-32b": {
23
- id: "qwen-qwq-32b",
24
- name: "Qwen QwQ 32B",
25
- provider: "groq",
26
- baseUrl: "https://api.groq.com/openai/v1",
21
+ "claude-opus-4-1-20250805": {
22
+ id: "claude-opus-4-1-20250805",
23
+ name: "Claude Opus 4.1",
24
+ provider: "anthropic",
27
25
  reasoning: true,
28
- input: ["text"],
26
+ input: ["text", "image"],
29
27
  cost: {
30
- input: 0.29,
31
- output: 0.39,
32
- cacheRead: 0,
33
- cacheWrite: 0,
28
+ input: 15,
29
+ output: 75,
30
+ cacheRead: 1.5,
31
+ cacheWrite: 18.75,
34
32
  },
35
- contextWindow: 131072,
36
- maxTokens: 16384,
33
+ contextWindow: 200000,
34
+ maxTokens: 32000,
37
35
  },
38
- "llama3-70b-8192": {
39
- id: "llama3-70b-8192",
40
- name: "Llama 3 70B",
41
- provider: "groq",
42
- baseUrl: "https://api.groq.com/openai/v1",
36
+ "claude-3-haiku-20240307": {
37
+ id: "claude-3-haiku-20240307",
38
+ name: "Claude Haiku 3",
39
+ provider: "anthropic",
43
40
  reasoning: false,
44
- input: ["text"],
41
+ input: ["text", "image"],
45
42
  cost: {
46
- input: 0.59,
47
- output: 0.79,
48
- cacheRead: 0,
49
- cacheWrite: 0,
43
+ input: 0.25,
44
+ output: 1.25,
45
+ cacheRead: 0.03,
46
+ cacheWrite: 0.3,
50
47
  },
51
- contextWindow: 8192,
48
+ contextWindow: 200000,
49
+ maxTokens: 4096,
50
+ },
51
+ "claude-3-5-haiku-20241022": {
52
+ id: "claude-3-5-haiku-20241022",
53
+ name: "Claude Haiku 3.5",
54
+ provider: "anthropic",
55
+ reasoning: false,
56
+ input: ["text", "image"],
57
+ cost: {
58
+ input: 0.8,
59
+ output: 4,
60
+ cacheRead: 0.08,
61
+ cacheWrite: 1,
62
+ },
63
+ contextWindow: 200000,
52
64
  maxTokens: 8192,
53
65
  },
54
- "deepseek-r1-distill-llama-70b": {
55
- id: "deepseek-r1-distill-llama-70b",
56
- name: "DeepSeek R1 Distill Llama 70B",
57
- provider: "groq",
58
- baseUrl: "https://api.groq.com/openai/v1",
66
+ "claude-opus-4-20250514": {
67
+ id: "claude-opus-4-20250514",
68
+ name: "Claude Opus 4",
69
+ provider: "anthropic",
59
70
  reasoning: true,
60
- input: ["text"],
71
+ input: ["text", "image"],
61
72
  cost: {
62
- input: 0.75,
63
- output: 0.99,
64
- cacheRead: 0,
65
- cacheWrite: 0,
73
+ input: 15,
74
+ output: 75,
75
+ cacheRead: 1.5,
76
+ cacheWrite: 18.75,
66
77
  },
67
- contextWindow: 131072,
68
- maxTokens: 8192,
78
+ contextWindow: 200000,
79
+ maxTokens: 32000,
69
80
  },
70
- "llama3-8b-8192": {
71
- id: "llama3-8b-8192",
72
- name: "Llama 3 8B",
73
- provider: "groq",
74
- baseUrl: "https://api.groq.com/openai/v1",
81
+ "claude-3-5-sonnet-20241022": {
82
+ id: "claude-3-5-sonnet-20241022",
83
+ name: "Claude Sonnet 3.5 v2",
84
+ provider: "anthropic",
75
85
  reasoning: false,
76
- input: ["text"],
86
+ input: ["text", "image"],
77
87
  cost: {
78
- input: 0.05,
79
- output: 0.08,
80
- cacheRead: 0,
81
- cacheWrite: 0,
88
+ input: 3,
89
+ output: 15,
90
+ cacheRead: 0.3,
91
+ cacheWrite: 3.75,
82
92
  },
83
- contextWindow: 8192,
93
+ contextWindow: 200000,
84
94
  maxTokens: 8192,
85
95
  },
86
- "gemma2-9b-it": {
87
- id: "gemma2-9b-it",
88
- name: "Gemma 2 9B",
89
- provider: "groq",
90
- baseUrl: "https://api.groq.com/openai/v1",
96
+ "claude-3-5-sonnet-20240620": {
97
+ id: "claude-3-5-sonnet-20240620",
98
+ name: "Claude Sonnet 3.5",
99
+ provider: "anthropic",
91
100
  reasoning: false,
92
- input: ["text"],
101
+ input: ["text", "image"],
93
102
  cost: {
94
- input: 0.2,
95
- output: 0.2,
96
- cacheRead: 0,
97
- cacheWrite: 0,
103
+ input: 3,
104
+ output: 15,
105
+ cacheRead: 0.3,
106
+ cacheWrite: 3.75,
98
107
  },
99
- contextWindow: 8192,
108
+ contextWindow: 200000,
100
109
  maxTokens: 8192,
101
110
  },
102
- "llama-3.3-70b-versatile": {
103
- id: "llama-3.3-70b-versatile",
104
- name: "Llama 3.3 70B Versatile",
105
- provider: "groq",
106
- baseUrl: "https://api.groq.com/openai/v1",
111
+ "claude-3-sonnet-20240229": {
112
+ id: "claude-3-sonnet-20240229",
113
+ name: "Claude Sonnet 3",
114
+ provider: "anthropic",
107
115
  reasoning: false,
108
- input: ["text"],
116
+ input: ["text", "image"],
109
117
  cost: {
110
- input: 0.59,
111
- output: 0.79,
112
- cacheRead: 0,
113
- cacheWrite: 0,
118
+ input: 3,
119
+ output: 15,
120
+ cacheRead: 0.3,
121
+ cacheWrite: 0.3,
114
122
  },
115
- contextWindow: 131072,
116
- maxTokens: 32768,
123
+ contextWindow: 200000,
124
+ maxTokens: 4096,
117
125
  },
118
- "mistral-saba-24b": {
119
- id: "mistral-saba-24b",
120
- name: "Mistral Saba 24B",
121
- provider: "groq",
122
- baseUrl: "https://api.groq.com/openai/v1",
126
+ "claude-sonnet-4-20250514": {
127
+ id: "claude-sonnet-4-20250514",
128
+ name: "Claude Sonnet 4",
129
+ provider: "anthropic",
130
+ reasoning: true,
131
+ input: ["text", "image"],
132
+ cost: {
133
+ input: 3,
134
+ output: 15,
135
+ cacheRead: 0.3,
136
+ cacheWrite: 3.75,
137
+ },
138
+ contextWindow: 200000,
139
+ maxTokens: 64000,
140
+ },
141
+ "claude-3-opus-20240229": {
142
+ id: "claude-3-opus-20240229",
143
+ name: "Claude Opus 3",
144
+ provider: "anthropic",
123
145
  reasoning: false,
124
- input: ["text"],
146
+ input: ["text", "image"],
125
147
  cost: {
126
- input: 0.79,
127
- output: 0.79,
128
- cacheRead: 0,
129
- cacheWrite: 0,
148
+ input: 15,
149
+ output: 75,
150
+ cacheRead: 1.5,
151
+ cacheWrite: 18.75,
130
152
  },
131
- contextWindow: 32768,
132
- maxTokens: 32768,
153
+ contextWindow: 200000,
154
+ maxTokens: 4096,
133
155
  },
134
- "openai/gpt-oss-20b": {
135
- id: "openai/gpt-oss-20b",
136
- name: "GPT OSS 20B",
137
- provider: "groq",
138
- baseUrl: "https://api.groq.com/openai/v1",
156
+ }
157
+ },
158
+ google: {
159
+ models: {
160
+ "gemini-2.5-flash-preview-05-20": {
161
+ id: "gemini-2.5-flash-preview-05-20",
162
+ name: "Gemini 2.5 Flash Preview 05-20",
163
+ provider: "google",
139
164
  reasoning: true,
140
- input: ["text"],
165
+ input: ["text", "image"],
141
166
  cost: {
142
- input: 0.1,
143
- output: 0.5,
144
- cacheRead: 0,
167
+ input: 0.15,
168
+ output: 0.6,
169
+ cacheRead: 0.0375,
145
170
  cacheWrite: 0,
146
171
  },
147
- contextWindow: 131072,
148
- maxTokens: 32768,
172
+ contextWindow: 1048576,
173
+ maxTokens: 65536,
149
174
  },
150
- "openai/gpt-oss-120b": {
151
- id: "openai/gpt-oss-120b",
152
- name: "GPT OSS 120B",
153
- provider: "groq",
154
- baseUrl: "https://api.groq.com/openai/v1",
175
+ "gemini-2.5-pro": {
176
+ id: "gemini-2.5-pro",
177
+ name: "Gemini 2.5 Pro",
178
+ provider: "google",
155
179
  reasoning: true,
156
- input: ["text"],
180
+ input: ["text", "image"],
157
181
  cost: {
158
- input: 0.15,
159
- output: 0.75,
160
- cacheRead: 0,
182
+ input: 1.25,
183
+ output: 10,
184
+ cacheRead: 0.31,
161
185
  cacheWrite: 0,
162
186
  },
163
- contextWindow: 131072,
164
- maxTokens: 32768,
187
+ contextWindow: 1048576,
188
+ maxTokens: 65536,
165
189
  },
166
- "meta-llama/llama-4-maverick-17b-128e-instruct": {
167
- id: "meta-llama/llama-4-maverick-17b-128e-instruct",
168
- name: "Llama 4 Maverick 17B",
169
- provider: "groq",
170
- baseUrl: "https://api.groq.com/openai/v1",
190
+ "gemini-1.5-flash": {
191
+ id: "gemini-1.5-flash",
192
+ name: "Gemini 1.5 Flash",
193
+ provider: "google",
171
194
  reasoning: false,
172
195
  input: ["text", "image"],
173
196
  cost: {
174
- input: 0.2,
175
- output: 0.6,
176
- cacheRead: 0,
197
+ input: 0.075,
198
+ output: 0.3,
199
+ cacheRead: 0.01875,
177
200
  cacheWrite: 0,
178
201
  },
179
- contextWindow: 131072,
202
+ contextWindow: 1000000,
180
203
  maxTokens: 8192,
181
204
  },
182
- "meta-llama/llama-4-scout-17b-16e-instruct": {
183
- id: "meta-llama/llama-4-scout-17b-16e-instruct",
184
- name: "Llama 4 Scout 17B",
185
- provider: "groq",
186
- baseUrl: "https://api.groq.com/openai/v1",
205
+ "gemini-2.0-flash-lite": {
206
+ id: "gemini-2.0-flash-lite",
207
+ name: "Gemini 2.0 Flash Lite",
208
+ provider: "google",
187
209
  reasoning: false,
188
210
  input: ["text", "image"],
189
211
  cost: {
190
- input: 0.11,
191
- output: 0.34,
212
+ input: 0.075,
213
+ output: 0.3,
192
214
  cacheRead: 0,
193
215
  cacheWrite: 0,
194
216
  },
195
- contextWindow: 131072,
217
+ contextWindow: 1048576,
196
218
  maxTokens: 8192,
197
219
  },
198
- "qwen/qwen3-32b": {
199
- id: "qwen/qwen3-32b",
200
- name: "Qwen3 32B",
201
- provider: "groq",
202
- baseUrl: "https://api.groq.com/openai/v1",
203
- reasoning: true,
204
- input: ["text"],
220
+ "gemini-1.5-pro": {
221
+ id: "gemini-1.5-pro",
222
+ name: "Gemini 1.5 Pro",
223
+ provider: "google",
224
+ reasoning: false,
225
+ input: ["text", "image"],
205
226
  cost: {
206
- input: 0.29,
207
- output: 0.59,
208
- cacheRead: 0,
227
+ input: 1.25,
228
+ output: 5,
229
+ cacheRead: 0.3125,
209
230
  cacheWrite: 0,
210
231
  },
211
- contextWindow: 131072,
212
- maxTokens: 16384,
232
+ contextWindow: 1000000,
233
+ maxTokens: 8192,
213
234
  },
214
- "moonshotai/kimi-k2-instruct": {
215
- id: "moonshotai/kimi-k2-instruct",
216
- name: "Kimi K2 Instruct",
217
- provider: "groq",
218
- baseUrl: "https://api.groq.com/openai/v1",
235
+ "gemini-1.5-flash-8b": {
236
+ id: "gemini-1.5-flash-8b",
237
+ name: "Gemini 1.5 Flash-8B",
238
+ provider: "google",
219
239
  reasoning: false,
220
- input: ["text"],
240
+ input: ["text", "image"],
221
241
  cost: {
222
- input: 1,
223
- output: 3,
224
- cacheRead: 0,
242
+ input: 0.0375,
243
+ output: 0.15,
244
+ cacheRead: 0.01,
225
245
  cacheWrite: 0,
226
246
  },
227
- contextWindow: 131072,
228
- maxTokens: 16384,
247
+ contextWindow: 1000000,
248
+ maxTokens: 8192,
229
249
  },
230
- }
231
- },
232
- cerebras: {
233
- models: {
234
- "qwen-3-235b-a22b-instruct-2507": {
235
- id: "qwen-3-235b-a22b-instruct-2507",
236
- name: "Qwen 3 235B Instruct",
237
- provider: "cerebras",
238
- baseUrl: "https://api.cerebras.ai/v1",
239
- reasoning: false,
240
- input: ["text"],
250
+ "gemini-2.5-flash": {
251
+ id: "gemini-2.5-flash",
252
+ name: "Gemini 2.5 Flash",
253
+ provider: "google",
254
+ reasoning: true,
255
+ input: ["text", "image"],
241
256
  cost: {
242
- input: 0.6,
243
- output: 1.2,
244
- cacheRead: 0,
257
+ input: 0.3,
258
+ output: 2.5,
259
+ cacheRead: 0.075,
245
260
  cacheWrite: 0,
246
261
  },
247
- contextWindow: 131000,
248
- maxTokens: 32000,
262
+ contextWindow: 1048576,
263
+ maxTokens: 65536,
249
264
  },
250
- "gpt-oss-120b": {
251
- id: "gpt-oss-120b",
252
- name: "GPT OSS 120B",
253
- provider: "cerebras",
254
- baseUrl: "https://api.cerebras.ai/v1",
265
+ "gemini-2.5-pro-preview-06-05": {
266
+ id: "gemini-2.5-pro-preview-06-05",
267
+ name: "Gemini 2.5 Pro Preview 06-05",
268
+ provider: "google",
255
269
  reasoning: true,
256
- input: ["text"],
270
+ input: ["text", "image"],
257
271
  cost: {
258
- input: 0.25,
259
- output: 0.69,
260
- cacheRead: 0,
272
+ input: 1.25,
273
+ output: 10,
274
+ cacheRead: 0.31,
261
275
  cacheWrite: 0,
262
276
  },
263
- contextWindow: 131072,
264
- maxTokens: 32768,
277
+ contextWindow: 1048576,
278
+ maxTokens: 65536,
265
279
  },
266
- "qwen-3-coder-480b": {
267
- id: "qwen-3-coder-480b",
268
- name: "Qwen 3 Coder 480B",
269
- provider: "cerebras",
270
- baseUrl: "https://api.cerebras.ai/v1",
280
+ "gemini-2.5-pro-preview-05-06": {
281
+ id: "gemini-2.5-pro-preview-05-06",
282
+ name: "Gemini 2.5 Pro Preview 05-06",
283
+ provider: "google",
284
+ reasoning: true,
285
+ input: ["text", "image"],
286
+ cost: {
287
+ input: 1.25,
288
+ output: 10,
289
+ cacheRead: 0.31,
290
+ cacheWrite: 0,
291
+ },
292
+ contextWindow: 1048576,
293
+ maxTokens: 65536,
294
+ },
295
+ "gemini-2.0-flash": {
296
+ id: "gemini-2.0-flash",
297
+ name: "Gemini 2.0 Flash",
298
+ provider: "google",
271
299
  reasoning: false,
272
- input: ["text"],
300
+ input: ["text", "image"],
273
301
  cost: {
274
- input: 2,
275
- output: 2,
276
- cacheRead: 0,
302
+ input: 0.1,
303
+ output: 0.4,
304
+ cacheRead: 0.025,
277
305
  cacheWrite: 0,
278
306
  },
279
- contextWindow: 131000,
280
- maxTokens: 32000,
307
+ contextWindow: 1048576,
308
+ maxTokens: 8192,
281
309
  },
282
- }
283
- },
284
- openrouter: {
285
- models: {
286
- "qwen/qwen3-30b-a3b-thinking-2507": {
287
- id: "qwen/qwen3-30b-a3b-thinking-2507",
288
- name: "Qwen: Qwen3 30B A3B Thinking 2507",
289
- provider: "openrouter",
290
- baseUrl: "https://openrouter.ai/api/v1",
310
+ "gemini-2.5-flash-lite-preview-06-17": {
311
+ id: "gemini-2.5-flash-lite-preview-06-17",
312
+ name: "Gemini 2.5 Flash Lite Preview 06-17",
313
+ provider: "google",
291
314
  reasoning: true,
292
- input: ["text"],
315
+ input: ["text", "image"],
293
316
  cost: {
294
- input: 0.0713,
295
- output: 0.2852,
296
- cacheRead: 0,
317
+ input: 0.1,
318
+ output: 0.4,
319
+ cacheRead: 0.025,
297
320
  cacheWrite: 0,
298
321
  },
299
- contextWindow: 262144,
300
- maxTokens: 262144,
322
+ contextWindow: 65536,
323
+ maxTokens: 65536,
301
324
  },
302
- "nousresearch/hermes-4-70b": {
303
- id: "nousresearch/hermes-4-70b",
304
- name: "Nous: Hermes 4 70B",
305
- provider: "openrouter",
306
- baseUrl: "https://openrouter.ai/api/v1",
325
+ "gemini-2.5-flash-preview-04-17": {
326
+ id: "gemini-2.5-flash-preview-04-17",
327
+ name: "Gemini 2.5 Flash Preview 04-17",
328
+ provider: "google",
307
329
  reasoning: true,
308
- input: ["text"],
330
+ input: ["text", "image"],
309
331
  cost: {
310
- input: 0.09329544,
311
- output: 0.3733632,
312
- cacheRead: 0,
332
+ input: 0.15,
333
+ output: 0.6,
334
+ cacheRead: 0.0375,
313
335
  cacheWrite: 0,
314
336
  },
315
- contextWindow: 131072,
316
- maxTokens: 4096,
337
+ contextWindow: 1048576,
338
+ maxTokens: 65536,
317
339
  },
318
- "nousresearch/hermes-4-405b": {
319
- id: "nousresearch/hermes-4-405b",
320
- name: "Nous: Hermes 4 405B",
321
- provider: "openrouter",
322
- baseUrl: "https://openrouter.ai/api/v1",
340
+ }
341
+ },
342
+ openai: {
343
+ models: {
344
+ "gpt-5-nano": {
345
+ id: "gpt-5-nano",
346
+ name: "GPT-5 Nano",
347
+ provider: "openai",
323
348
  reasoning: true,
324
- input: ["text"],
349
+ input: ["text", "image"],
325
350
  cost: {
326
- input: 0.1999188,
327
- output: 0.800064,
328
- cacheRead: 0,
351
+ input: 0.05,
352
+ output: 0.4,
353
+ cacheRead: 0.01,
329
354
  cacheWrite: 0,
330
355
  },
331
- contextWindow: 131072,
332
- maxTokens: 4096,
356
+ contextWindow: 400000,
357
+ maxTokens: 128000,
333
358
  },
334
- "deepseek/deepseek-chat-v3.1:free": {
335
- id: "deepseek/deepseek-chat-v3.1:free",
336
- name: "DeepSeek: DeepSeek V3.1 (free)",
337
- provider: "openrouter",
338
- baseUrl: "https://openrouter.ai/api/v1",
359
+ "o3-pro": {
360
+ id: "o3-pro",
361
+ name: "o3-pro",
362
+ provider: "openai",
339
363
  reasoning: true,
340
- input: ["text"],
364
+ input: ["text", "image"],
341
365
  cost: {
342
- input: 0,
343
- output: 0,
366
+ input: 20,
367
+ output: 80,
344
368
  cacheRead: 0,
345
369
  cacheWrite: 0,
346
370
  },
347
- contextWindow: 64000,
348
- maxTokens: 4096,
371
+ contextWindow: 200000,
372
+ maxTokens: 100000,
349
373
  },
350
- "deepseek/deepseek-chat-v3.1": {
351
- id: "deepseek/deepseek-chat-v3.1",
352
- name: "DeepSeek: DeepSeek V3.1",
353
- provider: "openrouter",
354
- baseUrl: "https://openrouter.ai/api/v1",
374
+ "codex-mini-latest": {
375
+ id: "codex-mini-latest",
376
+ name: "Codex Mini",
377
+ provider: "openai",
355
378
  reasoning: true,
356
379
  input: ["text"],
357
380
  cost: {
358
- input: 0.19999999999999998,
359
- output: 0.7999999999999999,
360
- cacheRead: 0,
381
+ input: 1.5,
382
+ output: 6,
383
+ cacheRead: 0.375,
361
384
  cacheWrite: 0,
362
385
  },
363
- contextWindow: 163840,
364
- maxTokens: 4096,
386
+ contextWindow: 200000,
387
+ maxTokens: 100000,
365
388
  },
366
- "mistralai/mistral-medium-3.1": {
367
- id: "mistralai/mistral-medium-3.1",
368
- name: "Mistral: Mistral Medium 3.1",
369
- provider: "openrouter",
370
- baseUrl: "https://openrouter.ai/api/v1",
389
+ "gpt-4.1": {
390
+ id: "gpt-4.1",
391
+ name: "GPT-4.1",
392
+ provider: "openai",
371
393
  reasoning: false,
372
394
  input: ["text", "image"],
373
395
  cost: {
374
- input: 0.39999999999999997,
375
- output: 2,
396
+ input: 2,
397
+ output: 8,
398
+ cacheRead: 0.5,
399
+ cacheWrite: 0,
400
+ },
401
+ contextWindow: 1047576,
402
+ maxTokens: 32768,
403
+ },
404
+ "gpt-4-turbo": {
405
+ id: "gpt-4-turbo",
406
+ name: "GPT-4 Turbo",
407
+ provider: "openai",
408
+ reasoning: false,
409
+ input: ["text", "image"],
410
+ cost: {
411
+ input: 10,
412
+ output: 30,
376
413
  cacheRead: 0,
377
414
  cacheWrite: 0,
378
415
  },
379
- contextWindow: 131072,
416
+ contextWindow: 128000,
380
417
  maxTokens: 4096,
381
418
  },
382
- "z-ai/glm-4.5v": {
383
- id: "z-ai/glm-4.5v",
384
- name: "Z.AI: GLM 4.5V",
385
- provider: "openrouter",
386
- baseUrl: "https://openrouter.ai/api/v1",
419
+ "o1": {
420
+ id: "o1",
421
+ name: "o1",
422
+ provider: "openai",
387
423
  reasoning: true,
388
424
  input: ["text", "image"],
389
425
  cost: {
390
- input: 0.5,
391
- output: 1.7999999999999998,
392
- cacheRead: 0,
426
+ input: 15,
427
+ output: 60,
428
+ cacheRead: 7.5,
393
429
  cacheWrite: 0,
394
430
  },
395
- contextWindow: 65536,
396
- maxTokens: 65536,
431
+ contextWindow: 200000,
432
+ maxTokens: 100000,
397
433
  },
398
- "ai21/jamba-mini-1.7": {
399
- id: "ai21/jamba-mini-1.7",
400
- name: "AI21: Jamba Mini 1.7",
401
- provider: "openrouter",
402
- baseUrl: "https://openrouter.ai/api/v1",
403
- reasoning: false,
404
- input: ["text"],
434
+ "o3-deep-research": {
435
+ id: "o3-deep-research",
436
+ name: "o3-deep-research",
437
+ provider: "openai",
438
+ reasoning: true,
439
+ input: ["text", "image"],
405
440
  cost: {
406
- input: 0.19999999999999998,
407
- output: 0.39999999999999997,
408
- cacheRead: 0,
441
+ input: 10,
442
+ output: 40,
443
+ cacheRead: 2.5,
409
444
  cacheWrite: 0,
410
445
  },
411
- contextWindow: 256000,
412
- maxTokens: 4096,
446
+ contextWindow: 200000,
447
+ maxTokens: 100000,
413
448
  },
414
- "ai21/jamba-large-1.7": {
415
- id: "ai21/jamba-large-1.7",
416
- name: "AI21: Jamba Large 1.7",
417
- provider: "openrouter",
418
- baseUrl: "https://openrouter.ai/api/v1",
419
- reasoning: false,
420
- input: ["text"],
449
+ "gpt-5": {
450
+ id: "gpt-5",
451
+ name: "GPT-5",
452
+ provider: "openai",
453
+ reasoning: true,
454
+ input: ["text", "image"],
421
455
  cost: {
422
- input: 2,
423
- output: 8,
424
- cacheRead: 0,
456
+ input: 1.25,
457
+ output: 10,
458
+ cacheRead: 0.13,
425
459
  cacheWrite: 0,
426
460
  },
427
- contextWindow: 256000,
428
- maxTokens: 4096,
461
+ contextWindow: 400000,
462
+ maxTokens: 128000,
429
463
  },
430
- "mistralai/codestral-2508": {
431
- id: "mistralai/codestral-2508",
432
- name: "Mistral: Codestral 2508",
433
- provider: "openrouter",
434
- baseUrl: "https://openrouter.ai/api/v1",
435
- reasoning: false,
436
- input: ["text"],
464
+ "o1-pro": {
465
+ id: "o1-pro",
466
+ name: "o1-pro",
467
+ provider: "openai",
468
+ reasoning: true,
469
+ input: ["text", "image"],
437
470
  cost: {
438
- input: 0.3,
439
- output: 0.8999999999999999,
471
+ input: 150,
472
+ output: 600,
440
473
  cacheRead: 0,
441
474
  cacheWrite: 0,
442
475
  },
443
- contextWindow: 256000,
444
- maxTokens: 4096,
476
+ contextWindow: 200000,
477
+ maxTokens: 100000,
445
478
  },
446
- "qwen/qwen3-coder-30b-a3b-instruct": {
447
- id: "qwen/qwen3-coder-30b-a3b-instruct",
448
- name: "Qwen: Qwen3 Coder 30B A3B Instruct",
449
- provider: "openrouter",
450
- baseUrl: "https://openrouter.ai/api/v1",
451
- reasoning: false,
452
- input: ["text"],
479
+ "o3": {
480
+ id: "o3",
481
+ name: "o3",
482
+ provider: "openai",
483
+ reasoning: true,
484
+ input: ["text", "image"],
453
485
  cost: {
454
- input: 0.051830799999999996,
455
- output: 0.207424,
456
- cacheRead: 0,
486
+ input: 2,
487
+ output: 8,
488
+ cacheRead: 0.5,
457
489
  cacheWrite: 0,
458
490
  },
459
- contextWindow: 262144,
460
- maxTokens: 4096,
491
+ contextWindow: 200000,
492
+ maxTokens: 100000,
461
493
  },
462
- "qwen/qwen3-30b-a3b-instruct-2507": {
463
- id: "qwen/qwen3-30b-a3b-instruct-2507",
464
- name: "Qwen: Qwen3 30B A3B Instruct 2507",
465
- provider: "openrouter",
466
- baseUrl: "https://openrouter.ai/api/v1",
467
- reasoning: false,
468
- input: ["text"],
494
+ "gpt-5-mini": {
495
+ id: "gpt-5-mini",
496
+ name: "GPT-5 Mini",
497
+ provider: "openai",
498
+ reasoning: true,
499
+ input: ["text", "image"],
469
500
  cost: {
470
- input: 0.051830799999999996,
471
- output: 0.207424,
472
- cacheRead: 0,
501
+ input: 0.25,
502
+ output: 2,
503
+ cacheRead: 0.03,
473
504
  cacheWrite: 0,
474
505
  },
475
- contextWindow: 262144,
476
- maxTokens: 4096,
506
+ contextWindow: 400000,
507
+ maxTokens: 128000,
477
508
  },
478
- "z-ai/glm-4.5": {
479
- id: "z-ai/glm-4.5",
480
- name: "Z.AI: GLM 4.5",
481
- provider: "openrouter",
482
- baseUrl: "https://openrouter.ai/api/v1",
509
+ "o4-mini-deep-research": {
510
+ id: "o4-mini-deep-research",
511
+ name: "o4-mini-deep-research",
512
+ provider: "openai",
483
513
  reasoning: true,
484
- input: ["text"],
514
+ input: ["text", "image"],
485
515
  cost: {
486
- input: 0.32986602,
487
- output: 1.3201056,
488
- cacheRead: 0,
516
+ input: 2,
517
+ output: 8,
518
+ cacheRead: 0.5,
489
519
  cacheWrite: 0,
490
520
  },
491
- contextWindow: 131072,
492
- maxTokens: 4096,
521
+ contextWindow: 200000,
522
+ maxTokens: 100000,
493
523
  },
494
- "z-ai/glm-4.5-air:free": {
495
- id: "z-ai/glm-4.5-air:free",
496
- name: "Z.AI: GLM 4.5 Air (free)",
497
- provider: "openrouter",
498
- baseUrl: "https://openrouter.ai/api/v1",
499
- reasoning: true,
500
- input: ["text"],
524
+ "gpt-4o-mini": {
525
+ id: "gpt-4o-mini",
526
+ name: "GPT-4o mini",
527
+ provider: "openai",
528
+ reasoning: false,
529
+ input: ["text", "image"],
501
530
  cost: {
502
- input: 0,
503
- output: 0,
504
- cacheRead: 0,
531
+ input: 0.15,
532
+ output: 0.6,
533
+ cacheRead: 0.08,
505
534
  cacheWrite: 0,
506
535
  },
507
- contextWindow: 131072,
508
- maxTokens: 4096,
536
+ contextWindow: 128000,
537
+ maxTokens: 16384,
509
538
  },
510
- "z-ai/glm-4.5-air": {
511
- id: "z-ai/glm-4.5-air",
512
- name: "Z.AI: GLM 4.5 Air",
513
- provider: "openrouter",
514
- baseUrl: "https://openrouter.ai/api/v1",
515
- reasoning: true,
516
- input: ["text"],
539
+ "gpt-4.1-nano": {
540
+ id: "gpt-4.1-nano",
541
+ name: "GPT-4.1 nano",
542
+ provider: "openai",
543
+ reasoning: false,
544
+ input: ["text", "image"],
517
545
  cost: {
518
- input: 0.14,
519
- output: 0.86,
520
- cacheRead: 0,
546
+ input: 0.1,
547
+ output: 0.4,
548
+ cacheRead: 0.03,
521
549
  cacheWrite: 0,
522
550
  },
523
- contextWindow: 131072,
524
- maxTokens: 131072,
551
+ contextWindow: 1047576,
552
+ maxTokens: 32768,
525
553
  },
526
- "qwen/qwen3-235b-a22b-thinking-2507": {
527
- id: "qwen/qwen3-235b-a22b-thinking-2507",
528
- name: "Qwen: Qwen3 235B A22B Thinking 2507",
529
- provider: "openrouter",
530
- baseUrl: "https://openrouter.ai/api/v1",
531
- reasoning: true,
532
- input: ["text"],
554
+ "gpt-4.1-mini": {
555
+ id: "gpt-4.1-mini",
556
+ name: "GPT-4.1 mini",
557
+ provider: "openai",
558
+ reasoning: false,
559
+ input: ["text", "image"],
533
560
  cost: {
534
- input: 0.077968332,
535
- output: 0.31202496,
536
- cacheRead: 0,
561
+ input: 0.4,
562
+ output: 1.6,
563
+ cacheRead: 0.1,
537
564
  cacheWrite: 0,
538
565
  },
539
- contextWindow: 262144,
540
- maxTokens: 4096,
566
+ contextWindow: 1047576,
567
+ maxTokens: 32768,
541
568
  },
542
- "z-ai/glm-4-32b": {
543
- id: "z-ai/glm-4-32b",
544
- name: "Z.AI: GLM 4 32B ",
545
- provider: "openrouter",
546
- baseUrl: "https://openrouter.ai/api/v1",
569
+ "gpt-4o": {
570
+ id: "gpt-4o",
571
+ name: "GPT-4o",
572
+ provider: "openai",
547
573
  reasoning: false,
548
- input: ["text"],
574
+ input: ["text", "image"],
549
575
  cost: {
550
- input: 0.09999999999999999,
551
- output: 0.09999999999999999,
552
- cacheRead: 0,
576
+ input: 2.5,
577
+ output: 10,
578
+ cacheRead: 1.25,
553
579
  cacheWrite: 0,
554
580
  },
555
581
  contextWindow: 128000,
556
- maxTokens: 4096,
582
+ maxTokens: 16384,
557
583
  },
558
- "qwen/qwen3-coder:free": {
559
- id: "qwen/qwen3-coder:free",
560
- name: "Qwen: Qwen3 Coder 480B A35B (free)",
561
- provider: "openrouter",
562
- baseUrl: "https://openrouter.ai/api/v1",
584
+ "gpt-4": {
585
+ id: "gpt-4",
586
+ name: "GPT-4",
587
+ provider: "openai",
563
588
  reasoning: false,
564
589
  input: ["text"],
565
590
  cost: {
566
- input: 0,
567
- output: 0,
591
+ input: 30,
592
+ output: 60,
568
593
  cacheRead: 0,
569
594
  cacheWrite: 0,
570
595
  },
571
- contextWindow: 262144,
572
- maxTokens: 4096,
596
+ contextWindow: 8192,
597
+ maxTokens: 8192,
573
598
  },
574
- "qwen/qwen3-coder": {
575
- id: "qwen/qwen3-coder",
576
- name: "Qwen: Qwen3 Coder 480B A35B",
577
- provider: "openrouter",
578
- baseUrl: "https://openrouter.ai/api/v1",
579
- reasoning: false,
580
- input: ["text"],
599
+ "o4-mini": {
600
+ id: "o4-mini",
601
+ name: "o4-mini",
602
+ provider: "openai",
603
+ reasoning: true,
604
+ input: ["text", "image"],
581
605
  cost: {
582
- input: 0.19999999999999998,
583
- output: 0.7999999999999999,
584
- cacheRead: 0,
606
+ input: 1.1,
607
+ output: 4.4,
608
+ cacheRead: 0.28,
585
609
  cacheWrite: 0,
586
610
  },
587
- contextWindow: 262144,
588
- maxTokens: 4096,
611
+ contextWindow: 200000,
612
+ maxTokens: 100000,
589
613
  },
590
- "qwen/qwen3-235b-a22b-2507": {
591
- id: "qwen/qwen3-235b-a22b-2507",
592
- name: "Qwen: Qwen3 235B A22B Instruct 2507",
593
- provider: "openrouter",
594
- baseUrl: "https://openrouter.ai/api/v1",
595
- reasoning: false,
614
+ "o3-mini": {
615
+ id: "o3-mini",
616
+ name: "o3-mini",
617
+ provider: "openai",
618
+ reasoning: true,
596
619
  input: ["text"],
597
620
  cost: {
598
- input: 0.077968332,
599
- output: 0.31202496,
600
- cacheRead: 0,
621
+ input: 1.1,
622
+ output: 4.4,
623
+ cacheRead: 0.55,
601
624
  cacheWrite: 0,
602
625
  },
603
- contextWindow: 262144,
604
- maxTokens: 4096,
626
+ contextWindow: 200000,
627
+ maxTokens: 100000,
605
628
  },
606
- "moonshotai/kimi-k2:free": {
607
- id: "moonshotai/kimi-k2:free",
608
- name: "MoonshotAI: Kimi K2 (free)",
609
- provider: "openrouter",
610
- baseUrl: "https://openrouter.ai/api/v1",
629
+ "gpt-5-chat-latest": {
630
+ id: "gpt-5-chat-latest",
631
+ name: "GPT-5 Chat Latest",
632
+ provider: "openai",
611
633
  reasoning: false,
612
- input: ["text"],
634
+ input: ["text", "image"],
613
635
  cost: {
614
- input: 0,
615
- output: 0,
616
- cacheRead: 0,
636
+ input: 1.25,
637
+ output: 10,
638
+ cacheRead: 0.125,
617
639
  cacheWrite: 0,
618
640
  },
619
- contextWindow: 32768,
620
- maxTokens: 4096,
641
+ contextWindow: 128000,
642
+ maxTokens: 16384,
621
643
  },
622
- "moonshotai/kimi-k2": {
623
- id: "moonshotai/kimi-k2",
624
- name: "MoonshotAI: Kimi K2",
625
- provider: "openrouter",
626
- baseUrl: "https://openrouter.ai/api/v1",
644
+ }
645
+ },
646
+ groq: {
647
+ models: {
648
+ "llama-3.1-8b-instant": {
649
+ id: "llama-3.1-8b-instant",
650
+ name: "Llama 3.1 8B Instant",
651
+ provider: "groq",
652
+ baseUrl: "https://api.groq.com/openai/v1",
627
653
  reasoning: false,
628
654
  input: ["text"],
629
655
  cost: {
630
- input: 0.14,
631
- output: 2.4899999999999998,
656
+ input: 0.05,
657
+ output: 0.08,
632
658
  cacheRead: 0,
633
659
  cacheWrite: 0,
634
660
  },
635
- contextWindow: 63000,
636
- maxTokens: 63000,
661
+ contextWindow: 131072,
662
+ maxTokens: 8192,
637
663
  },
638
- "mistralai/devstral-medium": {
639
- id: "mistralai/devstral-medium",
640
- name: "Mistral: Devstral Medium",
641
- provider: "openrouter",
642
- baseUrl: "https://openrouter.ai/api/v1",
643
- reasoning: false,
664
+ "qwen-qwq-32b": {
665
+ id: "qwen-qwq-32b",
666
+ name: "Qwen QwQ 32B",
667
+ provider: "groq",
668
+ baseUrl: "https://api.groq.com/openai/v1",
669
+ reasoning: true,
644
670
  input: ["text"],
645
671
  cost: {
646
- input: 0.39999999999999997,
647
- output: 2,
672
+ input: 0.29,
673
+ output: 0.39,
648
674
  cacheRead: 0,
649
675
  cacheWrite: 0,
650
676
  },
651
677
  contextWindow: 131072,
652
- maxTokens: 4096,
678
+ maxTokens: 16384,
653
679
  },
654
- "mistralai/devstral-small": {
655
- id: "mistralai/devstral-small",
656
- name: "Mistral: Devstral Small 1.1",
657
- provider: "openrouter",
658
- baseUrl: "https://openrouter.ai/api/v1",
680
+ "llama3-70b-8192": {
681
+ id: "llama3-70b-8192",
682
+ name: "Llama 3 70B",
683
+ provider: "groq",
684
+ baseUrl: "https://api.groq.com/openai/v1",
659
685
  reasoning: false,
660
686
  input: ["text"],
661
687
  cost: {
662
- input: 0.07,
663
- output: 0.28,
688
+ input: 0.59,
689
+ output: 0.79,
664
690
  cacheRead: 0,
665
691
  cacheWrite: 0,
666
692
  },
667
- contextWindow: 128000,
668
- maxTokens: 4096,
693
+ contextWindow: 8192,
694
+ maxTokens: 8192,
669
695
  },
670
- "inception/mercury": {
671
- id: "inception/mercury",
672
- name: "Inception: Mercury",
673
- provider: "openrouter",
674
- baseUrl: "https://openrouter.ai/api/v1",
675
- reasoning: false,
696
+ "deepseek-r1-distill-llama-70b": {
697
+ id: "deepseek-r1-distill-llama-70b",
698
+ name: "DeepSeek R1 Distill Llama 70B",
699
+ provider: "groq",
700
+ baseUrl: "https://api.groq.com/openai/v1",
701
+ reasoning: true,
676
702
  input: ["text"],
677
703
  cost: {
678
- input: 0.25,
679
- output: 1,
704
+ input: 0.75,
705
+ output: 0.99,
680
706
  cacheRead: 0,
681
707
  cacheWrite: 0,
682
708
  },
683
- contextWindow: 128000,
684
- maxTokens: 16384,
709
+ contextWindow: 131072,
710
+ maxTokens: 8192,
685
711
  },
686
- "mistralai/mistral-small-3.2-24b-instruct:free": {
687
- id: "mistralai/mistral-small-3.2-24b-instruct:free",
688
- name: "Mistral: Mistral Small 3.2 24B (free)",
689
- provider: "openrouter",
690
- baseUrl: "https://openrouter.ai/api/v1",
712
+ "llama3-8b-8192": {
713
+ id: "llama3-8b-8192",
714
+ name: "Llama 3 8B",
715
+ provider: "groq",
716
+ baseUrl: "https://api.groq.com/openai/v1",
691
717
  reasoning: false,
692
- input: ["text", "image"],
718
+ input: ["text"],
693
719
  cost: {
694
- input: 0,
695
- output: 0,
720
+ input: 0.05,
721
+ output: 0.08,
696
722
  cacheRead: 0,
697
723
  cacheWrite: 0,
698
724
  },
699
- contextWindow: 131072,
700
- maxTokens: 4096,
725
+ contextWindow: 8192,
726
+ maxTokens: 8192,
701
727
  },
702
- "mistralai/mistral-small-3.2-24b-instruct": {
703
- id: "mistralai/mistral-small-3.2-24b-instruct",
704
- name: "Mistral: Mistral Small 3.2 24B",
705
- provider: "openrouter",
706
- baseUrl: "https://openrouter.ai/api/v1",
728
+ "gemma2-9b-it": {
729
+ id: "gemma2-9b-it",
730
+ name: "Gemma 2 9B",
731
+ provider: "groq",
732
+ baseUrl: "https://api.groq.com/openai/v1",
707
733
  reasoning: false,
708
- input: ["text", "image"],
734
+ input: ["text"],
709
735
  cost: {
710
- input: 0.049999999999999996,
711
- output: 0.09999999999999999,
736
+ input: 0.2,
737
+ output: 0.2,
712
738
  cacheRead: 0,
713
739
  cacheWrite: 0,
714
740
  },
715
- contextWindow: 128000,
716
- maxTokens: 4096,
741
+ contextWindow: 8192,
742
+ maxTokens: 8192,
717
743
  },
718
- "minimax/minimax-m1": {
719
- id: "minimax/minimax-m1",
720
- name: "MiniMax: MiniMax M1",
721
- provider: "openrouter",
722
- baseUrl: "https://openrouter.ai/api/v1",
723
- reasoning: true,
744
+ "llama-3.3-70b-versatile": {
745
+ id: "llama-3.3-70b-versatile",
746
+ name: "Llama 3.3 70B Versatile",
747
+ provider: "groq",
748
+ baseUrl: "https://api.groq.com/openai/v1",
749
+ reasoning: false,
724
750
  input: ["text"],
725
751
  cost: {
726
- input: 0.3,
727
- output: 1.6500000000000001,
752
+ input: 0.59,
753
+ output: 0.79,
728
754
  cacheRead: 0,
729
755
  cacheWrite: 0,
730
756
  },
731
- contextWindow: 1000000,
732
- maxTokens: 40000,
757
+ contextWindow: 131072,
758
+ maxTokens: 32768,
733
759
  },
734
- "mistralai/magistral-small-2506": {
735
- id: "mistralai/magistral-small-2506",
736
- name: "Mistral: Magistral Small 2506",
737
- provider: "openrouter",
738
- baseUrl: "https://openrouter.ai/api/v1",
739
- reasoning: true,
760
+ "mistral-saba-24b": {
761
+ id: "mistral-saba-24b",
762
+ name: "Mistral Saba 24B",
763
+ provider: "groq",
764
+ baseUrl: "https://api.groq.com/openai/v1",
765
+ reasoning: false,
740
766
  input: ["text"],
741
767
  cost: {
742
- input: 0.5,
743
- output: 1.5,
768
+ input: 0.79,
769
+ output: 0.79,
744
770
  cacheRead: 0,
745
771
  cacheWrite: 0,
746
772
  },
747
- contextWindow: 40000,
748
- maxTokens: 40000,
773
+ contextWindow: 32768,
774
+ maxTokens: 32768,
749
775
  },
750
- "mistralai/magistral-medium-2506": {
751
- id: "mistralai/magistral-medium-2506",
752
- name: "Mistral: Magistral Medium 2506",
753
- provider: "openrouter",
754
- baseUrl: "https://openrouter.ai/api/v1",
776
+ "openai/gpt-oss-20b": {
777
+ id: "openai/gpt-oss-20b",
778
+ name: "GPT OSS 20B",
779
+ provider: "groq",
780
+ baseUrl: "https://api.groq.com/openai/v1",
755
781
  reasoning: true,
756
782
  input: ["text"],
757
783
  cost: {
758
- input: 2,
759
- output: 5,
784
+ input: 0.1,
785
+ output: 0.5,
760
786
  cacheRead: 0,
761
787
  cacheWrite: 0,
762
788
  },
763
- contextWindow: 40960,
764
- maxTokens: 40000,
789
+ contextWindow: 131072,
790
+ maxTokens: 32768,
765
791
  },
766
- "mistralai/magistral-medium-2506:thinking": {
767
- id: "mistralai/magistral-medium-2506:thinking",
768
- name: "Mistral: Magistral Medium 2506 (thinking)",
769
- provider: "openrouter",
770
- baseUrl: "https://openrouter.ai/api/v1",
792
+ "openai/gpt-oss-120b": {
793
+ id: "openai/gpt-oss-120b",
794
+ name: "GPT OSS 120B",
795
+ provider: "groq",
796
+ baseUrl: "https://api.groq.com/openai/v1",
771
797
  reasoning: true,
772
798
  input: ["text"],
773
799
  cost: {
774
- input: 2,
775
- output: 5,
800
+ input: 0.15,
801
+ output: 0.75,
776
802
  cacheRead: 0,
777
803
  cacheWrite: 0,
778
804
  },
779
- contextWindow: 40960,
780
- maxTokens: 40000,
805
+ contextWindow: 131072,
806
+ maxTokens: 32768,
781
807
  },
782
- "deepseek/deepseek-r1-0528": {
783
- id: "deepseek/deepseek-r1-0528",
784
- name: "DeepSeek: R1 0528",
785
- provider: "openrouter",
786
- baseUrl: "https://openrouter.ai/api/v1",
787
- reasoning: true,
788
- input: ["text"],
808
+ "meta-llama/llama-4-maverick-17b-128e-instruct": {
809
+ id: "meta-llama/llama-4-maverick-17b-128e-instruct",
810
+ name: "Llama 4 Maverick 17B",
811
+ provider: "groq",
812
+ baseUrl: "https://api.groq.com/openai/v1",
813
+ reasoning: false,
814
+ input: ["text", "image"],
789
815
  cost: {
790
- input: 0.1999188,
791
- output: 0.800064,
816
+ input: 0.2,
817
+ output: 0.6,
792
818
  cacheRead: 0,
793
819
  cacheWrite: 0,
794
820
  },
795
- contextWindow: 163840,
796
- maxTokens: 4096,
821
+ contextWindow: 131072,
822
+ maxTokens: 8192,
797
823
  },
798
- "mistralai/devstral-small-2505:free": {
799
- id: "mistralai/devstral-small-2505:free",
800
- name: "Mistral: Devstral Small 2505 (free)",
801
- provider: "openrouter",
802
- baseUrl: "https://openrouter.ai/api/v1",
824
+ "meta-llama/llama-4-scout-17b-16e-instruct": {
825
+ id: "meta-llama/llama-4-scout-17b-16e-instruct",
826
+ name: "Llama 4 Scout 17B",
827
+ provider: "groq",
828
+ baseUrl: "https://api.groq.com/openai/v1",
803
829
  reasoning: false,
804
- input: ["text"],
830
+ input: ["text", "image"],
805
831
  cost: {
806
- input: 0,
807
- output: 0,
832
+ input: 0.11,
833
+ output: 0.34,
808
834
  cacheRead: 0,
809
835
  cacheWrite: 0,
810
836
  },
811
- contextWindow: 32768,
812
- maxTokens: 4096,
837
+ contextWindow: 131072,
838
+ maxTokens: 8192,
813
839
  },
814
- "mistralai/devstral-small-2505": {
815
- id: "mistralai/devstral-small-2505",
816
- name: "Mistral: Devstral Small 2505",
817
- provider: "openrouter",
818
- baseUrl: "https://openrouter.ai/api/v1",
819
- reasoning: false,
820
- input: ["text"],
840
+ "qwen/qwen3-32b": {
841
+ id: "qwen/qwen3-32b",
842
+ name: "Qwen3 32B",
843
+ provider: "groq",
844
+ baseUrl: "https://api.groq.com/openai/v1",
845
+ reasoning: true,
846
+ input: ["text"],
821
847
  cost: {
822
- input: 0.01999188,
823
- output: 0.0800064,
848
+ input: 0.29,
849
+ output: 0.59,
824
850
  cacheRead: 0,
825
851
  cacheWrite: 0,
826
852
  },
827
853
  contextWindow: 131072,
828
- maxTokens: 4096,
854
+ maxTokens: 16384,
829
855
  },
830
- "meta-llama/llama-3.3-8b-instruct:free": {
831
- id: "meta-llama/llama-3.3-8b-instruct:free",
832
- name: "Meta: Llama 3.3 8B Instruct (free)",
833
- provider: "openrouter",
834
- baseUrl: "https://openrouter.ai/api/v1",
856
+ "moonshotai/kimi-k2-instruct": {
857
+ id: "moonshotai/kimi-k2-instruct",
858
+ name: "Kimi K2 Instruct",
859
+ provider: "groq",
860
+ baseUrl: "https://api.groq.com/openai/v1",
835
861
  reasoning: false,
836
862
  input: ["text"],
837
863
  cost: {
838
- input: 0,
839
- output: 0,
840
- cacheRead: 0,
841
- cacheWrite: 0,
842
- },
843
- contextWindow: 128000,
844
- maxTokens: 4028,
845
- },
846
- "mistralai/mistral-medium-3": {
847
- id: "mistralai/mistral-medium-3",
848
- name: "Mistral: Mistral Medium 3",
849
- provider: "openrouter",
850
- baseUrl: "https://openrouter.ai/api/v1",
851
- reasoning: false,
852
- input: ["text", "image"],
853
- cost: {
854
- input: 0.39999999999999997,
855
- output: 2,
864
+ input: 1,
865
+ output: 3,
856
866
  cacheRead: 0,
857
867
  cacheWrite: 0,
858
868
  },
859
869
  contextWindow: 131072,
860
- maxTokens: 4096,
870
+ maxTokens: 16384,
861
871
  },
862
- "arcee-ai/virtuoso-large": {
863
- id: "arcee-ai/virtuoso-large",
864
- name: "Arcee AI: Virtuoso Large",
865
- provider: "openrouter",
866
- baseUrl: "https://openrouter.ai/api/v1",
872
+ }
873
+ },
874
+ cerebras: {
875
+ models: {
876
+ "qwen-3-235b-a22b-instruct-2507": {
877
+ id: "qwen-3-235b-a22b-instruct-2507",
878
+ name: "Qwen 3 235B Instruct",
879
+ provider: "cerebras",
880
+ baseUrl: "https://api.cerebras.ai/v1",
867
881
  reasoning: false,
868
882
  input: ["text"],
869
883
  cost: {
870
- input: 0.75,
884
+ input: 0.6,
871
885
  output: 1.2,
872
886
  cacheRead: 0,
873
887
  cacheWrite: 0,
874
888
  },
875
- contextWindow: 131072,
876
- maxTokens: 64000,
889
+ contextWindow: 131000,
890
+ maxTokens: 32000,
877
891
  },
878
- "inception/mercury-coder": {
879
- id: "inception/mercury-coder",
880
- name: "Inception: Mercury Coder",
881
- provider: "openrouter",
882
- baseUrl: "https://openrouter.ai/api/v1",
883
- reasoning: false,
892
+ "gpt-oss-120b": {
893
+ id: "gpt-oss-120b",
894
+ name: "GPT OSS 120B",
895
+ provider: "cerebras",
896
+ baseUrl: "https://api.cerebras.ai/v1",
897
+ reasoning: true,
884
898
  input: ["text"],
885
899
  cost: {
886
900
  input: 0.25,
887
- output: 1,
901
+ output: 0.69,
888
902
  cacheRead: 0,
889
903
  cacheWrite: 0,
890
904
  },
891
- contextWindow: 128000,
892
- maxTokens: 16384,
905
+ contextWindow: 131072,
906
+ maxTokens: 32768,
893
907
  },
894
- "qwen/qwen3-4b:free": {
895
- id: "qwen/qwen3-4b:free",
896
- name: "Qwen: Qwen3 4B (free)",
897
- provider: "openrouter",
898
- baseUrl: "https://openrouter.ai/api/v1",
899
- reasoning: true,
908
+ "qwen-3-coder-480b": {
909
+ id: "qwen-3-coder-480b",
910
+ name: "Qwen 3 Coder 480B",
911
+ provider: "cerebras",
912
+ baseUrl: "https://api.cerebras.ai/v1",
913
+ reasoning: false,
900
914
  input: ["text"],
901
915
  cost: {
902
- input: 0,
903
- output: 0,
916
+ input: 2,
917
+ output: 2,
904
918
  cacheRead: 0,
905
919
  cacheWrite: 0,
906
920
  },
907
- contextWindow: 40960,
908
- maxTokens: 4096,
921
+ contextWindow: 131000,
922
+ maxTokens: 32000,
909
923
  },
910
- "qwen/qwen3-30b-a3b": {
911
- id: "qwen/qwen3-30b-a3b",
912
- name: "Qwen: Qwen3 30B A3B",
924
+ }
925
+ },
926
+ openrouter: {
927
+ models: {
928
+ "qwen/qwen3-30b-a3b-thinking-2507": {
929
+ id: "qwen/qwen3-30b-a3b-thinking-2507",
930
+ name: "Qwen: Qwen3 30B A3B Thinking 2507",
913
931
  provider: "openrouter",
914
932
  baseUrl: "https://openrouter.ai/api/v1",
915
933
  reasoning: true,
916
934
  input: ["text"],
917
935
  cost: {
918
- input: 0.01999188,
919
- output: 0.0800064,
936
+ input: 0.0713,
937
+ output: 0.2852,
920
938
  cacheRead: 0,
921
939
  cacheWrite: 0,
922
940
  },
923
- contextWindow: 40960,
924
- maxTokens: 4096,
941
+ contextWindow: 262144,
942
+ maxTokens: 262144,
925
943
  },
926
- "qwen/qwen3-14b": {
927
- id: "qwen/qwen3-14b",
928
- name: "Qwen: Qwen3 14B",
944
+ "nousresearch/hermes-4-70b": {
945
+ id: "nousresearch/hermes-4-70b",
946
+ name: "Nous: Hermes 4 70B",
929
947
  provider: "openrouter",
930
948
  baseUrl: "https://openrouter.ai/api/v1",
931
949
  reasoning: true,
932
950
  input: ["text"],
933
951
  cost: {
934
- input: 0.06,
935
- output: 0.24,
952
+ input: 0.09329544,
953
+ output: 0.3733632,
936
954
  cacheRead: 0,
937
955
  cacheWrite: 0,
938
956
  },
939
- contextWindow: 40960,
940
- maxTokens: 40960,
957
+ contextWindow: 131072,
958
+ maxTokens: 4096,
941
959
  },
942
- "qwen/qwen3-32b": {
943
- id: "qwen/qwen3-32b",
944
- name: "Qwen: Qwen3 32B",
960
+ "nousresearch/hermes-4-405b": {
961
+ id: "nousresearch/hermes-4-405b",
962
+ name: "Nous: Hermes 4 405B",
945
963
  provider: "openrouter",
946
964
  baseUrl: "https://openrouter.ai/api/v1",
947
965
  reasoning: true,
948
966
  input: ["text"],
949
967
  cost: {
950
- input: 0.017992691999999998,
951
- output: 0.07200576,
968
+ input: 0.1999188,
969
+ output: 0.800064,
952
970
  cacheRead: 0,
953
971
  cacheWrite: 0,
954
972
  },
955
- contextWindow: 40960,
973
+ contextWindow: 131072,
956
974
  maxTokens: 4096,
957
975
  },
958
- "qwen/qwen3-235b-a22b:free": {
959
- id: "qwen/qwen3-235b-a22b:free",
960
- name: "Qwen: Qwen3 235B A22B (free)",
976
+ "deepseek/deepseek-chat-v3.1:free": {
977
+ id: "deepseek/deepseek-chat-v3.1:free",
978
+ name: "DeepSeek: DeepSeek V3.1 (free)",
961
979
  provider: "openrouter",
962
980
  baseUrl: "https://openrouter.ai/api/v1",
963
981
  reasoning: true,
@@ -968,700 +986,716 @@ export const PROVIDERS = {
968
986
  cacheRead: 0,
969
987
  cacheWrite: 0,
970
988
  },
971
- contextWindow: 131072,
989
+ contextWindow: 64000,
972
990
  maxTokens: 4096,
973
991
  },
974
- "qwen/qwen3-235b-a22b": {
975
- id: "qwen/qwen3-235b-a22b",
976
- name: "Qwen: Qwen3 235B A22B",
992
+ "deepseek/deepseek-chat-v3.1": {
993
+ id: "deepseek/deepseek-chat-v3.1",
994
+ name: "DeepSeek: DeepSeek V3.1",
977
995
  provider: "openrouter",
978
996
  baseUrl: "https://openrouter.ai/api/v1",
979
997
  reasoning: true,
980
998
  input: ["text"],
981
999
  cost: {
982
- input: 0.13,
983
- output: 0.6,
1000
+ input: 0.19999999999999998,
1001
+ output: 0.7999999999999999,
984
1002
  cacheRead: 0,
985
1003
  cacheWrite: 0,
986
1004
  },
987
- contextWindow: 40960,
988
- maxTokens: 40960,
1005
+ contextWindow: 163840,
1006
+ maxTokens: 4096,
989
1007
  },
990
- "meta-llama/llama-4-maverick:free": {
991
- id: "meta-llama/llama-4-maverick:free",
992
- name: "Meta: Llama 4 Maverick (free)",
1008
+ "mistralai/mistral-medium-3.1": {
1009
+ id: "mistralai/mistral-medium-3.1",
1010
+ name: "Mistral: Mistral Medium 3.1",
993
1011
  provider: "openrouter",
994
1012
  baseUrl: "https://openrouter.ai/api/v1",
995
1013
  reasoning: false,
996
1014
  input: ["text", "image"],
997
1015
  cost: {
998
- input: 0,
999
- output: 0,
1016
+ input: 0.39999999999999997,
1017
+ output: 2,
1000
1018
  cacheRead: 0,
1001
1019
  cacheWrite: 0,
1002
1020
  },
1003
- contextWindow: 128000,
1004
- maxTokens: 4028,
1021
+ contextWindow: 131072,
1022
+ maxTokens: 4096,
1005
1023
  },
1006
- "meta-llama/llama-4-maverick": {
1007
- id: "meta-llama/llama-4-maverick",
1008
- name: "Meta: Llama 4 Maverick",
1024
+ "z-ai/glm-4.5v": {
1025
+ id: "z-ai/glm-4.5v",
1026
+ name: "Z.AI: GLM 4.5V",
1009
1027
  provider: "openrouter",
1010
1028
  baseUrl: "https://openrouter.ai/api/v1",
1011
- reasoning: false,
1029
+ reasoning: true,
1012
1030
  input: ["text", "image"],
1013
1031
  cost: {
1014
- input: 0.15,
1015
- output: 0.6,
1032
+ input: 0.5,
1033
+ output: 1.7999999999999998,
1016
1034
  cacheRead: 0,
1017
1035
  cacheWrite: 0,
1018
1036
  },
1019
- contextWindow: 1048576,
1020
- maxTokens: 16384,
1037
+ contextWindow: 65536,
1038
+ maxTokens: 65536,
1021
1039
  },
1022
- "meta-llama/llama-4-scout:free": {
1023
- id: "meta-llama/llama-4-scout:free",
1024
- name: "Meta: Llama 4 Scout (free)",
1040
+ "ai21/jamba-mini-1.7": {
1041
+ id: "ai21/jamba-mini-1.7",
1042
+ name: "AI21: Jamba Mini 1.7",
1025
1043
  provider: "openrouter",
1026
1044
  baseUrl: "https://openrouter.ai/api/v1",
1027
1045
  reasoning: false,
1028
- input: ["text", "image"],
1046
+ input: ["text"],
1029
1047
  cost: {
1030
- input: 0,
1031
- output: 0,
1048
+ input: 0.19999999999999998,
1049
+ output: 0.39999999999999997,
1032
1050
  cacheRead: 0,
1033
1051
  cacheWrite: 0,
1034
1052
  },
1035
- contextWindow: 128000,
1036
- maxTokens: 4028,
1053
+ contextWindow: 256000,
1054
+ maxTokens: 4096,
1037
1055
  },
1038
- "meta-llama/llama-4-scout": {
1039
- id: "meta-llama/llama-4-scout",
1040
- name: "Meta: Llama 4 Scout",
1056
+ "ai21/jamba-large-1.7": {
1057
+ id: "ai21/jamba-large-1.7",
1058
+ name: "AI21: Jamba Large 1.7",
1041
1059
  provider: "openrouter",
1042
1060
  baseUrl: "https://openrouter.ai/api/v1",
1043
1061
  reasoning: false,
1044
- input: ["text", "image"],
1062
+ input: ["text"],
1045
1063
  cost: {
1046
- input: 0.08,
1047
- output: 0.3,
1064
+ input: 2,
1065
+ output: 8,
1048
1066
  cacheRead: 0,
1049
1067
  cacheWrite: 0,
1050
1068
  },
1051
- contextWindow: 1048576,
1052
- maxTokens: 1048576,
1069
+ contextWindow: 256000,
1070
+ maxTokens: 4096,
1053
1071
  },
1054
- "deepseek/deepseek-chat-v3-0324:free": {
1055
- id: "deepseek/deepseek-chat-v3-0324:free",
1056
- name: "DeepSeek: DeepSeek V3 0324 (free)",
1072
+ "mistralai/codestral-2508": {
1073
+ id: "mistralai/codestral-2508",
1074
+ name: "Mistral: Codestral 2508",
1057
1075
  provider: "openrouter",
1058
1076
  baseUrl: "https://openrouter.ai/api/v1",
1059
1077
  reasoning: false,
1060
1078
  input: ["text"],
1061
1079
  cost: {
1062
- input: 0,
1063
- output: 0,
1080
+ input: 0.3,
1081
+ output: 0.8999999999999999,
1064
1082
  cacheRead: 0,
1065
1083
  cacheWrite: 0,
1066
1084
  },
1067
- contextWindow: 163840,
1085
+ contextWindow: 256000,
1068
1086
  maxTokens: 4096,
1069
1087
  },
1070
- "deepseek/deepseek-chat-v3-0324": {
1071
- id: "deepseek/deepseek-chat-v3-0324",
1072
- name: "DeepSeek: DeepSeek V3 0324",
1088
+ "qwen/qwen3-coder-30b-a3b-instruct": {
1089
+ id: "qwen/qwen3-coder-30b-a3b-instruct",
1090
+ name: "Qwen: Qwen3 Coder 30B A3B Instruct",
1073
1091
  provider: "openrouter",
1074
1092
  baseUrl: "https://openrouter.ai/api/v1",
1075
1093
  reasoning: false,
1076
1094
  input: ["text"],
1077
1095
  cost: {
1078
- input: 0.1999188,
1079
- output: 0.800064,
1096
+ input: 0.051830799999999996,
1097
+ output: 0.207424,
1080
1098
  cacheRead: 0,
1081
1099
  cacheWrite: 0,
1082
1100
  },
1083
- contextWindow: 163840,
1101
+ contextWindow: 262144,
1084
1102
  maxTokens: 4096,
1085
1103
  },
1086
- "mistralai/mistral-small-3.1-24b-instruct:free": {
1087
- id: "mistralai/mistral-small-3.1-24b-instruct:free",
1088
- name: "Mistral: Mistral Small 3.1 24B (free)",
1104
+ "qwen/qwen3-30b-a3b-instruct-2507": {
1105
+ id: "qwen/qwen3-30b-a3b-instruct-2507",
1106
+ name: "Qwen: Qwen3 30B A3B Instruct 2507",
1089
1107
  provider: "openrouter",
1090
1108
  baseUrl: "https://openrouter.ai/api/v1",
1091
1109
  reasoning: false,
1092
- input: ["text", "image"],
1110
+ input: ["text"],
1093
1111
  cost: {
1094
- input: 0,
1095
- output: 0,
1112
+ input: 0.051830799999999996,
1113
+ output: 0.207424,
1096
1114
  cacheRead: 0,
1097
1115
  cacheWrite: 0,
1098
1116
  },
1099
- contextWindow: 128000,
1117
+ contextWindow: 262144,
1100
1118
  maxTokens: 4096,
1101
1119
  },
1102
- "mistralai/mistral-small-3.1-24b-instruct": {
1103
- id: "mistralai/mistral-small-3.1-24b-instruct",
1104
- name: "Mistral: Mistral Small 3.1 24B",
1120
+ "z-ai/glm-4.5": {
1121
+ id: "z-ai/glm-4.5",
1122
+ name: "Z.AI: GLM 4.5",
1105
1123
  provider: "openrouter",
1106
1124
  baseUrl: "https://openrouter.ai/api/v1",
1107
- reasoning: false,
1108
- input: ["text", "image"],
1125
+ reasoning: true,
1126
+ input: ["text"],
1109
1127
  cost: {
1110
- input: 0.01999188,
1111
- output: 0.0800064,
1128
+ input: 0.32986602,
1129
+ output: 1.3201056,
1112
1130
  cacheRead: 0,
1113
1131
  cacheWrite: 0,
1114
1132
  },
1115
1133
  contextWindow: 131072,
1116
- maxTokens: 96000,
1134
+ maxTokens: 4096,
1117
1135
  },
1118
- "qwen/qwq-32b": {
1119
- id: "qwen/qwq-32b",
1120
- name: "Qwen: QwQ 32B",
1136
+ "z-ai/glm-4.5-air:free": {
1137
+ id: "z-ai/glm-4.5-air:free",
1138
+ name: "Z.AI: GLM 4.5 Air (free)",
1121
1139
  provider: "openrouter",
1122
1140
  baseUrl: "https://openrouter.ai/api/v1",
1123
1141
  reasoning: true,
1124
1142
  input: ["text"],
1125
1143
  cost: {
1126
- input: 0.075,
1127
- output: 0.15,
1144
+ input: 0,
1145
+ output: 0,
1128
1146
  cacheRead: 0,
1129
1147
  cacheWrite: 0,
1130
1148
  },
1131
1149
  contextWindow: 131072,
1132
1150
  maxTokens: 4096,
1133
1151
  },
1134
- "mistralai/mistral-saba": {
1135
- id: "mistralai/mistral-saba",
1136
- name: "Mistral: Saba",
1152
+ "z-ai/glm-4.5-air": {
1153
+ id: "z-ai/glm-4.5-air",
1154
+ name: "Z.AI: GLM 4.5 Air",
1137
1155
  provider: "openrouter",
1138
1156
  baseUrl: "https://openrouter.ai/api/v1",
1139
- reasoning: false,
1157
+ reasoning: true,
1140
1158
  input: ["text"],
1141
1159
  cost: {
1142
- input: 0.19999999999999998,
1143
- output: 0.6,
1160
+ input: 0.14,
1161
+ output: 0.86,
1144
1162
  cacheRead: 0,
1145
1163
  cacheWrite: 0,
1146
1164
  },
1147
- contextWindow: 32768,
1148
- maxTokens: 4096,
1165
+ contextWindow: 131072,
1166
+ maxTokens: 131072,
1149
1167
  },
1150
- "qwen/qwen-turbo": {
1151
- id: "qwen/qwen-turbo",
1152
- name: "Qwen: Qwen-Turbo",
1168
+ "qwen/qwen3-235b-a22b-thinking-2507": {
1169
+ id: "qwen/qwen3-235b-a22b-thinking-2507",
1170
+ name: "Qwen: Qwen3 235B A22B Thinking 2507",
1153
1171
  provider: "openrouter",
1154
1172
  baseUrl: "https://openrouter.ai/api/v1",
1155
- reasoning: false,
1173
+ reasoning: true,
1156
1174
  input: ["text"],
1157
1175
  cost: {
1158
- input: 0.049999999999999996,
1159
- output: 0.19999999999999998,
1160
- cacheRead: 0.02,
1176
+ input: 0.077968332,
1177
+ output: 0.31202496,
1178
+ cacheRead: 0,
1161
1179
  cacheWrite: 0,
1162
1180
  },
1163
- contextWindow: 1000000,
1164
- maxTokens: 8192,
1181
+ contextWindow: 262144,
1182
+ maxTokens: 4096,
1165
1183
  },
1166
- "qwen/qwen-plus": {
1167
- id: "qwen/qwen-plus",
1168
- name: "Qwen: Qwen-Plus",
1184
+ "z-ai/glm-4-32b": {
1185
+ id: "z-ai/glm-4-32b",
1186
+ name: "Z.AI: GLM 4 32B ",
1169
1187
  provider: "openrouter",
1170
1188
  baseUrl: "https://openrouter.ai/api/v1",
1171
1189
  reasoning: false,
1172
1190
  input: ["text"],
1173
1191
  cost: {
1174
- input: 0.39999999999999997,
1175
- output: 1.2,
1176
- cacheRead: 0.16,
1192
+ input: 0.09999999999999999,
1193
+ output: 0.09999999999999999,
1194
+ cacheRead: 0,
1177
1195
  cacheWrite: 0,
1178
1196
  },
1179
- contextWindow: 131072,
1180
- maxTokens: 8192,
1197
+ contextWindow: 128000,
1198
+ maxTokens: 4096,
1181
1199
  },
1182
- "qwen/qwen-max": {
1183
- id: "qwen/qwen-max",
1184
- name: "Qwen: Qwen-Max ",
1200
+ "qwen/qwen3-coder:free": {
1201
+ id: "qwen/qwen3-coder:free",
1202
+ name: "Qwen: Qwen3 Coder 480B A35B (free)",
1185
1203
  provider: "openrouter",
1186
1204
  baseUrl: "https://openrouter.ai/api/v1",
1187
1205
  reasoning: false,
1188
1206
  input: ["text"],
1189
1207
  cost: {
1190
- input: 1.5999999999999999,
1191
- output: 6.3999999999999995,
1192
- cacheRead: 0.64,
1208
+ input: 0,
1209
+ output: 0,
1210
+ cacheRead: 0,
1193
1211
  cacheWrite: 0,
1194
1212
  },
1195
- contextWindow: 32768,
1196
- maxTokens: 8192,
1213
+ contextWindow: 262144,
1214
+ maxTokens: 4096,
1197
1215
  },
1198
- "mistralai/mistral-small-24b-instruct-2501": {
1199
- id: "mistralai/mistral-small-24b-instruct-2501",
1200
- name: "Mistral: Mistral Small 3",
1216
+ "qwen/qwen3-coder": {
1217
+ id: "qwen/qwen3-coder",
1218
+ name: "Qwen: Qwen3 Coder 480B A35B",
1201
1219
  provider: "openrouter",
1202
1220
  baseUrl: "https://openrouter.ai/api/v1",
1203
1221
  reasoning: false,
1204
1222
  input: ["text"],
1205
1223
  cost: {
1206
- input: 0.01999188,
1207
- output: 0.0800064,
1224
+ input: 0.19999999999999998,
1225
+ output: 0.7999999999999999,
1208
1226
  cacheRead: 0,
1209
1227
  cacheWrite: 0,
1210
1228
  },
1211
- contextWindow: 32768,
1229
+ contextWindow: 262144,
1212
1230
  maxTokens: 4096,
1213
1231
  },
1214
- "deepseek/deepseek-r1-distill-llama-70b": {
1215
- id: "deepseek/deepseek-r1-distill-llama-70b",
1216
- name: "DeepSeek: R1 Distill Llama 70B",
1232
+ "qwen/qwen3-235b-a22b-2507": {
1233
+ id: "qwen/qwen3-235b-a22b-2507",
1234
+ name: "Qwen: Qwen3 235B A22B Instruct 2507",
1217
1235
  provider: "openrouter",
1218
1236
  baseUrl: "https://openrouter.ai/api/v1",
1219
- reasoning: true,
1237
+ reasoning: false,
1220
1238
  input: ["text"],
1221
1239
  cost: {
1222
- input: 0.025915399999999998,
1223
- output: 0.103712,
1240
+ input: 0.077968332,
1241
+ output: 0.31202496,
1224
1242
  cacheRead: 0,
1225
1243
  cacheWrite: 0,
1226
1244
  },
1227
- contextWindow: 131072,
1245
+ contextWindow: 262144,
1228
1246
  maxTokens: 4096,
1229
1247
  },
1230
- "deepseek/deepseek-r1": {
1231
- id: "deepseek/deepseek-r1",
1232
- name: "DeepSeek: R1",
1248
+ "moonshotai/kimi-k2:free": {
1249
+ id: "moonshotai/kimi-k2:free",
1250
+ name: "MoonshotAI: Kimi K2 (free)",
1233
1251
  provider: "openrouter",
1234
1252
  baseUrl: "https://openrouter.ai/api/v1",
1235
- reasoning: true,
1253
+ reasoning: false,
1236
1254
  input: ["text"],
1237
1255
  cost: {
1238
- input: 0.39999999999999997,
1239
- output: 2,
1256
+ input: 0,
1257
+ output: 0,
1240
1258
  cacheRead: 0,
1241
1259
  cacheWrite: 0,
1242
1260
  },
1243
- contextWindow: 163840,
1244
- maxTokens: 163840,
1261
+ contextWindow: 32768,
1262
+ maxTokens: 4096,
1245
1263
  },
1246
- "mistralai/codestral-2501": {
1247
- id: "mistralai/codestral-2501",
1248
- name: "Mistral: Codestral 2501",
1264
+ "moonshotai/kimi-k2": {
1265
+ id: "moonshotai/kimi-k2",
1266
+ name: "MoonshotAI: Kimi K2",
1249
1267
  provider: "openrouter",
1250
1268
  baseUrl: "https://openrouter.ai/api/v1",
1251
1269
  reasoning: false,
1252
1270
  input: ["text"],
1253
1271
  cost: {
1254
- input: 0.3,
1255
- output: 0.8999999999999999,
1272
+ input: 0.14,
1273
+ output: 2.4899999999999998,
1256
1274
  cacheRead: 0,
1257
1275
  cacheWrite: 0,
1258
1276
  },
1259
- contextWindow: 262144,
1260
- maxTokens: 4096,
1277
+ contextWindow: 63000,
1278
+ maxTokens: 63000,
1261
1279
  },
1262
- "deepseek/deepseek-chat": {
1263
- id: "deepseek/deepseek-chat",
1264
- name: "DeepSeek: DeepSeek V3",
1280
+ "mistralai/devstral-medium": {
1281
+ id: "mistralai/devstral-medium",
1282
+ name: "Mistral: Devstral Medium",
1265
1283
  provider: "openrouter",
1266
1284
  baseUrl: "https://openrouter.ai/api/v1",
1267
1285
  reasoning: false,
1268
1286
  input: ["text"],
1269
1287
  cost: {
1270
- input: 0.1999188,
1271
- output: 0.800064,
1288
+ input: 0.39999999999999997,
1289
+ output: 2,
1272
1290
  cacheRead: 0,
1273
1291
  cacheWrite: 0,
1274
1292
  },
1275
- contextWindow: 163840,
1293
+ contextWindow: 131072,
1276
1294
  maxTokens: 4096,
1277
1295
  },
1278
- "meta-llama/llama-3.3-70b-instruct:free": {
1279
- id: "meta-llama/llama-3.3-70b-instruct:free",
1280
- name: "Meta: Llama 3.3 70B Instruct (free)",
1296
+ "mistralai/devstral-small": {
1297
+ id: "mistralai/devstral-small",
1298
+ name: "Mistral: Devstral Small 1.1",
1281
1299
  provider: "openrouter",
1282
1300
  baseUrl: "https://openrouter.ai/api/v1",
1283
1301
  reasoning: false,
1284
1302
  input: ["text"],
1285
1303
  cost: {
1286
- input: 0,
1287
- output: 0,
1304
+ input: 0.07,
1305
+ output: 0.28,
1288
1306
  cacheRead: 0,
1289
1307
  cacheWrite: 0,
1290
1308
  },
1291
- contextWindow: 65536,
1309
+ contextWindow: 128000,
1292
1310
  maxTokens: 4096,
1293
1311
  },
1294
- "meta-llama/llama-3.3-70b-instruct": {
1295
- id: "meta-llama/llama-3.3-70b-instruct",
1296
- name: "Meta: Llama 3.3 70B Instruct",
1312
+ "inception/mercury": {
1313
+ id: "inception/mercury",
1314
+ name: "Inception: Mercury",
1297
1315
  provider: "openrouter",
1298
1316
  baseUrl: "https://openrouter.ai/api/v1",
1299
1317
  reasoning: false,
1300
1318
  input: ["text"],
1301
1319
  cost: {
1302
- input: 0.038000000000000006,
1303
- output: 0.12,
1320
+ input: 0.25,
1321
+ output: 1,
1304
1322
  cacheRead: 0,
1305
1323
  cacheWrite: 0,
1306
1324
  },
1307
- contextWindow: 131072,
1325
+ contextWindow: 128000,
1308
1326
  maxTokens: 16384,
1309
1327
  },
1310
- "amazon/nova-lite-v1": {
1311
- id: "amazon/nova-lite-v1",
1312
- name: "Amazon: Nova Lite 1.0",
1328
+ "mistralai/mistral-small-3.2-24b-instruct:free": {
1329
+ id: "mistralai/mistral-small-3.2-24b-instruct:free",
1330
+ name: "Mistral: Mistral Small 3.2 24B (free)",
1313
1331
  provider: "openrouter",
1314
1332
  baseUrl: "https://openrouter.ai/api/v1",
1315
1333
  reasoning: false,
1316
1334
  input: ["text", "image"],
1317
1335
  cost: {
1318
- input: 0.06,
1319
- output: 0.24,
1336
+ input: 0,
1337
+ output: 0,
1320
1338
  cacheRead: 0,
1321
1339
  cacheWrite: 0,
1322
1340
  },
1323
- contextWindow: 300000,
1324
- maxTokens: 5120,
1341
+ contextWindow: 131072,
1342
+ maxTokens: 4096,
1325
1343
  },
1326
- "amazon/nova-micro-v1": {
1327
- id: "amazon/nova-micro-v1",
1328
- name: "Amazon: Nova Micro 1.0",
1344
+ "mistralai/mistral-small-3.2-24b-instruct": {
1345
+ id: "mistralai/mistral-small-3.2-24b-instruct",
1346
+ name: "Mistral: Mistral Small 3.2 24B",
1329
1347
  provider: "openrouter",
1330
1348
  baseUrl: "https://openrouter.ai/api/v1",
1331
1349
  reasoning: false,
1332
- input: ["text"],
1350
+ input: ["text", "image"],
1333
1351
  cost: {
1334
- input: 0.035,
1335
- output: 0.14,
1352
+ input: 0.049999999999999996,
1353
+ output: 0.09999999999999999,
1336
1354
  cacheRead: 0,
1337
1355
  cacheWrite: 0,
1338
1356
  },
1339
1357
  contextWindow: 128000,
1340
- maxTokens: 5120,
1358
+ maxTokens: 4096,
1341
1359
  },
1342
- "amazon/nova-pro-v1": {
1343
- id: "amazon/nova-pro-v1",
1344
- name: "Amazon: Nova Pro 1.0",
1360
+ "minimax/minimax-m1": {
1361
+ id: "minimax/minimax-m1",
1362
+ name: "MiniMax: MiniMax M1",
1345
1363
  provider: "openrouter",
1346
1364
  baseUrl: "https://openrouter.ai/api/v1",
1347
- reasoning: false,
1348
- input: ["text", "image"],
1365
+ reasoning: true,
1366
+ input: ["text"],
1349
1367
  cost: {
1350
- input: 0.7999999999999999,
1351
- output: 3.1999999999999997,
1368
+ input: 0.3,
1369
+ output: 1.6500000000000001,
1352
1370
  cacheRead: 0,
1353
1371
  cacheWrite: 0,
1354
1372
  },
1355
- contextWindow: 300000,
1356
- maxTokens: 5120,
1373
+ contextWindow: 1000000,
1374
+ maxTokens: 40000,
1357
1375
  },
1358
- "mistralai/mistral-large-2411": {
1359
- id: "mistralai/mistral-large-2411",
1360
- name: "Mistral Large 2411",
1376
+ "mistralai/magistral-small-2506": {
1377
+ id: "mistralai/magistral-small-2506",
1378
+ name: "Mistral: Magistral Small 2506",
1361
1379
  provider: "openrouter",
1362
1380
  baseUrl: "https://openrouter.ai/api/v1",
1363
- reasoning: false,
1381
+ reasoning: true,
1364
1382
  input: ["text"],
1365
1383
  cost: {
1366
- input: 2,
1367
- output: 6,
1384
+ input: 0.5,
1385
+ output: 1.5,
1368
1386
  cacheRead: 0,
1369
1387
  cacheWrite: 0,
1370
1388
  },
1371
- contextWindow: 131072,
1372
- maxTokens: 4096,
1389
+ contextWindow: 40000,
1390
+ maxTokens: 40000,
1373
1391
  },
1374
- "mistralai/mistral-large-2407": {
1375
- id: "mistralai/mistral-large-2407",
1376
- name: "Mistral Large 2407",
1392
+ "mistralai/magistral-medium-2506": {
1393
+ id: "mistralai/magistral-medium-2506",
1394
+ name: "Mistral: Magistral Medium 2506",
1377
1395
  provider: "openrouter",
1378
1396
  baseUrl: "https://openrouter.ai/api/v1",
1379
- reasoning: false,
1397
+ reasoning: true,
1380
1398
  input: ["text"],
1381
1399
  cost: {
1382
1400
  input: 2,
1383
- output: 6,
1401
+ output: 5,
1384
1402
  cacheRead: 0,
1385
1403
  cacheWrite: 0,
1386
1404
  },
1387
- contextWindow: 131072,
1388
- maxTokens: 4096,
1405
+ contextWindow: 40960,
1406
+ maxTokens: 40000,
1389
1407
  },
1390
- "mistralai/pixtral-large-2411": {
1391
- id: "mistralai/pixtral-large-2411",
1392
- name: "Mistral: Pixtral Large 2411",
1408
+ "mistralai/magistral-medium-2506:thinking": {
1409
+ id: "mistralai/magistral-medium-2506:thinking",
1410
+ name: "Mistral: Magistral Medium 2506 (thinking)",
1393
1411
  provider: "openrouter",
1394
1412
  baseUrl: "https://openrouter.ai/api/v1",
1395
- reasoning: false,
1396
- input: ["text", "image"],
1413
+ reasoning: true,
1414
+ input: ["text"],
1397
1415
  cost: {
1398
1416
  input: 2,
1399
- output: 6,
1417
+ output: 5,
1400
1418
  cacheRead: 0,
1401
1419
  cacheWrite: 0,
1402
1420
  },
1403
- contextWindow: 131072,
1421
+ contextWindow: 40960,
1422
+ maxTokens: 40000,
1423
+ },
1424
+ "deepseek/deepseek-r1-0528": {
1425
+ id: "deepseek/deepseek-r1-0528",
1426
+ name: "DeepSeek: R1 0528",
1427
+ provider: "openrouter",
1428
+ baseUrl: "https://openrouter.ai/api/v1",
1429
+ reasoning: true,
1430
+ input: ["text"],
1431
+ cost: {
1432
+ input: 0.1999188,
1433
+ output: 0.800064,
1434
+ cacheRead: 0,
1435
+ cacheWrite: 0,
1436
+ },
1437
+ contextWindow: 163840,
1404
1438
  maxTokens: 4096,
1405
1439
  },
1406
- "thedrummer/unslopnemo-12b": {
1407
- id: "thedrummer/unslopnemo-12b",
1408
- name: "TheDrummer: UnslopNemo 12B",
1440
+ "mistralai/devstral-small-2505:free": {
1441
+ id: "mistralai/devstral-small-2505:free",
1442
+ name: "Mistral: Devstral Small 2505 (free)",
1409
1443
  provider: "openrouter",
1410
1444
  baseUrl: "https://openrouter.ai/api/v1",
1411
1445
  reasoning: false,
1412
1446
  input: ["text"],
1413
1447
  cost: {
1414
- input: 0.39999999999999997,
1415
- output: 0.39999999999999997,
1448
+ input: 0,
1449
+ output: 0,
1416
1450
  cacheRead: 0,
1417
1451
  cacheWrite: 0,
1418
1452
  },
1419
1453
  contextWindow: 32768,
1420
1454
  maxTokens: 4096,
1421
1455
  },
1422
- "mistralai/ministral-8b": {
1423
- id: "mistralai/ministral-8b",
1424
- name: "Mistral: Ministral 8B",
1456
+ "mistralai/devstral-small-2505": {
1457
+ id: "mistralai/devstral-small-2505",
1458
+ name: "Mistral: Devstral Small 2505",
1425
1459
  provider: "openrouter",
1426
1460
  baseUrl: "https://openrouter.ai/api/v1",
1427
1461
  reasoning: false,
1428
1462
  input: ["text"],
1429
1463
  cost: {
1430
- input: 0.09999999999999999,
1431
- output: 0.09999999999999999,
1464
+ input: 0.01999188,
1465
+ output: 0.0800064,
1432
1466
  cacheRead: 0,
1433
1467
  cacheWrite: 0,
1434
1468
  },
1435
- contextWindow: 128000,
1469
+ contextWindow: 131072,
1436
1470
  maxTokens: 4096,
1437
1471
  },
1438
- "nvidia/llama-3.1-nemotron-70b-instruct": {
1439
- id: "nvidia/llama-3.1-nemotron-70b-instruct",
1440
- name: "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1472
+ "meta-llama/llama-3.3-8b-instruct:free": {
1473
+ id: "meta-llama/llama-3.3-8b-instruct:free",
1474
+ name: "Meta: Llama 3.3 8B Instruct (free)",
1441
1475
  provider: "openrouter",
1442
1476
  baseUrl: "https://openrouter.ai/api/v1",
1443
1477
  reasoning: false,
1444
1478
  input: ["text"],
1445
1479
  cost: {
1446
- input: 0.12,
1447
- output: 0.3,
1480
+ input: 0,
1481
+ output: 0,
1448
1482
  cacheRead: 0,
1449
1483
  cacheWrite: 0,
1450
1484
  },
1451
- contextWindow: 131072,
1452
- maxTokens: 16384,
1485
+ contextWindow: 128000,
1486
+ maxTokens: 4028,
1453
1487
  },
1454
- "thedrummer/rocinante-12b": {
1455
- id: "thedrummer/rocinante-12b",
1456
- name: "TheDrummer: Rocinante 12B",
1488
+ "mistralai/mistral-medium-3": {
1489
+ id: "mistralai/mistral-medium-3",
1490
+ name: "Mistral: Mistral Medium 3",
1457
1491
  provider: "openrouter",
1458
1492
  baseUrl: "https://openrouter.ai/api/v1",
1459
1493
  reasoning: false,
1460
- input: ["text"],
1494
+ input: ["text", "image"],
1461
1495
  cost: {
1462
- input: 0.16999999999999998,
1463
- output: 0.43,
1496
+ input: 0.39999999999999997,
1497
+ output: 2,
1464
1498
  cacheRead: 0,
1465
1499
  cacheWrite: 0,
1466
1500
  },
1467
- contextWindow: 32768,
1501
+ contextWindow: 131072,
1468
1502
  maxTokens: 4096,
1469
1503
  },
1470
- "meta-llama/llama-3.2-3b-instruct": {
1471
- id: "meta-llama/llama-3.2-3b-instruct",
1472
- name: "Meta: Llama 3.2 3B Instruct",
1504
+ "arcee-ai/virtuoso-large": {
1505
+ id: "arcee-ai/virtuoso-large",
1506
+ name: "Arcee AI: Virtuoso Large",
1473
1507
  provider: "openrouter",
1474
1508
  baseUrl: "https://openrouter.ai/api/v1",
1475
1509
  reasoning: false,
1476
1510
  input: ["text"],
1477
1511
  cost: {
1478
- input: 0.003,
1479
- output: 0.006,
1512
+ input: 0.75,
1513
+ output: 1.2,
1480
1514
  cacheRead: 0,
1481
1515
  cacheWrite: 0,
1482
1516
  },
1483
- contextWindow: 20000,
1484
- maxTokens: 20000,
1517
+ contextWindow: 131072,
1518
+ maxTokens: 64000,
1485
1519
  },
1486
- "qwen/qwen-2.5-72b-instruct": {
1487
- id: "qwen/qwen-2.5-72b-instruct",
1488
- name: "Qwen2.5 72B Instruct",
1520
+ "inception/mercury-coder": {
1521
+ id: "inception/mercury-coder",
1522
+ name: "Inception: Mercury Coder",
1489
1523
  provider: "openrouter",
1490
1524
  baseUrl: "https://openrouter.ai/api/v1",
1491
1525
  reasoning: false,
1492
1526
  input: ["text"],
1493
1527
  cost: {
1494
- input: 0.051830799999999996,
1495
- output: 0.207424,
1528
+ input: 0.25,
1529
+ output: 1,
1496
1530
  cacheRead: 0,
1497
1531
  cacheWrite: 0,
1498
1532
  },
1499
- contextWindow: 32768,
1500
- maxTokens: 4096,
1533
+ contextWindow: 128000,
1534
+ maxTokens: 16384,
1501
1535
  },
1502
- "mistralai/pixtral-12b": {
1503
- id: "mistralai/pixtral-12b",
1504
- name: "Mistral: Pixtral 12B",
1536
+ "qwen/qwen3-4b:free": {
1537
+ id: "qwen/qwen3-4b:free",
1538
+ name: "Qwen: Qwen3 4B (free)",
1505
1539
  provider: "openrouter",
1506
1540
  baseUrl: "https://openrouter.ai/api/v1",
1507
- reasoning: false,
1508
- input: ["text", "image"],
1541
+ reasoning: true,
1542
+ input: ["text"],
1509
1543
  cost: {
1510
- input: 0.09999999999999999,
1511
- output: 0.09999999999999999,
1544
+ input: 0,
1545
+ output: 0,
1512
1546
  cacheRead: 0,
1513
1547
  cacheWrite: 0,
1514
1548
  },
1515
- contextWindow: 32768,
1549
+ contextWindow: 40960,
1516
1550
  maxTokens: 4096,
1517
1551
  },
1518
- "cohere/command-r-plus-08-2024": {
1519
- id: "cohere/command-r-plus-08-2024",
1520
- name: "Cohere: Command R+ (08-2024)",
1552
+ "qwen/qwen3-30b-a3b": {
1553
+ id: "qwen/qwen3-30b-a3b",
1554
+ name: "Qwen: Qwen3 30B A3B",
1521
1555
  provider: "openrouter",
1522
1556
  baseUrl: "https://openrouter.ai/api/v1",
1523
- reasoning: false,
1557
+ reasoning: true,
1524
1558
  input: ["text"],
1525
1559
  cost: {
1526
- input: 2.5,
1527
- output: 10,
1560
+ input: 0.01999188,
1561
+ output: 0.0800064,
1528
1562
  cacheRead: 0,
1529
1563
  cacheWrite: 0,
1530
1564
  },
1531
- contextWindow: 128000,
1532
- maxTokens: 4000,
1565
+ contextWindow: 40960,
1566
+ maxTokens: 4096,
1533
1567
  },
1534
- "cohere/command-r-08-2024": {
1535
- id: "cohere/command-r-08-2024",
1536
- name: "Cohere: Command R (08-2024)",
1568
+ "qwen/qwen3-14b": {
1569
+ id: "qwen/qwen3-14b",
1570
+ name: "Qwen: Qwen3 14B",
1537
1571
  provider: "openrouter",
1538
1572
  baseUrl: "https://openrouter.ai/api/v1",
1539
- reasoning: false,
1573
+ reasoning: true,
1540
1574
  input: ["text"],
1541
1575
  cost: {
1542
- input: 0.15,
1543
- output: 0.6,
1576
+ input: 0.06,
1577
+ output: 0.24,
1544
1578
  cacheRead: 0,
1545
1579
  cacheWrite: 0,
1546
1580
  },
1547
- contextWindow: 128000,
1548
- maxTokens: 4000,
1581
+ contextWindow: 40960,
1582
+ maxTokens: 40960,
1549
1583
  },
1550
- "microsoft/phi-3.5-mini-128k-instruct": {
1551
- id: "microsoft/phi-3.5-mini-128k-instruct",
1552
- name: "Microsoft: Phi-3.5 Mini 128K Instruct",
1584
+ "qwen/qwen3-32b": {
1585
+ id: "qwen/qwen3-32b",
1586
+ name: "Qwen: Qwen3 32B",
1553
1587
  provider: "openrouter",
1554
1588
  baseUrl: "https://openrouter.ai/api/v1",
1555
- reasoning: false,
1589
+ reasoning: true,
1556
1590
  input: ["text"],
1557
1591
  cost: {
1558
- input: 0.09999999999999999,
1559
- output: 0.09999999999999999,
1592
+ input: 0.017992691999999998,
1593
+ output: 0.07200576,
1560
1594
  cacheRead: 0,
1561
1595
  cacheWrite: 0,
1562
1596
  },
1563
- contextWindow: 128000,
1597
+ contextWindow: 40960,
1564
1598
  maxTokens: 4096,
1565
1599
  },
1566
- "nousresearch/hermes-3-llama-3.1-70b": {
1567
- id: "nousresearch/hermes-3-llama-3.1-70b",
1568
- name: "Nous: Hermes 3 70B Instruct",
1600
+ "qwen/qwen3-235b-a22b:free": {
1601
+ id: "qwen/qwen3-235b-a22b:free",
1602
+ name: "Qwen: Qwen3 235B A22B (free)",
1569
1603
  provider: "openrouter",
1570
1604
  baseUrl: "https://openrouter.ai/api/v1",
1571
- reasoning: false,
1605
+ reasoning: true,
1572
1606
  input: ["text"],
1573
1607
  cost: {
1574
- input: 0.09999999999999999,
1575
- output: 0.28,
1608
+ input: 0,
1609
+ output: 0,
1576
1610
  cacheRead: 0,
1577
1611
  cacheWrite: 0,
1578
1612
  },
1579
1613
  contextWindow: 131072,
1580
1614
  maxTokens: 4096,
1581
1615
  },
1582
- "meta-llama/llama-3.1-8b-instruct": {
1583
- id: "meta-llama/llama-3.1-8b-instruct",
1584
- name: "Meta: Llama 3.1 8B Instruct",
1616
+ "qwen/qwen3-235b-a22b": {
1617
+ id: "qwen/qwen3-235b-a22b",
1618
+ name: "Qwen: Qwen3 235B A22B",
1585
1619
  provider: "openrouter",
1586
1620
  baseUrl: "https://openrouter.ai/api/v1",
1587
- reasoning: false,
1621
+ reasoning: true,
1588
1622
  input: ["text"],
1589
1623
  cost: {
1590
- input: 0.015,
1591
- output: 0.02,
1624
+ input: 0.13,
1625
+ output: 0.6,
1592
1626
  cacheRead: 0,
1593
1627
  cacheWrite: 0,
1594
1628
  },
1595
- contextWindow: 131072,
1596
- maxTokens: 16384,
1629
+ contextWindow: 40960,
1630
+ maxTokens: 40960,
1597
1631
  },
1598
- "meta-llama/llama-3.1-70b-instruct": {
1599
- id: "meta-llama/llama-3.1-70b-instruct",
1600
- name: "Meta: Llama 3.1 70B Instruct",
1632
+ "meta-llama/llama-4-maverick:free": {
1633
+ id: "meta-llama/llama-4-maverick:free",
1634
+ name: "Meta: Llama 4 Maverick (free)",
1601
1635
  provider: "openrouter",
1602
1636
  baseUrl: "https://openrouter.ai/api/v1",
1603
1637
  reasoning: false,
1604
- input: ["text"],
1638
+ input: ["text", "image"],
1605
1639
  cost: {
1606
- input: 0.09999999999999999,
1607
- output: 0.28,
1640
+ input: 0,
1641
+ output: 0,
1608
1642
  cacheRead: 0,
1609
1643
  cacheWrite: 0,
1610
1644
  },
1611
- contextWindow: 131072,
1612
- maxTokens: 16384,
1645
+ contextWindow: 128000,
1646
+ maxTokens: 4028,
1613
1647
  },
1614
- "meta-llama/llama-3.1-405b-instruct": {
1615
- id: "meta-llama/llama-3.1-405b-instruct",
1616
- name: "Meta: Llama 3.1 405B Instruct",
1648
+ "meta-llama/llama-4-maverick": {
1649
+ id: "meta-llama/llama-4-maverick",
1650
+ name: "Meta: Llama 4 Maverick",
1617
1651
  provider: "openrouter",
1618
1652
  baseUrl: "https://openrouter.ai/api/v1",
1619
1653
  reasoning: false,
1620
- input: ["text"],
1654
+ input: ["text", "image"],
1621
1655
  cost: {
1622
- input: 0.7999999999999999,
1623
- output: 0.7999999999999999,
1656
+ input: 0.15,
1657
+ output: 0.6,
1624
1658
  cacheRead: 0,
1625
1659
  cacheWrite: 0,
1626
1660
  },
1627
- contextWindow: 32768,
1661
+ contextWindow: 1048576,
1628
1662
  maxTokens: 16384,
1629
1663
  },
1630
- "mistralai/mistral-nemo": {
1631
- id: "mistralai/mistral-nemo",
1632
- name: "Mistral: Mistral Nemo",
1664
+ "meta-llama/llama-4-scout:free": {
1665
+ id: "meta-llama/llama-4-scout:free",
1666
+ name: "Meta: Llama 4 Scout (free)",
1633
1667
  provider: "openrouter",
1634
1668
  baseUrl: "https://openrouter.ai/api/v1",
1635
1669
  reasoning: false,
1636
- input: ["text"],
1670
+ input: ["text", "image"],
1637
1671
  cost: {
1638
- input: 0.0075,
1639
- output: 0.049999999999999996,
1640
- cacheRead: 0,
1672
+ input: 0,
1673
+ output: 0,
1674
+ cacheRead: 0,
1641
1675
  cacheWrite: 0,
1642
1676
  },
1643
- contextWindow: 32000,
1644
- maxTokens: 4096,
1677
+ contextWindow: 128000,
1678
+ maxTokens: 4028,
1645
1679
  },
1646
- "mistralai/mistral-7b-instruct-v0.3": {
1647
- id: "mistralai/mistral-7b-instruct-v0.3",
1648
- name: "Mistral: Mistral 7B Instruct v0.3",
1680
+ "meta-llama/llama-4-scout": {
1681
+ id: "meta-llama/llama-4-scout",
1682
+ name: "Meta: Llama 4 Scout",
1649
1683
  provider: "openrouter",
1650
1684
  baseUrl: "https://openrouter.ai/api/v1",
1651
1685
  reasoning: false,
1652
- input: ["text"],
1686
+ input: ["text", "image"],
1653
1687
  cost: {
1654
- input: 0.028,
1655
- output: 0.054,
1688
+ input: 0.08,
1689
+ output: 0.3,
1656
1690
  cacheRead: 0,
1657
1691
  cacheWrite: 0,
1658
1692
  },
1659
- contextWindow: 32768,
1660
- maxTokens: 16384,
1693
+ contextWindow: 1048576,
1694
+ maxTokens: 1048576,
1661
1695
  },
1662
- "mistralai/mistral-7b-instruct:free": {
1663
- id: "mistralai/mistral-7b-instruct:free",
1664
- name: "Mistral: Mistral 7B Instruct (free)",
1696
+ "deepseek/deepseek-chat-v3-0324:free": {
1697
+ id: "deepseek/deepseek-chat-v3-0324:free",
1698
+ name: "DeepSeek: DeepSeek V3 0324 (free)",
1665
1699
  provider: "openrouter",
1666
1700
  baseUrl: "https://openrouter.ai/api/v1",
1667
1701
  reasoning: false,
@@ -1672,1216 +1706,972 @@ export const PROVIDERS = {
1672
1706
  cacheRead: 0,
1673
1707
  cacheWrite: 0,
1674
1708
  },
1675
- contextWindow: 32768,
1676
- maxTokens: 16384,
1709
+ contextWindow: 163840,
1710
+ maxTokens: 4096,
1677
1711
  },
1678
- "mistralai/mistral-7b-instruct": {
1679
- id: "mistralai/mistral-7b-instruct",
1680
- name: "Mistral: Mistral 7B Instruct",
1712
+ "deepseek/deepseek-chat-v3-0324": {
1713
+ id: "deepseek/deepseek-chat-v3-0324",
1714
+ name: "DeepSeek: DeepSeek V3 0324",
1681
1715
  provider: "openrouter",
1682
1716
  baseUrl: "https://openrouter.ai/api/v1",
1683
1717
  reasoning: false,
1684
1718
  input: ["text"],
1685
1719
  cost: {
1686
- input: 0.028,
1687
- output: 0.054,
1720
+ input: 0.1999188,
1721
+ output: 0.800064,
1688
1722
  cacheRead: 0,
1689
1723
  cacheWrite: 0,
1690
1724
  },
1691
- contextWindow: 32768,
1692
- maxTokens: 16384,
1725
+ contextWindow: 163840,
1726
+ maxTokens: 4096,
1693
1727
  },
1694
- "microsoft/phi-3-mini-128k-instruct": {
1695
- id: "microsoft/phi-3-mini-128k-instruct",
1696
- name: "Microsoft: Phi-3 Mini 128K Instruct",
1728
+ "mistralai/mistral-small-3.1-24b-instruct:free": {
1729
+ id: "mistralai/mistral-small-3.1-24b-instruct:free",
1730
+ name: "Mistral: Mistral Small 3.1 24B (free)",
1697
1731
  provider: "openrouter",
1698
1732
  baseUrl: "https://openrouter.ai/api/v1",
1699
1733
  reasoning: false,
1700
- input: ["text"],
1734
+ input: ["text", "image"],
1701
1735
  cost: {
1702
- input: 0.09999999999999999,
1703
- output: 0.09999999999999999,
1736
+ input: 0,
1737
+ output: 0,
1704
1738
  cacheRead: 0,
1705
1739
  cacheWrite: 0,
1706
1740
  },
1707
1741
  contextWindow: 128000,
1708
1742
  maxTokens: 4096,
1709
1743
  },
1710
- "microsoft/phi-3-medium-128k-instruct": {
1711
- id: "microsoft/phi-3-medium-128k-instruct",
1712
- name: "Microsoft: Phi-3 Medium 128K Instruct",
1744
+ "mistralai/mistral-small-3.1-24b-instruct": {
1745
+ id: "mistralai/mistral-small-3.1-24b-instruct",
1746
+ name: "Mistral: Mistral Small 3.1 24B",
1713
1747
  provider: "openrouter",
1714
1748
  baseUrl: "https://openrouter.ai/api/v1",
1715
1749
  reasoning: false,
1750
+ input: ["text", "image"],
1751
+ cost: {
1752
+ input: 0.01999188,
1753
+ output: 0.0800064,
1754
+ cacheRead: 0,
1755
+ cacheWrite: 0,
1756
+ },
1757
+ contextWindow: 131072,
1758
+ maxTokens: 96000,
1759
+ },
1760
+ "qwen/qwq-32b": {
1761
+ id: "qwen/qwq-32b",
1762
+ name: "Qwen: QwQ 32B",
1763
+ provider: "openrouter",
1764
+ baseUrl: "https://openrouter.ai/api/v1",
1765
+ reasoning: true,
1716
1766
  input: ["text"],
1717
1767
  cost: {
1718
- input: 1,
1719
- output: 1,
1768
+ input: 0.075,
1769
+ output: 0.15,
1720
1770
  cacheRead: 0,
1721
1771
  cacheWrite: 0,
1722
1772
  },
1723
- contextWindow: 128000,
1773
+ contextWindow: 131072,
1724
1774
  maxTokens: 4096,
1725
1775
  },
1726
- "meta-llama/llama-3-70b-instruct": {
1727
- id: "meta-llama/llama-3-70b-instruct",
1728
- name: "Meta: Llama 3 70B Instruct",
1776
+ "mistralai/mistral-saba": {
1777
+ id: "mistralai/mistral-saba",
1778
+ name: "Mistral: Saba",
1729
1779
  provider: "openrouter",
1730
1780
  baseUrl: "https://openrouter.ai/api/v1",
1731
1781
  reasoning: false,
1732
1782
  input: ["text"],
1733
1783
  cost: {
1734
- input: 0.3,
1735
- output: 0.39999999999999997,
1784
+ input: 0.19999999999999998,
1785
+ output: 0.6,
1736
1786
  cacheRead: 0,
1737
1787
  cacheWrite: 0,
1738
1788
  },
1739
- contextWindow: 8192,
1740
- maxTokens: 16384,
1789
+ contextWindow: 32768,
1790
+ maxTokens: 4096,
1741
1791
  },
1742
- "meta-llama/llama-3-8b-instruct": {
1743
- id: "meta-llama/llama-3-8b-instruct",
1744
- name: "Meta: Llama 3 8B Instruct",
1792
+ "qwen/qwen-turbo": {
1793
+ id: "qwen/qwen-turbo",
1794
+ name: "Qwen: Qwen-Turbo",
1745
1795
  provider: "openrouter",
1746
1796
  baseUrl: "https://openrouter.ai/api/v1",
1747
1797
  reasoning: false,
1748
1798
  input: ["text"],
1749
1799
  cost: {
1750
- input: 0.03,
1751
- output: 0.06,
1752
- cacheRead: 0,
1800
+ input: 0.049999999999999996,
1801
+ output: 0.19999999999999998,
1802
+ cacheRead: 0.02,
1753
1803
  cacheWrite: 0,
1754
1804
  },
1755
- contextWindow: 8192,
1756
- maxTokens: 16384,
1805
+ contextWindow: 1000000,
1806
+ maxTokens: 8192,
1757
1807
  },
1758
- "mistralai/mixtral-8x22b-instruct": {
1759
- id: "mistralai/mixtral-8x22b-instruct",
1760
- name: "Mistral: Mixtral 8x22B Instruct",
1808
+ "qwen/qwen-plus": {
1809
+ id: "qwen/qwen-plus",
1810
+ name: "Qwen: Qwen-Plus",
1761
1811
  provider: "openrouter",
1762
1812
  baseUrl: "https://openrouter.ai/api/v1",
1763
1813
  reasoning: false,
1764
1814
  input: ["text"],
1765
1815
  cost: {
1766
- input: 0.8999999999999999,
1767
- output: 0.8999999999999999,
1768
- cacheRead: 0,
1816
+ input: 0.39999999999999997,
1817
+ output: 1.2,
1818
+ cacheRead: 0.16,
1769
1819
  cacheWrite: 0,
1770
1820
  },
1771
- contextWindow: 65536,
1772
- maxTokens: 4096,
1821
+ contextWindow: 131072,
1822
+ maxTokens: 8192,
1773
1823
  },
1774
- "cohere/command-r-plus": {
1775
- id: "cohere/command-r-plus",
1776
- name: "Cohere: Command R+",
1824
+ "qwen/qwen-max": {
1825
+ id: "qwen/qwen-max",
1826
+ name: "Qwen: Qwen-Max ",
1777
1827
  provider: "openrouter",
1778
1828
  baseUrl: "https://openrouter.ai/api/v1",
1779
1829
  reasoning: false,
1780
1830
  input: ["text"],
1781
1831
  cost: {
1782
- input: 3,
1783
- output: 15,
1784
- cacheRead: 0,
1832
+ input: 1.5999999999999999,
1833
+ output: 6.3999999999999995,
1834
+ cacheRead: 0.64,
1785
1835
  cacheWrite: 0,
1786
1836
  },
1787
- contextWindow: 128000,
1788
- maxTokens: 4000,
1837
+ contextWindow: 32768,
1838
+ maxTokens: 8192,
1789
1839
  },
1790
- "cohere/command-r-plus-04-2024": {
1791
- id: "cohere/command-r-plus-04-2024",
1792
- name: "Cohere: Command R+ (04-2024)",
1840
+ "mistralai/mistral-small-24b-instruct-2501": {
1841
+ id: "mistralai/mistral-small-24b-instruct-2501",
1842
+ name: "Mistral: Mistral Small 3",
1793
1843
  provider: "openrouter",
1794
1844
  baseUrl: "https://openrouter.ai/api/v1",
1795
1845
  reasoning: false,
1796
1846
  input: ["text"],
1797
1847
  cost: {
1798
- input: 3,
1799
- output: 15,
1848
+ input: 0.01999188,
1849
+ output: 0.0800064,
1800
1850
  cacheRead: 0,
1801
1851
  cacheWrite: 0,
1802
1852
  },
1803
- contextWindow: 128000,
1804
- maxTokens: 4000,
1853
+ contextWindow: 32768,
1854
+ maxTokens: 4096,
1805
1855
  },
1806
- "cohere/command-r": {
1807
- id: "cohere/command-r",
1808
- name: "Cohere: Command R",
1856
+ "deepseek/deepseek-r1-distill-llama-70b": {
1857
+ id: "deepseek/deepseek-r1-distill-llama-70b",
1858
+ name: "DeepSeek: R1 Distill Llama 70B",
1809
1859
  provider: "openrouter",
1810
1860
  baseUrl: "https://openrouter.ai/api/v1",
1811
- reasoning: false,
1861
+ reasoning: true,
1812
1862
  input: ["text"],
1813
1863
  cost: {
1814
- input: 0.5,
1815
- output: 1.5,
1864
+ input: 0.025915399999999998,
1865
+ output: 0.103712,
1816
1866
  cacheRead: 0,
1817
1867
  cacheWrite: 0,
1818
1868
  },
1819
- contextWindow: 128000,
1820
- maxTokens: 4000,
1869
+ contextWindow: 131072,
1870
+ maxTokens: 4096,
1821
1871
  },
1822
- "cohere/command-r-03-2024": {
1823
- id: "cohere/command-r-03-2024",
1824
- name: "Cohere: Command R (03-2024)",
1872
+ "deepseek/deepseek-r1": {
1873
+ id: "deepseek/deepseek-r1",
1874
+ name: "DeepSeek: R1",
1825
1875
  provider: "openrouter",
1826
1876
  baseUrl: "https://openrouter.ai/api/v1",
1827
- reasoning: false,
1877
+ reasoning: true,
1828
1878
  input: ["text"],
1829
1879
  cost: {
1830
- input: 0.5,
1831
- output: 1.5,
1880
+ input: 0.39999999999999997,
1881
+ output: 2,
1832
1882
  cacheRead: 0,
1833
1883
  cacheWrite: 0,
1834
1884
  },
1835
- contextWindow: 128000,
1836
- maxTokens: 4000,
1885
+ contextWindow: 163840,
1886
+ maxTokens: 163840,
1837
1887
  },
1838
- "mistralai/mistral-large": {
1839
- id: "mistralai/mistral-large",
1840
- name: "Mistral Large",
1888
+ "mistralai/codestral-2501": {
1889
+ id: "mistralai/codestral-2501",
1890
+ name: "Mistral: Codestral 2501",
1841
1891
  provider: "openrouter",
1842
1892
  baseUrl: "https://openrouter.ai/api/v1",
1843
1893
  reasoning: false,
1844
1894
  input: ["text"],
1845
1895
  cost: {
1846
- input: 2,
1847
- output: 6,
1896
+ input: 0.3,
1897
+ output: 0.8999999999999999,
1848
1898
  cacheRead: 0,
1849
1899
  cacheWrite: 0,
1850
1900
  },
1851
- contextWindow: 128000,
1901
+ contextWindow: 262144,
1852
1902
  maxTokens: 4096,
1853
1903
  },
1854
- "mistralai/mistral-tiny": {
1855
- id: "mistralai/mistral-tiny",
1856
- name: "Mistral Tiny",
1904
+ "deepseek/deepseek-chat": {
1905
+ id: "deepseek/deepseek-chat",
1906
+ name: "DeepSeek: DeepSeek V3",
1857
1907
  provider: "openrouter",
1858
1908
  baseUrl: "https://openrouter.ai/api/v1",
1859
1909
  reasoning: false,
1860
1910
  input: ["text"],
1861
1911
  cost: {
1862
- input: 0.25,
1863
- output: 0.25,
1912
+ input: 0.1999188,
1913
+ output: 0.800064,
1864
1914
  cacheRead: 0,
1865
1915
  cacheWrite: 0,
1866
1916
  },
1867
- contextWindow: 32768,
1917
+ contextWindow: 163840,
1868
1918
  maxTokens: 4096,
1869
1919
  },
1870
- "mistralai/mistral-small": {
1871
- id: "mistralai/mistral-small",
1872
- name: "Mistral Small",
1920
+ "meta-llama/llama-3.3-70b-instruct:free": {
1921
+ id: "meta-llama/llama-3.3-70b-instruct:free",
1922
+ name: "Meta: Llama 3.3 70B Instruct (free)",
1873
1923
  provider: "openrouter",
1874
1924
  baseUrl: "https://openrouter.ai/api/v1",
1875
1925
  reasoning: false,
1876
1926
  input: ["text"],
1877
1927
  cost: {
1878
- input: 0.19999999999999998,
1879
- output: 0.6,
1928
+ input: 0,
1929
+ output: 0,
1880
1930
  cacheRead: 0,
1881
1931
  cacheWrite: 0,
1882
1932
  },
1883
- contextWindow: 32768,
1933
+ contextWindow: 65536,
1884
1934
  maxTokens: 4096,
1885
1935
  },
1886
- "mistralai/mixtral-8x7b-instruct": {
1887
- id: "mistralai/mixtral-8x7b-instruct",
1888
- name: "Mistral: Mixtral 8x7B Instruct",
1936
+ "meta-llama/llama-3.3-70b-instruct": {
1937
+ id: "meta-llama/llama-3.3-70b-instruct",
1938
+ name: "Meta: Llama 3.3 70B Instruct",
1889
1939
  provider: "openrouter",
1890
1940
  baseUrl: "https://openrouter.ai/api/v1",
1891
1941
  reasoning: false,
1892
1942
  input: ["text"],
1893
1943
  cost: {
1894
- input: 0.08,
1895
- output: 0.24,
1944
+ input: 0.038000000000000006,
1945
+ output: 0.12,
1896
1946
  cacheRead: 0,
1897
1947
  cacheWrite: 0,
1898
1948
  },
1899
- contextWindow: 32768,
1949
+ contextWindow: 131072,
1900
1950
  maxTokens: 16384,
1901
1951
  },
1902
- "mistralai/mistral-7b-instruct-v0.1": {
1903
- id: "mistralai/mistral-7b-instruct-v0.1",
1904
- name: "Mistral: Mistral 7B Instruct v0.1",
1952
+ "amazon/nova-lite-v1": {
1953
+ id: "amazon/nova-lite-v1",
1954
+ name: "Amazon: Nova Lite 1.0",
1905
1955
  provider: "openrouter",
1906
1956
  baseUrl: "https://openrouter.ai/api/v1",
1907
1957
  reasoning: false,
1908
- input: ["text"],
1958
+ input: ["text", "image"],
1909
1959
  cost: {
1910
- input: 0.11,
1911
- output: 0.19,
1960
+ input: 0.06,
1961
+ output: 0.24,
1912
1962
  cacheRead: 0,
1913
1963
  cacheWrite: 0,
1914
1964
  },
1915
- contextWindow: 2824,
1916
- maxTokens: 4096,
1965
+ contextWindow: 300000,
1966
+ maxTokens: 5120,
1917
1967
  },
1918
- }
1919
- },
1920
- xai: {
1921
- models: {
1922
- "grok-code-fast-1": {
1923
- id: "grok-code-fast-1",
1924
- name: "xAI: Grok Code Fast 1",
1925
- provider: "xai",
1926
- baseUrl: "https://api.x.ai/v1",
1927
- reasoning: true,
1968
+ "amazon/nova-micro-v1": {
1969
+ id: "amazon/nova-micro-v1",
1970
+ name: "Amazon: Nova Micro 1.0",
1971
+ provider: "openrouter",
1972
+ baseUrl: "https://openrouter.ai/api/v1",
1973
+ reasoning: false,
1928
1974
  input: ["text"],
1929
1975
  cost: {
1930
- input: 0.19999999999999998,
1931
- output: 1.5,
1932
- cacheRead: 0.02,
1976
+ input: 0.035,
1977
+ output: 0.14,
1978
+ cacheRead: 0,
1933
1979
  cacheWrite: 0,
1934
1980
  },
1935
- contextWindow: 256000,
1936
- maxTokens: 10000,
1981
+ contextWindow: 128000,
1982
+ maxTokens: 5120,
1937
1983
  },
1938
- "grok-4": {
1939
- id: "grok-4",
1940
- name: "xAI: Grok 4",
1941
- provider: "xai",
1942
- baseUrl: "https://api.x.ai/v1",
1943
- reasoning: true,
1984
+ "amazon/nova-pro-v1": {
1985
+ id: "amazon/nova-pro-v1",
1986
+ name: "Amazon: Nova Pro 1.0",
1987
+ provider: "openrouter",
1988
+ baseUrl: "https://openrouter.ai/api/v1",
1989
+ reasoning: false,
1944
1990
  input: ["text", "image"],
1945
1991
  cost: {
1946
- input: 3,
1947
- output: 15,
1948
- cacheRead: 0.75,
1992
+ input: 0.7999999999999999,
1993
+ output: 3.1999999999999997,
1994
+ cacheRead: 0,
1949
1995
  cacheWrite: 0,
1950
1996
  },
1951
- contextWindow: 256000,
1952
- maxTokens: 4096,
1997
+ contextWindow: 300000,
1998
+ maxTokens: 5120,
1953
1999
  },
1954
- "grok-3-mini": {
1955
- id: "grok-3-mini",
1956
- name: "xAI: Grok 3 Mini",
1957
- provider: "xai",
1958
- baseUrl: "https://api.x.ai/v1",
1959
- reasoning: true,
2000
+ "mistralai/mistral-large-2411": {
2001
+ id: "mistralai/mistral-large-2411",
2002
+ name: "Mistral Large 2411",
2003
+ provider: "openrouter",
2004
+ baseUrl: "https://openrouter.ai/api/v1",
2005
+ reasoning: false,
1960
2006
  input: ["text"],
1961
2007
  cost: {
1962
- input: 0.3,
1963
- output: 0.5,
1964
- cacheRead: 0.075,
2008
+ input: 2,
2009
+ output: 6,
2010
+ cacheRead: 0,
1965
2011
  cacheWrite: 0,
1966
2012
  },
1967
2013
  contextWindow: 131072,
1968
2014
  maxTokens: 4096,
1969
2015
  },
1970
- "grok-3": {
1971
- id: "grok-3",
1972
- name: "xAI: Grok 3",
1973
- provider: "xai",
1974
- baseUrl: "https://api.x.ai/v1",
2016
+ "mistralai/mistral-large-2407": {
2017
+ id: "mistralai/mistral-large-2407",
2018
+ name: "Mistral Large 2407",
2019
+ provider: "openrouter",
2020
+ baseUrl: "https://openrouter.ai/api/v1",
1975
2021
  reasoning: false,
1976
2022
  input: ["text"],
1977
2023
  cost: {
1978
- input: 3,
1979
- output: 15,
1980
- cacheRead: 0.75,
2024
+ input: 2,
2025
+ output: 6,
2026
+ cacheRead: 0,
1981
2027
  cacheWrite: 0,
1982
2028
  },
1983
2029
  contextWindow: 131072,
1984
2030
  maxTokens: 4096,
1985
2031
  },
1986
- "grok-3-mini-beta": {
1987
- id: "grok-3-mini-beta",
1988
- name: "xAI: Grok 3 Mini Beta",
1989
- provider: "xai",
1990
- baseUrl: "https://api.x.ai/v1",
1991
- reasoning: true,
1992
- input: ["text"],
2032
+ "mistralai/pixtral-large-2411": {
2033
+ id: "mistralai/pixtral-large-2411",
2034
+ name: "Mistral: Pixtral Large 2411",
2035
+ provider: "openrouter",
2036
+ baseUrl: "https://openrouter.ai/api/v1",
2037
+ reasoning: false,
2038
+ input: ["text", "image"],
1993
2039
  cost: {
1994
- input: 0.3,
1995
- output: 0.5,
1996
- cacheRead: 0.075,
2040
+ input: 2,
2041
+ output: 6,
2042
+ cacheRead: 0,
1997
2043
  cacheWrite: 0,
1998
2044
  },
1999
2045
  contextWindow: 131072,
2000
2046
  maxTokens: 4096,
2001
2047
  },
2002
- "grok-3-beta": {
2003
- id: "grok-3-beta",
2004
- name: "xAI: Grok 3 Beta",
2005
- provider: "xai",
2006
- baseUrl: "https://api.x.ai/v1",
2048
+ "thedrummer/unslopnemo-12b": {
2049
+ id: "thedrummer/unslopnemo-12b",
2050
+ name: "TheDrummer: UnslopNemo 12B",
2051
+ provider: "openrouter",
2052
+ baseUrl: "https://openrouter.ai/api/v1",
2007
2053
  reasoning: false,
2008
2054
  input: ["text"],
2009
2055
  cost: {
2010
- input: 3,
2011
- output: 15,
2012
- cacheRead: 0.75,
2056
+ input: 0.39999999999999997,
2057
+ output: 0.39999999999999997,
2058
+ cacheRead: 0,
2013
2059
  cacheWrite: 0,
2014
2060
  },
2015
- contextWindow: 131072,
2061
+ contextWindow: 32768,
2016
2062
  maxTokens: 4096,
2017
2063
  },
2018
- "grok-2-1212": {
2019
- id: "grok-2-1212",
2020
- name: "xAI: Grok 2 1212",
2021
- provider: "xai",
2022
- baseUrl: "https://api.x.ai/v1",
2064
+ "mistralai/ministral-8b": {
2065
+ id: "mistralai/ministral-8b",
2066
+ name: "Mistral: Ministral 8B",
2067
+ provider: "openrouter",
2068
+ baseUrl: "https://openrouter.ai/api/v1",
2023
2069
  reasoning: false,
2024
2070
  input: ["text"],
2025
2071
  cost: {
2026
- input: 2,
2027
- output: 10,
2072
+ input: 0.09999999999999999,
2073
+ output: 0.09999999999999999,
2028
2074
  cacheRead: 0,
2029
2075
  cacheWrite: 0,
2030
2076
  },
2031
- contextWindow: 131072,
2077
+ contextWindow: 128000,
2032
2078
  maxTokens: 4096,
2033
2079
  },
2034
- }
2035
- },
2036
- openai: {
2037
- models: {
2038
- "gpt-4o-audio-preview": {
2039
- id: "gpt-4o-audio-preview",
2040
- name: "OpenAI: GPT-4o Audio",
2041
- provider: "openai",
2080
+ "nvidia/llama-3.1-nemotron-70b-instruct": {
2081
+ id: "nvidia/llama-3.1-nemotron-70b-instruct",
2082
+ name: "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
2083
+ provider: "openrouter",
2084
+ baseUrl: "https://openrouter.ai/api/v1",
2042
2085
  reasoning: false,
2043
2086
  input: ["text"],
2044
2087
  cost: {
2045
- input: 2.5,
2046
- output: 10,
2088
+ input: 0.12,
2089
+ output: 0.3,
2047
2090
  cacheRead: 0,
2048
2091
  cacheWrite: 0,
2049
2092
  },
2050
- contextWindow: 128000,
2093
+ contextWindow: 131072,
2051
2094
  maxTokens: 16384,
2052
2095
  },
2053
- "gpt-5": {
2054
- id: "gpt-5",
2055
- name: "OpenAI: GPT-5",
2056
- provider: "openai",
2057
- reasoning: true,
2058
- input: ["text", "image"],
2096
+ "thedrummer/rocinante-12b": {
2097
+ id: "thedrummer/rocinante-12b",
2098
+ name: "TheDrummer: Rocinante 12B",
2099
+ provider: "openrouter",
2100
+ baseUrl: "https://openrouter.ai/api/v1",
2101
+ reasoning: false,
2102
+ input: ["text"],
2059
2103
  cost: {
2060
- input: 1.25,
2061
- output: 10,
2062
- cacheRead: 0.125,
2104
+ input: 0.16999999999999998,
2105
+ output: 0.43,
2106
+ cacheRead: 0,
2063
2107
  cacheWrite: 0,
2064
2108
  },
2065
- contextWindow: 400000,
2066
- maxTokens: 128000,
2109
+ contextWindow: 32768,
2110
+ maxTokens: 4096,
2067
2111
  },
2068
- "gpt-5-mini": {
2069
- id: "gpt-5-mini",
2070
- name: "OpenAI: GPT-5 Mini",
2071
- provider: "openai",
2072
- reasoning: true,
2073
- input: ["text", "image"],
2112
+ "meta-llama/llama-3.2-3b-instruct": {
2113
+ id: "meta-llama/llama-3.2-3b-instruct",
2114
+ name: "Meta: Llama 3.2 3B Instruct",
2115
+ provider: "openrouter",
2116
+ baseUrl: "https://openrouter.ai/api/v1",
2117
+ reasoning: false,
2118
+ input: ["text"],
2074
2119
  cost: {
2075
- input: 0.25,
2076
- output: 2,
2077
- cacheRead: 0.024999999999999998,
2120
+ input: 0.003,
2121
+ output: 0.006,
2122
+ cacheRead: 0,
2078
2123
  cacheWrite: 0,
2079
2124
  },
2080
- contextWindow: 400000,
2081
- maxTokens: 128000,
2125
+ contextWindow: 20000,
2126
+ maxTokens: 20000,
2082
2127
  },
2083
- "gpt-5-nano": {
2084
- id: "gpt-5-nano",
2085
- name: "OpenAI: GPT-5 Nano",
2086
- provider: "openai",
2087
- reasoning: true,
2088
- input: ["text", "image"],
2128
+ "qwen/qwen-2.5-72b-instruct": {
2129
+ id: "qwen/qwen-2.5-72b-instruct",
2130
+ name: "Qwen2.5 72B Instruct",
2131
+ provider: "openrouter",
2132
+ baseUrl: "https://openrouter.ai/api/v1",
2133
+ reasoning: false,
2134
+ input: ["text"],
2089
2135
  cost: {
2090
- input: 0.049999999999999996,
2091
- output: 0.39999999999999997,
2092
- cacheRead: 0.005,
2136
+ input: 0.051830799999999996,
2137
+ output: 0.207424,
2138
+ cacheRead: 0,
2093
2139
  cacheWrite: 0,
2094
2140
  },
2095
- contextWindow: 400000,
2096
- maxTokens: 128000,
2141
+ contextWindow: 32768,
2142
+ maxTokens: 4096,
2097
2143
  },
2098
- "gpt-oss-120b": {
2099
- id: "gpt-oss-120b",
2100
- name: "OpenAI: gpt-oss-120b",
2101
- provider: "openai",
2102
- reasoning: true,
2103
- input: ["text"],
2144
+ "mistralai/pixtral-12b": {
2145
+ id: "mistralai/pixtral-12b",
2146
+ name: "Mistral: Pixtral 12B",
2147
+ provider: "openrouter",
2148
+ baseUrl: "https://openrouter.ai/api/v1",
2149
+ reasoning: false,
2150
+ input: ["text", "image"],
2104
2151
  cost: {
2105
- input: 0.072,
2106
- output: 0.28,
2152
+ input: 0.09999999999999999,
2153
+ output: 0.09999999999999999,
2107
2154
  cacheRead: 0,
2108
2155
  cacheWrite: 0,
2109
2156
  },
2110
- contextWindow: 131000,
2111
- maxTokens: 131000,
2157
+ contextWindow: 32768,
2158
+ maxTokens: 4096,
2112
2159
  },
2113
- "gpt-oss-20b": {
2114
- id: "gpt-oss-20b",
2115
- name: "OpenAI: gpt-oss-20b",
2116
- provider: "openai",
2117
- reasoning: true,
2160
+ "cohere/command-r-plus-08-2024": {
2161
+ id: "cohere/command-r-plus-08-2024",
2162
+ name: "Cohere: Command R+ (08-2024)",
2163
+ provider: "openrouter",
2164
+ baseUrl: "https://openrouter.ai/api/v1",
2165
+ reasoning: false,
2118
2166
  input: ["text"],
2119
2167
  cost: {
2120
- input: 0.04,
2121
- output: 0.15,
2168
+ input: 2.5,
2169
+ output: 10,
2122
2170
  cacheRead: 0,
2123
2171
  cacheWrite: 0,
2124
2172
  },
2125
- contextWindow: 131000,
2126
- maxTokens: 131000,
2173
+ contextWindow: 128000,
2174
+ maxTokens: 4000,
2127
2175
  },
2128
- "o3-pro": {
2129
- id: "o3-pro",
2130
- name: "OpenAI: o3 Pro",
2131
- provider: "openai",
2132
- reasoning: true,
2133
- input: ["text", "image"],
2176
+ "cohere/command-r-08-2024": {
2177
+ id: "cohere/command-r-08-2024",
2178
+ name: "Cohere: Command R (08-2024)",
2179
+ provider: "openrouter",
2180
+ baseUrl: "https://openrouter.ai/api/v1",
2181
+ reasoning: false,
2182
+ input: ["text"],
2134
2183
  cost: {
2135
- input: 20,
2136
- output: 80,
2184
+ input: 0.15,
2185
+ output: 0.6,
2137
2186
  cacheRead: 0,
2138
2187
  cacheWrite: 0,
2139
2188
  },
2140
- contextWindow: 200000,
2141
- maxTokens: 100000,
2189
+ contextWindow: 128000,
2190
+ maxTokens: 4000,
2142
2191
  },
2143
- "codex-mini": {
2144
- id: "codex-mini",
2145
- name: "OpenAI: Codex Mini",
2146
- provider: "openai",
2147
- reasoning: true,
2148
- input: ["text", "image"],
2192
+ "microsoft/phi-3.5-mini-128k-instruct": {
2193
+ id: "microsoft/phi-3.5-mini-128k-instruct",
2194
+ name: "Microsoft: Phi-3.5 Mini 128K Instruct",
2195
+ provider: "openrouter",
2196
+ baseUrl: "https://openrouter.ai/api/v1",
2197
+ reasoning: false,
2198
+ input: ["text"],
2149
2199
  cost: {
2150
- input: 1.5,
2151
- output: 6,
2152
- cacheRead: 0.375,
2200
+ input: 0.09999999999999999,
2201
+ output: 0.09999999999999999,
2202
+ cacheRead: 0,
2153
2203
  cacheWrite: 0,
2154
2204
  },
2155
- contextWindow: 200000,
2156
- maxTokens: 100000,
2205
+ contextWindow: 128000,
2206
+ maxTokens: 4096,
2157
2207
  },
2158
- "o4-mini-high": {
2159
- id: "o4-mini-high",
2160
- name: "OpenAI: o4 Mini High",
2161
- provider: "openai",
2162
- reasoning: true,
2163
- input: ["text", "image"],
2208
+ "nousresearch/hermes-3-llama-3.1-70b": {
2209
+ id: "nousresearch/hermes-3-llama-3.1-70b",
2210
+ name: "Nous: Hermes 3 70B Instruct",
2211
+ provider: "openrouter",
2212
+ baseUrl: "https://openrouter.ai/api/v1",
2213
+ reasoning: false,
2214
+ input: ["text"],
2164
2215
  cost: {
2165
- input: 1.1,
2166
- output: 4.4,
2167
- cacheRead: 0.275,
2216
+ input: 0.09999999999999999,
2217
+ output: 0.28,
2218
+ cacheRead: 0,
2168
2219
  cacheWrite: 0,
2169
2220
  },
2170
- contextWindow: 200000,
2171
- maxTokens: 100000,
2221
+ contextWindow: 131072,
2222
+ maxTokens: 4096,
2172
2223
  },
2173
- "o3": {
2174
- id: "o3",
2175
- name: "OpenAI: o3",
2176
- provider: "openai",
2177
- reasoning: true,
2178
- input: ["text", "image"],
2224
+ "meta-llama/llama-3.1-8b-instruct": {
2225
+ id: "meta-llama/llama-3.1-8b-instruct",
2226
+ name: "Meta: Llama 3.1 8B Instruct",
2227
+ provider: "openrouter",
2228
+ baseUrl: "https://openrouter.ai/api/v1",
2229
+ reasoning: false,
2230
+ input: ["text"],
2179
2231
  cost: {
2180
- input: 2,
2181
- output: 8,
2182
- cacheRead: 0.5,
2232
+ input: 0.015,
2233
+ output: 0.02,
2234
+ cacheRead: 0,
2183
2235
  cacheWrite: 0,
2184
2236
  },
2185
- contextWindow: 200000,
2186
- maxTokens: 100000,
2237
+ contextWindow: 131072,
2238
+ maxTokens: 16384,
2187
2239
  },
2188
- "o4-mini": {
2189
- id: "o4-mini",
2190
- name: "OpenAI: o4 Mini",
2191
- provider: "openai",
2192
- reasoning: true,
2193
- input: ["text", "image"],
2240
+ "meta-llama/llama-3.1-70b-instruct": {
2241
+ id: "meta-llama/llama-3.1-70b-instruct",
2242
+ name: "Meta: Llama 3.1 70B Instruct",
2243
+ provider: "openrouter",
2244
+ baseUrl: "https://openrouter.ai/api/v1",
2245
+ reasoning: false,
2246
+ input: ["text"],
2194
2247
  cost: {
2195
- input: 1.1,
2196
- output: 4.4,
2197
- cacheRead: 0.275,
2248
+ input: 0.09999999999999999,
2249
+ output: 0.28,
2250
+ cacheRead: 0,
2198
2251
  cacheWrite: 0,
2199
2252
  },
2200
- contextWindow: 200000,
2201
- maxTokens: 100000,
2253
+ contextWindow: 131072,
2254
+ maxTokens: 16384,
2202
2255
  },
2203
- "gpt-4.1": {
2204
- id: "gpt-4.1",
2205
- name: "OpenAI: GPT-4.1",
2206
- provider: "openai",
2256
+ "meta-llama/llama-3.1-405b-instruct": {
2257
+ id: "meta-llama/llama-3.1-405b-instruct",
2258
+ name: "Meta: Llama 3.1 405B Instruct",
2259
+ provider: "openrouter",
2260
+ baseUrl: "https://openrouter.ai/api/v1",
2207
2261
  reasoning: false,
2208
- input: ["text", "image"],
2262
+ input: ["text"],
2209
2263
  cost: {
2210
- input: 2,
2211
- output: 8,
2212
- cacheRead: 0.5,
2264
+ input: 0.7999999999999999,
2265
+ output: 0.7999999999999999,
2266
+ cacheRead: 0,
2213
2267
  cacheWrite: 0,
2214
2268
  },
2215
- contextWindow: 1047576,
2216
- maxTokens: 32768,
2269
+ contextWindow: 32768,
2270
+ maxTokens: 16384,
2217
2271
  },
2218
- "gpt-4.1-mini": {
2219
- id: "gpt-4.1-mini",
2220
- name: "OpenAI: GPT-4.1 Mini",
2221
- provider: "openai",
2272
+ "mistralai/mistral-nemo": {
2273
+ id: "mistralai/mistral-nemo",
2274
+ name: "Mistral: Mistral Nemo",
2275
+ provider: "openrouter",
2276
+ baseUrl: "https://openrouter.ai/api/v1",
2222
2277
  reasoning: false,
2223
- input: ["text", "image"],
2278
+ input: ["text"],
2224
2279
  cost: {
2225
- input: 0.39999999999999997,
2226
- output: 1.5999999999999999,
2227
- cacheRead: 0.09999999999999999,
2280
+ input: 0.0075,
2281
+ output: 0.049999999999999996,
2282
+ cacheRead: 0,
2228
2283
  cacheWrite: 0,
2229
2284
  },
2230
- contextWindow: 1047576,
2231
- maxTokens: 32768,
2285
+ contextWindow: 32000,
2286
+ maxTokens: 4096,
2232
2287
  },
2233
- "gpt-4.1-nano": {
2234
- id: "gpt-4.1-nano",
2235
- name: "OpenAI: GPT-4.1 Nano",
2236
- provider: "openai",
2288
+ "mistralai/mistral-7b-instruct-v0.3": {
2289
+ id: "mistralai/mistral-7b-instruct-v0.3",
2290
+ name: "Mistral: Mistral 7B Instruct v0.3",
2291
+ provider: "openrouter",
2292
+ baseUrl: "https://openrouter.ai/api/v1",
2237
2293
  reasoning: false,
2238
- input: ["text", "image"],
2294
+ input: ["text"],
2239
2295
  cost: {
2240
- input: 0.09999999999999999,
2241
- output: 0.39999999999999997,
2242
- cacheRead: 0.024999999999999998,
2296
+ input: 0.028,
2297
+ output: 0.054,
2298
+ cacheRead: 0,
2243
2299
  cacheWrite: 0,
2244
2300
  },
2245
- contextWindow: 1047576,
2246
- maxTokens: 32768,
2301
+ contextWindow: 32768,
2302
+ maxTokens: 16384,
2247
2303
  },
2248
- "o3-mini-high": {
2249
- id: "o3-mini-high",
2250
- name: "OpenAI: o3 Mini High",
2251
- provider: "openai",
2304
+ "mistralai/mistral-7b-instruct:free": {
2305
+ id: "mistralai/mistral-7b-instruct:free",
2306
+ name: "Mistral: Mistral 7B Instruct (free)",
2307
+ provider: "openrouter",
2308
+ baseUrl: "https://openrouter.ai/api/v1",
2252
2309
  reasoning: false,
2253
2310
  input: ["text"],
2254
2311
  cost: {
2255
- input: 1.1,
2256
- output: 4.4,
2257
- cacheRead: 0.55,
2312
+ input: 0,
2313
+ output: 0,
2314
+ cacheRead: 0,
2258
2315
  cacheWrite: 0,
2259
2316
  },
2260
- contextWindow: 200000,
2261
- maxTokens: 100000,
2317
+ contextWindow: 32768,
2318
+ maxTokens: 16384,
2262
2319
  },
2263
- "o3-mini": {
2264
- id: "o3-mini",
2265
- name: "OpenAI: o3 Mini",
2266
- provider: "openai",
2320
+ "mistralai/mistral-7b-instruct": {
2321
+ id: "mistralai/mistral-7b-instruct",
2322
+ name: "Mistral: Mistral 7B Instruct",
2323
+ provider: "openrouter",
2324
+ baseUrl: "https://openrouter.ai/api/v1",
2267
2325
  reasoning: false,
2268
2326
  input: ["text"],
2269
2327
  cost: {
2270
- input: 1.1,
2271
- output: 4.4,
2272
- cacheRead: 0.55,
2328
+ input: 0.028,
2329
+ output: 0.054,
2330
+ cacheRead: 0,
2273
2331
  cacheWrite: 0,
2274
2332
  },
2275
- contextWindow: 200000,
2276
- maxTokens: 100000,
2333
+ contextWindow: 32768,
2334
+ maxTokens: 16384,
2277
2335
  },
2278
- "o1": {
2279
- id: "o1",
2280
- name: "OpenAI: o1",
2281
- provider: "openai",
2336
+ "microsoft/phi-3-mini-128k-instruct": {
2337
+ id: "microsoft/phi-3-mini-128k-instruct",
2338
+ name: "Microsoft: Phi-3 Mini 128K Instruct",
2339
+ provider: "openrouter",
2340
+ baseUrl: "https://openrouter.ai/api/v1",
2282
2341
  reasoning: false,
2283
- input: ["text", "image"],
2342
+ input: ["text"],
2284
2343
  cost: {
2285
- input: 15,
2286
- output: 60,
2287
- cacheRead: 7.5,
2344
+ input: 0.09999999999999999,
2345
+ output: 0.09999999999999999,
2346
+ cacheRead: 0,
2288
2347
  cacheWrite: 0,
2289
2348
  },
2290
- contextWindow: 200000,
2291
- maxTokens: 100000,
2349
+ contextWindow: 128000,
2350
+ maxTokens: 4096,
2292
2351
  },
2293
- "gpt-4o-2024-11-20": {
2294
- id: "gpt-4o-2024-11-20",
2295
- name: "OpenAI: GPT-4o (2024-11-20)",
2296
- provider: "openai",
2352
+ "microsoft/phi-3-medium-128k-instruct": {
2353
+ id: "microsoft/phi-3-medium-128k-instruct",
2354
+ name: "Microsoft: Phi-3 Medium 128K Instruct",
2355
+ provider: "openrouter",
2356
+ baseUrl: "https://openrouter.ai/api/v1",
2297
2357
  reasoning: false,
2298
- input: ["text", "image"],
2358
+ input: ["text"],
2299
2359
  cost: {
2300
- input: 2.5,
2301
- output: 10,
2302
- cacheRead: 1.25,
2360
+ input: 1,
2361
+ output: 1,
2362
+ cacheRead: 0,
2303
2363
  cacheWrite: 0,
2304
2364
  },
2305
2365
  contextWindow: 128000,
2306
- maxTokens: 16384,
2366
+ maxTokens: 4096,
2307
2367
  },
2308
- "gpt-4o-2024-08-06": {
2309
- id: "gpt-4o-2024-08-06",
2310
- name: "OpenAI: GPT-4o (2024-08-06)",
2311
- provider: "openai",
2368
+ "meta-llama/llama-3-70b-instruct": {
2369
+ id: "meta-llama/llama-3-70b-instruct",
2370
+ name: "Meta: Llama 3 70B Instruct",
2371
+ provider: "openrouter",
2372
+ baseUrl: "https://openrouter.ai/api/v1",
2312
2373
  reasoning: false,
2313
- input: ["text", "image"],
2374
+ input: ["text"],
2314
2375
  cost: {
2315
- input: 2.5,
2316
- output: 10,
2317
- cacheRead: 1.25,
2376
+ input: 0.3,
2377
+ output: 0.39999999999999997,
2378
+ cacheRead: 0,
2318
2379
  cacheWrite: 0,
2319
2380
  },
2320
- contextWindow: 128000,
2381
+ contextWindow: 8192,
2321
2382
  maxTokens: 16384,
2322
2383
  },
2323
- "gpt-4o-mini": {
2324
- id: "gpt-4o-mini",
2325
- name: "OpenAI: GPT-4o-mini",
2326
- provider: "openai",
2384
+ "meta-llama/llama-3-8b-instruct": {
2385
+ id: "meta-llama/llama-3-8b-instruct",
2386
+ name: "Meta: Llama 3 8B Instruct",
2387
+ provider: "openrouter",
2388
+ baseUrl: "https://openrouter.ai/api/v1",
2327
2389
  reasoning: false,
2328
- input: ["text", "image"],
2390
+ input: ["text"],
2329
2391
  cost: {
2330
- input: 0.15,
2331
- output: 0.6,
2332
- cacheRead: 0.075,
2392
+ input: 0.03,
2393
+ output: 0.06,
2394
+ cacheRead: 0,
2333
2395
  cacheWrite: 0,
2334
2396
  },
2335
- contextWindow: 128000,
2336
- maxTokens: 16384,
2337
- },
2338
- "gpt-4o-mini-2024-07-18": {
2339
- id: "gpt-4o-mini-2024-07-18",
2340
- name: "OpenAI: GPT-4o-mini (2024-07-18)",
2341
- provider: "openai",
2342
- reasoning: false,
2343
- input: ["text", "image"],
2344
- cost: {
2345
- input: 0.15,
2346
- output: 0.6,
2347
- cacheRead: 0.075,
2348
- cacheWrite: 0,
2349
- },
2350
- contextWindow: 128000,
2351
- maxTokens: 16384,
2352
- },
2353
- "gpt-4o": {
2354
- id: "gpt-4o",
2355
- name: "OpenAI: GPT-4o",
2356
- provider: "openai",
2357
- reasoning: false,
2358
- input: ["text", "image"],
2359
- cost: {
2360
- input: 2.5,
2361
- output: 10,
2362
- cacheRead: 1.25,
2363
- cacheWrite: 0,
2364
- },
2365
- contextWindow: 128000,
2397
+ contextWindow: 8192,
2366
2398
  maxTokens: 16384,
2367
2399
  },
2368
- "gpt-4o:extended": {
2369
- id: "gpt-4o:extended",
2370
- name: "OpenAI: GPT-4o (extended)",
2371
- provider: "openai",
2400
+ "mistralai/mixtral-8x22b-instruct": {
2401
+ id: "mistralai/mixtral-8x22b-instruct",
2402
+ name: "Mistral: Mixtral 8x22B Instruct",
2403
+ provider: "openrouter",
2404
+ baseUrl: "https://openrouter.ai/api/v1",
2372
2405
  reasoning: false,
2373
- input: ["text", "image"],
2406
+ input: ["text"],
2374
2407
  cost: {
2375
- input: 6,
2376
- output: 18,
2408
+ input: 0.8999999999999999,
2409
+ output: 0.8999999999999999,
2377
2410
  cacheRead: 0,
2378
2411
  cacheWrite: 0,
2379
2412
  },
2380
- contextWindow: 128000,
2381
- maxTokens: 64000,
2413
+ contextWindow: 65536,
2414
+ maxTokens: 4096,
2382
2415
  },
2383
- "gpt-4o-2024-05-13": {
2384
- id: "gpt-4o-2024-05-13",
2385
- name: "OpenAI: GPT-4o (2024-05-13)",
2386
- provider: "openai",
2416
+ "cohere/command-r-plus": {
2417
+ id: "cohere/command-r-plus",
2418
+ name: "Cohere: Command R+",
2419
+ provider: "openrouter",
2420
+ baseUrl: "https://openrouter.ai/api/v1",
2387
2421
  reasoning: false,
2388
- input: ["text", "image"],
2422
+ input: ["text"],
2389
2423
  cost: {
2390
- input: 5,
2424
+ input: 3,
2391
2425
  output: 15,
2392
2426
  cacheRead: 0,
2393
2427
  cacheWrite: 0,
2394
2428
  },
2395
2429
  contextWindow: 128000,
2396
- maxTokens: 4096,
2430
+ maxTokens: 4000,
2397
2431
  },
2398
- "gpt-4-turbo": {
2399
- id: "gpt-4-turbo",
2400
- name: "OpenAI: GPT-4 Turbo",
2401
- provider: "openai",
2432
+ "cohere/command-r-plus-04-2024": {
2433
+ id: "cohere/command-r-plus-04-2024",
2434
+ name: "Cohere: Command R+ (04-2024)",
2435
+ provider: "openrouter",
2436
+ baseUrl: "https://openrouter.ai/api/v1",
2402
2437
  reasoning: false,
2403
- input: ["text", "image"],
2438
+ input: ["text"],
2404
2439
  cost: {
2405
- input: 10,
2406
- output: 30,
2440
+ input: 3,
2441
+ output: 15,
2407
2442
  cacheRead: 0,
2408
2443
  cacheWrite: 0,
2409
2444
  },
2410
2445
  contextWindow: 128000,
2411
- maxTokens: 4096,
2446
+ maxTokens: 4000,
2412
2447
  },
2413
- "gpt-3.5-turbo-0613": {
2414
- id: "gpt-3.5-turbo-0613",
2415
- name: "OpenAI: GPT-3.5 Turbo (older v0613)",
2416
- provider: "openai",
2448
+ "cohere/command-r": {
2449
+ id: "cohere/command-r",
2450
+ name: "Cohere: Command R",
2451
+ provider: "openrouter",
2452
+ baseUrl: "https://openrouter.ai/api/v1",
2417
2453
  reasoning: false,
2418
2454
  input: ["text"],
2419
2455
  cost: {
2420
- input: 1,
2421
- output: 2,
2456
+ input: 0.5,
2457
+ output: 1.5,
2422
2458
  cacheRead: 0,
2423
2459
  cacheWrite: 0,
2424
2460
  },
2425
- contextWindow: 4095,
2426
- maxTokens: 4096,
2461
+ contextWindow: 128000,
2462
+ maxTokens: 4000,
2427
2463
  },
2428
- "gpt-4-turbo-preview": {
2429
- id: "gpt-4-turbo-preview",
2430
- name: "OpenAI: GPT-4 Turbo Preview",
2431
- provider: "openai",
2464
+ "cohere/command-r-03-2024": {
2465
+ id: "cohere/command-r-03-2024",
2466
+ name: "Cohere: Command R (03-2024)",
2467
+ provider: "openrouter",
2468
+ baseUrl: "https://openrouter.ai/api/v1",
2432
2469
  reasoning: false,
2433
2470
  input: ["text"],
2434
2471
  cost: {
2435
- input: 10,
2436
- output: 30,
2472
+ input: 0.5,
2473
+ output: 1.5,
2437
2474
  cacheRead: 0,
2438
2475
  cacheWrite: 0,
2439
2476
  },
2440
2477
  contextWindow: 128000,
2441
- maxTokens: 4096,
2478
+ maxTokens: 4000,
2442
2479
  },
2443
- "gpt-4-1106-preview": {
2444
- id: "gpt-4-1106-preview",
2445
- name: "OpenAI: GPT-4 Turbo (older v1106)",
2446
- provider: "openai",
2480
+ "mistralai/mistral-large": {
2481
+ id: "mistralai/mistral-large",
2482
+ name: "Mistral Large",
2483
+ provider: "openrouter",
2484
+ baseUrl: "https://openrouter.ai/api/v1",
2447
2485
  reasoning: false,
2448
2486
  input: ["text"],
2449
2487
  cost: {
2450
- input: 10,
2451
- output: 30,
2488
+ input: 2,
2489
+ output: 6,
2452
2490
  cacheRead: 0,
2453
2491
  cacheWrite: 0,
2454
2492
  },
2455
2493
  contextWindow: 128000,
2456
2494
  maxTokens: 4096,
2457
2495
  },
2458
- "gpt-3.5-turbo-16k": {
2459
- id: "gpt-3.5-turbo-16k",
2460
- name: "OpenAI: GPT-3.5 Turbo 16k",
2461
- provider: "openai",
2496
+ "mistralai/mistral-tiny": {
2497
+ id: "mistralai/mistral-tiny",
2498
+ name: "Mistral Tiny",
2499
+ provider: "openrouter",
2500
+ baseUrl: "https://openrouter.ai/api/v1",
2462
2501
  reasoning: false,
2463
2502
  input: ["text"],
2464
2503
  cost: {
2465
- input: 3,
2466
- output: 4,
2504
+ input: 0.25,
2505
+ output: 0.25,
2467
2506
  cacheRead: 0,
2468
2507
  cacheWrite: 0,
2469
2508
  },
2470
- contextWindow: 16385,
2509
+ contextWindow: 32768,
2471
2510
  maxTokens: 4096,
2472
2511
  },
2473
- "gpt-4": {
2474
- id: "gpt-4",
2475
- name: "OpenAI: GPT-4",
2476
- provider: "openai",
2512
+ "mistralai/mistral-small": {
2513
+ id: "mistralai/mistral-small",
2514
+ name: "Mistral Small",
2515
+ provider: "openrouter",
2516
+ baseUrl: "https://openrouter.ai/api/v1",
2477
2517
  reasoning: false,
2478
2518
  input: ["text"],
2479
2519
  cost: {
2480
- input: 30,
2481
- output: 60,
2520
+ input: 0.19999999999999998,
2521
+ output: 0.6,
2482
2522
  cacheRead: 0,
2483
2523
  cacheWrite: 0,
2484
2524
  },
2485
- contextWindow: 8191,
2525
+ contextWindow: 32768,
2486
2526
  maxTokens: 4096,
2487
2527
  },
2488
- "gpt-4-0314": {
2489
- id: "gpt-4-0314",
2490
- name: "OpenAI: GPT-4 (older v0314)",
2491
- provider: "openai",
2528
+ "mistralai/mixtral-8x7b-instruct": {
2529
+ id: "mistralai/mixtral-8x7b-instruct",
2530
+ name: "Mistral: Mixtral 8x7B Instruct",
2531
+ provider: "openrouter",
2532
+ baseUrl: "https://openrouter.ai/api/v1",
2492
2533
  reasoning: false,
2493
2534
  input: ["text"],
2494
2535
  cost: {
2495
- input: 30,
2496
- output: 60,
2536
+ input: 0.08,
2537
+ output: 0.24,
2497
2538
  cacheRead: 0,
2498
2539
  cacheWrite: 0,
2499
2540
  },
2500
- contextWindow: 8191,
2501
- maxTokens: 4096,
2541
+ contextWindow: 32768,
2542
+ maxTokens: 16384,
2502
2543
  },
2503
- "gpt-3.5-turbo": {
2504
- id: "gpt-3.5-turbo",
2505
- name: "OpenAI: GPT-3.5 Turbo",
2506
- provider: "openai",
2544
+ "mistralai/mistral-7b-instruct-v0.1": {
2545
+ id: "mistralai/mistral-7b-instruct-v0.1",
2546
+ name: "Mistral: Mistral 7B Instruct v0.1",
2547
+ provider: "openrouter",
2548
+ baseUrl: "https://openrouter.ai/api/v1",
2507
2549
  reasoning: false,
2508
2550
  input: ["text"],
2509
2551
  cost: {
2510
- input: 0.5,
2511
- output: 1.5,
2552
+ input: 0.11,
2553
+ output: 0.19,
2512
2554
  cacheRead: 0,
2513
2555
  cacheWrite: 0,
2514
2556
  },
2515
- contextWindow: 16385,
2557
+ contextWindow: 2824,
2516
2558
  maxTokens: 4096,
2517
2559
  },
2518
2560
  }
2519
2561
  },
2520
- anthropic: {
2562
+ xai: {
2521
2563
  models: {
2522
- "claude-opus-4-1": {
2523
- id: "claude-opus-4-1",
2524
- name: "Anthropic: Claude Opus 4.1",
2525
- provider: "anthropic",
2526
- reasoning: true,
2527
- input: ["text", "image"],
2528
- cost: {
2529
- input: 15,
2530
- output: 75,
2531
- cacheRead: 1.5,
2532
- cacheWrite: 18.75,
2533
- },
2534
- contextWindow: 200000,
2535
- maxTokens: 32000,
2536
- },
2537
- "claude-opus-4-0": {
2538
- id: "claude-opus-4-0",
2539
- name: "Anthropic: Claude Opus 4",
2540
- provider: "anthropic",
2541
- reasoning: true,
2542
- input: ["text", "image"],
2543
- cost: {
2544
- input: 15,
2545
- output: 75,
2546
- cacheRead: 1.5,
2547
- cacheWrite: 18.75,
2548
- },
2549
- contextWindow: 200000,
2550
- maxTokens: 32000,
2551
- },
2552
- "claude-sonnet-4-0": {
2553
- id: "claude-sonnet-4-0",
2554
- name: "Anthropic: Claude Sonnet 4",
2555
- provider: "anthropic",
2564
+ "grok-code-fast-1": {
2565
+ id: "grok-code-fast-1",
2566
+ name: "xAI: Grok Code Fast 1",
2567
+ provider: "xai",
2568
+ baseUrl: "https://api.x.ai/v1",
2556
2569
  reasoning: true,
2557
- input: ["text", "image"],
2570
+ input: ["text"],
2558
2571
  cost: {
2559
- input: 3,
2560
- output: 15,
2561
- cacheRead: 0.3,
2562
- cacheWrite: 3.75,
2572
+ input: 0.19999999999999998,
2573
+ output: 1.5,
2574
+ cacheRead: 0.02,
2575
+ cacheWrite: 0,
2563
2576
  },
2564
- contextWindow: 1000000,
2565
- maxTokens: 64000,
2577
+ contextWindow: 256000,
2578
+ maxTokens: 10000,
2566
2579
  },
2567
- "claude-3-7-sonnet-latest": {
2568
- id: "claude-3-7-sonnet-latest",
2569
- name: "Anthropic: Claude 3.7 Sonnet",
2570
- provider: "anthropic",
2580
+ "grok-4": {
2581
+ id: "grok-4",
2582
+ name: "xAI: Grok 4",
2583
+ provider: "xai",
2584
+ baseUrl: "https://api.x.ai/v1",
2571
2585
  reasoning: true,
2572
2586
  input: ["text", "image"],
2573
2587
  cost: {
2574
2588
  input: 3,
2575
2589
  output: 15,
2576
- cacheRead: 0.3,
2577
- cacheWrite: 3.75,
2590
+ cacheRead: 0.75,
2591
+ cacheWrite: 0,
2578
2592
  },
2579
- contextWindow: 200000,
2580
- maxTokens: 64000,
2593
+ contextWindow: 256000,
2594
+ maxTokens: 4096,
2581
2595
  },
2582
- "claude-3-7-sonnet-latest:thinking": {
2583
- id: "claude-3-7-sonnet-latest:thinking",
2584
- name: "Anthropic: Claude 3.7 Sonnet (thinking)",
2585
- provider: "anthropic",
2596
+ "grok-3-mini": {
2597
+ id: "grok-3-mini",
2598
+ name: "xAI: Grok 3 Mini",
2599
+ provider: "xai",
2600
+ baseUrl: "https://api.x.ai/v1",
2586
2601
  reasoning: true,
2587
- input: ["text", "image"],
2588
- cost: {
2589
- input: 3,
2590
- output: 15,
2591
- cacheRead: 0.3,
2592
- cacheWrite: 3.75,
2593
- },
2594
- contextWindow: 200000,
2595
- maxTokens: 64000,
2596
- },
2597
- "claude-3-5-haiku-20241022": {
2598
- id: "claude-3-5-haiku-20241022",
2599
- name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
2600
- provider: "anthropic",
2601
- reasoning: false,
2602
- input: ["text", "image"],
2603
- cost: {
2604
- input: 0.7999999999999999,
2605
- output: 4,
2606
- cacheRead: 0.08,
2607
- cacheWrite: 1,
2608
- },
2609
- contextWindow: 200000,
2610
- maxTokens: 8192,
2611
- },
2612
- "claude-3-5-haiku-latest": {
2613
- id: "claude-3-5-haiku-latest",
2614
- name: "Anthropic: Claude 3.5 Haiku",
2615
- provider: "anthropic",
2616
- reasoning: false,
2617
- input: ["text", "image"],
2618
- cost: {
2619
- input: 0.7999999999999999,
2620
- output: 4,
2621
- cacheRead: 0.08,
2622
- cacheWrite: 1,
2623
- },
2624
- contextWindow: 200000,
2625
- maxTokens: 8192,
2626
- },
2627
- "claude-3-5-sonnet-latest": {
2628
- id: "claude-3-5-sonnet-latest",
2629
- name: "Anthropic: Claude 3.5 Sonnet",
2630
- provider: "anthropic",
2631
- reasoning: false,
2632
- input: ["text", "image"],
2602
+ input: ["text"],
2633
2603
  cost: {
2634
- input: 3,
2635
- output: 15,
2636
- cacheRead: 0.3,
2637
- cacheWrite: 3.75,
2604
+ input: 0.3,
2605
+ output: 0.5,
2606
+ cacheRead: 0.075,
2607
+ cacheWrite: 0,
2638
2608
  },
2639
- contextWindow: 200000,
2640
- maxTokens: 8192,
2609
+ contextWindow: 131072,
2610
+ maxTokens: 4096,
2641
2611
  },
2642
- "claude-3-5-sonnet-20240620": {
2643
- id: "claude-3-5-sonnet-20240620",
2644
- name: "Anthropic: Claude 3.5 Sonnet (2024-06-20)",
2645
- provider: "anthropic",
2612
+ "grok-3": {
2613
+ id: "grok-3",
2614
+ name: "xAI: Grok 3",
2615
+ provider: "xai",
2616
+ baseUrl: "https://api.x.ai/v1",
2646
2617
  reasoning: false,
2647
- input: ["text", "image"],
2618
+ input: ["text"],
2648
2619
  cost: {
2649
2620
  input: 3,
2650
2621
  output: 15,
2651
- cacheRead: 0.3,
2652
- cacheWrite: 3.75,
2653
- },
2654
- contextWindow: 200000,
2655
- maxTokens: 8192,
2656
- },
2657
- "claude-3-haiku-20240307": {
2658
- id: "claude-3-haiku-20240307",
2659
- name: "Anthropic: Claude 3 Haiku",
2660
- provider: "anthropic",
2661
- reasoning: false,
2662
- input: ["text", "image"],
2663
- cost: {
2664
- input: 0.25,
2665
- output: 1.25,
2666
- cacheRead: 0.03,
2667
- cacheWrite: 0.3,
2668
- },
2669
- contextWindow: 200000,
2670
- maxTokens: 4096,
2671
- },
2672
- "claude-3-opus-20240229": {
2673
- id: "claude-3-opus-20240229",
2674
- name: "Anthropic: Claude 3 Opus",
2675
- provider: "anthropic",
2676
- reasoning: false,
2677
- input: ["text", "image"],
2678
- cost: {
2679
- input: 15,
2680
- output: 75,
2681
- cacheRead: 1.5,
2682
- cacheWrite: 18.75,
2622
+ cacheRead: 0.75,
2623
+ cacheWrite: 0,
2683
2624
  },
2684
- contextWindow: 200000,
2625
+ contextWindow: 131072,
2685
2626
  maxTokens: 4096,
2686
2627
  },
2687
- }
2688
- },
2689
- google: {
2690
- models: {
2691
- "gemini-2.5-flash-lite": {
2692
- id: "gemini-2.5-flash-lite",
2693
- name: "Google: Gemini 2.5 Flash Lite",
2694
- provider: "google",
2695
- reasoning: true,
2696
- input: ["text", "image"],
2697
- cost: {
2698
- input: 0.09999999999999999,
2699
- output: 0.39999999999999997,
2700
- cacheRead: 0.024999999999999998,
2701
- cacheWrite: 0.18330000000000002,
2702
- },
2703
- contextWindow: 1048576,
2704
- maxTokens: 65535,
2705
- },
2706
- "gemini-2.5-flash-lite-preview-06-17": {
2707
- id: "gemini-2.5-flash-lite-preview-06-17",
2708
- name: "Google: Gemini 2.5 Flash Lite Preview 06-17",
2709
- provider: "google",
2710
- reasoning: true,
2711
- input: ["text", "image"],
2712
- cost: {
2713
- input: 0.09999999999999999,
2714
- output: 0.39999999999999997,
2715
- cacheRead: 0.024999999999999998,
2716
- cacheWrite: 0.18330000000000002,
2717
- },
2718
- contextWindow: 1048576,
2719
- maxTokens: 65535,
2720
- },
2721
- "gemini-2.5-flash": {
2722
- id: "gemini-2.5-flash",
2723
- name: "Google: Gemini 2.5 Flash",
2724
- provider: "google",
2628
+ "grok-3-mini-beta": {
2629
+ id: "grok-3-mini-beta",
2630
+ name: "xAI: Grok 3 Mini Beta",
2631
+ provider: "xai",
2632
+ baseUrl: "https://api.x.ai/v1",
2725
2633
  reasoning: true,
2726
- input: ["text", "image"],
2634
+ input: ["text"],
2727
2635
  cost: {
2728
2636
  input: 0.3,
2729
- output: 2.5,
2637
+ output: 0.5,
2730
2638
  cacheRead: 0.075,
2731
- cacheWrite: 0.3833,
2732
- },
2733
- contextWindow: 1048576,
2734
- maxTokens: 65535,
2735
- },
2736
- "gemini-2.5-pro": {
2737
- id: "gemini-2.5-pro",
2738
- name: "Google: Gemini 2.5 Pro",
2739
- provider: "google",
2740
- reasoning: true,
2741
- input: ["text", "image"],
2742
- cost: {
2743
- input: 1.25,
2744
- output: 10,
2745
- cacheRead: 0.31,
2746
- cacheWrite: 1.625,
2747
- },
2748
- contextWindow: 1048576,
2749
- maxTokens: 65536,
2750
- },
2751
- "gemini-2.5-pro-preview": {
2752
- id: "gemini-2.5-pro-preview",
2753
- name: "Google: Gemini 2.5 Pro Preview 06-05",
2754
- provider: "google",
2755
- reasoning: true,
2756
- input: ["text", "image"],
2757
- cost: {
2758
- input: 1.25,
2759
- output: 10,
2760
- cacheRead: 0.31,
2761
- cacheWrite: 1.625,
2762
- },
2763
- contextWindow: 1048576,
2764
- maxTokens: 65536,
2765
- },
2766
- "gemini-2.5-pro-preview-05-06": {
2767
- id: "gemini-2.5-pro-preview-05-06",
2768
- name: "Google: Gemini 2.5 Pro Preview 05-06",
2769
- provider: "google",
2770
- reasoning: true,
2771
- input: ["text", "image"],
2772
- cost: {
2773
- input: 1.25,
2774
- output: 10,
2775
- cacheRead: 0.31,
2776
- cacheWrite: 1.625,
2777
- },
2778
- contextWindow: 1048576,
2779
- maxTokens: 65535,
2780
- },
2781
- "gemini-2.5-pro-exp-03-25": {
2782
- id: "gemini-2.5-pro-exp-03-25",
2783
- name: "Google: Gemini 2.5 Pro Experimental",
2784
- provider: "google",
2785
- reasoning: false,
2786
- input: ["text", "image"],
2787
- cost: {
2788
- input: 0,
2789
- output: 0,
2790
- cacheRead: 0,
2791
- cacheWrite: 0,
2792
- },
2793
- contextWindow: 1048576,
2794
- maxTokens: 65535,
2795
- },
2796
- "gemini-2.0-flash-lite-001": {
2797
- id: "gemini-2.0-flash-lite-001",
2798
- name: "Google: Gemini 2.0 Flash Lite",
2799
- provider: "google",
2800
- reasoning: false,
2801
- input: ["text", "image"],
2802
- cost: {
2803
- input: 0.075,
2804
- output: 0.3,
2805
- cacheRead: 0,
2806
2639
  cacheWrite: 0,
2807
2640
  },
2808
- contextWindow: 1048576,
2809
- maxTokens: 8192,
2810
- },
2811
- "gemini-2.0-flash-001": {
2812
- id: "gemini-2.0-flash-001",
2813
- name: "Google: Gemini 2.0 Flash",
2814
- provider: "google",
2815
- reasoning: false,
2816
- input: ["text", "image"],
2817
- cost: {
2818
- input: 0.09999999999999999,
2819
- output: 0.39999999999999997,
2820
- cacheRead: 0.024999999999999998,
2821
- cacheWrite: 0.18330000000000002,
2822
- },
2823
- contextWindow: 1048576,
2824
- maxTokens: 8192,
2641
+ contextWindow: 131072,
2642
+ maxTokens: 4096,
2825
2643
  },
2826
- "gemini-2.0-flash-exp:free": {
2827
- id: "gemini-2.0-flash-exp:free",
2828
- name: "Google: Gemini 2.0 Flash Experimental (free)",
2829
- provider: "google",
2644
+ "grok-3-beta": {
2645
+ id: "grok-3-beta",
2646
+ name: "xAI: Grok 3 Beta",
2647
+ provider: "xai",
2648
+ baseUrl: "https://api.x.ai/v1",
2830
2649
  reasoning: false,
2831
- input: ["text", "image"],
2650
+ input: ["text"],
2832
2651
  cost: {
2833
- input: 0,
2834
- output: 0,
2835
- cacheRead: 0,
2652
+ input: 3,
2653
+ output: 15,
2654
+ cacheRead: 0.75,
2836
2655
  cacheWrite: 0,
2837
2656
  },
2838
- contextWindow: 1048576,
2839
- maxTokens: 8192,
2840
- },
2841
- "gemini-flash-1.5-8b": {
2842
- id: "gemini-flash-1.5-8b",
2843
- name: "Google: Gemini 1.5 Flash 8B",
2844
- provider: "google",
2845
- reasoning: false,
2846
- input: ["text", "image"],
2847
- cost: {
2848
- input: 0.0375,
2849
- output: 0.15,
2850
- cacheRead: 0.01,
2851
- cacheWrite: 0.0583,
2852
- },
2853
- contextWindow: 1000000,
2854
- maxTokens: 8192,
2855
- },
2856
- "gemini-flash-1.5": {
2857
- id: "gemini-flash-1.5",
2858
- name: "Google: Gemini 1.5 Flash ",
2859
- provider: "google",
2860
- reasoning: false,
2861
- input: ["text", "image"],
2862
- cost: {
2863
- input: 0.075,
2864
- output: 0.3,
2865
- cacheRead: 0.01875,
2866
- cacheWrite: 0.1583,
2867
- },
2868
- contextWindow: 1000000,
2869
- maxTokens: 8192,
2657
+ contextWindow: 131072,
2658
+ maxTokens: 4096,
2870
2659
  },
2871
- "gemini-pro-1.5": {
2872
- id: "gemini-pro-1.5",
2873
- name: "Google: Gemini 1.5 Pro",
2874
- provider: "google",
2660
+ "grok-2-1212": {
2661
+ id: "grok-2-1212",
2662
+ name: "xAI: Grok 2 1212",
2663
+ provider: "xai",
2664
+ baseUrl: "https://api.x.ai/v1",
2875
2665
  reasoning: false,
2876
- input: ["text", "image"],
2666
+ input: ["text"],
2877
2667
  cost: {
2878
- input: 1.25,
2879
- output: 5,
2668
+ input: 2,
2669
+ output: 10,
2880
2670
  cacheRead: 0,
2881
2671
  cacheWrite: 0,
2882
2672
  },
2883
- contextWindow: 2000000,
2884
- maxTokens: 8192,
2673
+ contextWindow: 131072,
2674
+ maxTokens: 4096,
2885
2675
  },
2886
2676
  }
2887
2677
  },