tokencostauto 0.1.387__py3-none-any.whl → 0.1.423__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tokencostauto/model_prices.json +4131 -16
- {tokencostauto-0.1.387.dist-info → tokencostauto-0.1.423.dist-info}/METADATA +1 -1
- tokencostauto-0.1.423.dist-info/RECORD +9 -0
- tokencostauto-0.1.387.dist-info/RECORD +0 -9
- {tokencostauto-0.1.387.dist-info → tokencostauto-0.1.423.dist-info}/WHEEL +0 -0
- {tokencostauto-0.1.387.dist-info → tokencostauto-0.1.423.dist-info}/licenses/LICENSE +0 -0
- {tokencostauto-0.1.387.dist-info → tokencostauto-0.1.423.dist-info}/top_level.txt +0 -0
tokencostauto/model_prices.json
CHANGED
|
@@ -8693,6 +8693,7 @@
|
|
|
8693
8693
|
"output_cost_per_token": 3.5e-06,
|
|
8694
8694
|
"supports_function_calling": true,
|
|
8695
8695
|
"supports_parallel_function_calling": true,
|
|
8696
|
+
"supports_response_schema": true,
|
|
8696
8697
|
"supports_tool_choice": true
|
|
8697
8698
|
},
|
|
8698
8699
|
"deepinfra/meta-llama/Meta-Llama-3.1-405B-Instruct": {
|
|
@@ -8758,7 +8759,6 @@
|
|
|
8758
8759
|
"output_cost_per_token": 0.0
|
|
8759
8760
|
},
|
|
8760
8761
|
"voyage/rerank-2": {
|
|
8761
|
-
"input_cost_per_query": 5e-08,
|
|
8762
8762
|
"input_cost_per_token": 5e-08,
|
|
8763
8763
|
"litellm_provider": "voyage",
|
|
8764
8764
|
"max_input_tokens": 16000,
|
|
@@ -8769,7 +8769,6 @@
|
|
|
8769
8769
|
"output_cost_per_token": 0.0
|
|
8770
8770
|
},
|
|
8771
8771
|
"voyage/rerank-2-lite": {
|
|
8772
|
-
"input_cost_per_query": 2e-08,
|
|
8773
8772
|
"input_cost_per_token": 2e-08,
|
|
8774
8773
|
"litellm_provider": "voyage",
|
|
8775
8774
|
"max_input_tokens": 8000,
|
|
@@ -9783,8 +9782,8 @@
|
|
|
9783
9782
|
"input_cost_per_token": 3e-06,
|
|
9784
9783
|
"litellm_provider": "anthropic",
|
|
9785
9784
|
"max_input_tokens": 200000,
|
|
9786
|
-
"max_output_tokens":
|
|
9787
|
-
"max_tokens":
|
|
9785
|
+
"max_output_tokens": 64000,
|
|
9786
|
+
"max_tokens": 64000,
|
|
9788
9787
|
"mode": "chat",
|
|
9789
9788
|
"output_cost_per_token": 1.5e-05,
|
|
9790
9789
|
"search_context_cost_per_query": {
|
|
@@ -9811,8 +9810,8 @@
|
|
|
9811
9810
|
"input_cost_per_token": 3e-06,
|
|
9812
9811
|
"litellm_provider": "anthropic",
|
|
9813
9812
|
"max_input_tokens": 200000,
|
|
9814
|
-
"max_output_tokens":
|
|
9815
|
-
"max_tokens":
|
|
9813
|
+
"max_output_tokens": 64000,
|
|
9814
|
+
"max_tokens": 64000,
|
|
9816
9815
|
"mode": "chat",
|
|
9817
9816
|
"output_cost_per_token": 1.5e-05,
|
|
9818
9817
|
"search_context_cost_per_query": {
|
|
@@ -11367,15 +11366,20 @@
|
|
|
11367
11366
|
"supports_vision": true
|
|
11368
11367
|
},
|
|
11369
11368
|
"gpt-image-1": {
|
|
11369
|
+
"input_cost_per_image": 0.042,
|
|
11370
11370
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
11371
|
+
"input_cost_per_token": 5e-06,
|
|
11372
|
+
"input_cost_per_image_token": 1e-05,
|
|
11371
11373
|
"litellm_provider": "openai",
|
|
11372
11374
|
"mode": "image_generation",
|
|
11373
11375
|
"output_cost_per_pixel": 0.0,
|
|
11376
|
+
"output_cost_per_token": 4e-05,
|
|
11374
11377
|
"supported_endpoints": [
|
|
11375
11378
|
"/v1/images/generations"
|
|
11376
11379
|
]
|
|
11377
11380
|
},
|
|
11378
11381
|
"low/1024-x-1024/gpt-image-1": {
|
|
11382
|
+
"input_cost_per_image": 0.011,
|
|
11379
11383
|
"input_cost_per_pixel": 1.0490417e-08,
|
|
11380
11384
|
"litellm_provider": "openai",
|
|
11381
11385
|
"mode": "image_generation",
|
|
@@ -11385,6 +11389,7 @@
|
|
|
11385
11389
|
]
|
|
11386
11390
|
},
|
|
11387
11391
|
"medium/1024-x-1024/gpt-image-1": {
|
|
11392
|
+
"input_cost_per_image": 0.042,
|
|
11388
11393
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
11389
11394
|
"litellm_provider": "openai",
|
|
11390
11395
|
"mode": "image_generation",
|
|
@@ -11394,6 +11399,7 @@
|
|
|
11394
11399
|
]
|
|
11395
11400
|
},
|
|
11396
11401
|
"high/1024-x-1024/gpt-image-1": {
|
|
11402
|
+
"input_cost_per_image": 0.167,
|
|
11397
11403
|
"input_cost_per_pixel": 1.59263611e-07,
|
|
11398
11404
|
"litellm_provider": "openai",
|
|
11399
11405
|
"mode": "image_generation",
|
|
@@ -11403,6 +11409,7 @@
|
|
|
11403
11409
|
]
|
|
11404
11410
|
},
|
|
11405
11411
|
"low/1024-x-1536/gpt-image-1": {
|
|
11412
|
+
"input_cost_per_image": 0.016,
|
|
11406
11413
|
"input_cost_per_pixel": 1.0172526e-08,
|
|
11407
11414
|
"litellm_provider": "openai",
|
|
11408
11415
|
"mode": "image_generation",
|
|
@@ -11412,6 +11419,7 @@
|
|
|
11412
11419
|
]
|
|
11413
11420
|
},
|
|
11414
11421
|
"medium/1024-x-1536/gpt-image-1": {
|
|
11422
|
+
"input_cost_per_image": 0.063,
|
|
11415
11423
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
11416
11424
|
"litellm_provider": "openai",
|
|
11417
11425
|
"mode": "image_generation",
|
|
@@ -11421,6 +11429,7 @@
|
|
|
11421
11429
|
]
|
|
11422
11430
|
},
|
|
11423
11431
|
"high/1024-x-1536/gpt-image-1": {
|
|
11432
|
+
"input_cost_per_image": 0.25,
|
|
11424
11433
|
"input_cost_per_pixel": 1.58945719e-07,
|
|
11425
11434
|
"litellm_provider": "openai",
|
|
11426
11435
|
"mode": "image_generation",
|
|
@@ -11430,6 +11439,7 @@
|
|
|
11430
11439
|
]
|
|
11431
11440
|
},
|
|
11432
11441
|
"low/1536-x-1024/gpt-image-1": {
|
|
11442
|
+
"input_cost_per_image": 0.016,
|
|
11433
11443
|
"input_cost_per_pixel": 1.0172526e-08,
|
|
11434
11444
|
"litellm_provider": "openai",
|
|
11435
11445
|
"mode": "image_generation",
|
|
@@ -11439,6 +11449,7 @@
|
|
|
11439
11449
|
]
|
|
11440
11450
|
},
|
|
11441
11451
|
"medium/1536-x-1024/gpt-image-1": {
|
|
11452
|
+
"input_cost_per_image": 0.063,
|
|
11442
11453
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
11443
11454
|
"litellm_provider": "openai",
|
|
11444
11455
|
"mode": "image_generation",
|
|
@@ -11448,6 +11459,7 @@
|
|
|
11448
11459
|
]
|
|
11449
11460
|
},
|
|
11450
11461
|
"high/1536-x-1024/gpt-image-1": {
|
|
11462
|
+
"input_cost_per_image": 0.25,
|
|
11451
11463
|
"input_cost_per_pixel": 1.58945719e-07,
|
|
11452
11464
|
"litellm_provider": "openai",
|
|
11453
11465
|
"mode": "image_generation",
|
|
@@ -13040,6 +13052,7 @@
|
|
|
13040
13052
|
"output_cost_per_token": 8.5e-07,
|
|
13041
13053
|
"supports_function_calling": true,
|
|
13042
13054
|
"supports_parallel_function_calling": true,
|
|
13055
|
+
"supports_response_schema": true,
|
|
13043
13056
|
"supports_tool_choice": true
|
|
13044
13057
|
},
|
|
13045
13058
|
"together_ai/meta-llama/Llama-4-Scout-17B-16E-Instruct": {
|
|
@@ -13049,6 +13062,7 @@
|
|
|
13049
13062
|
"output_cost_per_token": 5.9e-07,
|
|
13050
13063
|
"supports_function_calling": true,
|
|
13051
13064
|
"supports_parallel_function_calling": true,
|
|
13065
|
+
"supports_response_schema": true,
|
|
13052
13066
|
"supports_tool_choice": true
|
|
13053
13067
|
},
|
|
13054
13068
|
"together_ai/meta-llama/Llama-3.2-3B-Instruct-Turbo": {
|
|
@@ -13056,6 +13070,7 @@
|
|
|
13056
13070
|
"mode": "chat",
|
|
13057
13071
|
"supports_function_calling": true,
|
|
13058
13072
|
"supports_parallel_function_calling": true,
|
|
13073
|
+
"supports_response_schema": true,
|
|
13059
13074
|
"supports_tool_choice": true
|
|
13060
13075
|
},
|
|
13061
13076
|
"together_ai/Qwen/Qwen2.5-7B-Instruct-Turbo": {
|
|
@@ -13063,6 +13078,7 @@
|
|
|
13063
13078
|
"mode": "chat",
|
|
13064
13079
|
"supports_function_calling": true,
|
|
13065
13080
|
"supports_parallel_function_calling": true,
|
|
13081
|
+
"supports_response_schema": true,
|
|
13066
13082
|
"supports_tool_choice": true
|
|
13067
13083
|
},
|
|
13068
13084
|
"together_ai/Qwen/Qwen2.5-72B-Instruct-Turbo": {
|
|
@@ -13070,6 +13086,7 @@
|
|
|
13070
13086
|
"mode": "chat",
|
|
13071
13087
|
"supports_function_calling": true,
|
|
13072
13088
|
"supports_parallel_function_calling": true,
|
|
13089
|
+
"supports_response_schema": true,
|
|
13073
13090
|
"supports_tool_choice": true
|
|
13074
13091
|
},
|
|
13075
13092
|
"together_ai/deepseek-ai/DeepSeek-V3": {
|
|
@@ -13082,6 +13099,7 @@
|
|
|
13082
13099
|
"output_cost_per_token": 1.25e-06,
|
|
13083
13100
|
"supports_function_calling": true,
|
|
13084
13101
|
"supports_parallel_function_calling": true,
|
|
13102
|
+
"supports_response_schema": true,
|
|
13085
13103
|
"supports_tool_choice": true
|
|
13086
13104
|
},
|
|
13087
13105
|
"together_ai/mistralai/Mistral-Small-24B-Instruct-2501": {
|
|
@@ -13516,7 +13534,8 @@
|
|
|
13516
13534
|
"output_cost_per_token": 3.4e-07,
|
|
13517
13535
|
"supports_function_calling": true,
|
|
13518
13536
|
"supports_response_schema": true,
|
|
13519
|
-
"supports_tool_choice": true
|
|
13537
|
+
"supports_tool_choice": true,
|
|
13538
|
+
"supports_vision": true
|
|
13520
13539
|
},
|
|
13521
13540
|
"groq/meta-llama/llama-4-maverick-17b-128e-instruct": {
|
|
13522
13541
|
"input_cost_per_token": 2e-07,
|
|
@@ -13528,7 +13547,8 @@
|
|
|
13528
13547
|
"output_cost_per_token": 6e-07,
|
|
13529
13548
|
"supports_function_calling": true,
|
|
13530
13549
|
"supports_response_schema": true,
|
|
13531
|
-
"supports_tool_choice": true
|
|
13550
|
+
"supports_tool_choice": true,
|
|
13551
|
+
"supports_vision": true
|
|
13532
13552
|
},
|
|
13533
13553
|
"groq/mistral-saba-24b": {
|
|
13534
13554
|
"input_cost_per_token": 7.9e-07,
|
|
@@ -16533,6 +16553,7 @@
|
|
|
16533
16553
|
"output_cost_per_token": 7e-06,
|
|
16534
16554
|
"supports_function_calling": true,
|
|
16535
16555
|
"supports_parallel_function_calling": true,
|
|
16556
|
+
"supports_response_schema": true,
|
|
16536
16557
|
"supports_tool_choice": true
|
|
16537
16558
|
},
|
|
16538
16559
|
"together_ai/moonshotai/Kimi-K2-Instruct": {
|
|
@@ -16543,6 +16564,7 @@
|
|
|
16543
16564
|
"source": "https://www.together.ai/models/kimi-k2-instruct",
|
|
16544
16565
|
"supports_function_calling": true,
|
|
16545
16566
|
"supports_parallel_function_calling": true,
|
|
16567
|
+
"supports_response_schema": true,
|
|
16546
16568
|
"supports_tool_choice": true
|
|
16547
16569
|
},
|
|
16548
16570
|
"azure_ai/grok-3": {
|
|
@@ -17992,6 +18014,7 @@
|
|
|
17992
18014
|
"output_cost_per_token": 2.19e-06,
|
|
17993
18015
|
"source": "https://fireworks.ai/models/fireworks/glm-4p5",
|
|
17994
18016
|
"supports_function_calling": true,
|
|
18017
|
+
"supports_reasoning": true,
|
|
17995
18018
|
"supports_response_schema": true,
|
|
17996
18019
|
"supports_tool_choice": true
|
|
17997
18020
|
},
|
|
@@ -18005,6 +18028,7 @@
|
|
|
18005
18028
|
"output_cost_per_token": 8.8e-07,
|
|
18006
18029
|
"source": "https://artificialanalysis.ai/models/glm-4-5-air",
|
|
18007
18030
|
"supports_function_calling": true,
|
|
18031
|
+
"supports_reasoning": true,
|
|
18008
18032
|
"supports_response_schema": true,
|
|
18009
18033
|
"supports_tool_choice": true
|
|
18010
18034
|
},
|
|
@@ -18018,6 +18042,7 @@
|
|
|
18018
18042
|
"output_cost_per_token": 6e-07,
|
|
18019
18043
|
"source": "https://fireworks.ai/pricing",
|
|
18020
18044
|
"supports_function_calling": true,
|
|
18045
|
+
"supports_reasoning": true,
|
|
18021
18046
|
"supports_response_schema": true,
|
|
18022
18047
|
"supports_tool_choice": true
|
|
18023
18048
|
},
|
|
@@ -18031,6 +18056,7 @@
|
|
|
18031
18056
|
"output_cost_per_token": 2e-07,
|
|
18032
18057
|
"source": "https://fireworks.ai/pricing",
|
|
18033
18058
|
"supports_function_calling": true,
|
|
18059
|
+
"supports_reasoning": true,
|
|
18034
18060
|
"supports_response_schema": true,
|
|
18035
18061
|
"supports_tool_choice": true
|
|
18036
18062
|
},
|
|
@@ -19132,6 +19158,7 @@
|
|
|
19132
19158
|
"source": "https://www.together.ai/models/qwen3-235b-a22b-instruct-2507-fp8",
|
|
19133
19159
|
"supports_function_calling": true,
|
|
19134
19160
|
"supports_parallel_function_calling": true,
|
|
19161
|
+
"supports_response_schema": true,
|
|
19135
19162
|
"supports_tool_choice": true
|
|
19136
19163
|
},
|
|
19137
19164
|
"together_ai/Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8": {
|
|
@@ -19143,6 +19170,7 @@
|
|
|
19143
19170
|
"source": "https://www.together.ai/models/qwen3-coder-480b-a35b-instruct",
|
|
19144
19171
|
"supports_function_calling": true,
|
|
19145
19172
|
"supports_parallel_function_calling": true,
|
|
19173
|
+
"supports_response_schema": true,
|
|
19146
19174
|
"supports_tool_choice": true
|
|
19147
19175
|
},
|
|
19148
19176
|
"together_ai/Qwen/Qwen3-235B-A22B-Thinking-2507": {
|
|
@@ -19154,6 +19182,7 @@
|
|
|
19154
19182
|
"source": "https://www.together.ai/models/qwen3-235b-a22b-thinking-2507",
|
|
19155
19183
|
"supports_function_calling": true,
|
|
19156
19184
|
"supports_parallel_function_calling": true,
|
|
19185
|
+
"supports_response_schema": true,
|
|
19157
19186
|
"supports_tool_choice": true
|
|
19158
19187
|
},
|
|
19159
19188
|
"together_ai/Qwen/Qwen3-235B-A22B-fp8-tput": {
|
|
@@ -19176,6 +19205,7 @@
|
|
|
19176
19205
|
"source": "https://www.together.ai/models/deepseek-r1-0528-throughput",
|
|
19177
19206
|
"supports_function_calling": true,
|
|
19178
19207
|
"supports_parallel_function_calling": true,
|
|
19208
|
+
"supports_response_schema": true,
|
|
19179
19209
|
"supports_tool_choice": true
|
|
19180
19210
|
},
|
|
19181
19211
|
"together_ai/openai/gpt-oss-120b": {
|
|
@@ -19187,6 +19217,7 @@
|
|
|
19187
19217
|
"source": "https://www.together.ai/models/gpt-oss-120b",
|
|
19188
19218
|
"supports_function_calling": true,
|
|
19189
19219
|
"supports_parallel_function_calling": true,
|
|
19220
|
+
"supports_response_schema": true,
|
|
19190
19221
|
"supports_tool_choice": true
|
|
19191
19222
|
},
|
|
19192
19223
|
"together_ai/OpenAI/gpt-oss-20B": {
|
|
@@ -19209,6 +19240,7 @@
|
|
|
19209
19240
|
"source": "https://www.together.ai/models/glm-4-5-air",
|
|
19210
19241
|
"supports_function_calling": true,
|
|
19211
19242
|
"supports_parallel_function_calling": true,
|
|
19243
|
+
"supports_response_schema": true,
|
|
19212
19244
|
"supports_tool_choice": true
|
|
19213
19245
|
},
|
|
19214
19246
|
"fireworks_ai/accounts/fireworks/models/deepseek-v3-0324": {
|
|
@@ -20397,6 +20429,7 @@
|
|
|
20397
20429
|
"max_videos_per_prompt": 10,
|
|
20398
20430
|
"mode": "image_generation",
|
|
20399
20431
|
"output_cost_per_image": 0.039,
|
|
20432
|
+
"output_cost_per_image_token": 3e-05,
|
|
20400
20433
|
"output_cost_per_reasoning_token": 3e-05,
|
|
20401
20434
|
"output_cost_per_token": 3e-05,
|
|
20402
20435
|
"rpm": 100000,
|
|
@@ -20445,6 +20478,7 @@
|
|
|
20445
20478
|
"max_videos_per_prompt": 10,
|
|
20446
20479
|
"mode": "image_generation",
|
|
20447
20480
|
"output_cost_per_image": 0.039,
|
|
20481
|
+
"output_cost_per_image_token": 3e-05,
|
|
20448
20482
|
"output_cost_per_reasoning_token": 3e-05,
|
|
20449
20483
|
"output_cost_per_token": 3e-05,
|
|
20450
20484
|
"rpm": 100000,
|
|
@@ -20527,6 +20561,7 @@
|
|
|
20527
20561
|
"mode": "chat",
|
|
20528
20562
|
"output_cost_per_token": 1.68e-06,
|
|
20529
20563
|
"source": "https://fireworks.ai/pricing",
|
|
20564
|
+
"supports_reasoning": true,
|
|
20530
20565
|
"supports_response_schema": true,
|
|
20531
20566
|
"supports_tool_choice": true
|
|
20532
20567
|
},
|
|
@@ -21856,6 +21891,7 @@
|
|
|
21856
21891
|
"source": "https://www.together.ai/models/gpt-oss-20b",
|
|
21857
21892
|
"supports_function_calling": true,
|
|
21858
21893
|
"supports_parallel_function_calling": true,
|
|
21894
|
+
"supports_response_schema": true,
|
|
21859
21895
|
"supports_tool_choice": true
|
|
21860
21896
|
},
|
|
21861
21897
|
"dashscope/qwen3-max-preview": {
|
|
@@ -24215,7 +24251,7 @@
|
|
|
24215
24251
|
"input_cost_per_image_token": 2.5e-06,
|
|
24216
24252
|
"input_cost_per_token": 2e-06,
|
|
24217
24253
|
"litellm_provider": "openai",
|
|
24218
|
-
"mode": "
|
|
24254
|
+
"mode": "image_generation",
|
|
24219
24255
|
"output_cost_per_image_token": 8e-06,
|
|
24220
24256
|
"supported_endpoints": [
|
|
24221
24257
|
"/v1/images/generations",
|
|
@@ -24466,6 +24502,7 @@
|
|
|
24466
24502
|
"source": "https://www.together.ai/models/qwen3-next-80b-a3b-instruct",
|
|
24467
24503
|
"supports_function_calling": true,
|
|
24468
24504
|
"supports_parallel_function_calling": true,
|
|
24505
|
+
"supports_response_schema": true,
|
|
24469
24506
|
"supports_tool_choice": true
|
|
24470
24507
|
},
|
|
24471
24508
|
"together_ai/Qwen/Qwen3-Next-80B-A3B-Thinking": {
|
|
@@ -24477,6 +24514,7 @@
|
|
|
24477
24514
|
"source": "https://www.together.ai/models/qwen3-next-80b-a3b-thinking",
|
|
24478
24515
|
"supports_function_calling": true,
|
|
24479
24516
|
"supports_parallel_function_calling": true,
|
|
24517
|
+
"supports_response_schema": true,
|
|
24480
24518
|
"supports_tool_choice": true
|
|
24481
24519
|
},
|
|
24482
24520
|
"together_ai/baai/bge-base-en-v1.5": {
|
|
@@ -24666,6 +24704,7 @@
|
|
|
24666
24704
|
"max_videos_per_prompt": 10,
|
|
24667
24705
|
"mode": "image_generation",
|
|
24668
24706
|
"output_cost_per_image": 0.039,
|
|
24707
|
+
"output_cost_per_image_token": 3e-05,
|
|
24669
24708
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
24670
24709
|
"output_cost_per_token": 2.5e-06,
|
|
24671
24710
|
"rpm": 100000,
|
|
@@ -24715,6 +24754,7 @@
|
|
|
24715
24754
|
"max_videos_per_prompt": 10,
|
|
24716
24755
|
"mode": "image_generation",
|
|
24717
24756
|
"output_cost_per_image": 0.039,
|
|
24757
|
+
"output_cost_per_image_token": 3e-05,
|
|
24718
24758
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
24719
24759
|
"output_cost_per_token": 2.5e-06,
|
|
24720
24760
|
"rpm": 100000,
|
|
@@ -24900,15 +24940,15 @@
|
|
|
24900
24940
|
"supports_vision": false
|
|
24901
24941
|
},
|
|
24902
24942
|
"global.anthropic.claude-haiku-4-5-20251001-v1:0": {
|
|
24903
|
-
"cache_creation_input_token_cost": 1.
|
|
24904
|
-
"cache_read_input_token_cost":
|
|
24905
|
-
"input_cost_per_token":
|
|
24943
|
+
"cache_creation_input_token_cost": 1.25e-06,
|
|
24944
|
+
"cache_read_input_token_cost": 1e-07,
|
|
24945
|
+
"input_cost_per_token": 1e-06,
|
|
24906
24946
|
"litellm_provider": "bedrock_converse",
|
|
24907
24947
|
"max_input_tokens": 200000,
|
|
24908
24948
|
"max_output_tokens": 64000,
|
|
24909
24949
|
"max_tokens": 64000,
|
|
24910
24950
|
"mode": "chat",
|
|
24911
|
-
"output_cost_per_token":
|
|
24951
|
+
"output_cost_per_token": 5e-06,
|
|
24912
24952
|
"source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock",
|
|
24913
24953
|
"supports_assistant_prefill": true,
|
|
24914
24954
|
"supports_computer_use": true,
|
|
@@ -26603,6 +26643,7 @@
|
|
|
26603
26643
|
"mode": "chat",
|
|
26604
26644
|
"output_cost_per_token": 1.68e-06,
|
|
26605
26645
|
"source": "https://fireworks.ai/pricing",
|
|
26646
|
+
"supports_reasoning": true,
|
|
26606
26647
|
"supports_response_schema": true,
|
|
26607
26648
|
"supports_tool_choice": true
|
|
26608
26649
|
},
|
|
@@ -27588,6 +27629,7 @@
|
|
|
27588
27629
|
"max_videos_per_prompt": 10,
|
|
27589
27630
|
"mode": "image_generation",
|
|
27590
27631
|
"output_cost_per_image": 0.039,
|
|
27632
|
+
"output_cost_per_image_token": 3e-05,
|
|
27591
27633
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
27592
27634
|
"output_cost_per_token": 2.5e-06,
|
|
27593
27635
|
"rpm": 100000,
|
|
@@ -28180,6 +28222,7 @@
|
|
|
28180
28222
|
"mode": "chat",
|
|
28181
28223
|
"source": "https://fireworks.ai/pricing",
|
|
28182
28224
|
"supports_function_calling": true,
|
|
28225
|
+
"supports_reasoning": true,
|
|
28183
28226
|
"supports_response_schema": true,
|
|
28184
28227
|
"supports_tool_choice": true
|
|
28185
28228
|
},
|
|
@@ -28664,6 +28707,7 @@
|
|
|
28664
28707
|
"output_cost_per_token": 1.2e-06,
|
|
28665
28708
|
"source": "https://fireworks.ai/models/fireworks/deepseek-v3p2",
|
|
28666
28709
|
"supports_function_calling": true,
|
|
28710
|
+
"supports_reasoning": true,
|
|
28667
28711
|
"supports_response_schema": true,
|
|
28668
28712
|
"supports_tool_choice": true
|
|
28669
28713
|
},
|
|
@@ -28766,6 +28810,7 @@
|
|
|
28766
28810
|
"source": "https://docs.z.ai/guides/overview/pricing"
|
|
28767
28811
|
},
|
|
28768
28812
|
"amazon.nova-2-lite-v1:0": {
|
|
28813
|
+
"cache_read_input_token_cost": 7.5e-08,
|
|
28769
28814
|
"input_cost_per_token": 3e-07,
|
|
28770
28815
|
"litellm_provider": "bedrock_converse",
|
|
28771
28816
|
"max_input_tokens": 1000000,
|
|
@@ -28782,7 +28827,8 @@
|
|
|
28782
28827
|
"supports_vision": true
|
|
28783
28828
|
},
|
|
28784
28829
|
"apac.amazon.nova-2-lite-v1:0": {
|
|
28785
|
-
"
|
|
28830
|
+
"cache_read_input_token_cost": 8.25e-08,
|
|
28831
|
+
"input_cost_per_token": 3.3e-07,
|
|
28786
28832
|
"litellm_provider": "bedrock_converse",
|
|
28787
28833
|
"max_input_tokens": 1000000,
|
|
28788
28834
|
"max_output_tokens": 64000,
|
|
@@ -28798,7 +28844,8 @@
|
|
|
28798
28844
|
"supports_vision": true
|
|
28799
28845
|
},
|
|
28800
28846
|
"eu.amazon.nova-2-lite-v1:0": {
|
|
28801
|
-
"
|
|
28847
|
+
"cache_read_input_token_cost": 8.25e-08,
|
|
28848
|
+
"input_cost_per_token": 3.3e-07,
|
|
28802
28849
|
"litellm_provider": "bedrock_converse",
|
|
28803
28850
|
"max_input_tokens": 1000000,
|
|
28804
28851
|
"max_output_tokens": 64000,
|
|
@@ -28814,7 +28861,8 @@
|
|
|
28814
28861
|
"supports_vision": true
|
|
28815
28862
|
},
|
|
28816
28863
|
"us.amazon.nova-2-lite-v1:0": {
|
|
28817
|
-
"
|
|
28864
|
+
"cache_read_input_token_cost": 8.25e-08,
|
|
28865
|
+
"input_cost_per_token": 3.3e-07,
|
|
28818
28866
|
"litellm_provider": "bedrock_converse",
|
|
28819
28867
|
"max_input_tokens": 1000000,
|
|
28820
28868
|
"max_output_tokens": 64000,
|
|
@@ -29149,5 +29197,4072 @@
|
|
|
29149
29197
|
"supports_prompt_caching": true,
|
|
29150
29198
|
"supports_response_schema": true,
|
|
29151
29199
|
"supports_vision": true
|
|
29200
|
+
},
|
|
29201
|
+
"google.gemma-3-12b-it": {
|
|
29202
|
+
"input_cost_per_token": 9e-08,
|
|
29203
|
+
"litellm_provider": "bedrock_converse",
|
|
29204
|
+
"max_input_tokens": 128000,
|
|
29205
|
+
"max_output_tokens": 8192,
|
|
29206
|
+
"max_tokens": 8192,
|
|
29207
|
+
"mode": "chat",
|
|
29208
|
+
"output_cost_per_token": 2.9e-07,
|
|
29209
|
+
"supports_system_messages": true,
|
|
29210
|
+
"supports_vision": true
|
|
29211
|
+
},
|
|
29212
|
+
"google.gemma-3-27b-it": {
|
|
29213
|
+
"input_cost_per_token": 2.3e-07,
|
|
29214
|
+
"litellm_provider": "bedrock_converse",
|
|
29215
|
+
"max_input_tokens": 128000,
|
|
29216
|
+
"max_output_tokens": 8192,
|
|
29217
|
+
"max_tokens": 8192,
|
|
29218
|
+
"mode": "chat",
|
|
29219
|
+
"output_cost_per_token": 3.8e-07,
|
|
29220
|
+
"supports_system_messages": true,
|
|
29221
|
+
"supports_vision": true
|
|
29222
|
+
},
|
|
29223
|
+
"google.gemma-3-4b-it": {
|
|
29224
|
+
"input_cost_per_token": 4e-08,
|
|
29225
|
+
"litellm_provider": "bedrock_converse",
|
|
29226
|
+
"max_input_tokens": 128000,
|
|
29227
|
+
"max_output_tokens": 8192,
|
|
29228
|
+
"max_tokens": 8192,
|
|
29229
|
+
"mode": "chat",
|
|
29230
|
+
"output_cost_per_token": 8e-08,
|
|
29231
|
+
"supports_system_messages": true,
|
|
29232
|
+
"supports_vision": true
|
|
29233
|
+
},
|
|
29234
|
+
"global.amazon.nova-2-lite-v1:0": {
|
|
29235
|
+
"cache_read_input_token_cost": 7.5e-08,
|
|
29236
|
+
"input_cost_per_token": 3e-07,
|
|
29237
|
+
"litellm_provider": "bedrock_converse",
|
|
29238
|
+
"max_input_tokens": 1000000,
|
|
29239
|
+
"max_output_tokens": 64000,
|
|
29240
|
+
"max_tokens": 64000,
|
|
29241
|
+
"mode": "chat",
|
|
29242
|
+
"output_cost_per_token": 2.5e-06,
|
|
29243
|
+
"supports_function_calling": true,
|
|
29244
|
+
"supports_pdf_input": true,
|
|
29245
|
+
"supports_prompt_caching": true,
|
|
29246
|
+
"supports_reasoning": true,
|
|
29247
|
+
"supports_response_schema": true,
|
|
29248
|
+
"supports_video_input": true,
|
|
29249
|
+
"supports_vision": true
|
|
29250
|
+
},
|
|
29251
|
+
"minimax.minimax-m2": {
|
|
29252
|
+
"input_cost_per_token": 3e-07,
|
|
29253
|
+
"litellm_provider": "bedrock_converse",
|
|
29254
|
+
"max_input_tokens": 128000,
|
|
29255
|
+
"max_output_tokens": 8192,
|
|
29256
|
+
"max_tokens": 8192,
|
|
29257
|
+
"mode": "chat",
|
|
29258
|
+
"output_cost_per_token": 1.2e-06,
|
|
29259
|
+
"supports_system_messages": true
|
|
29260
|
+
},
|
|
29261
|
+
"mistral.magistral-small-2509": {
|
|
29262
|
+
"input_cost_per_token": 5e-07,
|
|
29263
|
+
"litellm_provider": "bedrock_converse",
|
|
29264
|
+
"max_input_tokens": 128000,
|
|
29265
|
+
"max_output_tokens": 8192,
|
|
29266
|
+
"max_tokens": 8192,
|
|
29267
|
+
"mode": "chat",
|
|
29268
|
+
"output_cost_per_token": 1.5e-06,
|
|
29269
|
+
"supports_function_calling": true,
|
|
29270
|
+
"supports_reasoning": true,
|
|
29271
|
+
"supports_system_messages": true
|
|
29272
|
+
},
|
|
29273
|
+
"mistral.ministral-3-14b-instruct": {
|
|
29274
|
+
"input_cost_per_token": 2e-07,
|
|
29275
|
+
"litellm_provider": "bedrock_converse",
|
|
29276
|
+
"max_input_tokens": 128000,
|
|
29277
|
+
"max_output_tokens": 8192,
|
|
29278
|
+
"max_tokens": 8192,
|
|
29279
|
+
"mode": "chat",
|
|
29280
|
+
"output_cost_per_token": 2e-07,
|
|
29281
|
+
"supports_function_calling": true,
|
|
29282
|
+
"supports_system_messages": true
|
|
29283
|
+
},
|
|
29284
|
+
"mistral.ministral-3-3b-instruct": {
|
|
29285
|
+
"input_cost_per_token": 1e-07,
|
|
29286
|
+
"litellm_provider": "bedrock_converse",
|
|
29287
|
+
"max_input_tokens": 128000,
|
|
29288
|
+
"max_output_tokens": 8192,
|
|
29289
|
+
"max_tokens": 8192,
|
|
29290
|
+
"mode": "chat",
|
|
29291
|
+
"output_cost_per_token": 1e-07,
|
|
29292
|
+
"supports_function_calling": true,
|
|
29293
|
+
"supports_system_messages": true
|
|
29294
|
+
},
|
|
29295
|
+
"mistral.ministral-3-8b-instruct": {
|
|
29296
|
+
"input_cost_per_token": 1.5e-07,
|
|
29297
|
+
"litellm_provider": "bedrock_converse",
|
|
29298
|
+
"max_input_tokens": 128000,
|
|
29299
|
+
"max_output_tokens": 8192,
|
|
29300
|
+
"max_tokens": 8192,
|
|
29301
|
+
"mode": "chat",
|
|
29302
|
+
"output_cost_per_token": 1.5e-07,
|
|
29303
|
+
"supports_function_calling": true,
|
|
29304
|
+
"supports_system_messages": true
|
|
29305
|
+
},
|
|
29306
|
+
"mistral.mistral-large-3-675b-instruct": {
|
|
29307
|
+
"input_cost_per_token": 5e-07,
|
|
29308
|
+
"litellm_provider": "bedrock_converse",
|
|
29309
|
+
"max_input_tokens": 128000,
|
|
29310
|
+
"max_output_tokens": 8192,
|
|
29311
|
+
"max_tokens": 8192,
|
|
29312
|
+
"mode": "chat",
|
|
29313
|
+
"output_cost_per_token": 1.5e-06,
|
|
29314
|
+
"supports_function_calling": true,
|
|
29315
|
+
"supports_system_messages": true
|
|
29316
|
+
},
|
|
29317
|
+
"mistral.voxtral-mini-3b-2507": {
|
|
29318
|
+
"input_cost_per_token": 4e-08,
|
|
29319
|
+
"litellm_provider": "bedrock_converse",
|
|
29320
|
+
"max_input_tokens": 128000,
|
|
29321
|
+
"max_output_tokens": 8192,
|
|
29322
|
+
"max_tokens": 8192,
|
|
29323
|
+
"mode": "chat",
|
|
29324
|
+
"output_cost_per_token": 4e-08,
|
|
29325
|
+
"supports_audio_input": true,
|
|
29326
|
+
"supports_system_messages": true
|
|
29327
|
+
},
|
|
29328
|
+
"mistral.voxtral-small-24b-2507": {
|
|
29329
|
+
"input_cost_per_token": 1e-07,
|
|
29330
|
+
"litellm_provider": "bedrock_converse",
|
|
29331
|
+
"max_input_tokens": 128000,
|
|
29332
|
+
"max_output_tokens": 8192,
|
|
29333
|
+
"max_tokens": 8192,
|
|
29334
|
+
"mode": "chat",
|
|
29335
|
+
"output_cost_per_token": 3e-07,
|
|
29336
|
+
"supports_audio_input": true,
|
|
29337
|
+
"supports_system_messages": true
|
|
29338
|
+
},
|
|
29339
|
+
"moonshot.kimi-k2-thinking": {
|
|
29340
|
+
"input_cost_per_token": 6e-07,
|
|
29341
|
+
"litellm_provider": "bedrock_converse",
|
|
29342
|
+
"max_input_tokens": 128000,
|
|
29343
|
+
"max_output_tokens": 8192,
|
|
29344
|
+
"max_tokens": 8192,
|
|
29345
|
+
"mode": "chat",
|
|
29346
|
+
"output_cost_per_token": 2.5e-06,
|
|
29347
|
+
"supports_reasoning": true,
|
|
29348
|
+
"supports_system_messages": true
|
|
29349
|
+
},
|
|
29350
|
+
"nvidia.nemotron-nano-12b-v2": {
|
|
29351
|
+
"input_cost_per_token": 2e-07,
|
|
29352
|
+
"litellm_provider": "bedrock_converse",
|
|
29353
|
+
"max_input_tokens": 128000,
|
|
29354
|
+
"max_output_tokens": 8192,
|
|
29355
|
+
"max_tokens": 8192,
|
|
29356
|
+
"mode": "chat",
|
|
29357
|
+
"output_cost_per_token": 6e-07,
|
|
29358
|
+
"supports_system_messages": true,
|
|
29359
|
+
"supports_vision": true
|
|
29360
|
+
},
|
|
29361
|
+
"nvidia.nemotron-nano-9b-v2": {
|
|
29362
|
+
"input_cost_per_token": 6e-08,
|
|
29363
|
+
"litellm_provider": "bedrock_converse",
|
|
29364
|
+
"max_input_tokens": 128000,
|
|
29365
|
+
"max_output_tokens": 8192,
|
|
29366
|
+
"max_tokens": 8192,
|
|
29367
|
+
"mode": "chat",
|
|
29368
|
+
"output_cost_per_token": 2.3e-07,
|
|
29369
|
+
"supports_system_messages": true
|
|
29370
|
+
},
|
|
29371
|
+
"openai.gpt-oss-safeguard-120b": {
|
|
29372
|
+
"input_cost_per_token": 1.5e-07,
|
|
29373
|
+
"litellm_provider": "bedrock_converse",
|
|
29374
|
+
"max_input_tokens": 128000,
|
|
29375
|
+
"max_output_tokens": 8192,
|
|
29376
|
+
"max_tokens": 8192,
|
|
29377
|
+
"mode": "chat",
|
|
29378
|
+
"output_cost_per_token": 6e-07,
|
|
29379
|
+
"supports_system_messages": true
|
|
29380
|
+
},
|
|
29381
|
+
"openai.gpt-oss-safeguard-20b": {
|
|
29382
|
+
"input_cost_per_token": 7e-08,
|
|
29383
|
+
"litellm_provider": "bedrock_converse",
|
|
29384
|
+
"max_input_tokens": 128000,
|
|
29385
|
+
"max_output_tokens": 8192,
|
|
29386
|
+
"max_tokens": 8192,
|
|
29387
|
+
"mode": "chat",
|
|
29388
|
+
"output_cost_per_token": 2e-07,
|
|
29389
|
+
"supports_system_messages": true
|
|
29390
|
+
},
|
|
29391
|
+
"qwen.qwen3-next-80b-a3b": {
|
|
29392
|
+
"input_cost_per_token": 1.5e-07,
|
|
29393
|
+
"litellm_provider": "bedrock_converse",
|
|
29394
|
+
"max_input_tokens": 128000,
|
|
29395
|
+
"max_output_tokens": 8192,
|
|
29396
|
+
"max_tokens": 8192,
|
|
29397
|
+
"mode": "chat",
|
|
29398
|
+
"output_cost_per_token": 1.2e-06,
|
|
29399
|
+
"supports_function_calling": true,
|
|
29400
|
+
"supports_system_messages": true
|
|
29401
|
+
},
|
|
29402
|
+
"qwen.qwen3-vl-235b-a22b": {
|
|
29403
|
+
"input_cost_per_token": 5.3e-07,
|
|
29404
|
+
"litellm_provider": "bedrock_converse",
|
|
29405
|
+
"max_input_tokens": 128000,
|
|
29406
|
+
"max_output_tokens": 8192,
|
|
29407
|
+
"max_tokens": 8192,
|
|
29408
|
+
"mode": "chat",
|
|
29409
|
+
"output_cost_per_token": 2.66e-06,
|
|
29410
|
+
"supports_function_calling": true,
|
|
29411
|
+
"supports_system_messages": true,
|
|
29412
|
+
"supports_vision": true
|
|
29413
|
+
},
|
|
29414
|
+
"nvidia_nim/ranking/nvidia/llama-3.2-nv-rerankqa-1b-v2": {
|
|
29415
|
+
"input_cost_per_query": 0.0,
|
|
29416
|
+
"input_cost_per_token": 0.0,
|
|
29417
|
+
"litellm_provider": "nvidia_nim",
|
|
29418
|
+
"mode": "rerank",
|
|
29419
|
+
"output_cost_per_token": 0.0
|
|
29420
|
+
},
|
|
29421
|
+
"us.writer.palmyra-x4-v1:0": {
|
|
29422
|
+
"input_cost_per_token": 2.5e-06,
|
|
29423
|
+
"litellm_provider": "bedrock_converse",
|
|
29424
|
+
"max_input_tokens": 128000,
|
|
29425
|
+
"max_output_tokens": 8192,
|
|
29426
|
+
"max_tokens": 8192,
|
|
29427
|
+
"mode": "chat",
|
|
29428
|
+
"output_cost_per_token": 1e-05,
|
|
29429
|
+
"supports_function_calling": true,
|
|
29430
|
+
"supports_pdf_input": true
|
|
29431
|
+
},
|
|
29432
|
+
"us.writer.palmyra-x5-v1:0": {
|
|
29433
|
+
"input_cost_per_token": 6e-07,
|
|
29434
|
+
"litellm_provider": "bedrock_converse",
|
|
29435
|
+
"max_input_tokens": 1000000,
|
|
29436
|
+
"max_output_tokens": 8192,
|
|
29437
|
+
"max_tokens": 8192,
|
|
29438
|
+
"mode": "chat",
|
|
29439
|
+
"output_cost_per_token": 6e-06,
|
|
29440
|
+
"supports_function_calling": true,
|
|
29441
|
+
"supports_pdf_input": true
|
|
29442
|
+
},
|
|
29443
|
+
"writer.palmyra-x4-v1:0": {
|
|
29444
|
+
"input_cost_per_token": 2.5e-06,
|
|
29445
|
+
"litellm_provider": "bedrock_converse",
|
|
29446
|
+
"max_input_tokens": 128000,
|
|
29447
|
+
"max_output_tokens": 8192,
|
|
29448
|
+
"max_tokens": 8192,
|
|
29449
|
+
"mode": "chat",
|
|
29450
|
+
"output_cost_per_token": 1e-05,
|
|
29451
|
+
"supports_function_calling": true,
|
|
29452
|
+
"supports_pdf_input": true
|
|
29453
|
+
},
|
|
29454
|
+
"writer.palmyra-x5-v1:0": {
|
|
29455
|
+
"input_cost_per_token": 6e-07,
|
|
29456
|
+
"litellm_provider": "bedrock_converse",
|
|
29457
|
+
"max_input_tokens": 1000000,
|
|
29458
|
+
"max_output_tokens": 8192,
|
|
29459
|
+
"max_tokens": 8192,
|
|
29460
|
+
"mode": "chat",
|
|
29461
|
+
"output_cost_per_token": 6e-06,
|
|
29462
|
+
"supports_function_calling": true,
|
|
29463
|
+
"supports_pdf_input": true
|
|
29464
|
+
},
|
|
29465
|
+
"cerebras/zai-glm-4.6": {
|
|
29466
|
+
"input_cost_per_token": 2.25e-06,
|
|
29467
|
+
"litellm_provider": "cerebras",
|
|
29468
|
+
"max_input_tokens": 128000,
|
|
29469
|
+
"max_output_tokens": 128000,
|
|
29470
|
+
"max_tokens": 128000,
|
|
29471
|
+
"mode": "chat",
|
|
29472
|
+
"output_cost_per_token": 2.75e-06,
|
|
29473
|
+
"source": "https://www.cerebras.ai/pricing",
|
|
29474
|
+
"supports_function_calling": true,
|
|
29475
|
+
"supports_reasoning": true,
|
|
29476
|
+
"supports_tool_choice": true
|
|
29477
|
+
},
|
|
29478
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-coder-480b-a35b-instruct": {
|
|
29479
|
+
"max_tokens": 262144,
|
|
29480
|
+
"max_input_tokens": 262144,
|
|
29481
|
+
"max_output_tokens": 262144,
|
|
29482
|
+
"input_cost_per_token": 4.5e-07,
|
|
29483
|
+
"output_cost_per_token": 1.8e-06,
|
|
29484
|
+
"litellm_provider": "fireworks_ai",
|
|
29485
|
+
"mode": "chat",
|
|
29486
|
+
"supports_reasoning": true
|
|
29487
|
+
},
|
|
29488
|
+
"fireworks_ai/accounts/fireworks/models/flux-kontext-pro": {
|
|
29489
|
+
"max_tokens": 4096,
|
|
29490
|
+
"max_input_tokens": 4096,
|
|
29491
|
+
"max_output_tokens": 4096,
|
|
29492
|
+
"input_cost_per_token": 4e-08,
|
|
29493
|
+
"output_cost_per_token": 4e-08,
|
|
29494
|
+
"litellm_provider": "fireworks_ai",
|
|
29495
|
+
"mode": "image_generation"
|
|
29496
|
+
},
|
|
29497
|
+
"fireworks_ai/accounts/fireworks/models/SSD-1B": {
|
|
29498
|
+
"max_tokens": 4096,
|
|
29499
|
+
"max_input_tokens": 4096,
|
|
29500
|
+
"max_output_tokens": 4096,
|
|
29501
|
+
"input_cost_per_token": 1.3e-10,
|
|
29502
|
+
"output_cost_per_token": 1.3e-10,
|
|
29503
|
+
"litellm_provider": "fireworks_ai",
|
|
29504
|
+
"mode": "image_generation"
|
|
29505
|
+
},
|
|
29506
|
+
"fireworks_ai/accounts/fireworks/models/chronos-hermes-13b-v2": {
|
|
29507
|
+
"max_tokens": 4096,
|
|
29508
|
+
"max_input_tokens": 4096,
|
|
29509
|
+
"max_output_tokens": 4096,
|
|
29510
|
+
"input_cost_per_token": 2e-07,
|
|
29511
|
+
"output_cost_per_token": 2e-07,
|
|
29512
|
+
"litellm_provider": "fireworks_ai",
|
|
29513
|
+
"mode": "chat"
|
|
29514
|
+
},
|
|
29515
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-13b": {
|
|
29516
|
+
"max_tokens": 16384,
|
|
29517
|
+
"max_input_tokens": 16384,
|
|
29518
|
+
"max_output_tokens": 16384,
|
|
29519
|
+
"input_cost_per_token": 2e-07,
|
|
29520
|
+
"output_cost_per_token": 2e-07,
|
|
29521
|
+
"litellm_provider": "fireworks_ai",
|
|
29522
|
+
"mode": "chat"
|
|
29523
|
+
},
|
|
29524
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-13b-instruct": {
|
|
29525
|
+
"max_tokens": 16384,
|
|
29526
|
+
"max_input_tokens": 16384,
|
|
29527
|
+
"max_output_tokens": 16384,
|
|
29528
|
+
"input_cost_per_token": 2e-07,
|
|
29529
|
+
"output_cost_per_token": 2e-07,
|
|
29530
|
+
"litellm_provider": "fireworks_ai",
|
|
29531
|
+
"mode": "chat"
|
|
29532
|
+
},
|
|
29533
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-13b-python": {
|
|
29534
|
+
"max_tokens": 16384,
|
|
29535
|
+
"max_input_tokens": 16384,
|
|
29536
|
+
"max_output_tokens": 16384,
|
|
29537
|
+
"input_cost_per_token": 2e-07,
|
|
29538
|
+
"output_cost_per_token": 2e-07,
|
|
29539
|
+
"litellm_provider": "fireworks_ai",
|
|
29540
|
+
"mode": "chat"
|
|
29541
|
+
},
|
|
29542
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-34b": {
|
|
29543
|
+
"max_tokens": 16384,
|
|
29544
|
+
"max_input_tokens": 16384,
|
|
29545
|
+
"max_output_tokens": 16384,
|
|
29546
|
+
"input_cost_per_token": 9e-07,
|
|
29547
|
+
"output_cost_per_token": 9e-07,
|
|
29548
|
+
"litellm_provider": "fireworks_ai",
|
|
29549
|
+
"mode": "chat"
|
|
29550
|
+
},
|
|
29551
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-34b-instruct": {
|
|
29552
|
+
"max_tokens": 16384,
|
|
29553
|
+
"max_input_tokens": 16384,
|
|
29554
|
+
"max_output_tokens": 16384,
|
|
29555
|
+
"input_cost_per_token": 9e-07,
|
|
29556
|
+
"output_cost_per_token": 9e-07,
|
|
29557
|
+
"litellm_provider": "fireworks_ai",
|
|
29558
|
+
"mode": "chat"
|
|
29559
|
+
},
|
|
29560
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-34b-python": {
|
|
29561
|
+
"max_tokens": 16384,
|
|
29562
|
+
"max_input_tokens": 16384,
|
|
29563
|
+
"max_output_tokens": 16384,
|
|
29564
|
+
"input_cost_per_token": 9e-07,
|
|
29565
|
+
"output_cost_per_token": 9e-07,
|
|
29566
|
+
"litellm_provider": "fireworks_ai",
|
|
29567
|
+
"mode": "chat"
|
|
29568
|
+
},
|
|
29569
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-70b": {
|
|
29570
|
+
"max_tokens": 4096,
|
|
29571
|
+
"max_input_tokens": 4096,
|
|
29572
|
+
"max_output_tokens": 4096,
|
|
29573
|
+
"input_cost_per_token": 9e-07,
|
|
29574
|
+
"output_cost_per_token": 9e-07,
|
|
29575
|
+
"litellm_provider": "fireworks_ai",
|
|
29576
|
+
"mode": "chat"
|
|
29577
|
+
},
|
|
29578
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-70b-instruct": {
|
|
29579
|
+
"max_tokens": 4096,
|
|
29580
|
+
"max_input_tokens": 4096,
|
|
29581
|
+
"max_output_tokens": 4096,
|
|
29582
|
+
"input_cost_per_token": 9e-07,
|
|
29583
|
+
"output_cost_per_token": 9e-07,
|
|
29584
|
+
"litellm_provider": "fireworks_ai",
|
|
29585
|
+
"mode": "chat"
|
|
29586
|
+
},
|
|
29587
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-70b-python": {
|
|
29588
|
+
"max_tokens": 4096,
|
|
29589
|
+
"max_input_tokens": 4096,
|
|
29590
|
+
"max_output_tokens": 4096,
|
|
29591
|
+
"input_cost_per_token": 9e-07,
|
|
29592
|
+
"output_cost_per_token": 9e-07,
|
|
29593
|
+
"litellm_provider": "fireworks_ai",
|
|
29594
|
+
"mode": "chat"
|
|
29595
|
+
},
|
|
29596
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-7b": {
|
|
29597
|
+
"max_tokens": 16384,
|
|
29598
|
+
"max_input_tokens": 16384,
|
|
29599
|
+
"max_output_tokens": 16384,
|
|
29600
|
+
"input_cost_per_token": 2e-07,
|
|
29601
|
+
"output_cost_per_token": 2e-07,
|
|
29602
|
+
"litellm_provider": "fireworks_ai",
|
|
29603
|
+
"mode": "chat"
|
|
29604
|
+
},
|
|
29605
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-7b-instruct": {
|
|
29606
|
+
"max_tokens": 16384,
|
|
29607
|
+
"max_input_tokens": 16384,
|
|
29608
|
+
"max_output_tokens": 16384,
|
|
29609
|
+
"input_cost_per_token": 2e-07,
|
|
29610
|
+
"output_cost_per_token": 2e-07,
|
|
29611
|
+
"litellm_provider": "fireworks_ai",
|
|
29612
|
+
"mode": "chat"
|
|
29613
|
+
},
|
|
29614
|
+
"fireworks_ai/accounts/fireworks/models/code-llama-7b-python": {
|
|
29615
|
+
"max_tokens": 16384,
|
|
29616
|
+
"max_input_tokens": 16384,
|
|
29617
|
+
"max_output_tokens": 16384,
|
|
29618
|
+
"input_cost_per_token": 2e-07,
|
|
29619
|
+
"output_cost_per_token": 2e-07,
|
|
29620
|
+
"litellm_provider": "fireworks_ai",
|
|
29621
|
+
"mode": "chat"
|
|
29622
|
+
},
|
|
29623
|
+
"fireworks_ai/accounts/fireworks/models/code-qwen-1p5-7b": {
|
|
29624
|
+
"max_tokens": 65536,
|
|
29625
|
+
"max_input_tokens": 65536,
|
|
29626
|
+
"max_output_tokens": 65536,
|
|
29627
|
+
"input_cost_per_token": 2e-07,
|
|
29628
|
+
"output_cost_per_token": 2e-07,
|
|
29629
|
+
"litellm_provider": "fireworks_ai",
|
|
29630
|
+
"mode": "chat"
|
|
29631
|
+
},
|
|
29632
|
+
"fireworks_ai/accounts/fireworks/models/codegemma-2b": {
|
|
29633
|
+
"max_tokens": 8192,
|
|
29634
|
+
"max_input_tokens": 8192,
|
|
29635
|
+
"max_output_tokens": 8192,
|
|
29636
|
+
"input_cost_per_token": 1e-07,
|
|
29637
|
+
"output_cost_per_token": 1e-07,
|
|
29638
|
+
"litellm_provider": "fireworks_ai",
|
|
29639
|
+
"mode": "chat"
|
|
29640
|
+
},
|
|
29641
|
+
"fireworks_ai/accounts/fireworks/models/codegemma-7b": {
|
|
29642
|
+
"max_tokens": 8192,
|
|
29643
|
+
"max_input_tokens": 8192,
|
|
29644
|
+
"max_output_tokens": 8192,
|
|
29645
|
+
"input_cost_per_token": 2e-07,
|
|
29646
|
+
"output_cost_per_token": 2e-07,
|
|
29647
|
+
"litellm_provider": "fireworks_ai",
|
|
29648
|
+
"mode": "chat"
|
|
29649
|
+
},
|
|
29650
|
+
"fireworks_ai/accounts/fireworks/models/cogito-671b-v2-p1": {
|
|
29651
|
+
"max_tokens": 163840,
|
|
29652
|
+
"max_input_tokens": 163840,
|
|
29653
|
+
"max_output_tokens": 163840,
|
|
29654
|
+
"input_cost_per_token": 1.2e-06,
|
|
29655
|
+
"output_cost_per_token": 1.2e-06,
|
|
29656
|
+
"litellm_provider": "fireworks_ai",
|
|
29657
|
+
"mode": "chat"
|
|
29658
|
+
},
|
|
29659
|
+
"fireworks_ai/accounts/fireworks/models/cogito-v1-preview-llama-3b": {
|
|
29660
|
+
"max_tokens": 131072,
|
|
29661
|
+
"max_input_tokens": 131072,
|
|
29662
|
+
"max_output_tokens": 131072,
|
|
29663
|
+
"input_cost_per_token": 1e-07,
|
|
29664
|
+
"output_cost_per_token": 1e-07,
|
|
29665
|
+
"litellm_provider": "fireworks_ai",
|
|
29666
|
+
"mode": "chat"
|
|
29667
|
+
},
|
|
29668
|
+
"fireworks_ai/accounts/fireworks/models/cogito-v1-preview-llama-70b": {
|
|
29669
|
+
"max_tokens": 131072,
|
|
29670
|
+
"max_input_tokens": 131072,
|
|
29671
|
+
"max_output_tokens": 131072,
|
|
29672
|
+
"input_cost_per_token": 9e-07,
|
|
29673
|
+
"output_cost_per_token": 9e-07,
|
|
29674
|
+
"litellm_provider": "fireworks_ai",
|
|
29675
|
+
"mode": "chat"
|
|
29676
|
+
},
|
|
29677
|
+
"fireworks_ai/accounts/fireworks/models/cogito-v1-preview-llama-8b": {
|
|
29678
|
+
"max_tokens": 131072,
|
|
29679
|
+
"max_input_tokens": 131072,
|
|
29680
|
+
"max_output_tokens": 131072,
|
|
29681
|
+
"input_cost_per_token": 2e-07,
|
|
29682
|
+
"output_cost_per_token": 2e-07,
|
|
29683
|
+
"litellm_provider": "fireworks_ai",
|
|
29684
|
+
"mode": "chat"
|
|
29685
|
+
},
|
|
29686
|
+
"fireworks_ai/accounts/fireworks/models/cogito-v1-preview-qwen-14b": {
|
|
29687
|
+
"max_tokens": 131072,
|
|
29688
|
+
"max_input_tokens": 131072,
|
|
29689
|
+
"max_output_tokens": 131072,
|
|
29690
|
+
"input_cost_per_token": 2e-07,
|
|
29691
|
+
"output_cost_per_token": 2e-07,
|
|
29692
|
+
"litellm_provider": "fireworks_ai",
|
|
29693
|
+
"mode": "chat"
|
|
29694
|
+
},
|
|
29695
|
+
"fireworks_ai/accounts/fireworks/models/cogito-v1-preview-qwen-32b": {
|
|
29696
|
+
"max_tokens": 131072,
|
|
29697
|
+
"max_input_tokens": 131072,
|
|
29698
|
+
"max_output_tokens": 131072,
|
|
29699
|
+
"input_cost_per_token": 9e-07,
|
|
29700
|
+
"output_cost_per_token": 9e-07,
|
|
29701
|
+
"litellm_provider": "fireworks_ai",
|
|
29702
|
+
"mode": "chat"
|
|
29703
|
+
},
|
|
29704
|
+
"fireworks_ai/accounts/fireworks/models/flux-kontext-max": {
|
|
29705
|
+
"max_tokens": 4096,
|
|
29706
|
+
"max_input_tokens": 4096,
|
|
29707
|
+
"max_output_tokens": 4096,
|
|
29708
|
+
"input_cost_per_token": 8e-08,
|
|
29709
|
+
"output_cost_per_token": 8e-08,
|
|
29710
|
+
"litellm_provider": "fireworks_ai",
|
|
29711
|
+
"mode": "image_generation"
|
|
29712
|
+
},
|
|
29713
|
+
"fireworks_ai/accounts/fireworks/models/dbrx-instruct": {
|
|
29714
|
+
"max_tokens": 32768,
|
|
29715
|
+
"max_input_tokens": 32768,
|
|
29716
|
+
"max_output_tokens": 32768,
|
|
29717
|
+
"input_cost_per_token": 1.2e-06,
|
|
29718
|
+
"output_cost_per_token": 1.2e-06,
|
|
29719
|
+
"litellm_provider": "fireworks_ai",
|
|
29720
|
+
"mode": "chat"
|
|
29721
|
+
},
|
|
29722
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-coder-1b-base": {
|
|
29723
|
+
"max_tokens": 16384,
|
|
29724
|
+
"max_input_tokens": 16384,
|
|
29725
|
+
"max_output_tokens": 16384,
|
|
29726
|
+
"input_cost_per_token": 1e-07,
|
|
29727
|
+
"output_cost_per_token": 1e-07,
|
|
29728
|
+
"litellm_provider": "fireworks_ai",
|
|
29729
|
+
"mode": "chat"
|
|
29730
|
+
},
|
|
29731
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-coder-33b-instruct": {
|
|
29732
|
+
"max_tokens": 16384,
|
|
29733
|
+
"max_input_tokens": 16384,
|
|
29734
|
+
"max_output_tokens": 16384,
|
|
29735
|
+
"input_cost_per_token": 9e-07,
|
|
29736
|
+
"output_cost_per_token": 9e-07,
|
|
29737
|
+
"litellm_provider": "fireworks_ai",
|
|
29738
|
+
"mode": "chat"
|
|
29739
|
+
},
|
|
29740
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-coder-7b-base": {
|
|
29741
|
+
"max_tokens": 4096,
|
|
29742
|
+
"max_input_tokens": 4096,
|
|
29743
|
+
"max_output_tokens": 4096,
|
|
29744
|
+
"input_cost_per_token": 2e-07,
|
|
29745
|
+
"output_cost_per_token": 2e-07,
|
|
29746
|
+
"litellm_provider": "fireworks_ai",
|
|
29747
|
+
"mode": "chat"
|
|
29748
|
+
},
|
|
29749
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-coder-7b-base-v1p5": {
|
|
29750
|
+
"max_tokens": 4096,
|
|
29751
|
+
"max_input_tokens": 4096,
|
|
29752
|
+
"max_output_tokens": 4096,
|
|
29753
|
+
"input_cost_per_token": 2e-07,
|
|
29754
|
+
"output_cost_per_token": 2e-07,
|
|
29755
|
+
"litellm_provider": "fireworks_ai",
|
|
29756
|
+
"mode": "chat"
|
|
29757
|
+
},
|
|
29758
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-coder-7b-instruct-v1p5": {
|
|
29759
|
+
"max_tokens": 4096,
|
|
29760
|
+
"max_input_tokens": 4096,
|
|
29761
|
+
"max_output_tokens": 4096,
|
|
29762
|
+
"input_cost_per_token": 2e-07,
|
|
29763
|
+
"output_cost_per_token": 2e-07,
|
|
29764
|
+
"litellm_provider": "fireworks_ai",
|
|
29765
|
+
"mode": "chat"
|
|
29766
|
+
},
|
|
29767
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-lite-base": {
|
|
29768
|
+
"max_tokens": 163840,
|
|
29769
|
+
"max_input_tokens": 163840,
|
|
29770
|
+
"max_output_tokens": 163840,
|
|
29771
|
+
"input_cost_per_token": 5e-07,
|
|
29772
|
+
"output_cost_per_token": 5e-07,
|
|
29773
|
+
"litellm_provider": "fireworks_ai",
|
|
29774
|
+
"mode": "chat"
|
|
29775
|
+
},
|
|
29776
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-lite-instruct": {
|
|
29777
|
+
"max_tokens": 163840,
|
|
29778
|
+
"max_input_tokens": 163840,
|
|
29779
|
+
"max_output_tokens": 163840,
|
|
29780
|
+
"input_cost_per_token": 5e-07,
|
|
29781
|
+
"output_cost_per_token": 5e-07,
|
|
29782
|
+
"litellm_provider": "fireworks_ai",
|
|
29783
|
+
"mode": "chat"
|
|
29784
|
+
},
|
|
29785
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-prover-v2": {
|
|
29786
|
+
"max_tokens": 163840,
|
|
29787
|
+
"max_input_tokens": 163840,
|
|
29788
|
+
"max_output_tokens": 163840,
|
|
29789
|
+
"input_cost_per_token": 1.2e-06,
|
|
29790
|
+
"output_cost_per_token": 1.2e-06,
|
|
29791
|
+
"litellm_provider": "fireworks_ai",
|
|
29792
|
+
"mode": "chat"
|
|
29793
|
+
},
|
|
29794
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-r1-0528-distill-qwen3-8b": {
|
|
29795
|
+
"max_tokens": 131072,
|
|
29796
|
+
"max_input_tokens": 131072,
|
|
29797
|
+
"max_output_tokens": 131072,
|
|
29798
|
+
"input_cost_per_token": 2e-07,
|
|
29799
|
+
"output_cost_per_token": 2e-07,
|
|
29800
|
+
"litellm_provider": "fireworks_ai",
|
|
29801
|
+
"mode": "chat"
|
|
29802
|
+
},
|
|
29803
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-llama-70b": {
|
|
29804
|
+
"max_tokens": 131072,
|
|
29805
|
+
"max_input_tokens": 131072,
|
|
29806
|
+
"max_output_tokens": 131072,
|
|
29807
|
+
"input_cost_per_token": 9e-07,
|
|
29808
|
+
"output_cost_per_token": 9e-07,
|
|
29809
|
+
"litellm_provider": "fireworks_ai",
|
|
29810
|
+
"mode": "chat"
|
|
29811
|
+
},
|
|
29812
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-llama-8b": {
|
|
29813
|
+
"max_tokens": 131072,
|
|
29814
|
+
"max_input_tokens": 131072,
|
|
29815
|
+
"max_output_tokens": 131072,
|
|
29816
|
+
"input_cost_per_token": 2e-07,
|
|
29817
|
+
"output_cost_per_token": 2e-07,
|
|
29818
|
+
"litellm_provider": "fireworks_ai",
|
|
29819
|
+
"mode": "chat"
|
|
29820
|
+
},
|
|
29821
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-14b": {
|
|
29822
|
+
"max_tokens": 131072,
|
|
29823
|
+
"max_input_tokens": 131072,
|
|
29824
|
+
"max_output_tokens": 131072,
|
|
29825
|
+
"input_cost_per_token": 2e-07,
|
|
29826
|
+
"output_cost_per_token": 2e-07,
|
|
29827
|
+
"litellm_provider": "fireworks_ai",
|
|
29828
|
+
"mode": "chat"
|
|
29829
|
+
},
|
|
29830
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-1p5b": {
|
|
29831
|
+
"max_tokens": 131072,
|
|
29832
|
+
"max_input_tokens": 131072,
|
|
29833
|
+
"max_output_tokens": 131072,
|
|
29834
|
+
"input_cost_per_token": 1e-07,
|
|
29835
|
+
"output_cost_per_token": 1e-07,
|
|
29836
|
+
"litellm_provider": "fireworks_ai",
|
|
29837
|
+
"mode": "chat"
|
|
29838
|
+
},
|
|
29839
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-32b": {
|
|
29840
|
+
"max_tokens": 131072,
|
|
29841
|
+
"max_input_tokens": 131072,
|
|
29842
|
+
"max_output_tokens": 131072,
|
|
29843
|
+
"input_cost_per_token": 9e-07,
|
|
29844
|
+
"output_cost_per_token": 9e-07,
|
|
29845
|
+
"litellm_provider": "fireworks_ai",
|
|
29846
|
+
"mode": "chat"
|
|
29847
|
+
},
|
|
29848
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-7b": {
|
|
29849
|
+
"max_tokens": 131072,
|
|
29850
|
+
"max_input_tokens": 131072,
|
|
29851
|
+
"max_output_tokens": 131072,
|
|
29852
|
+
"input_cost_per_token": 2e-07,
|
|
29853
|
+
"output_cost_per_token": 2e-07,
|
|
29854
|
+
"litellm_provider": "fireworks_ai",
|
|
29855
|
+
"mode": "chat"
|
|
29856
|
+
},
|
|
29857
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-v2-lite-chat": {
|
|
29858
|
+
"max_tokens": 163840,
|
|
29859
|
+
"max_input_tokens": 163840,
|
|
29860
|
+
"max_output_tokens": 163840,
|
|
29861
|
+
"input_cost_per_token": 5e-07,
|
|
29862
|
+
"output_cost_per_token": 5e-07,
|
|
29863
|
+
"litellm_provider": "fireworks_ai",
|
|
29864
|
+
"mode": "chat"
|
|
29865
|
+
},
|
|
29866
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-v2p5": {
|
|
29867
|
+
"max_tokens": 32768,
|
|
29868
|
+
"max_input_tokens": 32768,
|
|
29869
|
+
"max_output_tokens": 32768,
|
|
29870
|
+
"input_cost_per_token": 1.2e-06,
|
|
29871
|
+
"output_cost_per_token": 1.2e-06,
|
|
29872
|
+
"litellm_provider": "fireworks_ai",
|
|
29873
|
+
"mode": "chat"
|
|
29874
|
+
},
|
|
29875
|
+
"fireworks_ai/accounts/fireworks/models/devstral-small-2505": {
|
|
29876
|
+
"max_tokens": 131072,
|
|
29877
|
+
"max_input_tokens": 131072,
|
|
29878
|
+
"max_output_tokens": 131072,
|
|
29879
|
+
"input_cost_per_token": 9e-07,
|
|
29880
|
+
"output_cost_per_token": 9e-07,
|
|
29881
|
+
"litellm_provider": "fireworks_ai",
|
|
29882
|
+
"mode": "chat"
|
|
29883
|
+
},
|
|
29884
|
+
"fireworks_ai/accounts/fireworks/models/dobby-mini-unhinged-plus-llama-3-1-8b": {
|
|
29885
|
+
"max_tokens": 131072,
|
|
29886
|
+
"max_input_tokens": 131072,
|
|
29887
|
+
"max_output_tokens": 131072,
|
|
29888
|
+
"input_cost_per_token": 2e-07,
|
|
29889
|
+
"output_cost_per_token": 2e-07,
|
|
29890
|
+
"litellm_provider": "fireworks_ai",
|
|
29891
|
+
"mode": "chat"
|
|
29892
|
+
},
|
|
29893
|
+
"fireworks_ai/accounts/fireworks/models/dobby-unhinged-llama-3-3-70b-new": {
|
|
29894
|
+
"max_tokens": 131072,
|
|
29895
|
+
"max_input_tokens": 131072,
|
|
29896
|
+
"max_output_tokens": 131072,
|
|
29897
|
+
"input_cost_per_token": 9e-07,
|
|
29898
|
+
"output_cost_per_token": 9e-07,
|
|
29899
|
+
"litellm_provider": "fireworks_ai",
|
|
29900
|
+
"mode": "chat"
|
|
29901
|
+
},
|
|
29902
|
+
"fireworks_ai/accounts/fireworks/models/dolphin-2-9-2-qwen2-72b": {
|
|
29903
|
+
"max_tokens": 131072,
|
|
29904
|
+
"max_input_tokens": 131072,
|
|
29905
|
+
"max_output_tokens": 131072,
|
|
29906
|
+
"input_cost_per_token": 9e-07,
|
|
29907
|
+
"output_cost_per_token": 9e-07,
|
|
29908
|
+
"litellm_provider": "fireworks_ai",
|
|
29909
|
+
"mode": "chat"
|
|
29910
|
+
},
|
|
29911
|
+
"fireworks_ai/accounts/fireworks/models/dolphin-2p6-mixtral-8x7b": {
|
|
29912
|
+
"max_tokens": 32768,
|
|
29913
|
+
"max_input_tokens": 32768,
|
|
29914
|
+
"max_output_tokens": 32768,
|
|
29915
|
+
"input_cost_per_token": 5e-07,
|
|
29916
|
+
"output_cost_per_token": 5e-07,
|
|
29917
|
+
"litellm_provider": "fireworks_ai",
|
|
29918
|
+
"mode": "chat"
|
|
29919
|
+
},
|
|
29920
|
+
"fireworks_ai/accounts/fireworks/models/ernie-4p5-21b-a3b-pt": {
|
|
29921
|
+
"max_tokens": 4096,
|
|
29922
|
+
"max_input_tokens": 4096,
|
|
29923
|
+
"max_output_tokens": 4096,
|
|
29924
|
+
"input_cost_per_token": 1e-07,
|
|
29925
|
+
"output_cost_per_token": 1e-07,
|
|
29926
|
+
"litellm_provider": "fireworks_ai",
|
|
29927
|
+
"mode": "chat"
|
|
29928
|
+
},
|
|
29929
|
+
"fireworks_ai/accounts/fireworks/models/ernie-4p5-300b-a47b-pt": {
|
|
29930
|
+
"max_tokens": 4096,
|
|
29931
|
+
"max_input_tokens": 4096,
|
|
29932
|
+
"max_output_tokens": 4096,
|
|
29933
|
+
"input_cost_per_token": 1e-07,
|
|
29934
|
+
"output_cost_per_token": 1e-07,
|
|
29935
|
+
"litellm_provider": "fireworks_ai",
|
|
29936
|
+
"mode": "chat"
|
|
29937
|
+
},
|
|
29938
|
+
"fireworks_ai/accounts/fireworks/models/fare-20b": {
|
|
29939
|
+
"max_tokens": 131072,
|
|
29940
|
+
"max_input_tokens": 131072,
|
|
29941
|
+
"max_output_tokens": 131072,
|
|
29942
|
+
"input_cost_per_token": 9e-07,
|
|
29943
|
+
"output_cost_per_token": 9e-07,
|
|
29944
|
+
"litellm_provider": "fireworks_ai",
|
|
29945
|
+
"mode": "chat"
|
|
29946
|
+
},
|
|
29947
|
+
"fireworks_ai/accounts/fireworks/models/firefunction-v1": {
|
|
29948
|
+
"max_tokens": 32768,
|
|
29949
|
+
"max_input_tokens": 32768,
|
|
29950
|
+
"max_output_tokens": 32768,
|
|
29951
|
+
"input_cost_per_token": 5e-07,
|
|
29952
|
+
"output_cost_per_token": 5e-07,
|
|
29953
|
+
"litellm_provider": "fireworks_ai",
|
|
29954
|
+
"mode": "chat"
|
|
29955
|
+
},
|
|
29956
|
+
"fireworks_ai/accounts/fireworks/models/firellava-13b": {
|
|
29957
|
+
"max_tokens": 4096,
|
|
29958
|
+
"max_input_tokens": 4096,
|
|
29959
|
+
"max_output_tokens": 4096,
|
|
29960
|
+
"input_cost_per_token": 2e-07,
|
|
29961
|
+
"output_cost_per_token": 2e-07,
|
|
29962
|
+
"litellm_provider": "fireworks_ai",
|
|
29963
|
+
"mode": "chat"
|
|
29964
|
+
},
|
|
29965
|
+
"fireworks_ai/accounts/fireworks/models/firesearch-ocr-v6": {
|
|
29966
|
+
"max_tokens": 8192,
|
|
29967
|
+
"max_input_tokens": 8192,
|
|
29968
|
+
"max_output_tokens": 8192,
|
|
29969
|
+
"input_cost_per_token": 2e-07,
|
|
29970
|
+
"output_cost_per_token": 2e-07,
|
|
29971
|
+
"litellm_provider": "fireworks_ai",
|
|
29972
|
+
"mode": "chat"
|
|
29973
|
+
},
|
|
29974
|
+
"fireworks_ai/accounts/fireworks/models/fireworks-asr-large": {
|
|
29975
|
+
"max_tokens": 4096,
|
|
29976
|
+
"max_input_tokens": 4096,
|
|
29977
|
+
"max_output_tokens": 4096,
|
|
29978
|
+
"input_cost_per_token": 0.0,
|
|
29979
|
+
"output_cost_per_token": 0.0,
|
|
29980
|
+
"litellm_provider": "fireworks_ai",
|
|
29981
|
+
"mode": "audio_transcription"
|
|
29982
|
+
},
|
|
29983
|
+
"fireworks_ai/accounts/fireworks/models/fireworks-asr-v2": {
|
|
29984
|
+
"max_tokens": 4096,
|
|
29985
|
+
"max_input_tokens": 4096,
|
|
29986
|
+
"max_output_tokens": 4096,
|
|
29987
|
+
"input_cost_per_token": 0.0,
|
|
29988
|
+
"output_cost_per_token": 0.0,
|
|
29989
|
+
"litellm_provider": "fireworks_ai",
|
|
29990
|
+
"mode": "audio_transcription"
|
|
29991
|
+
},
|
|
29992
|
+
"fireworks_ai/accounts/fireworks/models/flux-1-dev": {
|
|
29993
|
+
"max_tokens": 4096,
|
|
29994
|
+
"max_input_tokens": 4096,
|
|
29995
|
+
"max_output_tokens": 4096,
|
|
29996
|
+
"input_cost_per_token": 1e-07,
|
|
29997
|
+
"output_cost_per_token": 1e-07,
|
|
29998
|
+
"litellm_provider": "fireworks_ai",
|
|
29999
|
+
"mode": "chat"
|
|
30000
|
+
},
|
|
30001
|
+
"fireworks_ai/accounts/fireworks/models/flux-1-dev-controlnet-union": {
|
|
30002
|
+
"max_tokens": 4096,
|
|
30003
|
+
"max_input_tokens": 4096,
|
|
30004
|
+
"max_output_tokens": 4096,
|
|
30005
|
+
"input_cost_per_token": 1e-09,
|
|
30006
|
+
"output_cost_per_token": 1e-09,
|
|
30007
|
+
"litellm_provider": "fireworks_ai",
|
|
30008
|
+
"mode": "chat"
|
|
30009
|
+
},
|
|
30010
|
+
"fireworks_ai/accounts/fireworks/models/flux-1-dev-fp8": {
|
|
30011
|
+
"max_tokens": 4096,
|
|
30012
|
+
"max_input_tokens": 4096,
|
|
30013
|
+
"max_output_tokens": 4096,
|
|
30014
|
+
"input_cost_per_token": 5e-10,
|
|
30015
|
+
"output_cost_per_token": 5e-10,
|
|
30016
|
+
"litellm_provider": "fireworks_ai",
|
|
30017
|
+
"mode": "image_generation"
|
|
30018
|
+
},
|
|
30019
|
+
"fireworks_ai/accounts/fireworks/models/flux-1-schnell": {
|
|
30020
|
+
"max_tokens": 4096,
|
|
30021
|
+
"max_input_tokens": 4096,
|
|
30022
|
+
"max_output_tokens": 4096,
|
|
30023
|
+
"input_cost_per_token": 1e-07,
|
|
30024
|
+
"output_cost_per_token": 1e-07,
|
|
30025
|
+
"litellm_provider": "fireworks_ai",
|
|
30026
|
+
"mode": "chat"
|
|
30027
|
+
},
|
|
30028
|
+
"fireworks_ai/accounts/fireworks/models/flux-1-schnell-fp8": {
|
|
30029
|
+
"max_tokens": 4096,
|
|
30030
|
+
"max_input_tokens": 4096,
|
|
30031
|
+
"max_output_tokens": 4096,
|
|
30032
|
+
"input_cost_per_token": 3.5e-10,
|
|
30033
|
+
"output_cost_per_token": 3.5e-10,
|
|
30034
|
+
"litellm_provider": "fireworks_ai",
|
|
30035
|
+
"mode": "image_generation"
|
|
30036
|
+
},
|
|
30037
|
+
"fireworks_ai/accounts/fireworks/models/gemma-2b-it": {
|
|
30038
|
+
"max_tokens": 8192,
|
|
30039
|
+
"max_input_tokens": 8192,
|
|
30040
|
+
"max_output_tokens": 8192,
|
|
30041
|
+
"input_cost_per_token": 1e-07,
|
|
30042
|
+
"output_cost_per_token": 1e-07,
|
|
30043
|
+
"litellm_provider": "fireworks_ai",
|
|
30044
|
+
"mode": "chat"
|
|
30045
|
+
},
|
|
30046
|
+
"fireworks_ai/accounts/fireworks/models/gemma-3-27b-it": {
|
|
30047
|
+
"max_tokens": 131072,
|
|
30048
|
+
"max_input_tokens": 131072,
|
|
30049
|
+
"max_output_tokens": 131072,
|
|
30050
|
+
"input_cost_per_token": 9e-07,
|
|
30051
|
+
"output_cost_per_token": 9e-07,
|
|
30052
|
+
"litellm_provider": "fireworks_ai",
|
|
30053
|
+
"mode": "chat"
|
|
30054
|
+
},
|
|
30055
|
+
"fireworks_ai/accounts/fireworks/models/gemma-7b": {
|
|
30056
|
+
"max_tokens": 8192,
|
|
30057
|
+
"max_input_tokens": 8192,
|
|
30058
|
+
"max_output_tokens": 8192,
|
|
30059
|
+
"input_cost_per_token": 2e-07,
|
|
30060
|
+
"output_cost_per_token": 2e-07,
|
|
30061
|
+
"litellm_provider": "fireworks_ai",
|
|
30062
|
+
"mode": "chat"
|
|
30063
|
+
},
|
|
30064
|
+
"fireworks_ai/accounts/fireworks/models/gemma-7b-it": {
|
|
30065
|
+
"max_tokens": 8192,
|
|
30066
|
+
"max_input_tokens": 8192,
|
|
30067
|
+
"max_output_tokens": 8192,
|
|
30068
|
+
"input_cost_per_token": 2e-07,
|
|
30069
|
+
"output_cost_per_token": 2e-07,
|
|
30070
|
+
"litellm_provider": "fireworks_ai",
|
|
30071
|
+
"mode": "chat"
|
|
30072
|
+
},
|
|
30073
|
+
"fireworks_ai/accounts/fireworks/models/gemma2-9b-it": {
|
|
30074
|
+
"max_tokens": 8192,
|
|
30075
|
+
"max_input_tokens": 8192,
|
|
30076
|
+
"max_output_tokens": 8192,
|
|
30077
|
+
"input_cost_per_token": 2e-07,
|
|
30078
|
+
"output_cost_per_token": 2e-07,
|
|
30079
|
+
"litellm_provider": "fireworks_ai",
|
|
30080
|
+
"mode": "chat"
|
|
30081
|
+
},
|
|
30082
|
+
"fireworks_ai/accounts/fireworks/models/glm-4p5v": {
|
|
30083
|
+
"max_tokens": 131072,
|
|
30084
|
+
"max_input_tokens": 131072,
|
|
30085
|
+
"max_output_tokens": 131072,
|
|
30086
|
+
"input_cost_per_token": 1.2e-06,
|
|
30087
|
+
"output_cost_per_token": 1.2e-06,
|
|
30088
|
+
"litellm_provider": "fireworks_ai",
|
|
30089
|
+
"mode": "chat",
|
|
30090
|
+
"supports_reasoning": true
|
|
30091
|
+
},
|
|
30092
|
+
"fireworks_ai/accounts/fireworks/models/gpt-oss-safeguard-120b": {
|
|
30093
|
+
"max_tokens": 131072,
|
|
30094
|
+
"max_input_tokens": 131072,
|
|
30095
|
+
"max_output_tokens": 131072,
|
|
30096
|
+
"input_cost_per_token": 1.2e-06,
|
|
30097
|
+
"output_cost_per_token": 1.2e-06,
|
|
30098
|
+
"litellm_provider": "fireworks_ai",
|
|
30099
|
+
"mode": "chat"
|
|
30100
|
+
},
|
|
30101
|
+
"fireworks_ai/accounts/fireworks/models/gpt-oss-safeguard-20b": {
|
|
30102
|
+
"max_tokens": 131072,
|
|
30103
|
+
"max_input_tokens": 131072,
|
|
30104
|
+
"max_output_tokens": 131072,
|
|
30105
|
+
"input_cost_per_token": 5e-07,
|
|
30106
|
+
"output_cost_per_token": 5e-07,
|
|
30107
|
+
"litellm_provider": "fireworks_ai",
|
|
30108
|
+
"mode": "chat"
|
|
30109
|
+
},
|
|
30110
|
+
"fireworks_ai/accounts/fireworks/models/hermes-2-pro-mistral-7b": {
|
|
30111
|
+
"max_tokens": 32768,
|
|
30112
|
+
"max_input_tokens": 32768,
|
|
30113
|
+
"max_output_tokens": 32768,
|
|
30114
|
+
"input_cost_per_token": 2e-07,
|
|
30115
|
+
"output_cost_per_token": 2e-07,
|
|
30116
|
+
"litellm_provider": "fireworks_ai",
|
|
30117
|
+
"mode": "chat"
|
|
30118
|
+
},
|
|
30119
|
+
"fireworks_ai/accounts/fireworks/models/internvl3-38b": {
|
|
30120
|
+
"max_tokens": 16384,
|
|
30121
|
+
"max_input_tokens": 16384,
|
|
30122
|
+
"max_output_tokens": 16384,
|
|
30123
|
+
"input_cost_per_token": 9e-07,
|
|
30124
|
+
"output_cost_per_token": 9e-07,
|
|
30125
|
+
"litellm_provider": "fireworks_ai",
|
|
30126
|
+
"mode": "chat"
|
|
30127
|
+
},
|
|
30128
|
+
"fireworks_ai/accounts/fireworks/models/internvl3-78b": {
|
|
30129
|
+
"max_tokens": 16384,
|
|
30130
|
+
"max_input_tokens": 16384,
|
|
30131
|
+
"max_output_tokens": 16384,
|
|
30132
|
+
"input_cost_per_token": 9e-07,
|
|
30133
|
+
"output_cost_per_token": 9e-07,
|
|
30134
|
+
"litellm_provider": "fireworks_ai",
|
|
30135
|
+
"mode": "chat"
|
|
30136
|
+
},
|
|
30137
|
+
"fireworks_ai/accounts/fireworks/models/internvl3-8b": {
|
|
30138
|
+
"max_tokens": 16384,
|
|
30139
|
+
"max_input_tokens": 16384,
|
|
30140
|
+
"max_output_tokens": 16384,
|
|
30141
|
+
"input_cost_per_token": 2e-07,
|
|
30142
|
+
"output_cost_per_token": 2e-07,
|
|
30143
|
+
"litellm_provider": "fireworks_ai",
|
|
30144
|
+
"mode": "chat"
|
|
30145
|
+
},
|
|
30146
|
+
"fireworks_ai/accounts/fireworks/models/japanese-stable-diffusion-xl": {
|
|
30147
|
+
"max_tokens": 4096,
|
|
30148
|
+
"max_input_tokens": 4096,
|
|
30149
|
+
"max_output_tokens": 4096,
|
|
30150
|
+
"input_cost_per_token": 1.3e-10,
|
|
30151
|
+
"output_cost_per_token": 1.3e-10,
|
|
30152
|
+
"litellm_provider": "fireworks_ai",
|
|
30153
|
+
"mode": "image_generation"
|
|
30154
|
+
},
|
|
30155
|
+
"fireworks_ai/accounts/fireworks/models/kat-coder": {
|
|
30156
|
+
"max_tokens": 262144,
|
|
30157
|
+
"max_input_tokens": 262144,
|
|
30158
|
+
"max_output_tokens": 262144,
|
|
30159
|
+
"input_cost_per_token": 9e-07,
|
|
30160
|
+
"output_cost_per_token": 9e-07,
|
|
30161
|
+
"litellm_provider": "fireworks_ai",
|
|
30162
|
+
"mode": "chat"
|
|
30163
|
+
},
|
|
30164
|
+
"fireworks_ai/accounts/fireworks/models/kat-dev-32b": {
|
|
30165
|
+
"max_tokens": 131072,
|
|
30166
|
+
"max_input_tokens": 131072,
|
|
30167
|
+
"max_output_tokens": 131072,
|
|
30168
|
+
"input_cost_per_token": 9e-07,
|
|
30169
|
+
"output_cost_per_token": 9e-07,
|
|
30170
|
+
"litellm_provider": "fireworks_ai",
|
|
30171
|
+
"mode": "chat"
|
|
30172
|
+
},
|
|
30173
|
+
"fireworks_ai/accounts/fireworks/models/kat-dev-72b-exp": {
|
|
30174
|
+
"max_tokens": 131072,
|
|
30175
|
+
"max_input_tokens": 131072,
|
|
30176
|
+
"max_output_tokens": 131072,
|
|
30177
|
+
"input_cost_per_token": 9e-07,
|
|
30178
|
+
"output_cost_per_token": 9e-07,
|
|
30179
|
+
"litellm_provider": "fireworks_ai",
|
|
30180
|
+
"mode": "chat"
|
|
30181
|
+
},
|
|
30182
|
+
"fireworks_ai/accounts/fireworks/models/llama-guard-2-8b": {
|
|
30183
|
+
"max_tokens": 8192,
|
|
30184
|
+
"max_input_tokens": 8192,
|
|
30185
|
+
"max_output_tokens": 8192,
|
|
30186
|
+
"input_cost_per_token": 2e-07,
|
|
30187
|
+
"output_cost_per_token": 2e-07,
|
|
30188
|
+
"litellm_provider": "fireworks_ai",
|
|
30189
|
+
"mode": "chat"
|
|
30190
|
+
},
|
|
30191
|
+
"fireworks_ai/accounts/fireworks/models/llama-guard-3-1b": {
|
|
30192
|
+
"max_tokens": 131072,
|
|
30193
|
+
"max_input_tokens": 131072,
|
|
30194
|
+
"max_output_tokens": 131072,
|
|
30195
|
+
"input_cost_per_token": 1e-07,
|
|
30196
|
+
"output_cost_per_token": 1e-07,
|
|
30197
|
+
"litellm_provider": "fireworks_ai",
|
|
30198
|
+
"mode": "chat"
|
|
30199
|
+
},
|
|
30200
|
+
"fireworks_ai/accounts/fireworks/models/llama-guard-3-8b": {
|
|
30201
|
+
"max_tokens": 131072,
|
|
30202
|
+
"max_input_tokens": 131072,
|
|
30203
|
+
"max_output_tokens": 131072,
|
|
30204
|
+
"input_cost_per_token": 2e-07,
|
|
30205
|
+
"output_cost_per_token": 2e-07,
|
|
30206
|
+
"litellm_provider": "fireworks_ai",
|
|
30207
|
+
"mode": "chat"
|
|
30208
|
+
},
|
|
30209
|
+
"fireworks_ai/accounts/fireworks/models/llama-v2-13b": {
|
|
30210
|
+
"max_tokens": 4096,
|
|
30211
|
+
"max_input_tokens": 4096,
|
|
30212
|
+
"max_output_tokens": 4096,
|
|
30213
|
+
"input_cost_per_token": 2e-07,
|
|
30214
|
+
"output_cost_per_token": 2e-07,
|
|
30215
|
+
"litellm_provider": "fireworks_ai",
|
|
30216
|
+
"mode": "chat"
|
|
30217
|
+
},
|
|
30218
|
+
"fireworks_ai/accounts/fireworks/models/llama-v2-13b-chat": {
|
|
30219
|
+
"max_tokens": 4096,
|
|
30220
|
+
"max_input_tokens": 4096,
|
|
30221
|
+
"max_output_tokens": 4096,
|
|
30222
|
+
"input_cost_per_token": 2e-07,
|
|
30223
|
+
"output_cost_per_token": 2e-07,
|
|
30224
|
+
"litellm_provider": "fireworks_ai",
|
|
30225
|
+
"mode": "chat"
|
|
30226
|
+
},
|
|
30227
|
+
"fireworks_ai/accounts/fireworks/models/llama-v2-70b": {
|
|
30228
|
+
"max_tokens": 4096,
|
|
30229
|
+
"max_input_tokens": 4096,
|
|
30230
|
+
"max_output_tokens": 4096,
|
|
30231
|
+
"input_cost_per_token": 1e-07,
|
|
30232
|
+
"output_cost_per_token": 1e-07,
|
|
30233
|
+
"litellm_provider": "fireworks_ai",
|
|
30234
|
+
"mode": "chat"
|
|
30235
|
+
},
|
|
30236
|
+
"fireworks_ai/accounts/fireworks/models/llama-v2-70b-chat": {
|
|
30237
|
+
"max_tokens": 2048,
|
|
30238
|
+
"max_input_tokens": 2048,
|
|
30239
|
+
"max_output_tokens": 2048,
|
|
30240
|
+
"input_cost_per_token": 9e-07,
|
|
30241
|
+
"output_cost_per_token": 9e-07,
|
|
30242
|
+
"litellm_provider": "fireworks_ai",
|
|
30243
|
+
"mode": "chat"
|
|
30244
|
+
},
|
|
30245
|
+
"fireworks_ai/accounts/fireworks/models/llama-v2-7b": {
|
|
30246
|
+
"max_tokens": 4096,
|
|
30247
|
+
"max_input_tokens": 4096,
|
|
30248
|
+
"max_output_tokens": 4096,
|
|
30249
|
+
"input_cost_per_token": 2e-07,
|
|
30250
|
+
"output_cost_per_token": 2e-07,
|
|
30251
|
+
"litellm_provider": "fireworks_ai",
|
|
30252
|
+
"mode": "chat"
|
|
30253
|
+
},
|
|
30254
|
+
"fireworks_ai/accounts/fireworks/models/llama-v2-7b-chat": {
|
|
30255
|
+
"max_tokens": 4096,
|
|
30256
|
+
"max_input_tokens": 4096,
|
|
30257
|
+
"max_output_tokens": 4096,
|
|
30258
|
+
"input_cost_per_token": 2e-07,
|
|
30259
|
+
"output_cost_per_token": 2e-07,
|
|
30260
|
+
"litellm_provider": "fireworks_ai",
|
|
30261
|
+
"mode": "chat"
|
|
30262
|
+
},
|
|
30263
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3-70b-instruct": {
|
|
30264
|
+
"max_tokens": 8192,
|
|
30265
|
+
"max_input_tokens": 8192,
|
|
30266
|
+
"max_output_tokens": 8192,
|
|
30267
|
+
"input_cost_per_token": 9e-07,
|
|
30268
|
+
"output_cost_per_token": 9e-07,
|
|
30269
|
+
"litellm_provider": "fireworks_ai",
|
|
30270
|
+
"mode": "chat"
|
|
30271
|
+
},
|
|
30272
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3-70b-instruct-hf": {
|
|
30273
|
+
"max_tokens": 8192,
|
|
30274
|
+
"max_input_tokens": 8192,
|
|
30275
|
+
"max_output_tokens": 8192,
|
|
30276
|
+
"input_cost_per_token": 9e-07,
|
|
30277
|
+
"output_cost_per_token": 9e-07,
|
|
30278
|
+
"litellm_provider": "fireworks_ai",
|
|
30279
|
+
"mode": "chat"
|
|
30280
|
+
},
|
|
30281
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3-8b": {
|
|
30282
|
+
"max_tokens": 8192,
|
|
30283
|
+
"max_input_tokens": 8192,
|
|
30284
|
+
"max_output_tokens": 8192,
|
|
30285
|
+
"input_cost_per_token": 2e-07,
|
|
30286
|
+
"output_cost_per_token": 2e-07,
|
|
30287
|
+
"litellm_provider": "fireworks_ai",
|
|
30288
|
+
"mode": "chat"
|
|
30289
|
+
},
|
|
30290
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3-8b-instruct-hf": {
|
|
30291
|
+
"max_tokens": 8192,
|
|
30292
|
+
"max_input_tokens": 8192,
|
|
30293
|
+
"max_output_tokens": 8192,
|
|
30294
|
+
"input_cost_per_token": 2e-07,
|
|
30295
|
+
"output_cost_per_token": 2e-07,
|
|
30296
|
+
"litellm_provider": "fireworks_ai",
|
|
30297
|
+
"mode": "chat"
|
|
30298
|
+
},
|
|
30299
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3p1-405b-instruct-long": {
|
|
30300
|
+
"max_tokens": 4096,
|
|
30301
|
+
"max_input_tokens": 4096,
|
|
30302
|
+
"max_output_tokens": 4096,
|
|
30303
|
+
"input_cost_per_token": 1e-07,
|
|
30304
|
+
"output_cost_per_token": 1e-07,
|
|
30305
|
+
"litellm_provider": "fireworks_ai",
|
|
30306
|
+
"mode": "chat"
|
|
30307
|
+
},
|
|
30308
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3p1-70b-instruct": {
|
|
30309
|
+
"max_tokens": 131072,
|
|
30310
|
+
"max_input_tokens": 131072,
|
|
30311
|
+
"max_output_tokens": 131072,
|
|
30312
|
+
"input_cost_per_token": 9e-07,
|
|
30313
|
+
"output_cost_per_token": 9e-07,
|
|
30314
|
+
"litellm_provider": "fireworks_ai",
|
|
30315
|
+
"mode": "chat"
|
|
30316
|
+
},
|
|
30317
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3p1-70b-instruct-1b": {
|
|
30318
|
+
"max_tokens": 4096,
|
|
30319
|
+
"max_input_tokens": 4096,
|
|
30320
|
+
"max_output_tokens": 4096,
|
|
30321
|
+
"input_cost_per_token": 1e-07,
|
|
30322
|
+
"output_cost_per_token": 1e-07,
|
|
30323
|
+
"litellm_provider": "fireworks_ai",
|
|
30324
|
+
"mode": "chat"
|
|
30325
|
+
},
|
|
30326
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3p1-nemotron-70b-instruct": {
|
|
30327
|
+
"max_tokens": 131072,
|
|
30328
|
+
"max_input_tokens": 131072,
|
|
30329
|
+
"max_output_tokens": 131072,
|
|
30330
|
+
"input_cost_per_token": 9e-07,
|
|
30331
|
+
"output_cost_per_token": 9e-07,
|
|
30332
|
+
"litellm_provider": "fireworks_ai",
|
|
30333
|
+
"mode": "chat"
|
|
30334
|
+
},
|
|
30335
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3p2-1b": {
|
|
30336
|
+
"max_tokens": 131072,
|
|
30337
|
+
"max_input_tokens": 131072,
|
|
30338
|
+
"max_output_tokens": 131072,
|
|
30339
|
+
"input_cost_per_token": 1e-07,
|
|
30340
|
+
"output_cost_per_token": 1e-07,
|
|
30341
|
+
"litellm_provider": "fireworks_ai",
|
|
30342
|
+
"mode": "chat"
|
|
30343
|
+
},
|
|
30344
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3p2-3b": {
|
|
30345
|
+
"max_tokens": 131072,
|
|
30346
|
+
"max_input_tokens": 131072,
|
|
30347
|
+
"max_output_tokens": 131072,
|
|
30348
|
+
"input_cost_per_token": 1e-07,
|
|
30349
|
+
"output_cost_per_token": 1e-07,
|
|
30350
|
+
"litellm_provider": "fireworks_ai",
|
|
30351
|
+
"mode": "chat"
|
|
30352
|
+
},
|
|
30353
|
+
"fireworks_ai/accounts/fireworks/models/llama-v3p3-70b-instruct": {
|
|
30354
|
+
"max_tokens": 131072,
|
|
30355
|
+
"max_input_tokens": 131072,
|
|
30356
|
+
"max_output_tokens": 131072,
|
|
30357
|
+
"input_cost_per_token": 9e-07,
|
|
30358
|
+
"output_cost_per_token": 9e-07,
|
|
30359
|
+
"litellm_provider": "fireworks_ai",
|
|
30360
|
+
"mode": "chat"
|
|
30361
|
+
},
|
|
30362
|
+
"fireworks_ai/accounts/fireworks/models/llamaguard-7b": {
|
|
30363
|
+
"max_tokens": 4096,
|
|
30364
|
+
"max_input_tokens": 4096,
|
|
30365
|
+
"max_output_tokens": 4096,
|
|
30366
|
+
"input_cost_per_token": 2e-07,
|
|
30367
|
+
"output_cost_per_token": 2e-07,
|
|
30368
|
+
"litellm_provider": "fireworks_ai",
|
|
30369
|
+
"mode": "chat"
|
|
30370
|
+
},
|
|
30371
|
+
"fireworks_ai/accounts/fireworks/models/llava-yi-34b": {
|
|
30372
|
+
"max_tokens": 4096,
|
|
30373
|
+
"max_input_tokens": 4096,
|
|
30374
|
+
"max_output_tokens": 4096,
|
|
30375
|
+
"input_cost_per_token": 9e-07,
|
|
30376
|
+
"output_cost_per_token": 9e-07,
|
|
30377
|
+
"litellm_provider": "fireworks_ai",
|
|
30378
|
+
"mode": "chat"
|
|
30379
|
+
},
|
|
30380
|
+
"fireworks_ai/accounts/fireworks/models/minimax-m1-80k": {
|
|
30381
|
+
"max_tokens": 4096,
|
|
30382
|
+
"max_input_tokens": 4096,
|
|
30383
|
+
"max_output_tokens": 4096,
|
|
30384
|
+
"input_cost_per_token": 1e-07,
|
|
30385
|
+
"output_cost_per_token": 1e-07,
|
|
30386
|
+
"litellm_provider": "fireworks_ai",
|
|
30387
|
+
"mode": "chat"
|
|
30388
|
+
},
|
|
30389
|
+
"fireworks_ai/accounts/fireworks/models/minimax-m2": {
|
|
30390
|
+
"max_tokens": 4096,
|
|
30391
|
+
"max_input_tokens": 4096,
|
|
30392
|
+
"max_output_tokens": 4096,
|
|
30393
|
+
"input_cost_per_token": 3e-07,
|
|
30394
|
+
"output_cost_per_token": 1.2e-06,
|
|
30395
|
+
"litellm_provider": "fireworks_ai",
|
|
30396
|
+
"mode": "chat"
|
|
30397
|
+
},
|
|
30398
|
+
"fireworks_ai/accounts/fireworks/models/ministral-3-14b-instruct-2512": {
|
|
30399
|
+
"max_tokens": 256000,
|
|
30400
|
+
"max_input_tokens": 256000,
|
|
30401
|
+
"max_output_tokens": 256000,
|
|
30402
|
+
"input_cost_per_token": 2e-07,
|
|
30403
|
+
"output_cost_per_token": 2e-07,
|
|
30404
|
+
"litellm_provider": "fireworks_ai",
|
|
30405
|
+
"mode": "chat"
|
|
30406
|
+
},
|
|
30407
|
+
"fireworks_ai/accounts/fireworks/models/ministral-3-3b-instruct-2512": {
|
|
30408
|
+
"max_tokens": 256000,
|
|
30409
|
+
"max_input_tokens": 256000,
|
|
30410
|
+
"max_output_tokens": 256000,
|
|
30411
|
+
"input_cost_per_token": 1e-07,
|
|
30412
|
+
"output_cost_per_token": 1e-07,
|
|
30413
|
+
"litellm_provider": "fireworks_ai",
|
|
30414
|
+
"mode": "chat"
|
|
30415
|
+
},
|
|
30416
|
+
"fireworks_ai/accounts/fireworks/models/ministral-3-8b-instruct-2512": {
|
|
30417
|
+
"max_tokens": 256000,
|
|
30418
|
+
"max_input_tokens": 256000,
|
|
30419
|
+
"max_output_tokens": 256000,
|
|
30420
|
+
"input_cost_per_token": 2e-07,
|
|
30421
|
+
"output_cost_per_token": 2e-07,
|
|
30422
|
+
"litellm_provider": "fireworks_ai",
|
|
30423
|
+
"mode": "chat"
|
|
30424
|
+
},
|
|
30425
|
+
"fireworks_ai/accounts/fireworks/models/mistral-7b": {
|
|
30426
|
+
"max_tokens": 32768,
|
|
30427
|
+
"max_input_tokens": 32768,
|
|
30428
|
+
"max_output_tokens": 32768,
|
|
30429
|
+
"input_cost_per_token": 2e-07,
|
|
30430
|
+
"output_cost_per_token": 2e-07,
|
|
30431
|
+
"litellm_provider": "fireworks_ai",
|
|
30432
|
+
"mode": "chat"
|
|
30433
|
+
},
|
|
30434
|
+
"fireworks_ai/accounts/fireworks/models/mistral-7b-instruct-4k": {
|
|
30435
|
+
"max_tokens": 32768,
|
|
30436
|
+
"max_input_tokens": 32768,
|
|
30437
|
+
"max_output_tokens": 32768,
|
|
30438
|
+
"input_cost_per_token": 2e-07,
|
|
30439
|
+
"output_cost_per_token": 2e-07,
|
|
30440
|
+
"litellm_provider": "fireworks_ai",
|
|
30441
|
+
"mode": "chat"
|
|
30442
|
+
},
|
|
30443
|
+
"fireworks_ai/accounts/fireworks/models/mistral-7b-instruct-v0p2": {
|
|
30444
|
+
"max_tokens": 32768,
|
|
30445
|
+
"max_input_tokens": 32768,
|
|
30446
|
+
"max_output_tokens": 32768,
|
|
30447
|
+
"input_cost_per_token": 2e-07,
|
|
30448
|
+
"output_cost_per_token": 2e-07,
|
|
30449
|
+
"litellm_provider": "fireworks_ai",
|
|
30450
|
+
"mode": "chat"
|
|
30451
|
+
},
|
|
30452
|
+
"fireworks_ai/accounts/fireworks/models/mistral-7b-instruct-v3": {
|
|
30453
|
+
"max_tokens": 32768,
|
|
30454
|
+
"max_input_tokens": 32768,
|
|
30455
|
+
"max_output_tokens": 32768,
|
|
30456
|
+
"input_cost_per_token": 2e-07,
|
|
30457
|
+
"output_cost_per_token": 2e-07,
|
|
30458
|
+
"litellm_provider": "fireworks_ai",
|
|
30459
|
+
"mode": "chat"
|
|
30460
|
+
},
|
|
30461
|
+
"fireworks_ai/accounts/fireworks/models/mistral-7b-v0p2": {
|
|
30462
|
+
"max_tokens": 32768,
|
|
30463
|
+
"max_input_tokens": 32768,
|
|
30464
|
+
"max_output_tokens": 32768,
|
|
30465
|
+
"input_cost_per_token": 2e-07,
|
|
30466
|
+
"output_cost_per_token": 2e-07,
|
|
30467
|
+
"litellm_provider": "fireworks_ai",
|
|
30468
|
+
"mode": "chat"
|
|
30469
|
+
},
|
|
30470
|
+
"fireworks_ai/accounts/fireworks/models/mistral-large-3-fp8": {
|
|
30471
|
+
"max_tokens": 256000,
|
|
30472
|
+
"max_input_tokens": 256000,
|
|
30473
|
+
"max_output_tokens": 256000,
|
|
30474
|
+
"input_cost_per_token": 1.2e-06,
|
|
30475
|
+
"output_cost_per_token": 1.2e-06,
|
|
30476
|
+
"litellm_provider": "fireworks_ai",
|
|
30477
|
+
"mode": "chat"
|
|
30478
|
+
},
|
|
30479
|
+
"fireworks_ai/accounts/fireworks/models/mistral-nemo-base-2407": {
|
|
30480
|
+
"max_tokens": 128000,
|
|
30481
|
+
"max_input_tokens": 128000,
|
|
30482
|
+
"max_output_tokens": 128000,
|
|
30483
|
+
"input_cost_per_token": 2e-07,
|
|
30484
|
+
"output_cost_per_token": 2e-07,
|
|
30485
|
+
"litellm_provider": "fireworks_ai",
|
|
30486
|
+
"mode": "chat"
|
|
30487
|
+
},
|
|
30488
|
+
"fireworks_ai/accounts/fireworks/models/mistral-nemo-instruct-2407": {
|
|
30489
|
+
"max_tokens": 128000,
|
|
30490
|
+
"max_input_tokens": 128000,
|
|
30491
|
+
"max_output_tokens": 128000,
|
|
30492
|
+
"input_cost_per_token": 2e-07,
|
|
30493
|
+
"output_cost_per_token": 2e-07,
|
|
30494
|
+
"litellm_provider": "fireworks_ai",
|
|
30495
|
+
"mode": "chat"
|
|
30496
|
+
},
|
|
30497
|
+
"fireworks_ai/accounts/fireworks/models/mistral-small-24b-instruct-2501": {
|
|
30498
|
+
"max_tokens": 32768,
|
|
30499
|
+
"max_input_tokens": 32768,
|
|
30500
|
+
"max_output_tokens": 32768,
|
|
30501
|
+
"input_cost_per_token": 9e-07,
|
|
30502
|
+
"output_cost_per_token": 9e-07,
|
|
30503
|
+
"litellm_provider": "fireworks_ai",
|
|
30504
|
+
"mode": "chat"
|
|
30505
|
+
},
|
|
30506
|
+
"fireworks_ai/accounts/fireworks/models/mixtral-8x22b": {
|
|
30507
|
+
"max_tokens": 65536,
|
|
30508
|
+
"max_input_tokens": 65536,
|
|
30509
|
+
"max_output_tokens": 65536,
|
|
30510
|
+
"input_cost_per_token": 1.2e-06,
|
|
30511
|
+
"output_cost_per_token": 1.2e-06,
|
|
30512
|
+
"litellm_provider": "fireworks_ai",
|
|
30513
|
+
"mode": "chat"
|
|
30514
|
+
},
|
|
30515
|
+
"fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct": {
|
|
30516
|
+
"max_tokens": 65536,
|
|
30517
|
+
"max_input_tokens": 65536,
|
|
30518
|
+
"max_output_tokens": 65536,
|
|
30519
|
+
"input_cost_per_token": 1.2e-06,
|
|
30520
|
+
"output_cost_per_token": 1.2e-06,
|
|
30521
|
+
"litellm_provider": "fireworks_ai",
|
|
30522
|
+
"mode": "chat"
|
|
30523
|
+
},
|
|
30524
|
+
"fireworks_ai/accounts/fireworks/models/mixtral-8x7b": {
|
|
30525
|
+
"max_tokens": 32768,
|
|
30526
|
+
"max_input_tokens": 32768,
|
|
30527
|
+
"max_output_tokens": 32768,
|
|
30528
|
+
"input_cost_per_token": 5e-07,
|
|
30529
|
+
"output_cost_per_token": 5e-07,
|
|
30530
|
+
"litellm_provider": "fireworks_ai",
|
|
30531
|
+
"mode": "chat"
|
|
30532
|
+
},
|
|
30533
|
+
"fireworks_ai/accounts/fireworks/models/mixtral-8x7b-instruct": {
|
|
30534
|
+
"max_tokens": 32768,
|
|
30535
|
+
"max_input_tokens": 32768,
|
|
30536
|
+
"max_output_tokens": 32768,
|
|
30537
|
+
"input_cost_per_token": 5e-07,
|
|
30538
|
+
"output_cost_per_token": 5e-07,
|
|
30539
|
+
"litellm_provider": "fireworks_ai",
|
|
30540
|
+
"mode": "chat"
|
|
30541
|
+
},
|
|
30542
|
+
"fireworks_ai/accounts/fireworks/models/mixtral-8x7b-instruct-hf": {
|
|
30543
|
+
"max_tokens": 32768,
|
|
30544
|
+
"max_input_tokens": 32768,
|
|
30545
|
+
"max_output_tokens": 32768,
|
|
30546
|
+
"input_cost_per_token": 5e-07,
|
|
30547
|
+
"output_cost_per_token": 5e-07,
|
|
30548
|
+
"litellm_provider": "fireworks_ai",
|
|
30549
|
+
"mode": "chat"
|
|
30550
|
+
},
|
|
30551
|
+
"fireworks_ai/accounts/fireworks/models/mythomax-l2-13b": {
|
|
30552
|
+
"max_tokens": 4096,
|
|
30553
|
+
"max_input_tokens": 4096,
|
|
30554
|
+
"max_output_tokens": 4096,
|
|
30555
|
+
"input_cost_per_token": 2e-07,
|
|
30556
|
+
"output_cost_per_token": 2e-07,
|
|
30557
|
+
"litellm_provider": "fireworks_ai",
|
|
30558
|
+
"mode": "chat"
|
|
30559
|
+
},
|
|
30560
|
+
"fireworks_ai/accounts/fireworks/models/nemotron-nano-v2-12b-vl": {
|
|
30561
|
+
"max_tokens": 4096,
|
|
30562
|
+
"max_input_tokens": 4096,
|
|
30563
|
+
"max_output_tokens": 4096,
|
|
30564
|
+
"input_cost_per_token": 1e-07,
|
|
30565
|
+
"output_cost_per_token": 1e-07,
|
|
30566
|
+
"litellm_provider": "fireworks_ai",
|
|
30567
|
+
"mode": "chat"
|
|
30568
|
+
},
|
|
30569
|
+
"fireworks_ai/accounts/fireworks/models/nous-capybara-7b-v1p9": {
|
|
30570
|
+
"max_tokens": 32768,
|
|
30571
|
+
"max_input_tokens": 32768,
|
|
30572
|
+
"max_output_tokens": 32768,
|
|
30573
|
+
"input_cost_per_token": 2e-07,
|
|
30574
|
+
"output_cost_per_token": 2e-07,
|
|
30575
|
+
"litellm_provider": "fireworks_ai",
|
|
30576
|
+
"mode": "chat"
|
|
30577
|
+
},
|
|
30578
|
+
"fireworks_ai/accounts/fireworks/models/nous-hermes-2-mixtral-8x7b-dpo": {
|
|
30579
|
+
"max_tokens": 32768,
|
|
30580
|
+
"max_input_tokens": 32768,
|
|
30581
|
+
"max_output_tokens": 32768,
|
|
30582
|
+
"input_cost_per_token": 5e-07,
|
|
30583
|
+
"output_cost_per_token": 5e-07,
|
|
30584
|
+
"litellm_provider": "fireworks_ai",
|
|
30585
|
+
"mode": "chat"
|
|
30586
|
+
},
|
|
30587
|
+
"fireworks_ai/accounts/fireworks/models/nous-hermes-2-yi-34b": {
|
|
30588
|
+
"max_tokens": 4096,
|
|
30589
|
+
"max_input_tokens": 4096,
|
|
30590
|
+
"max_output_tokens": 4096,
|
|
30591
|
+
"input_cost_per_token": 9e-07,
|
|
30592
|
+
"output_cost_per_token": 9e-07,
|
|
30593
|
+
"litellm_provider": "fireworks_ai",
|
|
30594
|
+
"mode": "chat"
|
|
30595
|
+
},
|
|
30596
|
+
"fireworks_ai/accounts/fireworks/models/nous-hermes-llama2-13b": {
|
|
30597
|
+
"max_tokens": 4096,
|
|
30598
|
+
"max_input_tokens": 4096,
|
|
30599
|
+
"max_output_tokens": 4096,
|
|
30600
|
+
"input_cost_per_token": 2e-07,
|
|
30601
|
+
"output_cost_per_token": 2e-07,
|
|
30602
|
+
"litellm_provider": "fireworks_ai",
|
|
30603
|
+
"mode": "chat"
|
|
30604
|
+
},
|
|
30605
|
+
"fireworks_ai/accounts/fireworks/models/nous-hermes-llama2-70b": {
|
|
30606
|
+
"max_tokens": 4096,
|
|
30607
|
+
"max_input_tokens": 4096,
|
|
30608
|
+
"max_output_tokens": 4096,
|
|
30609
|
+
"input_cost_per_token": 9e-07,
|
|
30610
|
+
"output_cost_per_token": 9e-07,
|
|
30611
|
+
"litellm_provider": "fireworks_ai",
|
|
30612
|
+
"mode": "chat"
|
|
30613
|
+
},
|
|
30614
|
+
"fireworks_ai/accounts/fireworks/models/nous-hermes-llama2-7b": {
|
|
30615
|
+
"max_tokens": 4096,
|
|
30616
|
+
"max_input_tokens": 4096,
|
|
30617
|
+
"max_output_tokens": 4096,
|
|
30618
|
+
"input_cost_per_token": 2e-07,
|
|
30619
|
+
"output_cost_per_token": 2e-07,
|
|
30620
|
+
"litellm_provider": "fireworks_ai",
|
|
30621
|
+
"mode": "chat"
|
|
30622
|
+
},
|
|
30623
|
+
"fireworks_ai/accounts/fireworks/models/nvidia-nemotron-nano-12b-v2": {
|
|
30624
|
+
"max_tokens": 131072,
|
|
30625
|
+
"max_input_tokens": 131072,
|
|
30626
|
+
"max_output_tokens": 131072,
|
|
30627
|
+
"input_cost_per_token": 2e-07,
|
|
30628
|
+
"output_cost_per_token": 2e-07,
|
|
30629
|
+
"litellm_provider": "fireworks_ai",
|
|
30630
|
+
"mode": "chat"
|
|
30631
|
+
},
|
|
30632
|
+
"fireworks_ai/accounts/fireworks/models/nvidia-nemotron-nano-9b-v2": {
|
|
30633
|
+
"max_tokens": 131072,
|
|
30634
|
+
"max_input_tokens": 131072,
|
|
30635
|
+
"max_output_tokens": 131072,
|
|
30636
|
+
"input_cost_per_token": 2e-07,
|
|
30637
|
+
"output_cost_per_token": 2e-07,
|
|
30638
|
+
"litellm_provider": "fireworks_ai",
|
|
30639
|
+
"mode": "chat"
|
|
30640
|
+
},
|
|
30641
|
+
"fireworks_ai/accounts/fireworks/models/openchat-3p5-0106-7b": {
|
|
30642
|
+
"max_tokens": 8192,
|
|
30643
|
+
"max_input_tokens": 8192,
|
|
30644
|
+
"max_output_tokens": 8192,
|
|
30645
|
+
"input_cost_per_token": 2e-07,
|
|
30646
|
+
"output_cost_per_token": 2e-07,
|
|
30647
|
+
"litellm_provider": "fireworks_ai",
|
|
30648
|
+
"mode": "chat"
|
|
30649
|
+
},
|
|
30650
|
+
"fireworks_ai/accounts/fireworks/models/openhermes-2-mistral-7b": {
|
|
30651
|
+
"max_tokens": 32768,
|
|
30652
|
+
"max_input_tokens": 32768,
|
|
30653
|
+
"max_output_tokens": 32768,
|
|
30654
|
+
"input_cost_per_token": 2e-07,
|
|
30655
|
+
"output_cost_per_token": 2e-07,
|
|
30656
|
+
"litellm_provider": "fireworks_ai",
|
|
30657
|
+
"mode": "chat"
|
|
30658
|
+
},
|
|
30659
|
+
"fireworks_ai/accounts/fireworks/models/openhermes-2p5-mistral-7b": {
|
|
30660
|
+
"max_tokens": 32768,
|
|
30661
|
+
"max_input_tokens": 32768,
|
|
30662
|
+
"max_output_tokens": 32768,
|
|
30663
|
+
"input_cost_per_token": 2e-07,
|
|
30664
|
+
"output_cost_per_token": 2e-07,
|
|
30665
|
+
"litellm_provider": "fireworks_ai",
|
|
30666
|
+
"mode": "chat"
|
|
30667
|
+
},
|
|
30668
|
+
"fireworks_ai/accounts/fireworks/models/openorca-7b": {
|
|
30669
|
+
"max_tokens": 32768,
|
|
30670
|
+
"max_input_tokens": 32768,
|
|
30671
|
+
"max_output_tokens": 32768,
|
|
30672
|
+
"input_cost_per_token": 2e-07,
|
|
30673
|
+
"output_cost_per_token": 2e-07,
|
|
30674
|
+
"litellm_provider": "fireworks_ai",
|
|
30675
|
+
"mode": "chat"
|
|
30676
|
+
},
|
|
30677
|
+
"fireworks_ai/accounts/fireworks/models/phi-2-3b": {
|
|
30678
|
+
"max_tokens": 2048,
|
|
30679
|
+
"max_input_tokens": 2048,
|
|
30680
|
+
"max_output_tokens": 2048,
|
|
30681
|
+
"input_cost_per_token": 1e-07,
|
|
30682
|
+
"output_cost_per_token": 1e-07,
|
|
30683
|
+
"litellm_provider": "fireworks_ai",
|
|
30684
|
+
"mode": "chat"
|
|
30685
|
+
},
|
|
30686
|
+
"fireworks_ai/accounts/fireworks/models/phi-3-mini-128k-instruct": {
|
|
30687
|
+
"max_tokens": 131072,
|
|
30688
|
+
"max_input_tokens": 131072,
|
|
30689
|
+
"max_output_tokens": 131072,
|
|
30690
|
+
"input_cost_per_token": 1e-07,
|
|
30691
|
+
"output_cost_per_token": 1e-07,
|
|
30692
|
+
"litellm_provider": "fireworks_ai",
|
|
30693
|
+
"mode": "chat"
|
|
30694
|
+
},
|
|
30695
|
+
"fireworks_ai/accounts/fireworks/models/phi-3-vision-128k-instruct": {
|
|
30696
|
+
"max_tokens": 32064,
|
|
30697
|
+
"max_input_tokens": 32064,
|
|
30698
|
+
"max_output_tokens": 32064,
|
|
30699
|
+
"input_cost_per_token": 2e-07,
|
|
30700
|
+
"output_cost_per_token": 2e-07,
|
|
30701
|
+
"litellm_provider": "fireworks_ai",
|
|
30702
|
+
"mode": "chat"
|
|
30703
|
+
},
|
|
30704
|
+
"fireworks_ai/accounts/fireworks/models/phind-code-llama-34b-python-v1": {
|
|
30705
|
+
"max_tokens": 16384,
|
|
30706
|
+
"max_input_tokens": 16384,
|
|
30707
|
+
"max_output_tokens": 16384,
|
|
30708
|
+
"input_cost_per_token": 9e-07,
|
|
30709
|
+
"output_cost_per_token": 9e-07,
|
|
30710
|
+
"litellm_provider": "fireworks_ai",
|
|
30711
|
+
"mode": "chat"
|
|
30712
|
+
},
|
|
30713
|
+
"fireworks_ai/accounts/fireworks/models/phind-code-llama-34b-v1": {
|
|
30714
|
+
"max_tokens": 16384,
|
|
30715
|
+
"max_input_tokens": 16384,
|
|
30716
|
+
"max_output_tokens": 16384,
|
|
30717
|
+
"input_cost_per_token": 9e-07,
|
|
30718
|
+
"output_cost_per_token": 9e-07,
|
|
30719
|
+
"litellm_provider": "fireworks_ai",
|
|
30720
|
+
"mode": "chat"
|
|
30721
|
+
},
|
|
30722
|
+
"fireworks_ai/accounts/fireworks/models/phind-code-llama-34b-v2": {
|
|
30723
|
+
"max_tokens": 16384,
|
|
30724
|
+
"max_input_tokens": 16384,
|
|
30725
|
+
"max_output_tokens": 16384,
|
|
30726
|
+
"input_cost_per_token": 9e-07,
|
|
30727
|
+
"output_cost_per_token": 9e-07,
|
|
30728
|
+
"litellm_provider": "fireworks_ai",
|
|
30729
|
+
"mode": "chat"
|
|
30730
|
+
},
|
|
30731
|
+
"fireworks_ai/accounts/fireworks/models/playground-v2-1024px-aesthetic": {
|
|
30732
|
+
"max_tokens": 4096,
|
|
30733
|
+
"max_input_tokens": 4096,
|
|
30734
|
+
"max_output_tokens": 4096,
|
|
30735
|
+
"input_cost_per_token": 1.3e-10,
|
|
30736
|
+
"output_cost_per_token": 1.3e-10,
|
|
30737
|
+
"litellm_provider": "fireworks_ai",
|
|
30738
|
+
"mode": "image_generation"
|
|
30739
|
+
},
|
|
30740
|
+
"fireworks_ai/accounts/fireworks/models/playground-v2-5-1024px-aesthetic": {
|
|
30741
|
+
"max_tokens": 4096,
|
|
30742
|
+
"max_input_tokens": 4096,
|
|
30743
|
+
"max_output_tokens": 4096,
|
|
30744
|
+
"input_cost_per_token": 1.3e-10,
|
|
30745
|
+
"output_cost_per_token": 1.3e-10,
|
|
30746
|
+
"litellm_provider": "fireworks_ai",
|
|
30747
|
+
"mode": "image_generation"
|
|
30748
|
+
},
|
|
30749
|
+
"fireworks_ai/accounts/fireworks/models/pythia-12b": {
|
|
30750
|
+
"max_tokens": 2048,
|
|
30751
|
+
"max_input_tokens": 2048,
|
|
30752
|
+
"max_output_tokens": 2048,
|
|
30753
|
+
"input_cost_per_token": 2e-07,
|
|
30754
|
+
"output_cost_per_token": 2e-07,
|
|
30755
|
+
"litellm_provider": "fireworks_ai",
|
|
30756
|
+
"mode": "chat"
|
|
30757
|
+
},
|
|
30758
|
+
"fireworks_ai/accounts/fireworks/models/qwen-qwq-32b-preview": {
|
|
30759
|
+
"max_tokens": 32768,
|
|
30760
|
+
"max_input_tokens": 32768,
|
|
30761
|
+
"max_output_tokens": 32768,
|
|
30762
|
+
"input_cost_per_token": 9e-07,
|
|
30763
|
+
"output_cost_per_token": 9e-07,
|
|
30764
|
+
"litellm_provider": "fireworks_ai",
|
|
30765
|
+
"mode": "chat"
|
|
30766
|
+
},
|
|
30767
|
+
"fireworks_ai/accounts/fireworks/models/qwen-v2p5-14b-instruct": {
|
|
30768
|
+
"max_tokens": 32768,
|
|
30769
|
+
"max_input_tokens": 32768,
|
|
30770
|
+
"max_output_tokens": 32768,
|
|
30771
|
+
"input_cost_per_token": 2e-07,
|
|
30772
|
+
"output_cost_per_token": 2e-07,
|
|
30773
|
+
"litellm_provider": "fireworks_ai",
|
|
30774
|
+
"mode": "chat"
|
|
30775
|
+
},
|
|
30776
|
+
"fireworks_ai/accounts/fireworks/models/qwen-v2p5-7b": {
|
|
30777
|
+
"max_tokens": 131072,
|
|
30778
|
+
"max_input_tokens": 131072,
|
|
30779
|
+
"max_output_tokens": 131072,
|
|
30780
|
+
"input_cost_per_token": 2e-07,
|
|
30781
|
+
"output_cost_per_token": 2e-07,
|
|
30782
|
+
"litellm_provider": "fireworks_ai",
|
|
30783
|
+
"mode": "chat"
|
|
30784
|
+
},
|
|
30785
|
+
"fireworks_ai/accounts/fireworks/models/qwen1p5-72b-chat": {
|
|
30786
|
+
"max_tokens": 32768,
|
|
30787
|
+
"max_input_tokens": 32768,
|
|
30788
|
+
"max_output_tokens": 32768,
|
|
30789
|
+
"input_cost_per_token": 9e-07,
|
|
30790
|
+
"output_cost_per_token": 9e-07,
|
|
30791
|
+
"litellm_provider": "fireworks_ai",
|
|
30792
|
+
"mode": "chat"
|
|
30793
|
+
},
|
|
30794
|
+
"fireworks_ai/accounts/fireworks/models/qwen2-7b-instruct": {
|
|
30795
|
+
"max_tokens": 32768,
|
|
30796
|
+
"max_input_tokens": 32768,
|
|
30797
|
+
"max_output_tokens": 32768,
|
|
30798
|
+
"input_cost_per_token": 2e-07,
|
|
30799
|
+
"output_cost_per_token": 2e-07,
|
|
30800
|
+
"litellm_provider": "fireworks_ai",
|
|
30801
|
+
"mode": "chat"
|
|
30802
|
+
},
|
|
30803
|
+
"fireworks_ai/accounts/fireworks/models/qwen2-vl-2b-instruct": {
|
|
30804
|
+
"max_tokens": 32768,
|
|
30805
|
+
"max_input_tokens": 32768,
|
|
30806
|
+
"max_output_tokens": 32768,
|
|
30807
|
+
"input_cost_per_token": 1e-07,
|
|
30808
|
+
"output_cost_per_token": 1e-07,
|
|
30809
|
+
"litellm_provider": "fireworks_ai",
|
|
30810
|
+
"mode": "chat"
|
|
30811
|
+
},
|
|
30812
|
+
"fireworks_ai/accounts/fireworks/models/qwen2-vl-72b-instruct": {
|
|
30813
|
+
"max_tokens": 32768,
|
|
30814
|
+
"max_input_tokens": 32768,
|
|
30815
|
+
"max_output_tokens": 32768,
|
|
30816
|
+
"input_cost_per_token": 9e-07,
|
|
30817
|
+
"output_cost_per_token": 9e-07,
|
|
30818
|
+
"litellm_provider": "fireworks_ai",
|
|
30819
|
+
"mode": "chat"
|
|
30820
|
+
},
|
|
30821
|
+
"fireworks_ai/accounts/fireworks/models/qwen2-vl-7b-instruct": {
|
|
30822
|
+
"max_tokens": 32768,
|
|
30823
|
+
"max_input_tokens": 32768,
|
|
30824
|
+
"max_output_tokens": 32768,
|
|
30825
|
+
"input_cost_per_token": 2e-07,
|
|
30826
|
+
"output_cost_per_token": 2e-07,
|
|
30827
|
+
"litellm_provider": "fireworks_ai",
|
|
30828
|
+
"mode": "chat"
|
|
30829
|
+
},
|
|
30830
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-0p5b-instruct": {
|
|
30831
|
+
"max_tokens": 32768,
|
|
30832
|
+
"max_input_tokens": 32768,
|
|
30833
|
+
"max_output_tokens": 32768,
|
|
30834
|
+
"input_cost_per_token": 1e-07,
|
|
30835
|
+
"output_cost_per_token": 1e-07,
|
|
30836
|
+
"litellm_provider": "fireworks_ai",
|
|
30837
|
+
"mode": "chat"
|
|
30838
|
+
},
|
|
30839
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-14b": {
|
|
30840
|
+
"max_tokens": 131072,
|
|
30841
|
+
"max_input_tokens": 131072,
|
|
30842
|
+
"max_output_tokens": 131072,
|
|
30843
|
+
"input_cost_per_token": 2e-07,
|
|
30844
|
+
"output_cost_per_token": 2e-07,
|
|
30845
|
+
"litellm_provider": "fireworks_ai",
|
|
30846
|
+
"mode": "chat"
|
|
30847
|
+
},
|
|
30848
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-1p5b-instruct": {
|
|
30849
|
+
"max_tokens": 32768,
|
|
30850
|
+
"max_input_tokens": 32768,
|
|
30851
|
+
"max_output_tokens": 32768,
|
|
30852
|
+
"input_cost_per_token": 1e-07,
|
|
30853
|
+
"output_cost_per_token": 1e-07,
|
|
30854
|
+
"litellm_provider": "fireworks_ai",
|
|
30855
|
+
"mode": "chat"
|
|
30856
|
+
},
|
|
30857
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-32b": {
|
|
30858
|
+
"max_tokens": 131072,
|
|
30859
|
+
"max_input_tokens": 131072,
|
|
30860
|
+
"max_output_tokens": 131072,
|
|
30861
|
+
"input_cost_per_token": 9e-07,
|
|
30862
|
+
"output_cost_per_token": 9e-07,
|
|
30863
|
+
"litellm_provider": "fireworks_ai",
|
|
30864
|
+
"mode": "chat"
|
|
30865
|
+
},
|
|
30866
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-32b-instruct": {
|
|
30867
|
+
"max_tokens": 32768,
|
|
30868
|
+
"max_input_tokens": 32768,
|
|
30869
|
+
"max_output_tokens": 32768,
|
|
30870
|
+
"input_cost_per_token": 9e-07,
|
|
30871
|
+
"output_cost_per_token": 9e-07,
|
|
30872
|
+
"litellm_provider": "fireworks_ai",
|
|
30873
|
+
"mode": "chat"
|
|
30874
|
+
},
|
|
30875
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-72b": {
|
|
30876
|
+
"max_tokens": 131072,
|
|
30877
|
+
"max_input_tokens": 131072,
|
|
30878
|
+
"max_output_tokens": 131072,
|
|
30879
|
+
"input_cost_per_token": 9e-07,
|
|
30880
|
+
"output_cost_per_token": 9e-07,
|
|
30881
|
+
"litellm_provider": "fireworks_ai",
|
|
30882
|
+
"mode": "chat"
|
|
30883
|
+
},
|
|
30884
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-72b-instruct": {
|
|
30885
|
+
"max_tokens": 32768,
|
|
30886
|
+
"max_input_tokens": 32768,
|
|
30887
|
+
"max_output_tokens": 32768,
|
|
30888
|
+
"input_cost_per_token": 9e-07,
|
|
30889
|
+
"output_cost_per_token": 9e-07,
|
|
30890
|
+
"litellm_provider": "fireworks_ai",
|
|
30891
|
+
"mode": "chat"
|
|
30892
|
+
},
|
|
30893
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-7b-instruct": {
|
|
30894
|
+
"max_tokens": 32768,
|
|
30895
|
+
"max_input_tokens": 32768,
|
|
30896
|
+
"max_output_tokens": 32768,
|
|
30897
|
+
"input_cost_per_token": 2e-07,
|
|
30898
|
+
"output_cost_per_token": 2e-07,
|
|
30899
|
+
"litellm_provider": "fireworks_ai",
|
|
30900
|
+
"mode": "chat"
|
|
30901
|
+
},
|
|
30902
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-0p5b": {
|
|
30903
|
+
"max_tokens": 32768,
|
|
30904
|
+
"max_input_tokens": 32768,
|
|
30905
|
+
"max_output_tokens": 32768,
|
|
30906
|
+
"input_cost_per_token": 1e-07,
|
|
30907
|
+
"output_cost_per_token": 1e-07,
|
|
30908
|
+
"litellm_provider": "fireworks_ai",
|
|
30909
|
+
"mode": "chat"
|
|
30910
|
+
},
|
|
30911
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-0p5b-instruct": {
|
|
30912
|
+
"max_tokens": 32768,
|
|
30913
|
+
"max_input_tokens": 32768,
|
|
30914
|
+
"max_output_tokens": 32768,
|
|
30915
|
+
"input_cost_per_token": 1e-07,
|
|
30916
|
+
"output_cost_per_token": 1e-07,
|
|
30917
|
+
"litellm_provider": "fireworks_ai",
|
|
30918
|
+
"mode": "chat"
|
|
30919
|
+
},
|
|
30920
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-14b": {
|
|
30921
|
+
"max_tokens": 32768,
|
|
30922
|
+
"max_input_tokens": 32768,
|
|
30923
|
+
"max_output_tokens": 32768,
|
|
30924
|
+
"input_cost_per_token": 2e-07,
|
|
30925
|
+
"output_cost_per_token": 2e-07,
|
|
30926
|
+
"litellm_provider": "fireworks_ai",
|
|
30927
|
+
"mode": "chat"
|
|
30928
|
+
},
|
|
30929
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-14b-instruct": {
|
|
30930
|
+
"max_tokens": 32768,
|
|
30931
|
+
"max_input_tokens": 32768,
|
|
30932
|
+
"max_output_tokens": 32768,
|
|
30933
|
+
"input_cost_per_token": 2e-07,
|
|
30934
|
+
"output_cost_per_token": 2e-07,
|
|
30935
|
+
"litellm_provider": "fireworks_ai",
|
|
30936
|
+
"mode": "chat"
|
|
30937
|
+
},
|
|
30938
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-1p5b": {
|
|
30939
|
+
"max_tokens": 32768,
|
|
30940
|
+
"max_input_tokens": 32768,
|
|
30941
|
+
"max_output_tokens": 32768,
|
|
30942
|
+
"input_cost_per_token": 1e-07,
|
|
30943
|
+
"output_cost_per_token": 1e-07,
|
|
30944
|
+
"litellm_provider": "fireworks_ai",
|
|
30945
|
+
"mode": "chat"
|
|
30946
|
+
},
|
|
30947
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-1p5b-instruct": {
|
|
30948
|
+
"max_tokens": 32768,
|
|
30949
|
+
"max_input_tokens": 32768,
|
|
30950
|
+
"max_output_tokens": 32768,
|
|
30951
|
+
"input_cost_per_token": 1e-07,
|
|
30952
|
+
"output_cost_per_token": 1e-07,
|
|
30953
|
+
"litellm_provider": "fireworks_ai",
|
|
30954
|
+
"mode": "chat"
|
|
30955
|
+
},
|
|
30956
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b": {
|
|
30957
|
+
"max_tokens": 32768,
|
|
30958
|
+
"max_input_tokens": 32768,
|
|
30959
|
+
"max_output_tokens": 32768,
|
|
30960
|
+
"input_cost_per_token": 9e-07,
|
|
30961
|
+
"output_cost_per_token": 9e-07,
|
|
30962
|
+
"litellm_provider": "fireworks_ai",
|
|
30963
|
+
"mode": "chat"
|
|
30964
|
+
},
|
|
30965
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct-128k": {
|
|
30966
|
+
"max_tokens": 131072,
|
|
30967
|
+
"max_input_tokens": 131072,
|
|
30968
|
+
"max_output_tokens": 131072,
|
|
30969
|
+
"input_cost_per_token": 9e-07,
|
|
30970
|
+
"output_cost_per_token": 9e-07,
|
|
30971
|
+
"litellm_provider": "fireworks_ai",
|
|
30972
|
+
"mode": "chat"
|
|
30973
|
+
},
|
|
30974
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct-32k-rope": {
|
|
30975
|
+
"max_tokens": 32768,
|
|
30976
|
+
"max_input_tokens": 32768,
|
|
30977
|
+
"max_output_tokens": 32768,
|
|
30978
|
+
"input_cost_per_token": 9e-07,
|
|
30979
|
+
"output_cost_per_token": 9e-07,
|
|
30980
|
+
"litellm_provider": "fireworks_ai",
|
|
30981
|
+
"mode": "chat"
|
|
30982
|
+
},
|
|
30983
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct-64k": {
|
|
30984
|
+
"max_tokens": 65536,
|
|
30985
|
+
"max_input_tokens": 65536,
|
|
30986
|
+
"max_output_tokens": 65536,
|
|
30987
|
+
"input_cost_per_token": 9e-07,
|
|
30988
|
+
"output_cost_per_token": 9e-07,
|
|
30989
|
+
"litellm_provider": "fireworks_ai",
|
|
30990
|
+
"mode": "chat"
|
|
30991
|
+
},
|
|
30992
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-3b": {
|
|
30993
|
+
"max_tokens": 32768,
|
|
30994
|
+
"max_input_tokens": 32768,
|
|
30995
|
+
"max_output_tokens": 32768,
|
|
30996
|
+
"input_cost_per_token": 1e-07,
|
|
30997
|
+
"output_cost_per_token": 1e-07,
|
|
30998
|
+
"litellm_provider": "fireworks_ai",
|
|
30999
|
+
"mode": "chat"
|
|
31000
|
+
},
|
|
31001
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-3b-instruct": {
|
|
31002
|
+
"max_tokens": 32768,
|
|
31003
|
+
"max_input_tokens": 32768,
|
|
31004
|
+
"max_output_tokens": 32768,
|
|
31005
|
+
"input_cost_per_token": 1e-07,
|
|
31006
|
+
"output_cost_per_token": 1e-07,
|
|
31007
|
+
"litellm_provider": "fireworks_ai",
|
|
31008
|
+
"mode": "chat"
|
|
31009
|
+
},
|
|
31010
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-7b": {
|
|
31011
|
+
"max_tokens": 32768,
|
|
31012
|
+
"max_input_tokens": 32768,
|
|
31013
|
+
"max_output_tokens": 32768,
|
|
31014
|
+
"input_cost_per_token": 2e-07,
|
|
31015
|
+
"output_cost_per_token": 2e-07,
|
|
31016
|
+
"litellm_provider": "fireworks_ai",
|
|
31017
|
+
"mode": "chat"
|
|
31018
|
+
},
|
|
31019
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-7b-instruct": {
|
|
31020
|
+
"max_tokens": 32768,
|
|
31021
|
+
"max_input_tokens": 32768,
|
|
31022
|
+
"max_output_tokens": 32768,
|
|
31023
|
+
"input_cost_per_token": 2e-07,
|
|
31024
|
+
"output_cost_per_token": 2e-07,
|
|
31025
|
+
"litellm_provider": "fireworks_ai",
|
|
31026
|
+
"mode": "chat"
|
|
31027
|
+
},
|
|
31028
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-math-72b-instruct": {
|
|
31029
|
+
"max_tokens": 4096,
|
|
31030
|
+
"max_input_tokens": 4096,
|
|
31031
|
+
"max_output_tokens": 4096,
|
|
31032
|
+
"input_cost_per_token": 9e-07,
|
|
31033
|
+
"output_cost_per_token": 9e-07,
|
|
31034
|
+
"litellm_provider": "fireworks_ai",
|
|
31035
|
+
"mode": "chat"
|
|
31036
|
+
},
|
|
31037
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-vl-32b-instruct": {
|
|
31038
|
+
"max_tokens": 128000,
|
|
31039
|
+
"max_input_tokens": 128000,
|
|
31040
|
+
"max_output_tokens": 128000,
|
|
31041
|
+
"input_cost_per_token": 9e-07,
|
|
31042
|
+
"output_cost_per_token": 9e-07,
|
|
31043
|
+
"litellm_provider": "fireworks_ai",
|
|
31044
|
+
"mode": "chat"
|
|
31045
|
+
},
|
|
31046
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-vl-3b-instruct": {
|
|
31047
|
+
"max_tokens": 128000,
|
|
31048
|
+
"max_input_tokens": 128000,
|
|
31049
|
+
"max_output_tokens": 128000,
|
|
31050
|
+
"input_cost_per_token": 2e-07,
|
|
31051
|
+
"output_cost_per_token": 2e-07,
|
|
31052
|
+
"litellm_provider": "fireworks_ai",
|
|
31053
|
+
"mode": "chat"
|
|
31054
|
+
},
|
|
31055
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-vl-72b-instruct": {
|
|
31056
|
+
"max_tokens": 128000,
|
|
31057
|
+
"max_input_tokens": 128000,
|
|
31058
|
+
"max_output_tokens": 128000,
|
|
31059
|
+
"input_cost_per_token": 9e-07,
|
|
31060
|
+
"output_cost_per_token": 9e-07,
|
|
31061
|
+
"litellm_provider": "fireworks_ai",
|
|
31062
|
+
"mode": "chat"
|
|
31063
|
+
},
|
|
31064
|
+
"fireworks_ai/accounts/fireworks/models/qwen2p5-vl-7b-instruct": {
|
|
31065
|
+
"max_tokens": 128000,
|
|
31066
|
+
"max_input_tokens": 128000,
|
|
31067
|
+
"max_output_tokens": 128000,
|
|
31068
|
+
"input_cost_per_token": 2e-07,
|
|
31069
|
+
"output_cost_per_token": 2e-07,
|
|
31070
|
+
"litellm_provider": "fireworks_ai",
|
|
31071
|
+
"mode": "chat"
|
|
31072
|
+
},
|
|
31073
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-0p6b": {
|
|
31074
|
+
"max_tokens": 40960,
|
|
31075
|
+
"max_input_tokens": 40960,
|
|
31076
|
+
"max_output_tokens": 40960,
|
|
31077
|
+
"input_cost_per_token": 1e-07,
|
|
31078
|
+
"output_cost_per_token": 1e-07,
|
|
31079
|
+
"litellm_provider": "fireworks_ai",
|
|
31080
|
+
"mode": "chat"
|
|
31081
|
+
},
|
|
31082
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-14b": {
|
|
31083
|
+
"max_tokens": 40960,
|
|
31084
|
+
"max_input_tokens": 40960,
|
|
31085
|
+
"max_output_tokens": 40960,
|
|
31086
|
+
"input_cost_per_token": 2e-07,
|
|
31087
|
+
"output_cost_per_token": 2e-07,
|
|
31088
|
+
"litellm_provider": "fireworks_ai",
|
|
31089
|
+
"mode": "chat"
|
|
31090
|
+
},
|
|
31091
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-1p7b": {
|
|
31092
|
+
"max_tokens": 131072,
|
|
31093
|
+
"max_input_tokens": 131072,
|
|
31094
|
+
"max_output_tokens": 131072,
|
|
31095
|
+
"input_cost_per_token": 1e-07,
|
|
31096
|
+
"output_cost_per_token": 1e-07,
|
|
31097
|
+
"litellm_provider": "fireworks_ai",
|
|
31098
|
+
"mode": "chat"
|
|
31099
|
+
},
|
|
31100
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-1p7b-fp8-draft": {
|
|
31101
|
+
"max_tokens": 262144,
|
|
31102
|
+
"max_input_tokens": 262144,
|
|
31103
|
+
"max_output_tokens": 262144,
|
|
31104
|
+
"input_cost_per_token": 1e-07,
|
|
31105
|
+
"output_cost_per_token": 1e-07,
|
|
31106
|
+
"litellm_provider": "fireworks_ai",
|
|
31107
|
+
"mode": "chat"
|
|
31108
|
+
},
|
|
31109
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-1p7b-fp8-draft-131072": {
|
|
31110
|
+
"max_tokens": 131072,
|
|
31111
|
+
"max_input_tokens": 131072,
|
|
31112
|
+
"max_output_tokens": 131072,
|
|
31113
|
+
"input_cost_per_token": 1e-07,
|
|
31114
|
+
"output_cost_per_token": 1e-07,
|
|
31115
|
+
"litellm_provider": "fireworks_ai",
|
|
31116
|
+
"mode": "chat"
|
|
31117
|
+
},
|
|
31118
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-1p7b-fp8-draft-40960": {
|
|
31119
|
+
"max_tokens": 40960,
|
|
31120
|
+
"max_input_tokens": 40960,
|
|
31121
|
+
"max_output_tokens": 40960,
|
|
31122
|
+
"input_cost_per_token": 1e-07,
|
|
31123
|
+
"output_cost_per_token": 1e-07,
|
|
31124
|
+
"litellm_provider": "fireworks_ai",
|
|
31125
|
+
"mode": "chat"
|
|
31126
|
+
},
|
|
31127
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-235b-a22b": {
|
|
31128
|
+
"max_tokens": 131072,
|
|
31129
|
+
"max_input_tokens": 131072,
|
|
31130
|
+
"max_output_tokens": 131072,
|
|
31131
|
+
"input_cost_per_token": 2.2e-07,
|
|
31132
|
+
"output_cost_per_token": 8.8e-07,
|
|
31133
|
+
"litellm_provider": "fireworks_ai",
|
|
31134
|
+
"mode": "chat"
|
|
31135
|
+
},
|
|
31136
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-235b-a22b-instruct-2507": {
|
|
31137
|
+
"max_tokens": 262144,
|
|
31138
|
+
"max_input_tokens": 262144,
|
|
31139
|
+
"max_output_tokens": 262144,
|
|
31140
|
+
"input_cost_per_token": 2.2e-07,
|
|
31141
|
+
"output_cost_per_token": 8.8e-07,
|
|
31142
|
+
"litellm_provider": "fireworks_ai",
|
|
31143
|
+
"mode": "chat"
|
|
31144
|
+
},
|
|
31145
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-235b-a22b-thinking-2507": {
|
|
31146
|
+
"max_tokens": 262144,
|
|
31147
|
+
"max_input_tokens": 262144,
|
|
31148
|
+
"max_output_tokens": 262144,
|
|
31149
|
+
"input_cost_per_token": 2.2e-07,
|
|
31150
|
+
"output_cost_per_token": 8.8e-07,
|
|
31151
|
+
"litellm_provider": "fireworks_ai",
|
|
31152
|
+
"mode": "chat"
|
|
31153
|
+
},
|
|
31154
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-30b-a3b": {
|
|
31155
|
+
"max_tokens": 131072,
|
|
31156
|
+
"max_input_tokens": 131072,
|
|
31157
|
+
"max_output_tokens": 131072,
|
|
31158
|
+
"input_cost_per_token": 1.5e-07,
|
|
31159
|
+
"output_cost_per_token": 6e-07,
|
|
31160
|
+
"litellm_provider": "fireworks_ai",
|
|
31161
|
+
"mode": "chat"
|
|
31162
|
+
},
|
|
31163
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-30b-a3b-instruct-2507": {
|
|
31164
|
+
"max_tokens": 262144,
|
|
31165
|
+
"max_input_tokens": 262144,
|
|
31166
|
+
"max_output_tokens": 262144,
|
|
31167
|
+
"input_cost_per_token": 5e-07,
|
|
31168
|
+
"output_cost_per_token": 5e-07,
|
|
31169
|
+
"litellm_provider": "fireworks_ai",
|
|
31170
|
+
"mode": "chat"
|
|
31171
|
+
},
|
|
31172
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-30b-a3b-thinking-2507": {
|
|
31173
|
+
"max_tokens": 262144,
|
|
31174
|
+
"max_input_tokens": 262144,
|
|
31175
|
+
"max_output_tokens": 262144,
|
|
31176
|
+
"input_cost_per_token": 9e-07,
|
|
31177
|
+
"output_cost_per_token": 9e-07,
|
|
31178
|
+
"litellm_provider": "fireworks_ai",
|
|
31179
|
+
"mode": "chat"
|
|
31180
|
+
},
|
|
31181
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-32b": {
|
|
31182
|
+
"max_tokens": 131072,
|
|
31183
|
+
"max_input_tokens": 131072,
|
|
31184
|
+
"max_output_tokens": 131072,
|
|
31185
|
+
"input_cost_per_token": 9e-07,
|
|
31186
|
+
"output_cost_per_token": 9e-07,
|
|
31187
|
+
"litellm_provider": "fireworks_ai",
|
|
31188
|
+
"mode": "chat",
|
|
31189
|
+
"supports_reasoning": true
|
|
31190
|
+
},
|
|
31191
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-4b": {
|
|
31192
|
+
"max_tokens": 40960,
|
|
31193
|
+
"max_input_tokens": 40960,
|
|
31194
|
+
"max_output_tokens": 40960,
|
|
31195
|
+
"input_cost_per_token": 2e-07,
|
|
31196
|
+
"output_cost_per_token": 2e-07,
|
|
31197
|
+
"litellm_provider": "fireworks_ai",
|
|
31198
|
+
"mode": "chat"
|
|
31199
|
+
},
|
|
31200
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-4b-instruct-2507": {
|
|
31201
|
+
"max_tokens": 262144,
|
|
31202
|
+
"max_input_tokens": 262144,
|
|
31203
|
+
"max_output_tokens": 262144,
|
|
31204
|
+
"input_cost_per_token": 2e-07,
|
|
31205
|
+
"output_cost_per_token": 2e-07,
|
|
31206
|
+
"litellm_provider": "fireworks_ai",
|
|
31207
|
+
"mode": "chat"
|
|
31208
|
+
},
|
|
31209
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-8b": {
|
|
31210
|
+
"max_tokens": 40960,
|
|
31211
|
+
"max_input_tokens": 40960,
|
|
31212
|
+
"max_output_tokens": 40960,
|
|
31213
|
+
"input_cost_per_token": 2e-07,
|
|
31214
|
+
"output_cost_per_token": 2e-07,
|
|
31215
|
+
"litellm_provider": "fireworks_ai",
|
|
31216
|
+
"mode": "chat",
|
|
31217
|
+
"supports_reasoning": true
|
|
31218
|
+
},
|
|
31219
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-coder-30b-a3b-instruct": {
|
|
31220
|
+
"max_tokens": 262144,
|
|
31221
|
+
"max_input_tokens": 262144,
|
|
31222
|
+
"max_output_tokens": 262144,
|
|
31223
|
+
"input_cost_per_token": 1.5e-07,
|
|
31224
|
+
"output_cost_per_token": 6e-07,
|
|
31225
|
+
"litellm_provider": "fireworks_ai",
|
|
31226
|
+
"mode": "chat"
|
|
31227
|
+
},
|
|
31228
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-coder-480b-instruct-bf16": {
|
|
31229
|
+
"max_tokens": 4096,
|
|
31230
|
+
"max_input_tokens": 4096,
|
|
31231
|
+
"max_output_tokens": 4096,
|
|
31232
|
+
"input_cost_per_token": 9e-07,
|
|
31233
|
+
"output_cost_per_token": 9e-07,
|
|
31234
|
+
"litellm_provider": "fireworks_ai",
|
|
31235
|
+
"mode": "chat"
|
|
31236
|
+
},
|
|
31237
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-embedding-0p6b": {
|
|
31238
|
+
"max_tokens": 32768,
|
|
31239
|
+
"max_input_tokens": 32768,
|
|
31240
|
+
"max_output_tokens": 32768,
|
|
31241
|
+
"input_cost_per_token": 0.0,
|
|
31242
|
+
"output_cost_per_token": 0.0,
|
|
31243
|
+
"litellm_provider": "fireworks_ai",
|
|
31244
|
+
"mode": "embedding"
|
|
31245
|
+
},
|
|
31246
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-embedding-4b": {
|
|
31247
|
+
"max_tokens": 40960,
|
|
31248
|
+
"max_input_tokens": 40960,
|
|
31249
|
+
"max_output_tokens": 40960,
|
|
31250
|
+
"input_cost_per_token": 0.0,
|
|
31251
|
+
"output_cost_per_token": 0.0,
|
|
31252
|
+
"litellm_provider": "fireworks_ai",
|
|
31253
|
+
"mode": "embedding"
|
|
31254
|
+
},
|
|
31255
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-embedding-8b": {
|
|
31256
|
+
"max_tokens": 40960,
|
|
31257
|
+
"max_input_tokens": 40960,
|
|
31258
|
+
"max_output_tokens": 40960,
|
|
31259
|
+
"input_cost_per_token": 0.0,
|
|
31260
|
+
"output_cost_per_token": 0.0,
|
|
31261
|
+
"litellm_provider": "fireworks_ai",
|
|
31262
|
+
"mode": "embedding"
|
|
31263
|
+
},
|
|
31264
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-next-80b-a3b-instruct": {
|
|
31265
|
+
"max_tokens": 4096,
|
|
31266
|
+
"max_input_tokens": 4096,
|
|
31267
|
+
"max_output_tokens": 4096,
|
|
31268
|
+
"input_cost_per_token": 9e-07,
|
|
31269
|
+
"output_cost_per_token": 9e-07,
|
|
31270
|
+
"litellm_provider": "fireworks_ai",
|
|
31271
|
+
"mode": "chat"
|
|
31272
|
+
},
|
|
31273
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-next-80b-a3b-thinking": {
|
|
31274
|
+
"max_tokens": 4096,
|
|
31275
|
+
"max_input_tokens": 4096,
|
|
31276
|
+
"max_output_tokens": 4096,
|
|
31277
|
+
"input_cost_per_token": 9e-07,
|
|
31278
|
+
"output_cost_per_token": 9e-07,
|
|
31279
|
+
"litellm_provider": "fireworks_ai",
|
|
31280
|
+
"mode": "chat"
|
|
31281
|
+
},
|
|
31282
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-reranker-0p6b": {
|
|
31283
|
+
"max_tokens": 40960,
|
|
31284
|
+
"max_input_tokens": 40960,
|
|
31285
|
+
"max_output_tokens": 40960,
|
|
31286
|
+
"input_cost_per_token": 0.0,
|
|
31287
|
+
"output_cost_per_token": 0.0,
|
|
31288
|
+
"litellm_provider": "fireworks_ai",
|
|
31289
|
+
"mode": "rerank"
|
|
31290
|
+
},
|
|
31291
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-reranker-4b": {
|
|
31292
|
+
"max_tokens": 40960,
|
|
31293
|
+
"max_input_tokens": 40960,
|
|
31294
|
+
"max_output_tokens": 40960,
|
|
31295
|
+
"input_cost_per_token": 0.0,
|
|
31296
|
+
"output_cost_per_token": 0.0,
|
|
31297
|
+
"litellm_provider": "fireworks_ai",
|
|
31298
|
+
"mode": "rerank"
|
|
31299
|
+
},
|
|
31300
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-reranker-8b": {
|
|
31301
|
+
"max_tokens": 40960,
|
|
31302
|
+
"max_input_tokens": 40960,
|
|
31303
|
+
"max_output_tokens": 40960,
|
|
31304
|
+
"input_cost_per_token": 0.0,
|
|
31305
|
+
"output_cost_per_token": 0.0,
|
|
31306
|
+
"litellm_provider": "fireworks_ai",
|
|
31307
|
+
"mode": "rerank"
|
|
31308
|
+
},
|
|
31309
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-vl-235b-a22b-instruct": {
|
|
31310
|
+
"max_tokens": 262144,
|
|
31311
|
+
"max_input_tokens": 262144,
|
|
31312
|
+
"max_output_tokens": 262144,
|
|
31313
|
+
"input_cost_per_token": 2.2e-07,
|
|
31314
|
+
"output_cost_per_token": 8.8e-07,
|
|
31315
|
+
"litellm_provider": "fireworks_ai",
|
|
31316
|
+
"mode": "chat"
|
|
31317
|
+
},
|
|
31318
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-vl-235b-a22b-thinking": {
|
|
31319
|
+
"max_tokens": 262144,
|
|
31320
|
+
"max_input_tokens": 262144,
|
|
31321
|
+
"max_output_tokens": 262144,
|
|
31322
|
+
"input_cost_per_token": 2.2e-07,
|
|
31323
|
+
"output_cost_per_token": 8.8e-07,
|
|
31324
|
+
"litellm_provider": "fireworks_ai",
|
|
31325
|
+
"mode": "chat"
|
|
31326
|
+
},
|
|
31327
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-vl-30b-a3b-instruct": {
|
|
31328
|
+
"max_tokens": 262144,
|
|
31329
|
+
"max_input_tokens": 262144,
|
|
31330
|
+
"max_output_tokens": 262144,
|
|
31331
|
+
"input_cost_per_token": 1.5e-07,
|
|
31332
|
+
"output_cost_per_token": 6e-07,
|
|
31333
|
+
"litellm_provider": "fireworks_ai",
|
|
31334
|
+
"mode": "chat"
|
|
31335
|
+
},
|
|
31336
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-vl-30b-a3b-thinking": {
|
|
31337
|
+
"max_tokens": 262144,
|
|
31338
|
+
"max_input_tokens": 262144,
|
|
31339
|
+
"max_output_tokens": 262144,
|
|
31340
|
+
"input_cost_per_token": 1.5e-07,
|
|
31341
|
+
"output_cost_per_token": 6e-07,
|
|
31342
|
+
"litellm_provider": "fireworks_ai",
|
|
31343
|
+
"mode": "chat"
|
|
31344
|
+
},
|
|
31345
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-vl-32b-instruct": {
|
|
31346
|
+
"max_tokens": 4096,
|
|
31347
|
+
"max_input_tokens": 4096,
|
|
31348
|
+
"max_output_tokens": 4096,
|
|
31349
|
+
"input_cost_per_token": 9e-07,
|
|
31350
|
+
"output_cost_per_token": 9e-07,
|
|
31351
|
+
"litellm_provider": "fireworks_ai",
|
|
31352
|
+
"mode": "chat"
|
|
31353
|
+
},
|
|
31354
|
+
"fireworks_ai/accounts/fireworks/models/qwen3-vl-8b-instruct": {
|
|
31355
|
+
"max_tokens": 4096,
|
|
31356
|
+
"max_input_tokens": 4096,
|
|
31357
|
+
"max_output_tokens": 4096,
|
|
31358
|
+
"input_cost_per_token": 2e-07,
|
|
31359
|
+
"output_cost_per_token": 2e-07,
|
|
31360
|
+
"litellm_provider": "fireworks_ai",
|
|
31361
|
+
"mode": "chat"
|
|
31362
|
+
},
|
|
31363
|
+
"fireworks_ai/accounts/fireworks/models/qwq-32b": {
|
|
31364
|
+
"max_tokens": 131072,
|
|
31365
|
+
"max_input_tokens": 131072,
|
|
31366
|
+
"max_output_tokens": 131072,
|
|
31367
|
+
"input_cost_per_token": 9e-07,
|
|
31368
|
+
"output_cost_per_token": 9e-07,
|
|
31369
|
+
"litellm_provider": "fireworks_ai",
|
|
31370
|
+
"mode": "chat"
|
|
31371
|
+
},
|
|
31372
|
+
"fireworks_ai/accounts/fireworks/models/rolm-ocr": {
|
|
31373
|
+
"max_tokens": 128000,
|
|
31374
|
+
"max_input_tokens": 128000,
|
|
31375
|
+
"max_output_tokens": 128000,
|
|
31376
|
+
"input_cost_per_token": 2e-07,
|
|
31377
|
+
"output_cost_per_token": 2e-07,
|
|
31378
|
+
"litellm_provider": "fireworks_ai",
|
|
31379
|
+
"mode": "chat"
|
|
31380
|
+
},
|
|
31381
|
+
"fireworks_ai/accounts/fireworks/models/snorkel-mistral-7b-pairrm-dpo": {
|
|
31382
|
+
"max_tokens": 32768,
|
|
31383
|
+
"max_input_tokens": 32768,
|
|
31384
|
+
"max_output_tokens": 32768,
|
|
31385
|
+
"input_cost_per_token": 2e-07,
|
|
31386
|
+
"output_cost_per_token": 2e-07,
|
|
31387
|
+
"litellm_provider": "fireworks_ai",
|
|
31388
|
+
"mode": "chat"
|
|
31389
|
+
},
|
|
31390
|
+
"fireworks_ai/accounts/fireworks/models/stable-diffusion-xl-1024-v1-0": {
|
|
31391
|
+
"max_tokens": 4096,
|
|
31392
|
+
"max_input_tokens": 4096,
|
|
31393
|
+
"max_output_tokens": 4096,
|
|
31394
|
+
"input_cost_per_token": 1.3e-10,
|
|
31395
|
+
"output_cost_per_token": 1.3e-10,
|
|
31396
|
+
"litellm_provider": "fireworks_ai",
|
|
31397
|
+
"mode": "image_generation"
|
|
31398
|
+
},
|
|
31399
|
+
"fireworks_ai/accounts/fireworks/models/stablecode-3b": {
|
|
31400
|
+
"max_tokens": 4096,
|
|
31401
|
+
"max_input_tokens": 4096,
|
|
31402
|
+
"max_output_tokens": 4096,
|
|
31403
|
+
"input_cost_per_token": 1e-07,
|
|
31404
|
+
"output_cost_per_token": 1e-07,
|
|
31405
|
+
"litellm_provider": "fireworks_ai",
|
|
31406
|
+
"mode": "chat"
|
|
31407
|
+
},
|
|
31408
|
+
"fireworks_ai/accounts/fireworks/models/starcoder-16b": {
|
|
31409
|
+
"max_tokens": 8192,
|
|
31410
|
+
"max_input_tokens": 8192,
|
|
31411
|
+
"max_output_tokens": 8192,
|
|
31412
|
+
"input_cost_per_token": 2e-07,
|
|
31413
|
+
"output_cost_per_token": 2e-07,
|
|
31414
|
+
"litellm_provider": "fireworks_ai",
|
|
31415
|
+
"mode": "chat"
|
|
31416
|
+
},
|
|
31417
|
+
"fireworks_ai/accounts/fireworks/models/starcoder-7b": {
|
|
31418
|
+
"max_tokens": 8192,
|
|
31419
|
+
"max_input_tokens": 8192,
|
|
31420
|
+
"max_output_tokens": 8192,
|
|
31421
|
+
"input_cost_per_token": 2e-07,
|
|
31422
|
+
"output_cost_per_token": 2e-07,
|
|
31423
|
+
"litellm_provider": "fireworks_ai",
|
|
31424
|
+
"mode": "chat"
|
|
31425
|
+
},
|
|
31426
|
+
"fireworks_ai/accounts/fireworks/models/starcoder2-15b": {
|
|
31427
|
+
"max_tokens": 16384,
|
|
31428
|
+
"max_input_tokens": 16384,
|
|
31429
|
+
"max_output_tokens": 16384,
|
|
31430
|
+
"input_cost_per_token": 2e-07,
|
|
31431
|
+
"output_cost_per_token": 2e-07,
|
|
31432
|
+
"litellm_provider": "fireworks_ai",
|
|
31433
|
+
"mode": "chat"
|
|
31434
|
+
},
|
|
31435
|
+
"fireworks_ai/accounts/fireworks/models/starcoder2-3b": {
|
|
31436
|
+
"max_tokens": 16384,
|
|
31437
|
+
"max_input_tokens": 16384,
|
|
31438
|
+
"max_output_tokens": 16384,
|
|
31439
|
+
"input_cost_per_token": 1e-07,
|
|
31440
|
+
"output_cost_per_token": 1e-07,
|
|
31441
|
+
"litellm_provider": "fireworks_ai",
|
|
31442
|
+
"mode": "chat"
|
|
31443
|
+
},
|
|
31444
|
+
"fireworks_ai/accounts/fireworks/models/starcoder2-7b": {
|
|
31445
|
+
"max_tokens": 16384,
|
|
31446
|
+
"max_input_tokens": 16384,
|
|
31447
|
+
"max_output_tokens": 16384,
|
|
31448
|
+
"input_cost_per_token": 2e-07,
|
|
31449
|
+
"output_cost_per_token": 2e-07,
|
|
31450
|
+
"litellm_provider": "fireworks_ai",
|
|
31451
|
+
"mode": "chat"
|
|
31452
|
+
},
|
|
31453
|
+
"fireworks_ai/accounts/fireworks/models/toppy-m-7b": {
|
|
31454
|
+
"max_tokens": 32768,
|
|
31455
|
+
"max_input_tokens": 32768,
|
|
31456
|
+
"max_output_tokens": 32768,
|
|
31457
|
+
"input_cost_per_token": 2e-07,
|
|
31458
|
+
"output_cost_per_token": 2e-07,
|
|
31459
|
+
"litellm_provider": "fireworks_ai",
|
|
31460
|
+
"mode": "chat"
|
|
31461
|
+
},
|
|
31462
|
+
"fireworks_ai/accounts/fireworks/models/whisper-v3": {
|
|
31463
|
+
"max_tokens": 4096,
|
|
31464
|
+
"max_input_tokens": 4096,
|
|
31465
|
+
"max_output_tokens": 4096,
|
|
31466
|
+
"input_cost_per_token": 0.0,
|
|
31467
|
+
"output_cost_per_token": 0.0,
|
|
31468
|
+
"litellm_provider": "fireworks_ai",
|
|
31469
|
+
"mode": "audio_transcription"
|
|
31470
|
+
},
|
|
31471
|
+
"fireworks_ai/accounts/fireworks/models/whisper-v3-turbo": {
|
|
31472
|
+
"max_tokens": 4096,
|
|
31473
|
+
"max_input_tokens": 4096,
|
|
31474
|
+
"max_output_tokens": 4096,
|
|
31475
|
+
"input_cost_per_token": 0.0,
|
|
31476
|
+
"output_cost_per_token": 0.0,
|
|
31477
|
+
"litellm_provider": "fireworks_ai",
|
|
31478
|
+
"mode": "audio_transcription"
|
|
31479
|
+
},
|
|
31480
|
+
"fireworks_ai/accounts/fireworks/models/yi-34b": {
|
|
31481
|
+
"max_tokens": 4096,
|
|
31482
|
+
"max_input_tokens": 4096,
|
|
31483
|
+
"max_output_tokens": 4096,
|
|
31484
|
+
"input_cost_per_token": 9e-07,
|
|
31485
|
+
"output_cost_per_token": 9e-07,
|
|
31486
|
+
"litellm_provider": "fireworks_ai",
|
|
31487
|
+
"mode": "chat"
|
|
31488
|
+
},
|
|
31489
|
+
"fireworks_ai/accounts/fireworks/models/yi-34b-200k-capybara": {
|
|
31490
|
+
"max_tokens": 200000,
|
|
31491
|
+
"max_input_tokens": 200000,
|
|
31492
|
+
"max_output_tokens": 200000,
|
|
31493
|
+
"input_cost_per_token": 9e-07,
|
|
31494
|
+
"output_cost_per_token": 9e-07,
|
|
31495
|
+
"litellm_provider": "fireworks_ai",
|
|
31496
|
+
"mode": "chat"
|
|
31497
|
+
},
|
|
31498
|
+
"fireworks_ai/accounts/fireworks/models/yi-34b-chat": {
|
|
31499
|
+
"max_tokens": 4096,
|
|
31500
|
+
"max_input_tokens": 4096,
|
|
31501
|
+
"max_output_tokens": 4096,
|
|
31502
|
+
"input_cost_per_token": 9e-07,
|
|
31503
|
+
"output_cost_per_token": 9e-07,
|
|
31504
|
+
"litellm_provider": "fireworks_ai",
|
|
31505
|
+
"mode": "chat"
|
|
31506
|
+
},
|
|
31507
|
+
"fireworks_ai/accounts/fireworks/models/yi-6b": {
|
|
31508
|
+
"max_tokens": 4096,
|
|
31509
|
+
"max_input_tokens": 4096,
|
|
31510
|
+
"max_output_tokens": 4096,
|
|
31511
|
+
"input_cost_per_token": 2e-07,
|
|
31512
|
+
"output_cost_per_token": 2e-07,
|
|
31513
|
+
"litellm_provider": "fireworks_ai",
|
|
31514
|
+
"mode": "chat"
|
|
31515
|
+
},
|
|
31516
|
+
"fireworks_ai/accounts/fireworks/models/zephyr-7b-beta": {
|
|
31517
|
+
"max_tokens": 32768,
|
|
31518
|
+
"max_input_tokens": 32768,
|
|
31519
|
+
"max_output_tokens": 32768,
|
|
31520
|
+
"input_cost_per_token": 2e-07,
|
|
31521
|
+
"output_cost_per_token": 2e-07,
|
|
31522
|
+
"litellm_provider": "fireworks_ai",
|
|
31523
|
+
"mode": "chat"
|
|
31524
|
+
},
|
|
31525
|
+
"gemini/gemini-2.5-computer-use-preview-10-2025": {
|
|
31526
|
+
"input_cost_per_token": 1.25e-06,
|
|
31527
|
+
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
31528
|
+
"litellm_provider": "gemini",
|
|
31529
|
+
"max_images_per_prompt": 3000,
|
|
31530
|
+
"max_input_tokens": 128000,
|
|
31531
|
+
"max_output_tokens": 64000,
|
|
31532
|
+
"max_tokens": 64000,
|
|
31533
|
+
"mode": "chat",
|
|
31534
|
+
"output_cost_per_token": 1e-05,
|
|
31535
|
+
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
31536
|
+
"rpm": 2000,
|
|
31537
|
+
"source": "https://ai.google.dev/gemini-api/docs/computer-use",
|
|
31538
|
+
"supported_endpoints": [
|
|
31539
|
+
"/v1/chat/completions",
|
|
31540
|
+
"/v1/completions"
|
|
31541
|
+
],
|
|
31542
|
+
"supported_modalities": [
|
|
31543
|
+
"text",
|
|
31544
|
+
"image"
|
|
31545
|
+
],
|
|
31546
|
+
"supported_output_modalities": [
|
|
31547
|
+
"text"
|
|
31548
|
+
],
|
|
31549
|
+
"supports_computer_use": true,
|
|
31550
|
+
"supports_function_calling": true,
|
|
31551
|
+
"supports_system_messages": true,
|
|
31552
|
+
"supports_tool_choice": true,
|
|
31553
|
+
"supports_vision": true,
|
|
31554
|
+
"tpm": 800000
|
|
31555
|
+
},
|
|
31556
|
+
"vertex_ai/deepseek-ai/deepseek-v3.2-maas": {
|
|
31557
|
+
"input_cost_per_token": 5.6e-07,
|
|
31558
|
+
"input_cost_per_token_batches": 2.8e-07,
|
|
31559
|
+
"litellm_provider": "vertex_ai-deepseek_models",
|
|
31560
|
+
"max_input_tokens": 163840,
|
|
31561
|
+
"max_output_tokens": 32768,
|
|
31562
|
+
"max_tokens": 163840,
|
|
31563
|
+
"mode": "chat",
|
|
31564
|
+
"output_cost_per_token": 1.68e-06,
|
|
31565
|
+
"output_cost_per_token_batches": 8.4e-07,
|
|
31566
|
+
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
31567
|
+
"supported_regions": [
|
|
31568
|
+
"us-west2"
|
|
31569
|
+
],
|
|
31570
|
+
"supports_assistant_prefill": true,
|
|
31571
|
+
"supports_function_calling": true,
|
|
31572
|
+
"supports_prompt_caching": true,
|
|
31573
|
+
"supports_reasoning": true,
|
|
31574
|
+
"supports_tool_choice": true
|
|
31575
|
+
},
|
|
31576
|
+
"voyage/rerank-2.5": {
|
|
31577
|
+
"input_cost_per_token": 5e-08,
|
|
31578
|
+
"litellm_provider": "voyage",
|
|
31579
|
+
"max_input_tokens": 32000,
|
|
31580
|
+
"max_output_tokens": 32000,
|
|
31581
|
+
"max_query_tokens": 32000,
|
|
31582
|
+
"max_tokens": 32000,
|
|
31583
|
+
"mode": "rerank",
|
|
31584
|
+
"output_cost_per_token": 0.0
|
|
31585
|
+
},
|
|
31586
|
+
"voyage/rerank-2.5-lite": {
|
|
31587
|
+
"input_cost_per_token": 2e-08,
|
|
31588
|
+
"litellm_provider": "voyage",
|
|
31589
|
+
"max_input_tokens": 32000,
|
|
31590
|
+
"max_output_tokens": 32000,
|
|
31591
|
+
"max_query_tokens": 32000,
|
|
31592
|
+
"max_tokens": 32000,
|
|
31593
|
+
"mode": "rerank",
|
|
31594
|
+
"output_cost_per_token": 0.0
|
|
31595
|
+
},
|
|
31596
|
+
"azure_ai/claude-haiku-4-5": {
|
|
31597
|
+
"input_cost_per_token": 1e-06,
|
|
31598
|
+
"litellm_provider": "azure_ai",
|
|
31599
|
+
"max_input_tokens": 200000,
|
|
31600
|
+
"max_output_tokens": 64000,
|
|
31601
|
+
"max_tokens": 64000,
|
|
31602
|
+
"mode": "chat",
|
|
31603
|
+
"output_cost_per_token": 5e-06,
|
|
31604
|
+
"supports_assistant_prefill": true,
|
|
31605
|
+
"supports_computer_use": true,
|
|
31606
|
+
"supports_function_calling": true,
|
|
31607
|
+
"supports_pdf_input": true,
|
|
31608
|
+
"supports_prompt_caching": true,
|
|
31609
|
+
"supports_reasoning": true,
|
|
31610
|
+
"supports_response_schema": true,
|
|
31611
|
+
"supports_tool_choice": true,
|
|
31612
|
+
"supports_vision": true
|
|
31613
|
+
},
|
|
31614
|
+
"azure_ai/claude-opus-4-1": {
|
|
31615
|
+
"input_cost_per_token": 1.5e-05,
|
|
31616
|
+
"litellm_provider": "azure_ai",
|
|
31617
|
+
"max_input_tokens": 200000,
|
|
31618
|
+
"max_output_tokens": 32000,
|
|
31619
|
+
"max_tokens": 32000,
|
|
31620
|
+
"mode": "chat",
|
|
31621
|
+
"output_cost_per_token": 7.5e-05,
|
|
31622
|
+
"supports_assistant_prefill": true,
|
|
31623
|
+
"supports_computer_use": true,
|
|
31624
|
+
"supports_function_calling": true,
|
|
31625
|
+
"supports_pdf_input": true,
|
|
31626
|
+
"supports_prompt_caching": true,
|
|
31627
|
+
"supports_reasoning": true,
|
|
31628
|
+
"supports_response_schema": true,
|
|
31629
|
+
"supports_tool_choice": true,
|
|
31630
|
+
"supports_vision": true
|
|
31631
|
+
},
|
|
31632
|
+
"azure_ai/claude-sonnet-4-5": {
|
|
31633
|
+
"input_cost_per_token": 3e-06,
|
|
31634
|
+
"litellm_provider": "azure_ai",
|
|
31635
|
+
"max_input_tokens": 200000,
|
|
31636
|
+
"max_output_tokens": 64000,
|
|
31637
|
+
"max_tokens": 64000,
|
|
31638
|
+
"mode": "chat",
|
|
31639
|
+
"output_cost_per_token": 1.5e-05,
|
|
31640
|
+
"supports_assistant_prefill": true,
|
|
31641
|
+
"supports_computer_use": true,
|
|
31642
|
+
"supports_function_calling": true,
|
|
31643
|
+
"supports_pdf_input": true,
|
|
31644
|
+
"supports_prompt_caching": true,
|
|
31645
|
+
"supports_reasoning": true,
|
|
31646
|
+
"supports_response_schema": true,
|
|
31647
|
+
"supports_tool_choice": true,
|
|
31648
|
+
"supports_vision": true
|
|
31649
|
+
},
|
|
31650
|
+
"gpt-5.2": {
|
|
31651
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
31652
|
+
"cache_read_input_token_cost_priority": 3.5e-07,
|
|
31653
|
+
"input_cost_per_token": 1.75e-06,
|
|
31654
|
+
"input_cost_per_token_priority": 3.5e-06,
|
|
31655
|
+
"litellm_provider": "openai",
|
|
31656
|
+
"max_input_tokens": 400000,
|
|
31657
|
+
"max_output_tokens": 128000,
|
|
31658
|
+
"max_tokens": 128000,
|
|
31659
|
+
"mode": "chat",
|
|
31660
|
+
"output_cost_per_token": 1.4e-05,
|
|
31661
|
+
"output_cost_per_token_priority": 2.8e-05,
|
|
31662
|
+
"supported_endpoints": [
|
|
31663
|
+
"/v1/chat/completions",
|
|
31664
|
+
"/v1/batch",
|
|
31665
|
+
"/v1/responses"
|
|
31666
|
+
],
|
|
31667
|
+
"supported_modalities": [
|
|
31668
|
+
"text",
|
|
31669
|
+
"image"
|
|
31670
|
+
],
|
|
31671
|
+
"supported_output_modalities": [
|
|
31672
|
+
"text",
|
|
31673
|
+
"image"
|
|
31674
|
+
],
|
|
31675
|
+
"supports_function_calling": true,
|
|
31676
|
+
"supports_native_streaming": true,
|
|
31677
|
+
"supports_parallel_function_calling": true,
|
|
31678
|
+
"supports_pdf_input": true,
|
|
31679
|
+
"supports_prompt_caching": true,
|
|
31680
|
+
"supports_reasoning": true,
|
|
31681
|
+
"supports_response_schema": true,
|
|
31682
|
+
"supports_system_messages": true,
|
|
31683
|
+
"supports_tool_choice": true,
|
|
31684
|
+
"supports_service_tier": true,
|
|
31685
|
+
"supports_vision": true
|
|
31686
|
+
},
|
|
31687
|
+
"gpt-5.2-2025-12-11": {
|
|
31688
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
31689
|
+
"cache_read_input_token_cost_priority": 3.5e-07,
|
|
31690
|
+
"input_cost_per_token": 1.75e-06,
|
|
31691
|
+
"input_cost_per_token_priority": 3.5e-06,
|
|
31692
|
+
"litellm_provider": "openai",
|
|
31693
|
+
"max_input_tokens": 400000,
|
|
31694
|
+
"max_output_tokens": 128000,
|
|
31695
|
+
"max_tokens": 128000,
|
|
31696
|
+
"mode": "chat",
|
|
31697
|
+
"output_cost_per_token": 1.4e-05,
|
|
31698
|
+
"output_cost_per_token_priority": 2.8e-05,
|
|
31699
|
+
"supported_endpoints": [
|
|
31700
|
+
"/v1/chat/completions",
|
|
31701
|
+
"/v1/batch",
|
|
31702
|
+
"/v1/responses"
|
|
31703
|
+
],
|
|
31704
|
+
"supported_modalities": [
|
|
31705
|
+
"text",
|
|
31706
|
+
"image"
|
|
31707
|
+
],
|
|
31708
|
+
"supported_output_modalities": [
|
|
31709
|
+
"text",
|
|
31710
|
+
"image"
|
|
31711
|
+
],
|
|
31712
|
+
"supports_function_calling": true,
|
|
31713
|
+
"supports_native_streaming": true,
|
|
31714
|
+
"supports_parallel_function_calling": true,
|
|
31715
|
+
"supports_pdf_input": true,
|
|
31716
|
+
"supports_prompt_caching": true,
|
|
31717
|
+
"supports_reasoning": true,
|
|
31718
|
+
"supports_response_schema": true,
|
|
31719
|
+
"supports_system_messages": true,
|
|
31720
|
+
"supports_tool_choice": true,
|
|
31721
|
+
"supports_service_tier": true,
|
|
31722
|
+
"supports_vision": true
|
|
31723
|
+
},
|
|
31724
|
+
"gpt-5.2-chat-latest": {
|
|
31725
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
31726
|
+
"cache_read_input_token_cost_priority": 3.5e-07,
|
|
31727
|
+
"input_cost_per_token": 1.75e-06,
|
|
31728
|
+
"input_cost_per_token_priority": 3.5e-06,
|
|
31729
|
+
"litellm_provider": "openai",
|
|
31730
|
+
"max_input_tokens": 128000,
|
|
31731
|
+
"max_output_tokens": 16384,
|
|
31732
|
+
"max_tokens": 16384,
|
|
31733
|
+
"mode": "chat",
|
|
31734
|
+
"output_cost_per_token": 1.4e-05,
|
|
31735
|
+
"output_cost_per_token_priority": 2.8e-05,
|
|
31736
|
+
"supported_endpoints": [
|
|
31737
|
+
"/v1/chat/completions",
|
|
31738
|
+
"/v1/responses"
|
|
31739
|
+
],
|
|
31740
|
+
"supported_modalities": [
|
|
31741
|
+
"text",
|
|
31742
|
+
"image"
|
|
31743
|
+
],
|
|
31744
|
+
"supported_output_modalities": [
|
|
31745
|
+
"text"
|
|
31746
|
+
],
|
|
31747
|
+
"supports_function_calling": true,
|
|
31748
|
+
"supports_native_streaming": true,
|
|
31749
|
+
"supports_parallel_function_calling": true,
|
|
31750
|
+
"supports_pdf_input": true,
|
|
31751
|
+
"supports_prompt_caching": true,
|
|
31752
|
+
"supports_reasoning": true,
|
|
31753
|
+
"supports_response_schema": true,
|
|
31754
|
+
"supports_system_messages": true,
|
|
31755
|
+
"supports_tool_choice": true,
|
|
31756
|
+
"supports_vision": true
|
|
31757
|
+
},
|
|
31758
|
+
"gpt-5.2-pro": {
|
|
31759
|
+
"input_cost_per_token": 2.1e-05,
|
|
31760
|
+
"litellm_provider": "openai",
|
|
31761
|
+
"max_input_tokens": 400000,
|
|
31762
|
+
"max_output_tokens": 128000,
|
|
31763
|
+
"max_tokens": 128000,
|
|
31764
|
+
"mode": "responses",
|
|
31765
|
+
"output_cost_per_token": 0.000168,
|
|
31766
|
+
"supported_endpoints": [
|
|
31767
|
+
"/v1/batch",
|
|
31768
|
+
"/v1/responses"
|
|
31769
|
+
],
|
|
31770
|
+
"supported_modalities": [
|
|
31771
|
+
"text",
|
|
31772
|
+
"image"
|
|
31773
|
+
],
|
|
31774
|
+
"supported_output_modalities": [
|
|
31775
|
+
"text"
|
|
31776
|
+
],
|
|
31777
|
+
"supports_function_calling": true,
|
|
31778
|
+
"supports_native_streaming": true,
|
|
31779
|
+
"supports_parallel_function_calling": true,
|
|
31780
|
+
"supports_pdf_input": true,
|
|
31781
|
+
"supports_prompt_caching": true,
|
|
31782
|
+
"supports_reasoning": true,
|
|
31783
|
+
"supports_response_schema": true,
|
|
31784
|
+
"supports_system_messages": true,
|
|
31785
|
+
"supports_tool_choice": true,
|
|
31786
|
+
"supports_vision": true,
|
|
31787
|
+
"supports_web_search": true
|
|
31788
|
+
},
|
|
31789
|
+
"gpt-5.2-pro-2025-12-11": {
|
|
31790
|
+
"input_cost_per_token": 2.1e-05,
|
|
31791
|
+
"litellm_provider": "openai",
|
|
31792
|
+
"max_input_tokens": 400000,
|
|
31793
|
+
"max_output_tokens": 128000,
|
|
31794
|
+
"max_tokens": 128000,
|
|
31795
|
+
"mode": "responses",
|
|
31796
|
+
"output_cost_per_token": 0.000168,
|
|
31797
|
+
"supported_endpoints": [
|
|
31798
|
+
"/v1/batch",
|
|
31799
|
+
"/v1/responses"
|
|
31800
|
+
],
|
|
31801
|
+
"supported_modalities": [
|
|
31802
|
+
"text",
|
|
31803
|
+
"image"
|
|
31804
|
+
],
|
|
31805
|
+
"supported_output_modalities": [
|
|
31806
|
+
"text"
|
|
31807
|
+
],
|
|
31808
|
+
"supports_function_calling": true,
|
|
31809
|
+
"supports_native_streaming": true,
|
|
31810
|
+
"supports_parallel_function_calling": true,
|
|
31811
|
+
"supports_pdf_input": true,
|
|
31812
|
+
"supports_prompt_caching": true,
|
|
31813
|
+
"supports_reasoning": true,
|
|
31814
|
+
"supports_response_schema": true,
|
|
31815
|
+
"supports_system_messages": true,
|
|
31816
|
+
"supports_tool_choice": true,
|
|
31817
|
+
"supports_vision": true,
|
|
31818
|
+
"supports_web_search": true
|
|
31819
|
+
},
|
|
31820
|
+
"mistral/codestral-2508": {
|
|
31821
|
+
"input_cost_per_token": 3e-07,
|
|
31822
|
+
"litellm_provider": "mistral",
|
|
31823
|
+
"max_input_tokens": 256000,
|
|
31824
|
+
"max_output_tokens": 256000,
|
|
31825
|
+
"max_tokens": 256000,
|
|
31826
|
+
"mode": "chat",
|
|
31827
|
+
"output_cost_per_token": 9e-07,
|
|
31828
|
+
"source": "https://mistral.ai/news/codestral-25-08",
|
|
31829
|
+
"supports_assistant_prefill": true,
|
|
31830
|
+
"supports_function_calling": true,
|
|
31831
|
+
"supports_response_schema": true,
|
|
31832
|
+
"supports_tool_choice": true
|
|
31833
|
+
},
|
|
31834
|
+
"mistral/labs-devstral-small-2512": {
|
|
31835
|
+
"input_cost_per_token": 1e-07,
|
|
31836
|
+
"litellm_provider": "mistral",
|
|
31837
|
+
"max_input_tokens": 256000,
|
|
31838
|
+
"max_output_tokens": 256000,
|
|
31839
|
+
"max_tokens": 256000,
|
|
31840
|
+
"mode": "chat",
|
|
31841
|
+
"output_cost_per_token": 3e-07,
|
|
31842
|
+
"source": "https://docs.mistral.ai/models/devstral-small-2-25-12",
|
|
31843
|
+
"supports_assistant_prefill": true,
|
|
31844
|
+
"supports_function_calling": true,
|
|
31845
|
+
"supports_response_schema": true,
|
|
31846
|
+
"supports_tool_choice": true
|
|
31847
|
+
},
|
|
31848
|
+
"mistral/devstral-2512": {
|
|
31849
|
+
"input_cost_per_token": 4e-07,
|
|
31850
|
+
"litellm_provider": "mistral",
|
|
31851
|
+
"max_input_tokens": 256000,
|
|
31852
|
+
"max_output_tokens": 256000,
|
|
31853
|
+
"max_tokens": 256000,
|
|
31854
|
+
"mode": "chat",
|
|
31855
|
+
"output_cost_per_token": 2e-06,
|
|
31856
|
+
"source": "https://mistral.ai/news/devstral-2-vibe-cli",
|
|
31857
|
+
"supports_assistant_prefill": true,
|
|
31858
|
+
"supports_function_calling": true,
|
|
31859
|
+
"supports_response_schema": true,
|
|
31860
|
+
"supports_tool_choice": true
|
|
31861
|
+
},
|
|
31862
|
+
"azure/gpt-5.2": {
|
|
31863
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
31864
|
+
"input_cost_per_token": 1.75e-06,
|
|
31865
|
+
"litellm_provider": "azure",
|
|
31866
|
+
"max_input_tokens": 400000,
|
|
31867
|
+
"max_output_tokens": 128000,
|
|
31868
|
+
"max_tokens": 128000,
|
|
31869
|
+
"mode": "chat",
|
|
31870
|
+
"output_cost_per_token": 1.4e-05,
|
|
31871
|
+
"supported_endpoints": [
|
|
31872
|
+
"/v1/chat/completions",
|
|
31873
|
+
"/v1/batch",
|
|
31874
|
+
"/v1/responses"
|
|
31875
|
+
],
|
|
31876
|
+
"supported_modalities": [
|
|
31877
|
+
"text",
|
|
31878
|
+
"image"
|
|
31879
|
+
],
|
|
31880
|
+
"supported_output_modalities": [
|
|
31881
|
+
"text",
|
|
31882
|
+
"image"
|
|
31883
|
+
],
|
|
31884
|
+
"supports_function_calling": true,
|
|
31885
|
+
"supports_native_streaming": true,
|
|
31886
|
+
"supports_parallel_function_calling": true,
|
|
31887
|
+
"supports_pdf_input": true,
|
|
31888
|
+
"supports_prompt_caching": true,
|
|
31889
|
+
"supports_reasoning": true,
|
|
31890
|
+
"supports_response_schema": true,
|
|
31891
|
+
"supports_system_messages": true,
|
|
31892
|
+
"supports_tool_choice": true,
|
|
31893
|
+
"supports_vision": true
|
|
31894
|
+
},
|
|
31895
|
+
"azure/gpt-5.2-2025-12-11": {
|
|
31896
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
31897
|
+
"cache_read_input_token_cost_priority": 3.5e-07,
|
|
31898
|
+
"input_cost_per_token": 1.75e-06,
|
|
31899
|
+
"input_cost_per_token_priority": 3.5e-06,
|
|
31900
|
+
"litellm_provider": "azure",
|
|
31901
|
+
"max_input_tokens": 400000,
|
|
31902
|
+
"max_output_tokens": 128000,
|
|
31903
|
+
"max_tokens": 128000,
|
|
31904
|
+
"mode": "chat",
|
|
31905
|
+
"output_cost_per_token": 1.4e-05,
|
|
31906
|
+
"output_cost_per_token_priority": 2.8e-05,
|
|
31907
|
+
"supported_endpoints": [
|
|
31908
|
+
"/v1/chat/completions",
|
|
31909
|
+
"/v1/batch",
|
|
31910
|
+
"/v1/responses"
|
|
31911
|
+
],
|
|
31912
|
+
"supported_modalities": [
|
|
31913
|
+
"text",
|
|
31914
|
+
"image"
|
|
31915
|
+
],
|
|
31916
|
+
"supported_output_modalities": [
|
|
31917
|
+
"text",
|
|
31918
|
+
"image"
|
|
31919
|
+
],
|
|
31920
|
+
"supports_function_calling": true,
|
|
31921
|
+
"supports_native_streaming": true,
|
|
31922
|
+
"supports_parallel_function_calling": true,
|
|
31923
|
+
"supports_pdf_input": true,
|
|
31924
|
+
"supports_prompt_caching": true,
|
|
31925
|
+
"supports_reasoning": true,
|
|
31926
|
+
"supports_response_schema": true,
|
|
31927
|
+
"supports_system_messages": true,
|
|
31928
|
+
"supports_tool_choice": true,
|
|
31929
|
+
"supports_service_tier": true,
|
|
31930
|
+
"supports_vision": true
|
|
31931
|
+
},
|
|
31932
|
+
"azure/gpt-5.2-chat-2025-12-11": {
|
|
31933
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
31934
|
+
"cache_read_input_token_cost_priority": 3.5e-07,
|
|
31935
|
+
"input_cost_per_token": 1.75e-06,
|
|
31936
|
+
"input_cost_per_token_priority": 3.5e-06,
|
|
31937
|
+
"litellm_provider": "azure",
|
|
31938
|
+
"max_input_tokens": 128000,
|
|
31939
|
+
"max_output_tokens": 16384,
|
|
31940
|
+
"max_tokens": 16384,
|
|
31941
|
+
"mode": "chat",
|
|
31942
|
+
"output_cost_per_token": 1.4e-05,
|
|
31943
|
+
"output_cost_per_token_priority": 2.8e-05,
|
|
31944
|
+
"supported_endpoints": [
|
|
31945
|
+
"/v1/chat/completions",
|
|
31946
|
+
"/v1/responses"
|
|
31947
|
+
],
|
|
31948
|
+
"supported_modalities": [
|
|
31949
|
+
"text",
|
|
31950
|
+
"image"
|
|
31951
|
+
],
|
|
31952
|
+
"supported_output_modalities": [
|
|
31953
|
+
"text"
|
|
31954
|
+
],
|
|
31955
|
+
"supports_function_calling": true,
|
|
31956
|
+
"supports_native_streaming": true,
|
|
31957
|
+
"supports_parallel_function_calling": true,
|
|
31958
|
+
"supports_pdf_input": true,
|
|
31959
|
+
"supports_prompt_caching": true,
|
|
31960
|
+
"supports_reasoning": true,
|
|
31961
|
+
"supports_response_schema": true,
|
|
31962
|
+
"supports_system_messages": true,
|
|
31963
|
+
"supports_tool_choice": true,
|
|
31964
|
+
"supports_vision": true
|
|
31965
|
+
},
|
|
31966
|
+
"azure/gpt-5.2-pro": {
|
|
31967
|
+
"input_cost_per_token": 2.1e-05,
|
|
31968
|
+
"litellm_provider": "azure",
|
|
31969
|
+
"max_input_tokens": 400000,
|
|
31970
|
+
"max_output_tokens": 128000,
|
|
31971
|
+
"max_tokens": 128000,
|
|
31972
|
+
"mode": "responses",
|
|
31973
|
+
"output_cost_per_token": 0.000168,
|
|
31974
|
+
"supported_endpoints": [
|
|
31975
|
+
"/v1/batch",
|
|
31976
|
+
"/v1/responses"
|
|
31977
|
+
],
|
|
31978
|
+
"supported_modalities": [
|
|
31979
|
+
"text",
|
|
31980
|
+
"image"
|
|
31981
|
+
],
|
|
31982
|
+
"supported_output_modalities": [
|
|
31983
|
+
"text"
|
|
31984
|
+
],
|
|
31985
|
+
"supports_function_calling": true,
|
|
31986
|
+
"supports_native_streaming": true,
|
|
31987
|
+
"supports_parallel_function_calling": true,
|
|
31988
|
+
"supports_pdf_input": true,
|
|
31989
|
+
"supports_prompt_caching": true,
|
|
31990
|
+
"supports_reasoning": true,
|
|
31991
|
+
"supports_response_schema": true,
|
|
31992
|
+
"supports_system_messages": true,
|
|
31993
|
+
"supports_tool_choice": true,
|
|
31994
|
+
"supports_vision": true,
|
|
31995
|
+
"supports_web_search": true
|
|
31996
|
+
},
|
|
31997
|
+
"azure/gpt-5.2-pro-2025-12-11": {
|
|
31998
|
+
"input_cost_per_token": 2.1e-05,
|
|
31999
|
+
"litellm_provider": "azure",
|
|
32000
|
+
"max_input_tokens": 400000,
|
|
32001
|
+
"max_output_tokens": 128000,
|
|
32002
|
+
"max_tokens": 128000,
|
|
32003
|
+
"mode": "responses",
|
|
32004
|
+
"output_cost_per_token": 0.000168,
|
|
32005
|
+
"supported_endpoints": [
|
|
32006
|
+
"/v1/batch",
|
|
32007
|
+
"/v1/responses"
|
|
32008
|
+
],
|
|
32009
|
+
"supported_modalities": [
|
|
32010
|
+
"text",
|
|
32011
|
+
"image"
|
|
32012
|
+
],
|
|
32013
|
+
"supported_output_modalities": [
|
|
32014
|
+
"text"
|
|
32015
|
+
],
|
|
32016
|
+
"supports_function_calling": true,
|
|
32017
|
+
"supports_native_streaming": true,
|
|
32018
|
+
"supports_parallel_function_calling": true,
|
|
32019
|
+
"supports_pdf_input": true,
|
|
32020
|
+
"supports_prompt_caching": true,
|
|
32021
|
+
"supports_reasoning": true,
|
|
32022
|
+
"supports_response_schema": true,
|
|
32023
|
+
"supports_system_messages": true,
|
|
32024
|
+
"supports_tool_choice": true,
|
|
32025
|
+
"supports_vision": true,
|
|
32026
|
+
"supports_web_search": true
|
|
32027
|
+
},
|
|
32028
|
+
"eu.anthropic.claude-opus-4-5-20251101-v1:0": {
|
|
32029
|
+
"cache_creation_input_token_cost": 6.25e-06,
|
|
32030
|
+
"cache_read_input_token_cost": 5e-07,
|
|
32031
|
+
"input_cost_per_token": 5e-06,
|
|
32032
|
+
"litellm_provider": "bedrock_converse",
|
|
32033
|
+
"max_input_tokens": 200000,
|
|
32034
|
+
"max_output_tokens": 64000,
|
|
32035
|
+
"max_tokens": 64000,
|
|
32036
|
+
"mode": "chat",
|
|
32037
|
+
"output_cost_per_token": 2.5e-05,
|
|
32038
|
+
"search_context_cost_per_query": {
|
|
32039
|
+
"search_context_size_high": 0.01,
|
|
32040
|
+
"search_context_size_low": 0.01,
|
|
32041
|
+
"search_context_size_medium": 0.01
|
|
32042
|
+
},
|
|
32043
|
+
"supports_assistant_prefill": true,
|
|
32044
|
+
"supports_computer_use": true,
|
|
32045
|
+
"supports_function_calling": true,
|
|
32046
|
+
"supports_pdf_input": true,
|
|
32047
|
+
"supports_prompt_caching": true,
|
|
32048
|
+
"supports_reasoning": true,
|
|
32049
|
+
"supports_response_schema": true,
|
|
32050
|
+
"supports_tool_choice": true,
|
|
32051
|
+
"supports_vision": true,
|
|
32052
|
+
"tool_use_system_prompt_tokens": 159
|
|
32053
|
+
},
|
|
32054
|
+
"azure_ai/cohere-rerank-v4.0-pro": {
|
|
32055
|
+
"input_cost_per_query": 0.0025,
|
|
32056
|
+
"input_cost_per_token": 0.0,
|
|
32057
|
+
"litellm_provider": "azure_ai",
|
|
32058
|
+
"max_input_tokens": 32768,
|
|
32059
|
+
"max_output_tokens": 32768,
|
|
32060
|
+
"max_query_tokens": 4096,
|
|
32061
|
+
"max_tokens": 32768,
|
|
32062
|
+
"mode": "rerank",
|
|
32063
|
+
"output_cost_per_token": 0.0
|
|
32064
|
+
},
|
|
32065
|
+
"azure_ai/cohere-rerank-v4.0-fast": {
|
|
32066
|
+
"input_cost_per_query": 0.002,
|
|
32067
|
+
"input_cost_per_token": 0.0,
|
|
32068
|
+
"litellm_provider": "azure_ai",
|
|
32069
|
+
"max_input_tokens": 32768,
|
|
32070
|
+
"max_output_tokens": 32768,
|
|
32071
|
+
"max_query_tokens": 4096,
|
|
32072
|
+
"max_tokens": 32768,
|
|
32073
|
+
"mode": "rerank",
|
|
32074
|
+
"output_cost_per_token": 0.0
|
|
32075
|
+
},
|
|
32076
|
+
"stability/sd3": {
|
|
32077
|
+
"litellm_provider": "stability",
|
|
32078
|
+
"mode": "image_generation",
|
|
32079
|
+
"output_cost_per_image": 0.065,
|
|
32080
|
+
"supported_endpoints": [
|
|
32081
|
+
"/v1/images/generations"
|
|
32082
|
+
]
|
|
32083
|
+
},
|
|
32084
|
+
"stability/sd3-large": {
|
|
32085
|
+
"litellm_provider": "stability",
|
|
32086
|
+
"mode": "image_generation",
|
|
32087
|
+
"output_cost_per_image": 0.065,
|
|
32088
|
+
"supported_endpoints": [
|
|
32089
|
+
"/v1/images/generations"
|
|
32090
|
+
]
|
|
32091
|
+
},
|
|
32092
|
+
"stability/sd3-large-turbo": {
|
|
32093
|
+
"litellm_provider": "stability",
|
|
32094
|
+
"mode": "image_generation",
|
|
32095
|
+
"output_cost_per_image": 0.04,
|
|
32096
|
+
"supported_endpoints": [
|
|
32097
|
+
"/v1/images/generations"
|
|
32098
|
+
]
|
|
32099
|
+
},
|
|
32100
|
+
"stability/sd3-medium": {
|
|
32101
|
+
"litellm_provider": "stability",
|
|
32102
|
+
"mode": "image_generation",
|
|
32103
|
+
"output_cost_per_image": 0.035,
|
|
32104
|
+
"supported_endpoints": [
|
|
32105
|
+
"/v1/images/generations"
|
|
32106
|
+
]
|
|
32107
|
+
},
|
|
32108
|
+
"stability/sd3.5-large": {
|
|
32109
|
+
"litellm_provider": "stability",
|
|
32110
|
+
"mode": "image_generation",
|
|
32111
|
+
"output_cost_per_image": 0.065,
|
|
32112
|
+
"supported_endpoints": [
|
|
32113
|
+
"/v1/images/generations"
|
|
32114
|
+
]
|
|
32115
|
+
},
|
|
32116
|
+
"stability/sd3.5-large-turbo": {
|
|
32117
|
+
"litellm_provider": "stability",
|
|
32118
|
+
"mode": "image_generation",
|
|
32119
|
+
"output_cost_per_image": 0.04,
|
|
32120
|
+
"supported_endpoints": [
|
|
32121
|
+
"/v1/images/generations"
|
|
32122
|
+
]
|
|
32123
|
+
},
|
|
32124
|
+
"stability/sd3.5-medium": {
|
|
32125
|
+
"litellm_provider": "stability",
|
|
32126
|
+
"mode": "image_generation",
|
|
32127
|
+
"output_cost_per_image": 0.035,
|
|
32128
|
+
"supported_endpoints": [
|
|
32129
|
+
"/v1/images/generations"
|
|
32130
|
+
]
|
|
32131
|
+
},
|
|
32132
|
+
"stability/stable-image-ultra": {
|
|
32133
|
+
"litellm_provider": "stability",
|
|
32134
|
+
"mode": "image_generation",
|
|
32135
|
+
"output_cost_per_image": 0.08,
|
|
32136
|
+
"supported_endpoints": [
|
|
32137
|
+
"/v1/images/generations"
|
|
32138
|
+
]
|
|
32139
|
+
},
|
|
32140
|
+
"stability/stable-image-core": {
|
|
32141
|
+
"litellm_provider": "stability",
|
|
32142
|
+
"mode": "image_generation",
|
|
32143
|
+
"output_cost_per_image": 0.03,
|
|
32144
|
+
"supported_endpoints": [
|
|
32145
|
+
"/v1/images/generations"
|
|
32146
|
+
]
|
|
32147
|
+
},
|
|
32148
|
+
"openrouter/mistralai/devstral-2512:free": {
|
|
32149
|
+
"input_cost_per_image": 0,
|
|
32150
|
+
"input_cost_per_token": 0,
|
|
32151
|
+
"litellm_provider": "openrouter",
|
|
32152
|
+
"max_input_tokens": 262144,
|
|
32153
|
+
"max_output_tokens": 262144,
|
|
32154
|
+
"max_tokens": 262144,
|
|
32155
|
+
"mode": "chat",
|
|
32156
|
+
"output_cost_per_token": 0,
|
|
32157
|
+
"supports_function_calling": true,
|
|
32158
|
+
"supports_prompt_caching": false,
|
|
32159
|
+
"supports_tool_choice": true,
|
|
32160
|
+
"supports_vision": false
|
|
32161
|
+
},
|
|
32162
|
+
"openrouter/mistralai/devstral-2512": {
|
|
32163
|
+
"input_cost_per_image": 0,
|
|
32164
|
+
"input_cost_per_token": 1.5e-07,
|
|
32165
|
+
"litellm_provider": "openrouter",
|
|
32166
|
+
"max_input_tokens": 262144,
|
|
32167
|
+
"max_output_tokens": 65536,
|
|
32168
|
+
"max_tokens": 262144,
|
|
32169
|
+
"mode": "chat",
|
|
32170
|
+
"output_cost_per_token": 6e-07,
|
|
32171
|
+
"supports_function_calling": true,
|
|
32172
|
+
"supports_prompt_caching": false,
|
|
32173
|
+
"supports_tool_choice": true,
|
|
32174
|
+
"supports_vision": false
|
|
32175
|
+
},
|
|
32176
|
+
"openrouter/mistralai/ministral-3b-2512": {
|
|
32177
|
+
"input_cost_per_image": 0,
|
|
32178
|
+
"input_cost_per_token": 1e-07,
|
|
32179
|
+
"litellm_provider": "openrouter",
|
|
32180
|
+
"max_input_tokens": 131072,
|
|
32181
|
+
"max_output_tokens": 131072,
|
|
32182
|
+
"max_tokens": 131072,
|
|
32183
|
+
"mode": "chat",
|
|
32184
|
+
"output_cost_per_token": 1e-07,
|
|
32185
|
+
"supports_function_calling": true,
|
|
32186
|
+
"supports_prompt_caching": false,
|
|
32187
|
+
"supports_tool_choice": true,
|
|
32188
|
+
"supports_vision": true
|
|
32189
|
+
},
|
|
32190
|
+
"openrouter/mistralai/ministral-8b-2512": {
|
|
32191
|
+
"input_cost_per_image": 0,
|
|
32192
|
+
"input_cost_per_token": 1.5e-07,
|
|
32193
|
+
"litellm_provider": "openrouter",
|
|
32194
|
+
"max_input_tokens": 262144,
|
|
32195
|
+
"max_output_tokens": 262144,
|
|
32196
|
+
"max_tokens": 262144,
|
|
32197
|
+
"mode": "chat",
|
|
32198
|
+
"output_cost_per_token": 1.5e-07,
|
|
32199
|
+
"supports_function_calling": true,
|
|
32200
|
+
"supports_prompt_caching": false,
|
|
32201
|
+
"supports_tool_choice": true,
|
|
32202
|
+
"supports_vision": true
|
|
32203
|
+
},
|
|
32204
|
+
"openrouter/mistralai/ministral-14b-2512": {
|
|
32205
|
+
"input_cost_per_image": 0,
|
|
32206
|
+
"input_cost_per_token": 2e-07,
|
|
32207
|
+
"litellm_provider": "openrouter",
|
|
32208
|
+
"max_input_tokens": 262144,
|
|
32209
|
+
"max_output_tokens": 262144,
|
|
32210
|
+
"max_tokens": 262144,
|
|
32211
|
+
"mode": "chat",
|
|
32212
|
+
"output_cost_per_token": 2e-07,
|
|
32213
|
+
"supports_function_calling": true,
|
|
32214
|
+
"supports_prompt_caching": false,
|
|
32215
|
+
"supports_tool_choice": true,
|
|
32216
|
+
"supports_vision": true
|
|
32217
|
+
},
|
|
32218
|
+
"openrouter/mistralai/mistral-large-2512": {
|
|
32219
|
+
"input_cost_per_image": 0,
|
|
32220
|
+
"input_cost_per_token": 5e-07,
|
|
32221
|
+
"litellm_provider": "openrouter",
|
|
32222
|
+
"max_input_tokens": 262144,
|
|
32223
|
+
"max_output_tokens": 262144,
|
|
32224
|
+
"max_tokens": 262144,
|
|
32225
|
+
"mode": "chat",
|
|
32226
|
+
"output_cost_per_token": 1.5e-06,
|
|
32227
|
+
"supports_function_calling": true,
|
|
32228
|
+
"supports_prompt_caching": false,
|
|
32229
|
+
"supports_tool_choice": true,
|
|
32230
|
+
"supports_vision": true
|
|
32231
|
+
},
|
|
32232
|
+
"openrouter/openai/gpt-5.2": {
|
|
32233
|
+
"input_cost_per_image": 0,
|
|
32234
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
32235
|
+
"input_cost_per_token": 1.75e-06,
|
|
32236
|
+
"litellm_provider": "openrouter",
|
|
32237
|
+
"max_input_tokens": 400000,
|
|
32238
|
+
"max_output_tokens": 128000,
|
|
32239
|
+
"max_tokens": 400000,
|
|
32240
|
+
"mode": "chat",
|
|
32241
|
+
"output_cost_per_token": 1.4e-05,
|
|
32242
|
+
"supports_function_calling": true,
|
|
32243
|
+
"supports_prompt_caching": true,
|
|
32244
|
+
"supports_reasoning": true,
|
|
32245
|
+
"supports_tool_choice": true,
|
|
32246
|
+
"supports_vision": true
|
|
32247
|
+
},
|
|
32248
|
+
"openrouter/openai/gpt-5.2-chat": {
|
|
32249
|
+
"input_cost_per_image": 0,
|
|
32250
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
32251
|
+
"input_cost_per_token": 1.75e-06,
|
|
32252
|
+
"litellm_provider": "openrouter",
|
|
32253
|
+
"max_input_tokens": 128000,
|
|
32254
|
+
"max_output_tokens": 16384,
|
|
32255
|
+
"max_tokens": 128000,
|
|
32256
|
+
"mode": "chat",
|
|
32257
|
+
"output_cost_per_token": 1.4e-05,
|
|
32258
|
+
"supports_function_calling": true,
|
|
32259
|
+
"supports_prompt_caching": true,
|
|
32260
|
+
"supports_tool_choice": true,
|
|
32261
|
+
"supports_vision": true
|
|
32262
|
+
},
|
|
32263
|
+
"openrouter/openai/gpt-5.2-pro": {
|
|
32264
|
+
"input_cost_per_image": 0,
|
|
32265
|
+
"input_cost_per_token": 2.1e-05,
|
|
32266
|
+
"litellm_provider": "openrouter",
|
|
32267
|
+
"max_input_tokens": 400000,
|
|
32268
|
+
"max_output_tokens": 128000,
|
|
32269
|
+
"max_tokens": 400000,
|
|
32270
|
+
"mode": "chat",
|
|
32271
|
+
"output_cost_per_token": 0.000168,
|
|
32272
|
+
"supports_function_calling": true,
|
|
32273
|
+
"supports_prompt_caching": false,
|
|
32274
|
+
"supports_reasoning": true,
|
|
32275
|
+
"supports_tool_choice": true,
|
|
32276
|
+
"supports_vision": true
|
|
32277
|
+
},
|
|
32278
|
+
"azure_ai/deepseek-v3.2": {
|
|
32279
|
+
"input_cost_per_token": 5.8e-07,
|
|
32280
|
+
"litellm_provider": "azure_ai",
|
|
32281
|
+
"max_input_tokens": 163840,
|
|
32282
|
+
"max_output_tokens": 163840,
|
|
32283
|
+
"max_tokens": 8192,
|
|
32284
|
+
"mode": "chat",
|
|
32285
|
+
"output_cost_per_token": 1.68e-06,
|
|
32286
|
+
"supports_assistant_prefill": true,
|
|
32287
|
+
"supports_function_calling": true,
|
|
32288
|
+
"supports_prompt_caching": true,
|
|
32289
|
+
"supports_reasoning": true,
|
|
32290
|
+
"supports_tool_choice": true
|
|
32291
|
+
},
|
|
32292
|
+
"azure_ai/deepseek-v3.2-speciale": {
|
|
32293
|
+
"input_cost_per_token": 5.8e-07,
|
|
32294
|
+
"litellm_provider": "azure_ai",
|
|
32295
|
+
"max_input_tokens": 163840,
|
|
32296
|
+
"max_output_tokens": 163840,
|
|
32297
|
+
"max_tokens": 8192,
|
|
32298
|
+
"mode": "chat",
|
|
32299
|
+
"output_cost_per_token": 1.68e-06,
|
|
32300
|
+
"supports_assistant_prefill": true,
|
|
32301
|
+
"supports_function_calling": true,
|
|
32302
|
+
"supports_prompt_caching": true,
|
|
32303
|
+
"supports_reasoning": true,
|
|
32304
|
+
"supports_tool_choice": true
|
|
32305
|
+
},
|
|
32306
|
+
"github_copilot/claude-haiku-4.5": {
|
|
32307
|
+
"litellm_provider": "github_copilot",
|
|
32308
|
+
"max_input_tokens": 128000,
|
|
32309
|
+
"max_output_tokens": 16000,
|
|
32310
|
+
"max_tokens": 16000,
|
|
32311
|
+
"mode": "chat",
|
|
32312
|
+
"supported_endpoints": [
|
|
32313
|
+
"/v1/chat/completions"
|
|
32314
|
+
],
|
|
32315
|
+
"supports_function_calling": true,
|
|
32316
|
+
"supports_parallel_function_calling": true,
|
|
32317
|
+
"supports_vision": true
|
|
32318
|
+
},
|
|
32319
|
+
"github_copilot/claude-opus-4.5": {
|
|
32320
|
+
"litellm_provider": "github_copilot",
|
|
32321
|
+
"max_input_tokens": 128000,
|
|
32322
|
+
"max_output_tokens": 16000,
|
|
32323
|
+
"max_tokens": 16000,
|
|
32324
|
+
"mode": "chat",
|
|
32325
|
+
"supported_endpoints": [
|
|
32326
|
+
"/v1/chat/completions"
|
|
32327
|
+
],
|
|
32328
|
+
"supports_function_calling": true,
|
|
32329
|
+
"supports_parallel_function_calling": true,
|
|
32330
|
+
"supports_vision": true
|
|
32331
|
+
},
|
|
32332
|
+
"github_copilot/claude-opus-41": {
|
|
32333
|
+
"litellm_provider": "github_copilot",
|
|
32334
|
+
"max_input_tokens": 80000,
|
|
32335
|
+
"max_output_tokens": 16000,
|
|
32336
|
+
"max_tokens": 16000,
|
|
32337
|
+
"mode": "chat",
|
|
32338
|
+
"supported_endpoints": [
|
|
32339
|
+
"/v1/chat/completions"
|
|
32340
|
+
],
|
|
32341
|
+
"supports_vision": true
|
|
32342
|
+
},
|
|
32343
|
+
"github_copilot/claude-sonnet-4": {
|
|
32344
|
+
"litellm_provider": "github_copilot",
|
|
32345
|
+
"max_input_tokens": 128000,
|
|
32346
|
+
"max_output_tokens": 16000,
|
|
32347
|
+
"max_tokens": 16000,
|
|
32348
|
+
"mode": "chat",
|
|
32349
|
+
"supported_endpoints": [
|
|
32350
|
+
"/v1/chat/completions"
|
|
32351
|
+
],
|
|
32352
|
+
"supports_function_calling": true,
|
|
32353
|
+
"supports_parallel_function_calling": true,
|
|
32354
|
+
"supports_vision": true
|
|
32355
|
+
},
|
|
32356
|
+
"github_copilot/claude-sonnet-4.5": {
|
|
32357
|
+
"litellm_provider": "github_copilot",
|
|
32358
|
+
"max_input_tokens": 128000,
|
|
32359
|
+
"max_output_tokens": 16000,
|
|
32360
|
+
"max_tokens": 16000,
|
|
32361
|
+
"mode": "chat",
|
|
32362
|
+
"supported_endpoints": [
|
|
32363
|
+
"/v1/chat/completions"
|
|
32364
|
+
],
|
|
32365
|
+
"supports_function_calling": true,
|
|
32366
|
+
"supports_parallel_function_calling": true,
|
|
32367
|
+
"supports_vision": true
|
|
32368
|
+
},
|
|
32369
|
+
"github_copilot/gemini-2.5-pro": {
|
|
32370
|
+
"litellm_provider": "github_copilot",
|
|
32371
|
+
"max_input_tokens": 128000,
|
|
32372
|
+
"max_output_tokens": 64000,
|
|
32373
|
+
"max_tokens": 64000,
|
|
32374
|
+
"mode": "chat",
|
|
32375
|
+
"supports_function_calling": true,
|
|
32376
|
+
"supports_parallel_function_calling": true,
|
|
32377
|
+
"supports_vision": true
|
|
32378
|
+
},
|
|
32379
|
+
"github_copilot/gemini-3-pro-preview": {
|
|
32380
|
+
"litellm_provider": "github_copilot",
|
|
32381
|
+
"max_input_tokens": 128000,
|
|
32382
|
+
"max_output_tokens": 64000,
|
|
32383
|
+
"max_tokens": 64000,
|
|
32384
|
+
"mode": "chat",
|
|
32385
|
+
"supports_function_calling": true,
|
|
32386
|
+
"supports_parallel_function_calling": true,
|
|
32387
|
+
"supports_vision": true
|
|
32388
|
+
},
|
|
32389
|
+
"github_copilot/gpt-3.5-turbo": {
|
|
32390
|
+
"litellm_provider": "github_copilot",
|
|
32391
|
+
"max_input_tokens": 16384,
|
|
32392
|
+
"max_output_tokens": 4096,
|
|
32393
|
+
"max_tokens": 4096,
|
|
32394
|
+
"mode": "chat",
|
|
32395
|
+
"supports_function_calling": true
|
|
32396
|
+
},
|
|
32397
|
+
"github_copilot/gpt-3.5-turbo-0613": {
|
|
32398
|
+
"litellm_provider": "github_copilot",
|
|
32399
|
+
"max_input_tokens": 16384,
|
|
32400
|
+
"max_output_tokens": 4096,
|
|
32401
|
+
"max_tokens": 4096,
|
|
32402
|
+
"mode": "chat",
|
|
32403
|
+
"supports_function_calling": true
|
|
32404
|
+
},
|
|
32405
|
+
"github_copilot/gpt-4": {
|
|
32406
|
+
"litellm_provider": "github_copilot",
|
|
32407
|
+
"max_input_tokens": 32768,
|
|
32408
|
+
"max_output_tokens": 4096,
|
|
32409
|
+
"max_tokens": 4096,
|
|
32410
|
+
"mode": "chat",
|
|
32411
|
+
"supports_function_calling": true
|
|
32412
|
+
},
|
|
32413
|
+
"github_copilot/gpt-4-0613": {
|
|
32414
|
+
"litellm_provider": "github_copilot",
|
|
32415
|
+
"max_input_tokens": 32768,
|
|
32416
|
+
"max_output_tokens": 4096,
|
|
32417
|
+
"max_tokens": 4096,
|
|
32418
|
+
"mode": "chat",
|
|
32419
|
+
"supports_function_calling": true
|
|
32420
|
+
},
|
|
32421
|
+
"github_copilot/gpt-4-o-preview": {
|
|
32422
|
+
"litellm_provider": "github_copilot",
|
|
32423
|
+
"max_input_tokens": 64000,
|
|
32424
|
+
"max_output_tokens": 4096,
|
|
32425
|
+
"max_tokens": 4096,
|
|
32426
|
+
"mode": "chat",
|
|
32427
|
+
"supports_function_calling": true,
|
|
32428
|
+
"supports_parallel_function_calling": true
|
|
32429
|
+
},
|
|
32430
|
+
"github_copilot/gpt-4.1": {
|
|
32431
|
+
"litellm_provider": "github_copilot",
|
|
32432
|
+
"max_input_tokens": 128000,
|
|
32433
|
+
"max_output_tokens": 16384,
|
|
32434
|
+
"max_tokens": 16384,
|
|
32435
|
+
"mode": "chat",
|
|
32436
|
+
"supports_function_calling": true,
|
|
32437
|
+
"supports_parallel_function_calling": true,
|
|
32438
|
+
"supports_response_schema": true,
|
|
32439
|
+
"supports_vision": true
|
|
32440
|
+
},
|
|
32441
|
+
"github_copilot/gpt-4.1-2025-04-14": {
|
|
32442
|
+
"litellm_provider": "github_copilot",
|
|
32443
|
+
"max_input_tokens": 128000,
|
|
32444
|
+
"max_output_tokens": 16384,
|
|
32445
|
+
"max_tokens": 16384,
|
|
32446
|
+
"mode": "chat",
|
|
32447
|
+
"supports_function_calling": true,
|
|
32448
|
+
"supports_parallel_function_calling": true,
|
|
32449
|
+
"supports_response_schema": true,
|
|
32450
|
+
"supports_vision": true
|
|
32451
|
+
},
|
|
32452
|
+
"github_copilot/gpt-41-copilot": {
|
|
32453
|
+
"litellm_provider": "github_copilot",
|
|
32454
|
+
"mode": "completion"
|
|
32455
|
+
},
|
|
32456
|
+
"github_copilot/gpt-4o": {
|
|
32457
|
+
"litellm_provider": "github_copilot",
|
|
32458
|
+
"max_input_tokens": 64000,
|
|
32459
|
+
"max_output_tokens": 4096,
|
|
32460
|
+
"max_tokens": 4096,
|
|
32461
|
+
"mode": "chat",
|
|
32462
|
+
"supports_function_calling": true,
|
|
32463
|
+
"supports_parallel_function_calling": true,
|
|
32464
|
+
"supports_vision": true
|
|
32465
|
+
},
|
|
32466
|
+
"github_copilot/gpt-4o-2024-05-13": {
|
|
32467
|
+
"litellm_provider": "github_copilot",
|
|
32468
|
+
"max_input_tokens": 64000,
|
|
32469
|
+
"max_output_tokens": 4096,
|
|
32470
|
+
"max_tokens": 4096,
|
|
32471
|
+
"mode": "chat",
|
|
32472
|
+
"supports_function_calling": true,
|
|
32473
|
+
"supports_parallel_function_calling": true,
|
|
32474
|
+
"supports_vision": true
|
|
32475
|
+
},
|
|
32476
|
+
"github_copilot/gpt-4o-2024-08-06": {
|
|
32477
|
+
"litellm_provider": "github_copilot",
|
|
32478
|
+
"max_input_tokens": 64000,
|
|
32479
|
+
"max_output_tokens": 16384,
|
|
32480
|
+
"max_tokens": 16384,
|
|
32481
|
+
"mode": "chat",
|
|
32482
|
+
"supports_function_calling": true,
|
|
32483
|
+
"supports_parallel_function_calling": true
|
|
32484
|
+
},
|
|
32485
|
+
"github_copilot/gpt-4o-2024-11-20": {
|
|
32486
|
+
"litellm_provider": "github_copilot",
|
|
32487
|
+
"max_input_tokens": 64000,
|
|
32488
|
+
"max_output_tokens": 16384,
|
|
32489
|
+
"max_tokens": 16384,
|
|
32490
|
+
"mode": "chat",
|
|
32491
|
+
"supports_function_calling": true,
|
|
32492
|
+
"supports_parallel_function_calling": true,
|
|
32493
|
+
"supports_vision": true
|
|
32494
|
+
},
|
|
32495
|
+
"github_copilot/gpt-4o-mini": {
|
|
32496
|
+
"litellm_provider": "github_copilot",
|
|
32497
|
+
"max_input_tokens": 64000,
|
|
32498
|
+
"max_output_tokens": 4096,
|
|
32499
|
+
"max_tokens": 4096,
|
|
32500
|
+
"mode": "chat",
|
|
32501
|
+
"supports_function_calling": true,
|
|
32502
|
+
"supports_parallel_function_calling": true
|
|
32503
|
+
},
|
|
32504
|
+
"github_copilot/gpt-4o-mini-2024-07-18": {
|
|
32505
|
+
"litellm_provider": "github_copilot",
|
|
32506
|
+
"max_input_tokens": 64000,
|
|
32507
|
+
"max_output_tokens": 4096,
|
|
32508
|
+
"max_tokens": 4096,
|
|
32509
|
+
"mode": "chat",
|
|
32510
|
+
"supports_function_calling": true,
|
|
32511
|
+
"supports_parallel_function_calling": true
|
|
32512
|
+
},
|
|
32513
|
+
"github_copilot/gpt-5": {
|
|
32514
|
+
"litellm_provider": "github_copilot",
|
|
32515
|
+
"max_input_tokens": 128000,
|
|
32516
|
+
"max_output_tokens": 128000,
|
|
32517
|
+
"max_tokens": 128000,
|
|
32518
|
+
"mode": "chat",
|
|
32519
|
+
"supported_endpoints": [
|
|
32520
|
+
"/v1/chat/completions",
|
|
32521
|
+
"/v1/responses"
|
|
32522
|
+
],
|
|
32523
|
+
"supports_function_calling": true,
|
|
32524
|
+
"supports_parallel_function_calling": true,
|
|
32525
|
+
"supports_response_schema": true,
|
|
32526
|
+
"supports_vision": true
|
|
32527
|
+
},
|
|
32528
|
+
"github_copilot/gpt-5-mini": {
|
|
32529
|
+
"litellm_provider": "github_copilot",
|
|
32530
|
+
"max_input_tokens": 128000,
|
|
32531
|
+
"max_output_tokens": 64000,
|
|
32532
|
+
"max_tokens": 64000,
|
|
32533
|
+
"mode": "chat",
|
|
32534
|
+
"supports_function_calling": true,
|
|
32535
|
+
"supports_parallel_function_calling": true,
|
|
32536
|
+
"supports_response_schema": true,
|
|
32537
|
+
"supports_vision": true
|
|
32538
|
+
},
|
|
32539
|
+
"github_copilot/gpt-5.1": {
|
|
32540
|
+
"litellm_provider": "github_copilot",
|
|
32541
|
+
"max_input_tokens": 128000,
|
|
32542
|
+
"max_output_tokens": 64000,
|
|
32543
|
+
"max_tokens": 64000,
|
|
32544
|
+
"mode": "chat",
|
|
32545
|
+
"supported_endpoints": [
|
|
32546
|
+
"/v1/chat/completions",
|
|
32547
|
+
"/v1/responses"
|
|
32548
|
+
],
|
|
32549
|
+
"supports_function_calling": true,
|
|
32550
|
+
"supports_parallel_function_calling": true,
|
|
32551
|
+
"supports_response_schema": true,
|
|
32552
|
+
"supports_vision": true
|
|
32553
|
+
},
|
|
32554
|
+
"github_copilot/gpt-5.1-codex-max": {
|
|
32555
|
+
"litellm_provider": "github_copilot",
|
|
32556
|
+
"max_input_tokens": 128000,
|
|
32557
|
+
"max_output_tokens": 128000,
|
|
32558
|
+
"max_tokens": 128000,
|
|
32559
|
+
"mode": "responses",
|
|
32560
|
+
"supported_endpoints": [
|
|
32561
|
+
"/v1/responses"
|
|
32562
|
+
],
|
|
32563
|
+
"supports_function_calling": true,
|
|
32564
|
+
"supports_parallel_function_calling": true,
|
|
32565
|
+
"supports_response_schema": true,
|
|
32566
|
+
"supports_vision": true
|
|
32567
|
+
},
|
|
32568
|
+
"github_copilot/gpt-5.2": {
|
|
32569
|
+
"litellm_provider": "github_copilot",
|
|
32570
|
+
"max_input_tokens": 128000,
|
|
32571
|
+
"max_output_tokens": 64000,
|
|
32572
|
+
"max_tokens": 64000,
|
|
32573
|
+
"mode": "chat",
|
|
32574
|
+
"supported_endpoints": [
|
|
32575
|
+
"/v1/chat/completions",
|
|
32576
|
+
"/v1/responses"
|
|
32577
|
+
],
|
|
32578
|
+
"supports_function_calling": true,
|
|
32579
|
+
"supports_parallel_function_calling": true,
|
|
32580
|
+
"supports_response_schema": true,
|
|
32581
|
+
"supports_vision": true
|
|
32582
|
+
},
|
|
32583
|
+
"github_copilot/text-embedding-3-small": {
|
|
32584
|
+
"litellm_provider": "github_copilot",
|
|
32585
|
+
"max_input_tokens": 8191,
|
|
32586
|
+
"max_tokens": 8191,
|
|
32587
|
+
"mode": "embedding"
|
|
32588
|
+
},
|
|
32589
|
+
"github_copilot/text-embedding-3-small-inference": {
|
|
32590
|
+
"litellm_provider": "github_copilot",
|
|
32591
|
+
"max_input_tokens": 8191,
|
|
32592
|
+
"max_tokens": 8191,
|
|
32593
|
+
"mode": "embedding"
|
|
32594
|
+
},
|
|
32595
|
+
"github_copilot/text-embedding-ada-002": {
|
|
32596
|
+
"litellm_provider": "github_copilot",
|
|
32597
|
+
"max_input_tokens": 8191,
|
|
32598
|
+
"max_tokens": 8191,
|
|
32599
|
+
"mode": "embedding"
|
|
32600
|
+
},
|
|
32601
|
+
"fireworks_ai/accounts/fireworks/models/": {
|
|
32602
|
+
"max_tokens": 40960,
|
|
32603
|
+
"max_input_tokens": 40960,
|
|
32604
|
+
"max_output_tokens": 40960,
|
|
32605
|
+
"input_cost_per_token": 1e-07,
|
|
32606
|
+
"output_cost_per_token": 0.0,
|
|
32607
|
+
"litellm_provider": "fireworks_ai",
|
|
32608
|
+
"mode": "embedding"
|
|
32609
|
+
},
|
|
32610
|
+
"gpt-4o-transcribe-diarize": {
|
|
32611
|
+
"input_cost_per_audio_token": 6e-06,
|
|
32612
|
+
"input_cost_per_token": 2.5e-06,
|
|
32613
|
+
"litellm_provider": "openai",
|
|
32614
|
+
"max_input_tokens": 16000,
|
|
32615
|
+
"max_output_tokens": 2000,
|
|
32616
|
+
"mode": "audio_transcription",
|
|
32617
|
+
"output_cost_per_token": 1e-05,
|
|
32618
|
+
"supported_endpoints": [
|
|
32619
|
+
"/v1/audio/transcriptions"
|
|
32620
|
+
]
|
|
32621
|
+
},
|
|
32622
|
+
"gemini/gemini-3-flash-preview": {
|
|
32623
|
+
"cache_read_input_token_cost": 5e-08,
|
|
32624
|
+
"input_cost_per_audio_token": 1e-06,
|
|
32625
|
+
"input_cost_per_token": 5e-07,
|
|
32626
|
+
"litellm_provider": "gemini",
|
|
32627
|
+
"max_audio_length_hours": 8.4,
|
|
32628
|
+
"max_audio_per_prompt": 1,
|
|
32629
|
+
"max_images_per_prompt": 3000,
|
|
32630
|
+
"max_input_tokens": 1048576,
|
|
32631
|
+
"max_output_tokens": 65535,
|
|
32632
|
+
"max_pdf_size_mb": 30,
|
|
32633
|
+
"max_tokens": 65535,
|
|
32634
|
+
"max_video_length": 1,
|
|
32635
|
+
"max_videos_per_prompt": 10,
|
|
32636
|
+
"mode": "chat",
|
|
32637
|
+
"output_cost_per_reasoning_token": 3e-06,
|
|
32638
|
+
"output_cost_per_token": 3e-06,
|
|
32639
|
+
"rpm": 2000,
|
|
32640
|
+
"source": "https://ai.google.dev/pricing/gemini-3",
|
|
32641
|
+
"supported_endpoints": [
|
|
32642
|
+
"/v1/chat/completions",
|
|
32643
|
+
"/v1/completions",
|
|
32644
|
+
"/v1/batch"
|
|
32645
|
+
],
|
|
32646
|
+
"supported_modalities": [
|
|
32647
|
+
"text",
|
|
32648
|
+
"image",
|
|
32649
|
+
"audio",
|
|
32650
|
+
"video"
|
|
32651
|
+
],
|
|
32652
|
+
"supported_output_modalities": [
|
|
32653
|
+
"text"
|
|
32654
|
+
],
|
|
32655
|
+
"supports_audio_output": false,
|
|
32656
|
+
"supports_function_calling": true,
|
|
32657
|
+
"supports_parallel_function_calling": true,
|
|
32658
|
+
"supports_pdf_input": true,
|
|
32659
|
+
"supports_prompt_caching": true,
|
|
32660
|
+
"supports_reasoning": true,
|
|
32661
|
+
"supports_response_schema": true,
|
|
32662
|
+
"supports_system_messages": true,
|
|
32663
|
+
"supports_tool_choice": true,
|
|
32664
|
+
"supports_url_context": true,
|
|
32665
|
+
"supports_vision": true,
|
|
32666
|
+
"supports_web_search": true,
|
|
32667
|
+
"tpm": 800000
|
|
32668
|
+
},
|
|
32669
|
+
"gemini-3-flash-preview": {
|
|
32670
|
+
"cache_read_input_token_cost": 5e-08,
|
|
32671
|
+
"input_cost_per_audio_token": 1e-06,
|
|
32672
|
+
"input_cost_per_token": 5e-07,
|
|
32673
|
+
"litellm_provider": "vertex_ai-language-models",
|
|
32674
|
+
"max_audio_length_hours": 8.4,
|
|
32675
|
+
"max_audio_per_prompt": 1,
|
|
32676
|
+
"max_images_per_prompt": 3000,
|
|
32677
|
+
"max_input_tokens": 1048576,
|
|
32678
|
+
"max_output_tokens": 65535,
|
|
32679
|
+
"max_pdf_size_mb": 30,
|
|
32680
|
+
"max_tokens": 65535,
|
|
32681
|
+
"max_video_length": 1,
|
|
32682
|
+
"max_videos_per_prompt": 10,
|
|
32683
|
+
"mode": "chat",
|
|
32684
|
+
"output_cost_per_reasoning_token": 3e-06,
|
|
32685
|
+
"output_cost_per_token": 3e-06,
|
|
32686
|
+
"source": "https://ai.google.dev/pricing/gemini-3",
|
|
32687
|
+
"supported_endpoints": [
|
|
32688
|
+
"/v1/chat/completions",
|
|
32689
|
+
"/v1/completions",
|
|
32690
|
+
"/v1/batch"
|
|
32691
|
+
],
|
|
32692
|
+
"supported_modalities": [
|
|
32693
|
+
"text",
|
|
32694
|
+
"image",
|
|
32695
|
+
"audio",
|
|
32696
|
+
"video"
|
|
32697
|
+
],
|
|
32698
|
+
"supported_output_modalities": [
|
|
32699
|
+
"text"
|
|
32700
|
+
],
|
|
32701
|
+
"supports_audio_output": false,
|
|
32702
|
+
"supports_function_calling": true,
|
|
32703
|
+
"supports_parallel_function_calling": true,
|
|
32704
|
+
"supports_pdf_input": true,
|
|
32705
|
+
"supports_prompt_caching": true,
|
|
32706
|
+
"supports_reasoning": true,
|
|
32707
|
+
"supports_response_schema": true,
|
|
32708
|
+
"supports_system_messages": true,
|
|
32709
|
+
"supports_tool_choice": true,
|
|
32710
|
+
"supports_url_context": true,
|
|
32711
|
+
"supports_vision": true,
|
|
32712
|
+
"supports_web_search": true
|
|
32713
|
+
},
|
|
32714
|
+
"gpt-image-1.5": {
|
|
32715
|
+
"cache_read_input_image_token_cost": 2e-06,
|
|
32716
|
+
"cache_read_input_token_cost": 1.25e-06,
|
|
32717
|
+
"input_cost_per_token": 5e-06,
|
|
32718
|
+
"litellm_provider": "openai",
|
|
32719
|
+
"mode": "image_generation",
|
|
32720
|
+
"output_cost_per_token": 1e-05,
|
|
32721
|
+
"input_cost_per_image_token": 8e-06,
|
|
32722
|
+
"output_cost_per_image_token": 3.2e-05,
|
|
32723
|
+
"supported_endpoints": [
|
|
32724
|
+
"/v1/images/generations"
|
|
32725
|
+
],
|
|
32726
|
+
"supports_vision": true,
|
|
32727
|
+
"supports_pdf_input": true
|
|
32728
|
+
},
|
|
32729
|
+
"gpt-image-1.5-2025-12-16": {
|
|
32730
|
+
"cache_read_input_image_token_cost": 2e-06,
|
|
32731
|
+
"cache_read_input_token_cost": 1.25e-06,
|
|
32732
|
+
"input_cost_per_token": 5e-06,
|
|
32733
|
+
"litellm_provider": "openai",
|
|
32734
|
+
"mode": "image_generation",
|
|
32735
|
+
"output_cost_per_token": 1e-05,
|
|
32736
|
+
"input_cost_per_image_token": 8e-06,
|
|
32737
|
+
"output_cost_per_image_token": 3.2e-05,
|
|
32738
|
+
"supported_endpoints": [
|
|
32739
|
+
"/v1/images/generations"
|
|
32740
|
+
],
|
|
32741
|
+
"supports_vision": true,
|
|
32742
|
+
"supports_pdf_input": true
|
|
32743
|
+
},
|
|
32744
|
+
"vertex_ai/gemini-3-flash-preview": {
|
|
32745
|
+
"cache_read_input_token_cost": 5e-08,
|
|
32746
|
+
"input_cost_per_token": 5e-07,
|
|
32747
|
+
"input_cost_per_audio_token": 1e-06,
|
|
32748
|
+
"litellm_provider": "vertex_ai",
|
|
32749
|
+
"max_audio_length_hours": 8.4,
|
|
32750
|
+
"max_audio_per_prompt": 1,
|
|
32751
|
+
"max_images_per_prompt": 3000,
|
|
32752
|
+
"max_input_tokens": 1048576,
|
|
32753
|
+
"max_output_tokens": 65535,
|
|
32754
|
+
"max_pdf_size_mb": 30,
|
|
32755
|
+
"max_tokens": 65535,
|
|
32756
|
+
"max_video_length": 1,
|
|
32757
|
+
"max_videos_per_prompt": 10,
|
|
32758
|
+
"mode": "chat",
|
|
32759
|
+
"output_cost_per_token": 3e-06,
|
|
32760
|
+
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
32761
|
+
"supported_endpoints": [
|
|
32762
|
+
"/v1/chat/completions",
|
|
32763
|
+
"/v1/completions",
|
|
32764
|
+
"/v1/batch"
|
|
32765
|
+
],
|
|
32766
|
+
"supported_modalities": [
|
|
32767
|
+
"text",
|
|
32768
|
+
"image",
|
|
32769
|
+
"audio",
|
|
32770
|
+
"video"
|
|
32771
|
+
],
|
|
32772
|
+
"supported_output_modalities": [
|
|
32773
|
+
"text"
|
|
32774
|
+
],
|
|
32775
|
+
"supports_audio_input": true,
|
|
32776
|
+
"supports_function_calling": true,
|
|
32777
|
+
"supports_pdf_input": true,
|
|
32778
|
+
"supports_prompt_caching": true,
|
|
32779
|
+
"supports_reasoning": true,
|
|
32780
|
+
"supports_response_schema": true,
|
|
32781
|
+
"supports_system_messages": true,
|
|
32782
|
+
"supports_tool_choice": true,
|
|
32783
|
+
"supports_video_input": true,
|
|
32784
|
+
"supports_vision": true,
|
|
32785
|
+
"supports_web_search": true
|
|
32786
|
+
},
|
|
32787
|
+
"linkup/search": {
|
|
32788
|
+
"input_cost_per_query": 0.00587,
|
|
32789
|
+
"litellm_provider": "linkup",
|
|
32790
|
+
"mode": "search"
|
|
32791
|
+
},
|
|
32792
|
+
"linkup/search-deep": {
|
|
32793
|
+
"input_cost_per_query": 0.05867,
|
|
32794
|
+
"litellm_provider": "linkup",
|
|
32795
|
+
"mode": "search"
|
|
32796
|
+
},
|
|
32797
|
+
"stability/inpaint": {
|
|
32798
|
+
"litellm_provider": "stability",
|
|
32799
|
+
"mode": "image_edit",
|
|
32800
|
+
"output_cost_per_image": 0.005,
|
|
32801
|
+
"supported_endpoints": [
|
|
32802
|
+
"/v1/images/edits"
|
|
32803
|
+
]
|
|
32804
|
+
},
|
|
32805
|
+
"stability/outpaint": {
|
|
32806
|
+
"litellm_provider": "stability",
|
|
32807
|
+
"mode": "image_edit",
|
|
32808
|
+
"output_cost_per_image": 0.004,
|
|
32809
|
+
"supported_endpoints": [
|
|
32810
|
+
"/v1/images/edits"
|
|
32811
|
+
]
|
|
32812
|
+
},
|
|
32813
|
+
"stability/erase": {
|
|
32814
|
+
"litellm_provider": "stability",
|
|
32815
|
+
"mode": "image_edit",
|
|
32816
|
+
"output_cost_per_image": 0.005,
|
|
32817
|
+
"supported_endpoints": [
|
|
32818
|
+
"/v1/images/edits"
|
|
32819
|
+
]
|
|
32820
|
+
},
|
|
32821
|
+
"stability/search-and-replace": {
|
|
32822
|
+
"litellm_provider": "stability",
|
|
32823
|
+
"mode": "image_edit",
|
|
32824
|
+
"output_cost_per_image": 0.005,
|
|
32825
|
+
"supported_endpoints": [
|
|
32826
|
+
"/v1/images/edits"
|
|
32827
|
+
]
|
|
32828
|
+
},
|
|
32829
|
+
"stability/search-and-recolor": {
|
|
32830
|
+
"litellm_provider": "stability",
|
|
32831
|
+
"mode": "image_edit",
|
|
32832
|
+
"output_cost_per_image": 0.005,
|
|
32833
|
+
"supported_endpoints": [
|
|
32834
|
+
"/v1/images/edits"
|
|
32835
|
+
]
|
|
32836
|
+
},
|
|
32837
|
+
"stability/remove-background": {
|
|
32838
|
+
"litellm_provider": "stability",
|
|
32839
|
+
"mode": "image_edit",
|
|
32840
|
+
"output_cost_per_image": 0.005,
|
|
32841
|
+
"supported_endpoints": [
|
|
32842
|
+
"/v1/images/edits"
|
|
32843
|
+
]
|
|
32844
|
+
},
|
|
32845
|
+
"stability/replace-background-and-relight": {
|
|
32846
|
+
"litellm_provider": "stability",
|
|
32847
|
+
"mode": "image_edit",
|
|
32848
|
+
"output_cost_per_image": 0.008,
|
|
32849
|
+
"supported_endpoints": [
|
|
32850
|
+
"/v1/images/edits"
|
|
32851
|
+
]
|
|
32852
|
+
},
|
|
32853
|
+
"stability/sketch": {
|
|
32854
|
+
"litellm_provider": "stability",
|
|
32855
|
+
"mode": "image_edit",
|
|
32856
|
+
"output_cost_per_image": 0.005,
|
|
32857
|
+
"supported_endpoints": [
|
|
32858
|
+
"/v1/images/edits"
|
|
32859
|
+
]
|
|
32860
|
+
},
|
|
32861
|
+
"stability/structure": {
|
|
32862
|
+
"litellm_provider": "stability",
|
|
32863
|
+
"mode": "image_edit",
|
|
32864
|
+
"output_cost_per_image": 0.005,
|
|
32865
|
+
"supported_endpoints": [
|
|
32866
|
+
"/v1/images/edits"
|
|
32867
|
+
]
|
|
32868
|
+
},
|
|
32869
|
+
"stability/style": {
|
|
32870
|
+
"litellm_provider": "stability",
|
|
32871
|
+
"mode": "image_edit",
|
|
32872
|
+
"output_cost_per_image": 0.005,
|
|
32873
|
+
"supported_endpoints": [
|
|
32874
|
+
"/v1/images/edits"
|
|
32875
|
+
]
|
|
32876
|
+
},
|
|
32877
|
+
"stability/style-transfer": {
|
|
32878
|
+
"litellm_provider": "stability",
|
|
32879
|
+
"mode": "image_edit",
|
|
32880
|
+
"output_cost_per_image": 0.008,
|
|
32881
|
+
"supported_endpoints": [
|
|
32882
|
+
"/v1/images/edits"
|
|
32883
|
+
]
|
|
32884
|
+
},
|
|
32885
|
+
"stability/fast": {
|
|
32886
|
+
"litellm_provider": "stability",
|
|
32887
|
+
"mode": "image_edit",
|
|
32888
|
+
"output_cost_per_image": 0.002,
|
|
32889
|
+
"supported_endpoints": [
|
|
32890
|
+
"/v1/images/edits"
|
|
32891
|
+
]
|
|
32892
|
+
},
|
|
32893
|
+
"stability/conservative": {
|
|
32894
|
+
"litellm_provider": "stability",
|
|
32895
|
+
"mode": "image_edit",
|
|
32896
|
+
"output_cost_per_image": 0.04,
|
|
32897
|
+
"supported_endpoints": [
|
|
32898
|
+
"/v1/images/edits"
|
|
32899
|
+
]
|
|
32900
|
+
},
|
|
32901
|
+
"stability/creative": {
|
|
32902
|
+
"litellm_provider": "stability",
|
|
32903
|
+
"mode": "image_edit",
|
|
32904
|
+
"output_cost_per_image": 0.06,
|
|
32905
|
+
"supported_endpoints": [
|
|
32906
|
+
"/v1/images/edits"
|
|
32907
|
+
]
|
|
32908
|
+
},
|
|
32909
|
+
"stability.stable-conservative-upscale-v1:0": {
|
|
32910
|
+
"litellm_provider": "bedrock",
|
|
32911
|
+
"max_input_tokens": 77,
|
|
32912
|
+
"mode": "image_edit",
|
|
32913
|
+
"output_cost_per_image": 0.4
|
|
32914
|
+
},
|
|
32915
|
+
"stability.stable-creative-upscale-v1:0": {
|
|
32916
|
+
"litellm_provider": "bedrock",
|
|
32917
|
+
"max_input_tokens": 77,
|
|
32918
|
+
"mode": "image_edit",
|
|
32919
|
+
"output_cost_per_image": 0.6
|
|
32920
|
+
},
|
|
32921
|
+
"stability.stable-fast-upscale-v1:0": {
|
|
32922
|
+
"litellm_provider": "bedrock",
|
|
32923
|
+
"max_input_tokens": 77,
|
|
32924
|
+
"mode": "image_edit",
|
|
32925
|
+
"output_cost_per_image": 0.03
|
|
32926
|
+
},
|
|
32927
|
+
"stability.stable-outpaint-v1:0": {
|
|
32928
|
+
"litellm_provider": "bedrock",
|
|
32929
|
+
"max_input_tokens": 77,
|
|
32930
|
+
"mode": "image_edit",
|
|
32931
|
+
"output_cost_per_image": 0.06
|
|
32932
|
+
},
|
|
32933
|
+
"stability.stable-image-control-sketch-v1:0": {
|
|
32934
|
+
"litellm_provider": "bedrock",
|
|
32935
|
+
"max_input_tokens": 77,
|
|
32936
|
+
"mode": "image_edit",
|
|
32937
|
+
"output_cost_per_image": 0.07
|
|
32938
|
+
},
|
|
32939
|
+
"stability.stable-image-control-structure-v1:0": {
|
|
32940
|
+
"litellm_provider": "bedrock",
|
|
32941
|
+
"max_input_tokens": 77,
|
|
32942
|
+
"mode": "image_edit",
|
|
32943
|
+
"output_cost_per_image": 0.07
|
|
32944
|
+
},
|
|
32945
|
+
"stability.stable-image-erase-object-v1:0": {
|
|
32946
|
+
"litellm_provider": "bedrock",
|
|
32947
|
+
"max_input_tokens": 77,
|
|
32948
|
+
"mode": "image_edit",
|
|
32949
|
+
"output_cost_per_image": 0.07
|
|
32950
|
+
},
|
|
32951
|
+
"stability.stable-image-inpaint-v1:0": {
|
|
32952
|
+
"litellm_provider": "bedrock",
|
|
32953
|
+
"max_input_tokens": 77,
|
|
32954
|
+
"mode": "image_edit",
|
|
32955
|
+
"output_cost_per_image": 0.07
|
|
32956
|
+
},
|
|
32957
|
+
"stability.stable-image-remove-background-v1:0": {
|
|
32958
|
+
"litellm_provider": "bedrock",
|
|
32959
|
+
"max_input_tokens": 77,
|
|
32960
|
+
"mode": "image_edit",
|
|
32961
|
+
"output_cost_per_image": 0.07
|
|
32962
|
+
},
|
|
32963
|
+
"stability.stable-image-search-recolor-v1:0": {
|
|
32964
|
+
"litellm_provider": "bedrock",
|
|
32965
|
+
"max_input_tokens": 77,
|
|
32966
|
+
"mode": "image_edit",
|
|
32967
|
+
"output_cost_per_image": 0.07
|
|
32968
|
+
},
|
|
32969
|
+
"stability.stable-image-search-replace-v1:0": {
|
|
32970
|
+
"litellm_provider": "bedrock",
|
|
32971
|
+
"max_input_tokens": 77,
|
|
32972
|
+
"mode": "image_edit",
|
|
32973
|
+
"output_cost_per_image": 0.07
|
|
32974
|
+
},
|
|
32975
|
+
"stability.stable-image-style-guide-v1:0": {
|
|
32976
|
+
"litellm_provider": "bedrock",
|
|
32977
|
+
"max_input_tokens": 77,
|
|
32978
|
+
"mode": "image_edit",
|
|
32979
|
+
"output_cost_per_image": 0.07
|
|
32980
|
+
},
|
|
32981
|
+
"stability.stable-style-transfer-v1:0": {
|
|
32982
|
+
"litellm_provider": "bedrock",
|
|
32983
|
+
"max_input_tokens": 77,
|
|
32984
|
+
"mode": "image_edit",
|
|
32985
|
+
"output_cost_per_image": 0.08
|
|
32986
|
+
},
|
|
32987
|
+
"vertex_ai/deepseek-ai/deepseek-ocr-maas": {
|
|
32988
|
+
"litellm_provider": "vertex_ai",
|
|
32989
|
+
"mode": "ocr",
|
|
32990
|
+
"input_cost_per_token": 3e-07,
|
|
32991
|
+
"output_cost_per_token": 1.2e-06,
|
|
32992
|
+
"ocr_cost_per_page": 0.0003,
|
|
32993
|
+
"source": "https://cloud.google.com/vertex-ai/pricing"
|
|
32994
|
+
},
|
|
32995
|
+
"gemini/veo-3.1-fast-generate-001": {
|
|
32996
|
+
"litellm_provider": "gemini",
|
|
32997
|
+
"max_input_tokens": 1024,
|
|
32998
|
+
"max_tokens": 1024,
|
|
32999
|
+
"mode": "video_generation",
|
|
33000
|
+
"output_cost_per_second": 0.15,
|
|
33001
|
+
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
33002
|
+
"supported_modalities": [
|
|
33003
|
+
"text"
|
|
33004
|
+
],
|
|
33005
|
+
"supported_output_modalities": [
|
|
33006
|
+
"video"
|
|
33007
|
+
]
|
|
33008
|
+
},
|
|
33009
|
+
"gemini/veo-3.1-generate-001": {
|
|
33010
|
+
"litellm_provider": "gemini",
|
|
33011
|
+
"max_input_tokens": 1024,
|
|
33012
|
+
"max_tokens": 1024,
|
|
33013
|
+
"mode": "video_generation",
|
|
33014
|
+
"output_cost_per_second": 0.4,
|
|
33015
|
+
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
33016
|
+
"supported_modalities": [
|
|
33017
|
+
"text"
|
|
33018
|
+
],
|
|
33019
|
+
"supported_output_modalities": [
|
|
33020
|
+
"video"
|
|
33021
|
+
]
|
|
33022
|
+
},
|
|
33023
|
+
"aws_polly/standard": {
|
|
33024
|
+
"input_cost_per_character": 4e-06,
|
|
33025
|
+
"litellm_provider": "aws_polly",
|
|
33026
|
+
"mode": "audio_speech",
|
|
33027
|
+
"supported_endpoints": [
|
|
33028
|
+
"/v1/audio/speech"
|
|
33029
|
+
],
|
|
33030
|
+
"source": "https://aws.amazon.com/polly/pricing/"
|
|
33031
|
+
},
|
|
33032
|
+
"aws_polly/neural": {
|
|
33033
|
+
"input_cost_per_character": 1.6e-05,
|
|
33034
|
+
"litellm_provider": "aws_polly",
|
|
33035
|
+
"mode": "audio_speech",
|
|
33036
|
+
"supported_endpoints": [
|
|
33037
|
+
"/v1/audio/speech"
|
|
33038
|
+
],
|
|
33039
|
+
"source": "https://aws.amazon.com/polly/pricing/"
|
|
33040
|
+
},
|
|
33041
|
+
"aws_polly/long-form": {
|
|
33042
|
+
"input_cost_per_character": 0.0001,
|
|
33043
|
+
"litellm_provider": "aws_polly",
|
|
33044
|
+
"mode": "audio_speech",
|
|
33045
|
+
"supported_endpoints": [
|
|
33046
|
+
"/v1/audio/speech"
|
|
33047
|
+
],
|
|
33048
|
+
"source": "https://aws.amazon.com/polly/pricing/"
|
|
33049
|
+
},
|
|
33050
|
+
"aws_polly/generative": {
|
|
33051
|
+
"input_cost_per_character": 3e-05,
|
|
33052
|
+
"litellm_provider": "aws_polly",
|
|
33053
|
+
"mode": "audio_speech",
|
|
33054
|
+
"supported_endpoints": [
|
|
33055
|
+
"/v1/audio/speech"
|
|
33056
|
+
],
|
|
33057
|
+
"source": "https://aws.amazon.com/polly/pricing/"
|
|
33058
|
+
},
|
|
33059
|
+
"vertex_ai/veo-3.1-generate-001": {
|
|
33060
|
+
"litellm_provider": "vertex_ai-video-models",
|
|
33061
|
+
"max_input_tokens": 1024,
|
|
33062
|
+
"max_tokens": 1024,
|
|
33063
|
+
"mode": "video_generation",
|
|
33064
|
+
"output_cost_per_second": 0.4,
|
|
33065
|
+
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/veo",
|
|
33066
|
+
"supported_modalities": [
|
|
33067
|
+
"text"
|
|
33068
|
+
],
|
|
33069
|
+
"supported_output_modalities": [
|
|
33070
|
+
"video"
|
|
33071
|
+
]
|
|
33072
|
+
},
|
|
33073
|
+
"vertex_ai/veo-3.1-fast-generate-001": {
|
|
33074
|
+
"litellm_provider": "vertex_ai-video-models",
|
|
33075
|
+
"max_input_tokens": 1024,
|
|
33076
|
+
"max_tokens": 1024,
|
|
33077
|
+
"mode": "video_generation",
|
|
33078
|
+
"output_cost_per_second": 0.15,
|
|
33079
|
+
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/veo",
|
|
33080
|
+
"supported_modalities": [
|
|
33081
|
+
"text"
|
|
33082
|
+
],
|
|
33083
|
+
"supported_output_modalities": [
|
|
33084
|
+
"video"
|
|
33085
|
+
]
|
|
33086
|
+
},
|
|
33087
|
+
"azure_ai/gpt-oss-120b": {
|
|
33088
|
+
"input_cost_per_token": 1.5e-07,
|
|
33089
|
+
"output_cost_per_token": 6e-07,
|
|
33090
|
+
"litellm_provider": "azure_ai",
|
|
33091
|
+
"max_input_tokens": 131072,
|
|
33092
|
+
"max_output_tokens": 131072,
|
|
33093
|
+
"max_tokens": 131072,
|
|
33094
|
+
"mode": "chat",
|
|
33095
|
+
"source": "https://azure.microsoft.com/en-us/pricing/details/cognitive-services/openai-service/",
|
|
33096
|
+
"supports_function_calling": true,
|
|
33097
|
+
"supports_parallel_function_calling": true,
|
|
33098
|
+
"supports_response_schema": true,
|
|
33099
|
+
"supports_tool_choice": true
|
|
33100
|
+
},
|
|
33101
|
+
"azure/gpt-image-1.5": {
|
|
33102
|
+
"cache_read_input_image_token_cost": 2e-06,
|
|
33103
|
+
"cache_read_input_token_cost": 1.25e-06,
|
|
33104
|
+
"input_cost_per_token": 5e-06,
|
|
33105
|
+
"input_cost_per_image_token": 8e-06,
|
|
33106
|
+
"litellm_provider": "azure",
|
|
33107
|
+
"mode": "image_generation",
|
|
33108
|
+
"output_cost_per_image_token": 3.2e-05,
|
|
33109
|
+
"supported_endpoints": [
|
|
33110
|
+
"/v1/images/generations",
|
|
33111
|
+
"/v1/images/edits"
|
|
33112
|
+
]
|
|
33113
|
+
},
|
|
33114
|
+
"azure/gpt-image-1.5-2025-12-16": {
|
|
33115
|
+
"cache_read_input_image_token_cost": 2e-06,
|
|
33116
|
+
"cache_read_input_token_cost": 1.25e-06,
|
|
33117
|
+
"input_cost_per_token": 5e-06,
|
|
33118
|
+
"input_cost_per_image_token": 8e-06,
|
|
33119
|
+
"litellm_provider": "azure",
|
|
33120
|
+
"mode": "image_generation",
|
|
33121
|
+
"output_cost_per_image_token": 3.2e-05,
|
|
33122
|
+
"supported_endpoints": [
|
|
33123
|
+
"/v1/images/generations",
|
|
33124
|
+
"/v1/images/edits"
|
|
33125
|
+
]
|
|
33126
|
+
},
|
|
33127
|
+
"groq/meta-llama/llama-guard-4-12b": {
|
|
33128
|
+
"input_cost_per_token": 2e-07,
|
|
33129
|
+
"litellm_provider": "groq",
|
|
33130
|
+
"max_input_tokens": 8192,
|
|
33131
|
+
"max_output_tokens": 8192,
|
|
33132
|
+
"max_tokens": 8192,
|
|
33133
|
+
"mode": "chat",
|
|
33134
|
+
"output_cost_per_token": 2e-07
|
|
33135
|
+
},
|
|
33136
|
+
"minimax/speech-02-hd": {
|
|
33137
|
+
"input_cost_per_character": 0.0001,
|
|
33138
|
+
"litellm_provider": "minimax",
|
|
33139
|
+
"mode": "audio_speech",
|
|
33140
|
+
"supported_endpoints": [
|
|
33141
|
+
"/v1/audio/speech"
|
|
33142
|
+
]
|
|
33143
|
+
},
|
|
33144
|
+
"minimax/speech-02-turbo": {
|
|
33145
|
+
"input_cost_per_character": 6e-05,
|
|
33146
|
+
"litellm_provider": "minimax",
|
|
33147
|
+
"mode": "audio_speech",
|
|
33148
|
+
"supported_endpoints": [
|
|
33149
|
+
"/v1/audio/speech"
|
|
33150
|
+
]
|
|
33151
|
+
},
|
|
33152
|
+
"minimax/speech-2.6-hd": {
|
|
33153
|
+
"input_cost_per_character": 0.0001,
|
|
33154
|
+
"litellm_provider": "minimax",
|
|
33155
|
+
"mode": "audio_speech",
|
|
33156
|
+
"supported_endpoints": [
|
|
33157
|
+
"/v1/audio/speech"
|
|
33158
|
+
]
|
|
33159
|
+
},
|
|
33160
|
+
"minimax/speech-2.6-turbo": {
|
|
33161
|
+
"input_cost_per_character": 6e-05,
|
|
33162
|
+
"litellm_provider": "minimax",
|
|
33163
|
+
"mode": "audio_speech",
|
|
33164
|
+
"supported_endpoints": [
|
|
33165
|
+
"/v1/audio/speech"
|
|
33166
|
+
]
|
|
33167
|
+
},
|
|
33168
|
+
"minimax/MiniMax-M2.1": {
|
|
33169
|
+
"input_cost_per_token": 3e-07,
|
|
33170
|
+
"output_cost_per_token": 1.2e-06,
|
|
33171
|
+
"cache_read_input_token_cost": 3e-08,
|
|
33172
|
+
"cache_creation_input_token_cost": 3.75e-07,
|
|
33173
|
+
"litellm_provider": "minimax",
|
|
33174
|
+
"mode": "chat",
|
|
33175
|
+
"supports_function_calling": true,
|
|
33176
|
+
"supports_tool_choice": true,
|
|
33177
|
+
"supports_prompt_caching": true,
|
|
33178
|
+
"supports_system_messages": true,
|
|
33179
|
+
"max_input_tokens": 1000000,
|
|
33180
|
+
"max_output_tokens": 8192
|
|
33181
|
+
},
|
|
33182
|
+
"minimax/MiniMax-M2.1-lightning": {
|
|
33183
|
+
"input_cost_per_token": 3e-07,
|
|
33184
|
+
"output_cost_per_token": 2.4e-06,
|
|
33185
|
+
"cache_read_input_token_cost": 3e-08,
|
|
33186
|
+
"cache_creation_input_token_cost": 3.75e-07,
|
|
33187
|
+
"litellm_provider": "minimax",
|
|
33188
|
+
"mode": "chat",
|
|
33189
|
+
"supports_function_calling": true,
|
|
33190
|
+
"supports_tool_choice": true,
|
|
33191
|
+
"supports_prompt_caching": true,
|
|
33192
|
+
"supports_system_messages": true,
|
|
33193
|
+
"max_input_tokens": 1000000,
|
|
33194
|
+
"max_output_tokens": 8192
|
|
33195
|
+
},
|
|
33196
|
+
"minimax/MiniMax-M2": {
|
|
33197
|
+
"input_cost_per_token": 3e-07,
|
|
33198
|
+
"output_cost_per_token": 1.2e-06,
|
|
33199
|
+
"cache_read_input_token_cost": 3e-08,
|
|
33200
|
+
"cache_creation_input_token_cost": 3.75e-07,
|
|
33201
|
+
"litellm_provider": "minimax",
|
|
33202
|
+
"mode": "chat",
|
|
33203
|
+
"supports_function_calling": true,
|
|
33204
|
+
"supports_tool_choice": true,
|
|
33205
|
+
"supports_prompt_caching": true,
|
|
33206
|
+
"supports_system_messages": true,
|
|
33207
|
+
"max_input_tokens": 200000,
|
|
33208
|
+
"max_output_tokens": 8192
|
|
33209
|
+
},
|
|
33210
|
+
"azure/gpt-5.2-chat": {
|
|
33211
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
33212
|
+
"cache_read_input_token_cost_priority": 3.5e-07,
|
|
33213
|
+
"input_cost_per_token": 1.75e-06,
|
|
33214
|
+
"input_cost_per_token_priority": 3.5e-06,
|
|
33215
|
+
"litellm_provider": "azure",
|
|
33216
|
+
"max_input_tokens": 128000,
|
|
33217
|
+
"max_output_tokens": 16384,
|
|
33218
|
+
"max_tokens": 16384,
|
|
33219
|
+
"mode": "chat",
|
|
33220
|
+
"output_cost_per_token": 1.4e-05,
|
|
33221
|
+
"output_cost_per_token_priority": 2.8e-05,
|
|
33222
|
+
"supported_endpoints": [
|
|
33223
|
+
"/v1/chat/completions",
|
|
33224
|
+
"/v1/responses"
|
|
33225
|
+
],
|
|
33226
|
+
"supported_modalities": [
|
|
33227
|
+
"text",
|
|
33228
|
+
"image"
|
|
33229
|
+
],
|
|
33230
|
+
"supported_output_modalities": [
|
|
33231
|
+
"text"
|
|
33232
|
+
],
|
|
33233
|
+
"supports_function_calling": true,
|
|
33234
|
+
"supports_native_streaming": true,
|
|
33235
|
+
"supports_parallel_function_calling": true,
|
|
33236
|
+
"supports_pdf_input": true,
|
|
33237
|
+
"supports_prompt_caching": true,
|
|
33238
|
+
"supports_reasoning": true,
|
|
33239
|
+
"supports_response_schema": true,
|
|
33240
|
+
"supports_system_messages": true,
|
|
33241
|
+
"supports_tool_choice": true,
|
|
33242
|
+
"supports_vision": true
|
|
33243
|
+
},
|
|
33244
|
+
"aiml/google/imagen-4.0-ultra-generate-001": {
|
|
33245
|
+
"litellm_provider": "aiml",
|
|
33246
|
+
"metadata": {
|
|
33247
|
+
"notes": "Imagen 4.0 Ultra Generate API - Photorealistic image generation with precise text rendering"
|
|
33248
|
+
},
|
|
33249
|
+
"mode": "image_generation",
|
|
33250
|
+
"output_cost_per_image": 0.063,
|
|
33251
|
+
"source": "https://docs.aimlapi.com/api-references/image-models/google/imagen-4-ultra-generate",
|
|
33252
|
+
"supported_endpoints": [
|
|
33253
|
+
"/v1/images/generations"
|
|
33254
|
+
]
|
|
33255
|
+
},
|
|
33256
|
+
"aiml/google/nano-banana-pro": {
|
|
33257
|
+
"litellm_provider": "aiml",
|
|
33258
|
+
"metadata": {
|
|
33259
|
+
"notes": "Gemini 3 Pro Image (Nano Banana Pro) - Advanced text-to-image generation with reasoning and 4K resolution support"
|
|
33260
|
+
},
|
|
33261
|
+
"mode": "image_generation",
|
|
33262
|
+
"output_cost_per_image": 0.1575,
|
|
33263
|
+
"source": "https://docs.aimlapi.com/api-references/image-models/google/gemini-3-pro-image-preview",
|
|
33264
|
+
"supported_endpoints": [
|
|
33265
|
+
"/v1/images/generations"
|
|
33266
|
+
]
|
|
29152
33267
|
}
|
|
29153
33268
|
}
|