tokencostauto 0.1.400__py3-none-any.whl → 0.1.404__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tokencostauto/model_prices.json +489 -5
- {tokencostauto-0.1.400.dist-info → tokencostauto-0.1.404.dist-info}/METADATA +1 -1
- tokencostauto-0.1.404.dist-info/RECORD +9 -0
- tokencostauto-0.1.400.dist-info/RECORD +0 -9
- {tokencostauto-0.1.400.dist-info → tokencostauto-0.1.404.dist-info}/WHEEL +0 -0
- {tokencostauto-0.1.400.dist-info → tokencostauto-0.1.404.dist-info}/licenses/LICENSE +0 -0
- {tokencostauto-0.1.400.dist-info → tokencostauto-0.1.404.dist-info}/top_level.txt +0 -0
tokencostauto/model_prices.json
CHANGED
|
@@ -9781,8 +9781,8 @@
|
|
|
9781
9781
|
"input_cost_per_token": 3e-06,
|
|
9782
9782
|
"litellm_provider": "anthropic",
|
|
9783
9783
|
"max_input_tokens": 200000,
|
|
9784
|
-
"max_output_tokens":
|
|
9785
|
-
"max_tokens":
|
|
9784
|
+
"max_output_tokens": 64000,
|
|
9785
|
+
"max_tokens": 64000,
|
|
9786
9786
|
"mode": "chat",
|
|
9787
9787
|
"output_cost_per_token": 1.5e-05,
|
|
9788
9788
|
"search_context_cost_per_query": {
|
|
@@ -9809,8 +9809,8 @@
|
|
|
9809
9809
|
"input_cost_per_token": 3e-06,
|
|
9810
9810
|
"litellm_provider": "anthropic",
|
|
9811
9811
|
"max_input_tokens": 200000,
|
|
9812
|
-
"max_output_tokens":
|
|
9813
|
-
"max_tokens":
|
|
9812
|
+
"max_output_tokens": 64000,
|
|
9813
|
+
"max_tokens": 64000,
|
|
9814
9814
|
"mode": "chat",
|
|
9815
9815
|
"output_cost_per_token": 1.5e-05,
|
|
9816
9816
|
"search_context_cost_per_query": {
|
|
@@ -11365,15 +11365,20 @@
|
|
|
11365
11365
|
"supports_vision": true
|
|
11366
11366
|
},
|
|
11367
11367
|
"gpt-image-1": {
|
|
11368
|
+
"input_cost_per_image": 0.042,
|
|
11368
11369
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
11370
|
+
"input_cost_per_token": 5e-06,
|
|
11371
|
+
"input_cost_per_image_token": 1e-05,
|
|
11369
11372
|
"litellm_provider": "openai",
|
|
11370
11373
|
"mode": "image_generation",
|
|
11371
11374
|
"output_cost_per_pixel": 0.0,
|
|
11375
|
+
"output_cost_per_token": 4e-05,
|
|
11372
11376
|
"supported_endpoints": [
|
|
11373
11377
|
"/v1/images/generations"
|
|
11374
11378
|
]
|
|
11375
11379
|
},
|
|
11376
11380
|
"low/1024-x-1024/gpt-image-1": {
|
|
11381
|
+
"input_cost_per_image": 0.011,
|
|
11377
11382
|
"input_cost_per_pixel": 1.0490417e-08,
|
|
11378
11383
|
"litellm_provider": "openai",
|
|
11379
11384
|
"mode": "image_generation",
|
|
@@ -11383,6 +11388,7 @@
|
|
|
11383
11388
|
]
|
|
11384
11389
|
},
|
|
11385
11390
|
"medium/1024-x-1024/gpt-image-1": {
|
|
11391
|
+
"input_cost_per_image": 0.042,
|
|
11386
11392
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
11387
11393
|
"litellm_provider": "openai",
|
|
11388
11394
|
"mode": "image_generation",
|
|
@@ -11392,6 +11398,7 @@
|
|
|
11392
11398
|
]
|
|
11393
11399
|
},
|
|
11394
11400
|
"high/1024-x-1024/gpt-image-1": {
|
|
11401
|
+
"input_cost_per_image": 0.167,
|
|
11395
11402
|
"input_cost_per_pixel": 1.59263611e-07,
|
|
11396
11403
|
"litellm_provider": "openai",
|
|
11397
11404
|
"mode": "image_generation",
|
|
@@ -11401,6 +11408,7 @@
|
|
|
11401
11408
|
]
|
|
11402
11409
|
},
|
|
11403
11410
|
"low/1024-x-1536/gpt-image-1": {
|
|
11411
|
+
"input_cost_per_image": 0.016,
|
|
11404
11412
|
"input_cost_per_pixel": 1.0172526e-08,
|
|
11405
11413
|
"litellm_provider": "openai",
|
|
11406
11414
|
"mode": "image_generation",
|
|
@@ -11410,6 +11418,7 @@
|
|
|
11410
11418
|
]
|
|
11411
11419
|
},
|
|
11412
11420
|
"medium/1024-x-1536/gpt-image-1": {
|
|
11421
|
+
"input_cost_per_image": 0.063,
|
|
11413
11422
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
11414
11423
|
"litellm_provider": "openai",
|
|
11415
11424
|
"mode": "image_generation",
|
|
@@ -11419,6 +11428,7 @@
|
|
|
11419
11428
|
]
|
|
11420
11429
|
},
|
|
11421
11430
|
"high/1024-x-1536/gpt-image-1": {
|
|
11431
|
+
"input_cost_per_image": 0.25,
|
|
11422
11432
|
"input_cost_per_pixel": 1.58945719e-07,
|
|
11423
11433
|
"litellm_provider": "openai",
|
|
11424
11434
|
"mode": "image_generation",
|
|
@@ -11428,6 +11438,7 @@
|
|
|
11428
11438
|
]
|
|
11429
11439
|
},
|
|
11430
11440
|
"low/1536-x-1024/gpt-image-1": {
|
|
11441
|
+
"input_cost_per_image": 0.016,
|
|
11431
11442
|
"input_cost_per_pixel": 1.0172526e-08,
|
|
11432
11443
|
"litellm_provider": "openai",
|
|
11433
11444
|
"mode": "image_generation",
|
|
@@ -11437,6 +11448,7 @@
|
|
|
11437
11448
|
]
|
|
11438
11449
|
},
|
|
11439
11450
|
"medium/1536-x-1024/gpt-image-1": {
|
|
11451
|
+
"input_cost_per_image": 0.063,
|
|
11440
11452
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
11441
11453
|
"litellm_provider": "openai",
|
|
11442
11454
|
"mode": "image_generation",
|
|
@@ -11446,6 +11458,7 @@
|
|
|
11446
11458
|
]
|
|
11447
11459
|
},
|
|
11448
11460
|
"high/1536-x-1024/gpt-image-1": {
|
|
11461
|
+
"input_cost_per_image": 0.25,
|
|
11449
11462
|
"input_cost_per_pixel": 1.58945719e-07,
|
|
11450
11463
|
"litellm_provider": "openai",
|
|
11451
11464
|
"mode": "image_generation",
|
|
@@ -17990,6 +18003,7 @@
|
|
|
17990
18003
|
"output_cost_per_token": 2.19e-06,
|
|
17991
18004
|
"source": "https://fireworks.ai/models/fireworks/glm-4p5",
|
|
17992
18005
|
"supports_function_calling": true,
|
|
18006
|
+
"supports_reasoning": true,
|
|
17993
18007
|
"supports_response_schema": true,
|
|
17994
18008
|
"supports_tool_choice": true
|
|
17995
18009
|
},
|
|
@@ -18003,6 +18017,7 @@
|
|
|
18003
18017
|
"output_cost_per_token": 8.8e-07,
|
|
18004
18018
|
"source": "https://artificialanalysis.ai/models/glm-4-5-air",
|
|
18005
18019
|
"supports_function_calling": true,
|
|
18020
|
+
"supports_reasoning": true,
|
|
18006
18021
|
"supports_response_schema": true,
|
|
18007
18022
|
"supports_tool_choice": true
|
|
18008
18023
|
},
|
|
@@ -18016,6 +18031,7 @@
|
|
|
18016
18031
|
"output_cost_per_token": 6e-07,
|
|
18017
18032
|
"source": "https://fireworks.ai/pricing",
|
|
18018
18033
|
"supports_function_calling": true,
|
|
18034
|
+
"supports_reasoning": true,
|
|
18019
18035
|
"supports_response_schema": true,
|
|
18020
18036
|
"supports_tool_choice": true
|
|
18021
18037
|
},
|
|
@@ -18029,6 +18045,7 @@
|
|
|
18029
18045
|
"output_cost_per_token": 2e-07,
|
|
18030
18046
|
"source": "https://fireworks.ai/pricing",
|
|
18031
18047
|
"supports_function_calling": true,
|
|
18048
|
+
"supports_reasoning": true,
|
|
18032
18049
|
"supports_response_schema": true,
|
|
18033
18050
|
"supports_tool_choice": true
|
|
18034
18051
|
},
|
|
@@ -20525,6 +20542,7 @@
|
|
|
20525
20542
|
"mode": "chat",
|
|
20526
20543
|
"output_cost_per_token": 1.68e-06,
|
|
20527
20544
|
"source": "https://fireworks.ai/pricing",
|
|
20545
|
+
"supports_reasoning": true,
|
|
20528
20546
|
"supports_response_schema": true,
|
|
20529
20547
|
"supports_tool_choice": true
|
|
20530
20548
|
},
|
|
@@ -26601,6 +26619,7 @@
|
|
|
26601
26619
|
"mode": "chat",
|
|
26602
26620
|
"output_cost_per_token": 1.68e-06,
|
|
26603
26621
|
"source": "https://fireworks.ai/pricing",
|
|
26622
|
+
"supports_reasoning": true,
|
|
26604
26623
|
"supports_response_schema": true,
|
|
26605
26624
|
"supports_tool_choice": true
|
|
26606
26625
|
},
|
|
@@ -28178,6 +28197,7 @@
|
|
|
28178
28197
|
"mode": "chat",
|
|
28179
28198
|
"source": "https://fireworks.ai/pricing",
|
|
28180
28199
|
"supports_function_calling": true,
|
|
28200
|
+
"supports_reasoning": true,
|
|
28181
28201
|
"supports_response_schema": true,
|
|
28182
28202
|
"supports_tool_choice": true
|
|
28183
28203
|
},
|
|
@@ -28662,6 +28682,7 @@
|
|
|
28662
28682
|
"output_cost_per_token": 1.2e-06,
|
|
28663
28683
|
"source": "https://fireworks.ai/models/fireworks/deepseek-v3p2",
|
|
28664
28684
|
"supports_function_calling": true,
|
|
28685
|
+
"supports_reasoning": true,
|
|
28665
28686
|
"supports_response_schema": true,
|
|
28666
28687
|
"supports_tool_choice": true
|
|
28667
28688
|
},
|
|
@@ -30039,7 +30060,8 @@
|
|
|
30039
30060
|
"input_cost_per_token": 1.2e-06,
|
|
30040
30061
|
"output_cost_per_token": 1.2e-06,
|
|
30041
30062
|
"litellm_provider": "fireworks_ai",
|
|
30042
|
-
"mode": "chat"
|
|
30063
|
+
"mode": "chat",
|
|
30064
|
+
"supports_reasoning": true
|
|
30043
30065
|
},
|
|
30044
30066
|
"fireworks_ai/accounts/fireworks/models/gpt-oss-safeguard-120b": {
|
|
30045
30067
|
"max_tokens": 131072,
|
|
@@ -32094,5 +32116,467 @@
|
|
|
32094
32116
|
"supported_endpoints": [
|
|
32095
32117
|
"/v1/images/generations"
|
|
32096
32118
|
]
|
|
32119
|
+
},
|
|
32120
|
+
"openrouter/mistralai/devstral-2512:free": {
|
|
32121
|
+
"input_cost_per_image": 0,
|
|
32122
|
+
"input_cost_per_token": 0,
|
|
32123
|
+
"litellm_provider": "openrouter",
|
|
32124
|
+
"max_input_tokens": 262144,
|
|
32125
|
+
"max_output_tokens": null,
|
|
32126
|
+
"max_tokens": 262144,
|
|
32127
|
+
"mode": "chat",
|
|
32128
|
+
"output_cost_per_token": 0,
|
|
32129
|
+
"supports_function_calling": true,
|
|
32130
|
+
"supports_prompt_caching": false,
|
|
32131
|
+
"supports_tool_choice": true,
|
|
32132
|
+
"supports_vision": false
|
|
32133
|
+
},
|
|
32134
|
+
"openrouter/mistralai/devstral-2512": {
|
|
32135
|
+
"input_cost_per_image": 0,
|
|
32136
|
+
"input_cost_per_token": 1.5e-07,
|
|
32137
|
+
"litellm_provider": "openrouter",
|
|
32138
|
+
"max_input_tokens": 262144,
|
|
32139
|
+
"max_output_tokens": 65536,
|
|
32140
|
+
"max_tokens": 262144,
|
|
32141
|
+
"mode": "chat",
|
|
32142
|
+
"output_cost_per_token": 6e-07,
|
|
32143
|
+
"supports_function_calling": true,
|
|
32144
|
+
"supports_prompt_caching": false,
|
|
32145
|
+
"supports_tool_choice": true,
|
|
32146
|
+
"supports_vision": false
|
|
32147
|
+
},
|
|
32148
|
+
"openrouter/mistralai/ministral-3b-2512": {
|
|
32149
|
+
"input_cost_per_image": 0,
|
|
32150
|
+
"input_cost_per_token": 1e-07,
|
|
32151
|
+
"litellm_provider": "openrouter",
|
|
32152
|
+
"max_input_tokens": 131072,
|
|
32153
|
+
"max_output_tokens": null,
|
|
32154
|
+
"max_tokens": 131072,
|
|
32155
|
+
"mode": "chat",
|
|
32156
|
+
"output_cost_per_token": 1e-07,
|
|
32157
|
+
"supports_function_calling": true,
|
|
32158
|
+
"supports_prompt_caching": false,
|
|
32159
|
+
"supports_tool_choice": true,
|
|
32160
|
+
"supports_vision": true
|
|
32161
|
+
},
|
|
32162
|
+
"openrouter/mistralai/ministral-8b-2512": {
|
|
32163
|
+
"input_cost_per_image": 0,
|
|
32164
|
+
"input_cost_per_token": 1.5e-07,
|
|
32165
|
+
"litellm_provider": "openrouter",
|
|
32166
|
+
"max_input_tokens": 262144,
|
|
32167
|
+
"max_output_tokens": null,
|
|
32168
|
+
"max_tokens": 262144,
|
|
32169
|
+
"mode": "chat",
|
|
32170
|
+
"output_cost_per_token": 1.5e-07,
|
|
32171
|
+
"supports_function_calling": true,
|
|
32172
|
+
"supports_prompt_caching": false,
|
|
32173
|
+
"supports_tool_choice": true,
|
|
32174
|
+
"supports_vision": true
|
|
32175
|
+
},
|
|
32176
|
+
"openrouter/mistralai/ministral-14b-2512": {
|
|
32177
|
+
"input_cost_per_image": 0,
|
|
32178
|
+
"input_cost_per_token": 2e-07,
|
|
32179
|
+
"litellm_provider": "openrouter",
|
|
32180
|
+
"max_input_tokens": 262144,
|
|
32181
|
+
"max_output_tokens": null,
|
|
32182
|
+
"max_tokens": 262144,
|
|
32183
|
+
"mode": "chat",
|
|
32184
|
+
"output_cost_per_token": 2e-07,
|
|
32185
|
+
"supports_function_calling": true,
|
|
32186
|
+
"supports_prompt_caching": false,
|
|
32187
|
+
"supports_tool_choice": true,
|
|
32188
|
+
"supports_vision": true
|
|
32189
|
+
},
|
|
32190
|
+
"openrouter/mistralai/mistral-large-2512": {
|
|
32191
|
+
"input_cost_per_image": 0,
|
|
32192
|
+
"input_cost_per_token": 5e-07,
|
|
32193
|
+
"litellm_provider": "openrouter",
|
|
32194
|
+
"max_input_tokens": 262144,
|
|
32195
|
+
"max_output_tokens": null,
|
|
32196
|
+
"max_tokens": 262144,
|
|
32197
|
+
"mode": "chat",
|
|
32198
|
+
"output_cost_per_token": 1.5e-06,
|
|
32199
|
+
"supports_function_calling": true,
|
|
32200
|
+
"supports_prompt_caching": false,
|
|
32201
|
+
"supports_tool_choice": true,
|
|
32202
|
+
"supports_vision": true
|
|
32203
|
+
},
|
|
32204
|
+
"openrouter/openai/gpt-5.2": {
|
|
32205
|
+
"input_cost_per_image": 0,
|
|
32206
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
32207
|
+
"input_cost_per_token": 1.75e-06,
|
|
32208
|
+
"litellm_provider": "openrouter",
|
|
32209
|
+
"max_input_tokens": 400000,
|
|
32210
|
+
"max_output_tokens": 128000,
|
|
32211
|
+
"max_tokens": 400000,
|
|
32212
|
+
"mode": "chat",
|
|
32213
|
+
"output_cost_per_token": 1.4e-05,
|
|
32214
|
+
"supports_function_calling": true,
|
|
32215
|
+
"supports_prompt_caching": true,
|
|
32216
|
+
"supports_reasoning": true,
|
|
32217
|
+
"supports_tool_choice": true,
|
|
32218
|
+
"supports_vision": true
|
|
32219
|
+
},
|
|
32220
|
+
"openrouter/openai/gpt-5.2-chat": {
|
|
32221
|
+
"input_cost_per_image": 0,
|
|
32222
|
+
"cache_read_input_token_cost": 1.75e-07,
|
|
32223
|
+
"input_cost_per_token": 1.75e-06,
|
|
32224
|
+
"litellm_provider": "openrouter",
|
|
32225
|
+
"max_input_tokens": 128000,
|
|
32226
|
+
"max_output_tokens": 16384,
|
|
32227
|
+
"max_tokens": 128000,
|
|
32228
|
+
"mode": "chat",
|
|
32229
|
+
"output_cost_per_token": 1.4e-05,
|
|
32230
|
+
"supports_function_calling": true,
|
|
32231
|
+
"supports_prompt_caching": true,
|
|
32232
|
+
"supports_tool_choice": true,
|
|
32233
|
+
"supports_vision": true
|
|
32234
|
+
},
|
|
32235
|
+
"openrouter/openai/gpt-5.2-pro": {
|
|
32236
|
+
"input_cost_per_image": 0,
|
|
32237
|
+
"input_cost_per_token": 2.1e-05,
|
|
32238
|
+
"litellm_provider": "openrouter",
|
|
32239
|
+
"max_input_tokens": 400000,
|
|
32240
|
+
"max_output_tokens": 128000,
|
|
32241
|
+
"max_tokens": 400000,
|
|
32242
|
+
"mode": "chat",
|
|
32243
|
+
"output_cost_per_token": 0.000168,
|
|
32244
|
+
"supports_function_calling": true,
|
|
32245
|
+
"supports_prompt_caching": false,
|
|
32246
|
+
"supports_reasoning": true,
|
|
32247
|
+
"supports_tool_choice": true,
|
|
32248
|
+
"supports_vision": true
|
|
32249
|
+
},
|
|
32250
|
+
"azure_ai/deepseek-v3.2": {
|
|
32251
|
+
"input_cost_per_token": 5.8e-07,
|
|
32252
|
+
"litellm_provider": "azure_ai",
|
|
32253
|
+
"max_input_tokens": 163840,
|
|
32254
|
+
"max_output_tokens": 163840,
|
|
32255
|
+
"max_tokens": 8192,
|
|
32256
|
+
"mode": "chat",
|
|
32257
|
+
"output_cost_per_token": 1.68e-06,
|
|
32258
|
+
"supports_assistant_prefill": true,
|
|
32259
|
+
"supports_function_calling": true,
|
|
32260
|
+
"supports_prompt_caching": true,
|
|
32261
|
+
"supports_reasoning": true,
|
|
32262
|
+
"supports_tool_choice": true
|
|
32263
|
+
},
|
|
32264
|
+
"azure_ai/deepseek-v3.2-speciale": {
|
|
32265
|
+
"input_cost_per_token": 5.8e-07,
|
|
32266
|
+
"litellm_provider": "azure_ai",
|
|
32267
|
+
"max_input_tokens": 163840,
|
|
32268
|
+
"max_output_tokens": 163840,
|
|
32269
|
+
"max_tokens": 8192,
|
|
32270
|
+
"mode": "chat",
|
|
32271
|
+
"output_cost_per_token": 1.68e-06,
|
|
32272
|
+
"supports_assistant_prefill": true,
|
|
32273
|
+
"supports_function_calling": true,
|
|
32274
|
+
"supports_prompt_caching": true,
|
|
32275
|
+
"supports_reasoning": true,
|
|
32276
|
+
"supports_tool_choice": true
|
|
32277
|
+
},
|
|
32278
|
+
"github_copilot/claude-haiku-4.5": {
|
|
32279
|
+
"litellm_provider": "github_copilot",
|
|
32280
|
+
"max_input_tokens": 128000,
|
|
32281
|
+
"max_output_tokens": 16000,
|
|
32282
|
+
"max_tokens": 16000,
|
|
32283
|
+
"mode": "chat",
|
|
32284
|
+
"supported_endpoints": [
|
|
32285
|
+
"/chat/completions"
|
|
32286
|
+
],
|
|
32287
|
+
"supports_function_calling": true,
|
|
32288
|
+
"supports_parallel_function_calling": true,
|
|
32289
|
+
"supports_vision": true
|
|
32290
|
+
},
|
|
32291
|
+
"github_copilot/claude-opus-4.5": {
|
|
32292
|
+
"litellm_provider": "github_copilot",
|
|
32293
|
+
"max_input_tokens": 128000,
|
|
32294
|
+
"max_output_tokens": 16000,
|
|
32295
|
+
"max_tokens": 16000,
|
|
32296
|
+
"mode": "chat",
|
|
32297
|
+
"supported_endpoints": [
|
|
32298
|
+
"/chat/completions"
|
|
32299
|
+
],
|
|
32300
|
+
"supports_function_calling": true,
|
|
32301
|
+
"supports_parallel_function_calling": true,
|
|
32302
|
+
"supports_vision": true
|
|
32303
|
+
},
|
|
32304
|
+
"github_copilot/claude-opus-41": {
|
|
32305
|
+
"litellm_provider": "github_copilot",
|
|
32306
|
+
"max_input_tokens": 80000,
|
|
32307
|
+
"max_output_tokens": 16000,
|
|
32308
|
+
"max_tokens": 16000,
|
|
32309
|
+
"mode": "chat",
|
|
32310
|
+
"supported_endpoints": [
|
|
32311
|
+
"/chat/completions"
|
|
32312
|
+
],
|
|
32313
|
+
"supports_vision": true
|
|
32314
|
+
},
|
|
32315
|
+
"github_copilot/claude-sonnet-4": {
|
|
32316
|
+
"litellm_provider": "github_copilot",
|
|
32317
|
+
"max_input_tokens": 128000,
|
|
32318
|
+
"max_output_tokens": 16000,
|
|
32319
|
+
"max_tokens": 16000,
|
|
32320
|
+
"mode": "chat",
|
|
32321
|
+
"supported_endpoints": [
|
|
32322
|
+
"/chat/completions"
|
|
32323
|
+
],
|
|
32324
|
+
"supports_function_calling": true,
|
|
32325
|
+
"supports_parallel_function_calling": true,
|
|
32326
|
+
"supports_vision": true
|
|
32327
|
+
},
|
|
32328
|
+
"github_copilot/claude-sonnet-4.5": {
|
|
32329
|
+
"litellm_provider": "github_copilot",
|
|
32330
|
+
"max_input_tokens": 128000,
|
|
32331
|
+
"max_output_tokens": 16000,
|
|
32332
|
+
"max_tokens": 16000,
|
|
32333
|
+
"mode": "chat",
|
|
32334
|
+
"supported_endpoints": [
|
|
32335
|
+
"/chat/completions"
|
|
32336
|
+
],
|
|
32337
|
+
"supports_function_calling": true,
|
|
32338
|
+
"supports_parallel_function_calling": true,
|
|
32339
|
+
"supports_vision": true
|
|
32340
|
+
},
|
|
32341
|
+
"github_copilot/gemini-2.5-pro": {
|
|
32342
|
+
"litellm_provider": "github_copilot",
|
|
32343
|
+
"max_input_tokens": 128000,
|
|
32344
|
+
"max_output_tokens": 64000,
|
|
32345
|
+
"max_tokens": 64000,
|
|
32346
|
+
"mode": "chat",
|
|
32347
|
+
"supports_function_calling": true,
|
|
32348
|
+
"supports_parallel_function_calling": true,
|
|
32349
|
+
"supports_vision": true
|
|
32350
|
+
},
|
|
32351
|
+
"github_copilot/gemini-3-pro-preview": {
|
|
32352
|
+
"litellm_provider": "github_copilot",
|
|
32353
|
+
"max_input_tokens": 128000,
|
|
32354
|
+
"max_output_tokens": 64000,
|
|
32355
|
+
"max_tokens": 64000,
|
|
32356
|
+
"mode": "chat",
|
|
32357
|
+
"supports_function_calling": true,
|
|
32358
|
+
"supports_parallel_function_calling": true,
|
|
32359
|
+
"supports_vision": true
|
|
32360
|
+
},
|
|
32361
|
+
"github_copilot/gpt-3.5-turbo": {
|
|
32362
|
+
"litellm_provider": "github_copilot",
|
|
32363
|
+
"max_input_tokens": 16384,
|
|
32364
|
+
"max_output_tokens": 4096,
|
|
32365
|
+
"max_tokens": 4096,
|
|
32366
|
+
"mode": "chat",
|
|
32367
|
+
"supports_function_calling": true
|
|
32368
|
+
},
|
|
32369
|
+
"github_copilot/gpt-3.5-turbo-0613": {
|
|
32370
|
+
"litellm_provider": "github_copilot",
|
|
32371
|
+
"max_input_tokens": 16384,
|
|
32372
|
+
"max_output_tokens": 4096,
|
|
32373
|
+
"max_tokens": 4096,
|
|
32374
|
+
"mode": "chat",
|
|
32375
|
+
"supports_function_calling": true
|
|
32376
|
+
},
|
|
32377
|
+
"github_copilot/gpt-4": {
|
|
32378
|
+
"litellm_provider": "github_copilot",
|
|
32379
|
+
"max_input_tokens": 32768,
|
|
32380
|
+
"max_output_tokens": 4096,
|
|
32381
|
+
"max_tokens": 4096,
|
|
32382
|
+
"mode": "chat",
|
|
32383
|
+
"supports_function_calling": true
|
|
32384
|
+
},
|
|
32385
|
+
"github_copilot/gpt-4-0613": {
|
|
32386
|
+
"litellm_provider": "github_copilot",
|
|
32387
|
+
"max_input_tokens": 32768,
|
|
32388
|
+
"max_output_tokens": 4096,
|
|
32389
|
+
"max_tokens": 4096,
|
|
32390
|
+
"mode": "chat",
|
|
32391
|
+
"supports_function_calling": true
|
|
32392
|
+
},
|
|
32393
|
+
"github_copilot/gpt-4-o-preview": {
|
|
32394
|
+
"litellm_provider": "github_copilot",
|
|
32395
|
+
"max_input_tokens": 64000,
|
|
32396
|
+
"max_output_tokens": 4096,
|
|
32397
|
+
"max_tokens": 4096,
|
|
32398
|
+
"mode": "chat",
|
|
32399
|
+
"supports_function_calling": true,
|
|
32400
|
+
"supports_parallel_function_calling": true
|
|
32401
|
+
},
|
|
32402
|
+
"github_copilot/gpt-4.1": {
|
|
32403
|
+
"litellm_provider": "github_copilot",
|
|
32404
|
+
"max_input_tokens": 128000,
|
|
32405
|
+
"max_output_tokens": 16384,
|
|
32406
|
+
"max_tokens": 16384,
|
|
32407
|
+
"mode": "chat",
|
|
32408
|
+
"supports_function_calling": true,
|
|
32409
|
+
"supports_parallel_function_calling": true,
|
|
32410
|
+
"supports_response_schema": true,
|
|
32411
|
+
"supports_vision": true
|
|
32412
|
+
},
|
|
32413
|
+
"github_copilot/gpt-4.1-2025-04-14": {
|
|
32414
|
+
"litellm_provider": "github_copilot",
|
|
32415
|
+
"max_input_tokens": 128000,
|
|
32416
|
+
"max_output_tokens": 16384,
|
|
32417
|
+
"max_tokens": 16384,
|
|
32418
|
+
"mode": "chat",
|
|
32419
|
+
"supports_function_calling": true,
|
|
32420
|
+
"supports_parallel_function_calling": true,
|
|
32421
|
+
"supports_response_schema": true,
|
|
32422
|
+
"supports_vision": true
|
|
32423
|
+
},
|
|
32424
|
+
"github_copilot/gpt-41-copilot": {
|
|
32425
|
+
"litellm_provider": "github_copilot",
|
|
32426
|
+
"mode": "completion"
|
|
32427
|
+
},
|
|
32428
|
+
"github_copilot/gpt-4o": {
|
|
32429
|
+
"litellm_provider": "github_copilot",
|
|
32430
|
+
"max_input_tokens": 64000,
|
|
32431
|
+
"max_output_tokens": 4096,
|
|
32432
|
+
"max_tokens": 4096,
|
|
32433
|
+
"mode": "chat",
|
|
32434
|
+
"supports_function_calling": true,
|
|
32435
|
+
"supports_parallel_function_calling": true,
|
|
32436
|
+
"supports_vision": true
|
|
32437
|
+
},
|
|
32438
|
+
"github_copilot/gpt-4o-2024-05-13": {
|
|
32439
|
+
"litellm_provider": "github_copilot",
|
|
32440
|
+
"max_input_tokens": 64000,
|
|
32441
|
+
"max_output_tokens": 4096,
|
|
32442
|
+
"max_tokens": 4096,
|
|
32443
|
+
"mode": "chat",
|
|
32444
|
+
"supports_function_calling": true,
|
|
32445
|
+
"supports_parallel_function_calling": true,
|
|
32446
|
+
"supports_vision": true
|
|
32447
|
+
},
|
|
32448
|
+
"github_copilot/gpt-4o-2024-08-06": {
|
|
32449
|
+
"litellm_provider": "github_copilot",
|
|
32450
|
+
"max_input_tokens": 64000,
|
|
32451
|
+
"max_output_tokens": 16384,
|
|
32452
|
+
"max_tokens": 16384,
|
|
32453
|
+
"mode": "chat",
|
|
32454
|
+
"supports_function_calling": true,
|
|
32455
|
+
"supports_parallel_function_calling": true
|
|
32456
|
+
},
|
|
32457
|
+
"github_copilot/gpt-4o-2024-11-20": {
|
|
32458
|
+
"litellm_provider": "github_copilot",
|
|
32459
|
+
"max_input_tokens": 64000,
|
|
32460
|
+
"max_output_tokens": 16384,
|
|
32461
|
+
"max_tokens": 16384,
|
|
32462
|
+
"mode": "chat",
|
|
32463
|
+
"supports_function_calling": true,
|
|
32464
|
+
"supports_parallel_function_calling": true,
|
|
32465
|
+
"supports_vision": true
|
|
32466
|
+
},
|
|
32467
|
+
"github_copilot/gpt-4o-mini": {
|
|
32468
|
+
"litellm_provider": "github_copilot",
|
|
32469
|
+
"max_input_tokens": 64000,
|
|
32470
|
+
"max_output_tokens": 4096,
|
|
32471
|
+
"max_tokens": 4096,
|
|
32472
|
+
"mode": "chat",
|
|
32473
|
+
"supports_function_calling": true,
|
|
32474
|
+
"supports_parallel_function_calling": true
|
|
32475
|
+
},
|
|
32476
|
+
"github_copilot/gpt-4o-mini-2024-07-18": {
|
|
32477
|
+
"litellm_provider": "github_copilot",
|
|
32478
|
+
"max_input_tokens": 64000,
|
|
32479
|
+
"max_output_tokens": 4096,
|
|
32480
|
+
"max_tokens": 4096,
|
|
32481
|
+
"mode": "chat",
|
|
32482
|
+
"supports_function_calling": true,
|
|
32483
|
+
"supports_parallel_function_calling": true
|
|
32484
|
+
},
|
|
32485
|
+
"github_copilot/gpt-5": {
|
|
32486
|
+
"litellm_provider": "github_copilot",
|
|
32487
|
+
"max_input_tokens": 128000,
|
|
32488
|
+
"max_output_tokens": 128000,
|
|
32489
|
+
"max_tokens": 128000,
|
|
32490
|
+
"mode": "chat",
|
|
32491
|
+
"supported_endpoints": [
|
|
32492
|
+
"/chat/completions",
|
|
32493
|
+
"/responses"
|
|
32494
|
+
],
|
|
32495
|
+
"supports_function_calling": true,
|
|
32496
|
+
"supports_parallel_function_calling": true,
|
|
32497
|
+
"supports_response_schema": true,
|
|
32498
|
+
"supports_vision": true
|
|
32499
|
+
},
|
|
32500
|
+
"github_copilot/gpt-5-mini": {
|
|
32501
|
+
"litellm_provider": "github_copilot",
|
|
32502
|
+
"max_input_tokens": 128000,
|
|
32503
|
+
"max_output_tokens": 64000,
|
|
32504
|
+
"max_tokens": 64000,
|
|
32505
|
+
"mode": "chat",
|
|
32506
|
+
"supports_function_calling": true,
|
|
32507
|
+
"supports_parallel_function_calling": true,
|
|
32508
|
+
"supports_response_schema": true,
|
|
32509
|
+
"supports_vision": true
|
|
32510
|
+
},
|
|
32511
|
+
"github_copilot/gpt-5.1": {
|
|
32512
|
+
"litellm_provider": "github_copilot",
|
|
32513
|
+
"max_input_tokens": 128000,
|
|
32514
|
+
"max_output_tokens": 64000,
|
|
32515
|
+
"max_tokens": 64000,
|
|
32516
|
+
"mode": "chat",
|
|
32517
|
+
"supported_endpoints": [
|
|
32518
|
+
"/chat/completions",
|
|
32519
|
+
"/responses"
|
|
32520
|
+
],
|
|
32521
|
+
"supports_function_calling": true,
|
|
32522
|
+
"supports_parallel_function_calling": true,
|
|
32523
|
+
"supports_response_schema": true,
|
|
32524
|
+
"supports_vision": true
|
|
32525
|
+
},
|
|
32526
|
+
"github_copilot/gpt-5.1-codex-max": {
|
|
32527
|
+
"litellm_provider": "github_copilot",
|
|
32528
|
+
"max_input_tokens": 128000,
|
|
32529
|
+
"max_output_tokens": 128000,
|
|
32530
|
+
"max_tokens": 128000,
|
|
32531
|
+
"mode": "responses",
|
|
32532
|
+
"supported_endpoints": [
|
|
32533
|
+
"/responses"
|
|
32534
|
+
],
|
|
32535
|
+
"supports_function_calling": true,
|
|
32536
|
+
"supports_parallel_function_calling": true,
|
|
32537
|
+
"supports_response_schema": true,
|
|
32538
|
+
"supports_vision": true
|
|
32539
|
+
},
|
|
32540
|
+
"github_copilot/gpt-5.2": {
|
|
32541
|
+
"litellm_provider": "github_copilot",
|
|
32542
|
+
"max_input_tokens": 128000,
|
|
32543
|
+
"max_output_tokens": 64000,
|
|
32544
|
+
"max_tokens": 64000,
|
|
32545
|
+
"mode": "chat",
|
|
32546
|
+
"supported_endpoints": [
|
|
32547
|
+
"/chat/completions",
|
|
32548
|
+
"/responses"
|
|
32549
|
+
],
|
|
32550
|
+
"supports_function_calling": true,
|
|
32551
|
+
"supports_parallel_function_calling": true,
|
|
32552
|
+
"supports_response_schema": true,
|
|
32553
|
+
"supports_vision": true
|
|
32554
|
+
},
|
|
32555
|
+
"github_copilot/text-embedding-3-small": {
|
|
32556
|
+
"litellm_provider": "github_copilot",
|
|
32557
|
+
"max_input_tokens": 8191,
|
|
32558
|
+
"max_tokens": 8191,
|
|
32559
|
+
"mode": "embedding"
|
|
32560
|
+
},
|
|
32561
|
+
"github_copilot/text-embedding-3-small-inference": {
|
|
32562
|
+
"litellm_provider": "github_copilot",
|
|
32563
|
+
"max_input_tokens": 8191,
|
|
32564
|
+
"max_tokens": 8191,
|
|
32565
|
+
"mode": "embedding"
|
|
32566
|
+
},
|
|
32567
|
+
"github_copilot/text-embedding-ada-002": {
|
|
32568
|
+
"litellm_provider": "github_copilot",
|
|
32569
|
+
"max_input_tokens": 8191,
|
|
32570
|
+
"max_tokens": 8191,
|
|
32571
|
+
"mode": "embedding"
|
|
32572
|
+
},
|
|
32573
|
+
"fireworks_ai/accounts/fireworks/models/": {
|
|
32574
|
+
"max_tokens": 40960,
|
|
32575
|
+
"max_input_tokens": 40960,
|
|
32576
|
+
"max_output_tokens": 40960,
|
|
32577
|
+
"input_cost_per_token": 1e-07,
|
|
32578
|
+
"output_cost_per_token": 0.0,
|
|
32579
|
+
"litellm_provider": "fireworks_ai",
|
|
32580
|
+
"mode": "embedding"
|
|
32097
32581
|
}
|
|
32098
32582
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: tokencostauto
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.404
|
|
4
4
|
Summary: To calculate token and translated USD cost of string and message calls to OpenAI, for example when used by AI agents
|
|
5
5
|
Author-email: Trisha Pan <trishaepan@gmail.com>, Alex Reibman <areibman@gmail.com>, Pratyush Shukla <ps4534@nyu.edu>, Thiago MadPin <madpin@gmail.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/madpin/tokencostaudo
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
tokencostauto/__init__.py,sha256=-4d_ryFH62SgNXPXA8vGPFZoAKtOBjnsg37EB_RkZG8,289
|
|
2
|
+
tokencostauto/constants.py,sha256=_82MlTkTrdrwzyRosQD7d3JdgNP9KAUM-cZo8DE00P0,3395
|
|
3
|
+
tokencostauto/costs.py,sha256=tXsgrTypq-dCHaHtoXcg2XepezWsAvZpl9gEsv_53iE,10679
|
|
4
|
+
tokencostauto/model_prices.json,sha256=yMJ3xvLn07G099uEWmMtIgM5iCspy1j9C7o-7JuuU7Y,1159721
|
|
5
|
+
tokencostauto-0.1.404.dist-info/licenses/LICENSE,sha256=4PLv_CD6Ughnsvg_nM2XeTqGwVK6lQVR77kVWbPq-0U,1065
|
|
6
|
+
tokencostauto-0.1.404.dist-info/METADATA,sha256=_P8DrC2jLAZz8Xg7vS66JX9lQJu287AiBMiIUjKJ3Ck,204076
|
|
7
|
+
tokencostauto-0.1.404.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
8
|
+
tokencostauto-0.1.404.dist-info/top_level.txt,sha256=szZQTUJRotfIaeZCDsOgvofIkLt2ak88RP13oI51-TU,14
|
|
9
|
+
tokencostauto-0.1.404.dist-info/RECORD,,
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
tokencostauto/__init__.py,sha256=-4d_ryFH62SgNXPXA8vGPFZoAKtOBjnsg37EB_RkZG8,289
|
|
2
|
-
tokencostauto/constants.py,sha256=_82MlTkTrdrwzyRosQD7d3JdgNP9KAUM-cZo8DE00P0,3395
|
|
3
|
-
tokencostauto/costs.py,sha256=tXsgrTypq-dCHaHtoXcg2XepezWsAvZpl9gEsv_53iE,10679
|
|
4
|
-
tokencostauto/model_prices.json,sha256=ATldHSLnRtIcAr-7UM4PvIm8CK6uTETDmXqXFZ838Po,1143273
|
|
5
|
-
tokencostauto-0.1.400.dist-info/licenses/LICENSE,sha256=4PLv_CD6Ughnsvg_nM2XeTqGwVK6lQVR77kVWbPq-0U,1065
|
|
6
|
-
tokencostauto-0.1.400.dist-info/METADATA,sha256=1VkC9GSVMid4DfySdY_-pM66yG23IOhVs3KmU2GqLHI,204076
|
|
7
|
-
tokencostauto-0.1.400.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
8
|
-
tokencostauto-0.1.400.dist-info/top_level.txt,sha256=szZQTUJRotfIaeZCDsOgvofIkLt2ak88RP13oI51-TU,14
|
|
9
|
-
tokencostauto-0.1.400.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|