tokencostauto 0.1.444__tar.gz → 0.1.447__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {tokencostauto-0.1.444/tokencostauto.egg-info → tokencostauto-0.1.447}/PKG-INFO +1 -1
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/pyproject.toml +1 -1
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/tokencostauto/model_prices.json +297 -2
- {tokencostauto-0.1.444 → tokencostauto-0.1.447/tokencostauto.egg-info}/PKG-INFO +1 -1
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/LICENSE +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/MANIFEST.in +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/README.md +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/setup.cfg +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/tests/test_costs.py +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/tokencostauto/__init__.py +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/tokencostauto/constants.py +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/tokencostauto/costs.py +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/tokencostauto.egg-info/SOURCES.txt +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/tokencostauto.egg-info/dependency_links.txt +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/tokencostauto.egg-info/requires.txt +0 -0
- {tokencostauto-0.1.444 → tokencostauto-0.1.447}/tokencostauto.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: tokencostauto
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.447
|
|
4
4
|
Summary: To calculate token and translated USD cost of string and message calls to OpenAI, for example when used by AI agents
|
|
5
5
|
Author-email: Trisha Pan <trishaepan@gmail.com>, Alex Reibman <areibman@gmail.com>, Pratyush Shukla <ps4534@nyu.edu>, Thiago MadPin <madpin@gmail.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/madpin/tokencostaudo
|
|
@@ -18915,13 +18915,13 @@
|
|
|
18915
18915
|
"supports_tool_choice": true
|
|
18916
18916
|
},
|
|
18917
18917
|
"openrouter/openai/gpt-oss-20b": {
|
|
18918
|
-
"input_cost_per_token":
|
|
18918
|
+
"input_cost_per_token": 2e-08,
|
|
18919
18919
|
"litellm_provider": "openrouter",
|
|
18920
18920
|
"max_input_tokens": 131072,
|
|
18921
18921
|
"max_output_tokens": 32768,
|
|
18922
18922
|
"max_tokens": 32768,
|
|
18923
18923
|
"mode": "chat",
|
|
18924
|
-
"output_cost_per_token":
|
|
18924
|
+
"output_cost_per_token": 1e-07,
|
|
18925
18925
|
"source": "https://openrouter.ai/openai/gpt-oss-20b",
|
|
18926
18926
|
"supports_function_calling": true,
|
|
18927
18927
|
"supports_parallel_function_calling": true,
|
|
@@ -20506,6 +20506,7 @@
|
|
|
20506
20506
|
"deprecation_date": "2026-01-15",
|
|
20507
20507
|
"cache_read_input_token_cost": 7.5e-08,
|
|
20508
20508
|
"input_cost_per_audio_token": 1e-06,
|
|
20509
|
+
"input_cost_per_image_token": 3e-07,
|
|
20509
20510
|
"input_cost_per_token": 3e-07,
|
|
20510
20511
|
"litellm_provider": "vertex_ai-language-models",
|
|
20511
20512
|
"max_audio_length_hours": 8.4,
|
|
@@ -35074,5 +35075,299 @@
|
|
|
35074
35075
|
"max_input_tokens": 8000,
|
|
35075
35076
|
"max_output_tokens": 8000,
|
|
35076
35077
|
"max_tokens": 8000
|
|
35078
|
+
},
|
|
35079
|
+
"replicate/openai/gpt-5": {
|
|
35080
|
+
"input_cost_per_token": 1.25e-06,
|
|
35081
|
+
"output_cost_per_token": 1e-05,
|
|
35082
|
+
"litellm_provider": "replicate",
|
|
35083
|
+
"mode": "chat",
|
|
35084
|
+
"supports_function_calling": true,
|
|
35085
|
+
"supports_parallel_function_calling": true,
|
|
35086
|
+
"supports_vision": true,
|
|
35087
|
+
"supports_system_messages": true,
|
|
35088
|
+
"supports_tool_choice": true,
|
|
35089
|
+
"supports_response_schema": true
|
|
35090
|
+
},
|
|
35091
|
+
"replicateopenai/gpt-oss-20b": {
|
|
35092
|
+
"input_cost_per_token": 9e-08,
|
|
35093
|
+
"output_cost_per_token": 3.6e-07,
|
|
35094
|
+
"litellm_provider": "replicate",
|
|
35095
|
+
"mode": "chat",
|
|
35096
|
+
"supports_function_calling": true,
|
|
35097
|
+
"supports_system_messages": true
|
|
35098
|
+
},
|
|
35099
|
+
"replicate/anthropic/claude-4.5-haiku": {
|
|
35100
|
+
"input_cost_per_token": 1e-06,
|
|
35101
|
+
"output_cost_per_token": 5e-06,
|
|
35102
|
+
"litellm_provider": "replicate",
|
|
35103
|
+
"mode": "chat",
|
|
35104
|
+
"supports_function_calling": true,
|
|
35105
|
+
"supports_parallel_function_calling": true,
|
|
35106
|
+
"supports_vision": true,
|
|
35107
|
+
"supports_system_messages": true,
|
|
35108
|
+
"supports_tool_choice": true,
|
|
35109
|
+
"supports_response_schema": true,
|
|
35110
|
+
"supports_prompt_caching": true
|
|
35111
|
+
},
|
|
35112
|
+
"replicate/ibm-granite/granite-3.3-8b-instruct": {
|
|
35113
|
+
"input_cost_per_token": 3e-08,
|
|
35114
|
+
"output_cost_per_token": 2.5e-07,
|
|
35115
|
+
"litellm_provider": "replicate",
|
|
35116
|
+
"mode": "chat",
|
|
35117
|
+
"supports_function_calling": true,
|
|
35118
|
+
"supports_system_messages": true
|
|
35119
|
+
},
|
|
35120
|
+
"replicate/openai/gpt-4o": {
|
|
35121
|
+
"input_cost_per_token": 2.5e-06,
|
|
35122
|
+
"output_cost_per_token": 1e-05,
|
|
35123
|
+
"litellm_provider": "replicate",
|
|
35124
|
+
"mode": "chat",
|
|
35125
|
+
"supports_function_calling": true,
|
|
35126
|
+
"supports_parallel_function_calling": true,
|
|
35127
|
+
"supports_vision": true,
|
|
35128
|
+
"supports_system_messages": true,
|
|
35129
|
+
"supports_tool_choice": true,
|
|
35130
|
+
"supports_response_schema": true,
|
|
35131
|
+
"supports_audio_input": true,
|
|
35132
|
+
"supports_audio_output": true
|
|
35133
|
+
},
|
|
35134
|
+
"replicate/openai/o4-mini": {
|
|
35135
|
+
"input_cost_per_token": 1e-06,
|
|
35136
|
+
"output_cost_per_token": 4e-06,
|
|
35137
|
+
"output_cost_per_reasoning_token": 4e-06,
|
|
35138
|
+
"litellm_provider": "replicate",
|
|
35139
|
+
"mode": "chat",
|
|
35140
|
+
"supports_reasoning": true,
|
|
35141
|
+
"supports_system_messages": true
|
|
35142
|
+
},
|
|
35143
|
+
"replicate/openai/o1-mini": {
|
|
35144
|
+
"input_cost_per_token": 1.1e-06,
|
|
35145
|
+
"output_cost_per_token": 4.4e-06,
|
|
35146
|
+
"output_cost_per_reasoning_token": 4.4e-06,
|
|
35147
|
+
"litellm_provider": "replicate",
|
|
35148
|
+
"mode": "chat",
|
|
35149
|
+
"supports_reasoning": true,
|
|
35150
|
+
"supports_system_messages": true
|
|
35151
|
+
},
|
|
35152
|
+
"replicate/openai/o1": {
|
|
35153
|
+
"input_cost_per_token": 1.5e-05,
|
|
35154
|
+
"output_cost_per_token": 6e-05,
|
|
35155
|
+
"output_cost_per_reasoning_token": 6e-05,
|
|
35156
|
+
"litellm_provider": "replicate",
|
|
35157
|
+
"mode": "chat",
|
|
35158
|
+
"supports_reasoning": true,
|
|
35159
|
+
"supports_system_messages": true
|
|
35160
|
+
},
|
|
35161
|
+
"replicate/openai/gpt-4o-mini": {
|
|
35162
|
+
"input_cost_per_token": 1.5e-07,
|
|
35163
|
+
"output_cost_per_token": 6e-07,
|
|
35164
|
+
"litellm_provider": "replicate",
|
|
35165
|
+
"mode": "chat",
|
|
35166
|
+
"supports_function_calling": true,
|
|
35167
|
+
"supports_parallel_function_calling": true,
|
|
35168
|
+
"supports_vision": true,
|
|
35169
|
+
"supports_system_messages": true,
|
|
35170
|
+
"supports_tool_choice": true,
|
|
35171
|
+
"supports_response_schema": true
|
|
35172
|
+
},
|
|
35173
|
+
"replicate/qwen/qwen3-235b-a22b-instruct-2507": {
|
|
35174
|
+
"input_cost_per_token": 2.64e-07,
|
|
35175
|
+
"output_cost_per_token": 1.06e-06,
|
|
35176
|
+
"litellm_provider": "replicate",
|
|
35177
|
+
"mode": "chat",
|
|
35178
|
+
"supports_function_calling": true,
|
|
35179
|
+
"supports_system_messages": true
|
|
35180
|
+
},
|
|
35181
|
+
"replicate/anthropic/claude-4-sonnet": {
|
|
35182
|
+
"input_cost_per_token": 3e-06,
|
|
35183
|
+
"output_cost_per_token": 1.5e-05,
|
|
35184
|
+
"litellm_provider": "replicate",
|
|
35185
|
+
"mode": "chat",
|
|
35186
|
+
"supports_function_calling": true,
|
|
35187
|
+
"supports_parallel_function_calling": true,
|
|
35188
|
+
"supports_vision": true,
|
|
35189
|
+
"supports_system_messages": true,
|
|
35190
|
+
"supports_tool_choice": true,
|
|
35191
|
+
"supports_response_schema": true,
|
|
35192
|
+
"supports_prompt_caching": true
|
|
35193
|
+
},
|
|
35194
|
+
"replicate/deepseek-ai/deepseek-v3": {
|
|
35195
|
+
"input_cost_per_token": 1.45e-06,
|
|
35196
|
+
"output_cost_per_token": 1.45e-06,
|
|
35197
|
+
"litellm_provider": "replicate",
|
|
35198
|
+
"mode": "chat",
|
|
35199
|
+
"max_input_tokens": 65536,
|
|
35200
|
+
"max_output_tokens": 8192,
|
|
35201
|
+
"max_tokens": 8192,
|
|
35202
|
+
"supports_function_calling": true,
|
|
35203
|
+
"supports_system_messages": true
|
|
35204
|
+
},
|
|
35205
|
+
"replicate/anthropic/claude-3.7-sonnet": {
|
|
35206
|
+
"input_cost_per_token": 3e-06,
|
|
35207
|
+
"output_cost_per_token": 1.5e-05,
|
|
35208
|
+
"litellm_provider": "replicate",
|
|
35209
|
+
"mode": "chat",
|
|
35210
|
+
"supports_function_calling": true,
|
|
35211
|
+
"supports_parallel_function_calling": true,
|
|
35212
|
+
"supports_vision": true,
|
|
35213
|
+
"supports_system_messages": true,
|
|
35214
|
+
"supports_tool_choice": true,
|
|
35215
|
+
"supports_response_schema": true,
|
|
35216
|
+
"supports_prompt_caching": true
|
|
35217
|
+
},
|
|
35218
|
+
"replicate/anthropic/claude-3.5-haiku": {
|
|
35219
|
+
"input_cost_per_token": 1e-06,
|
|
35220
|
+
"output_cost_per_token": 5e-06,
|
|
35221
|
+
"litellm_provider": "replicate",
|
|
35222
|
+
"mode": "chat",
|
|
35223
|
+
"supports_function_calling": true,
|
|
35224
|
+
"supports_parallel_function_calling": true,
|
|
35225
|
+
"supports_vision": true,
|
|
35226
|
+
"supports_system_messages": true,
|
|
35227
|
+
"supports_tool_choice": true,
|
|
35228
|
+
"supports_response_schema": true,
|
|
35229
|
+
"supports_prompt_caching": true
|
|
35230
|
+
},
|
|
35231
|
+
"replicate/anthropic/claude-3.5-sonnet": {
|
|
35232
|
+
"input_cost_per_token": 3.75e-06,
|
|
35233
|
+
"output_cost_per_token": 1.875e-05,
|
|
35234
|
+
"litellm_provider": "replicate",
|
|
35235
|
+
"mode": "chat",
|
|
35236
|
+
"supports_function_calling": true,
|
|
35237
|
+
"supports_parallel_function_calling": true,
|
|
35238
|
+
"supports_vision": true,
|
|
35239
|
+
"supports_system_messages": true,
|
|
35240
|
+
"supports_tool_choice": true,
|
|
35241
|
+
"supports_response_schema": true,
|
|
35242
|
+
"supports_prompt_caching": true
|
|
35243
|
+
},
|
|
35244
|
+
"replicate/google/gemini-3-pro": {
|
|
35245
|
+
"input_cost_per_token": 2e-06,
|
|
35246
|
+
"output_cost_per_token": 1.2e-05,
|
|
35247
|
+
"litellm_provider": "replicate",
|
|
35248
|
+
"mode": "chat",
|
|
35249
|
+
"supports_function_calling": true,
|
|
35250
|
+
"supports_parallel_function_calling": true,
|
|
35251
|
+
"supports_vision": true,
|
|
35252
|
+
"supports_system_messages": true,
|
|
35253
|
+
"supports_tool_choice": true,
|
|
35254
|
+
"supports_response_schema": true
|
|
35255
|
+
},
|
|
35256
|
+
"replicate/anthropic/claude-4.5-sonnet": {
|
|
35257
|
+
"input_cost_per_token": 3e-06,
|
|
35258
|
+
"output_cost_per_token": 1.5e-05,
|
|
35259
|
+
"litellm_provider": "replicate",
|
|
35260
|
+
"mode": "chat",
|
|
35261
|
+
"supports_function_calling": true,
|
|
35262
|
+
"supports_parallel_function_calling": true,
|
|
35263
|
+
"supports_vision": true,
|
|
35264
|
+
"supports_system_messages": true,
|
|
35265
|
+
"supports_tool_choice": true,
|
|
35266
|
+
"supports_response_schema": true,
|
|
35267
|
+
"supports_prompt_caching": true
|
|
35268
|
+
},
|
|
35269
|
+
"replicate/openai/gpt-4.1": {
|
|
35270
|
+
"input_cost_per_token": 2e-06,
|
|
35271
|
+
"output_cost_per_token": 8e-06,
|
|
35272
|
+
"litellm_provider": "replicate",
|
|
35273
|
+
"mode": "chat",
|
|
35274
|
+
"supports_function_calling": true,
|
|
35275
|
+
"supports_parallel_function_calling": true,
|
|
35276
|
+
"supports_vision": true,
|
|
35277
|
+
"supports_system_messages": true,
|
|
35278
|
+
"supports_tool_choice": true,
|
|
35279
|
+
"supports_response_schema": true
|
|
35280
|
+
},
|
|
35281
|
+
"replicate/openai/gpt-4.1-nano": {
|
|
35282
|
+
"input_cost_per_token": 1e-07,
|
|
35283
|
+
"output_cost_per_token": 4e-07,
|
|
35284
|
+
"litellm_provider": "replicate",
|
|
35285
|
+
"mode": "chat",
|
|
35286
|
+
"supports_function_calling": true,
|
|
35287
|
+
"supports_system_messages": true
|
|
35288
|
+
},
|
|
35289
|
+
"replicate/openai/gpt-4.1-mini": {
|
|
35290
|
+
"input_cost_per_token": 4e-07,
|
|
35291
|
+
"output_cost_per_token": 1.6e-06,
|
|
35292
|
+
"litellm_provider": "replicate",
|
|
35293
|
+
"mode": "chat",
|
|
35294
|
+
"supports_function_calling": true,
|
|
35295
|
+
"supports_parallel_function_calling": true,
|
|
35296
|
+
"supports_vision": true,
|
|
35297
|
+
"supports_system_messages": true,
|
|
35298
|
+
"supports_tool_choice": true,
|
|
35299
|
+
"supports_response_schema": true
|
|
35300
|
+
},
|
|
35301
|
+
"replicate/openai/gpt-5-nano": {
|
|
35302
|
+
"input_cost_per_token": 5e-08,
|
|
35303
|
+
"output_cost_per_token": 4e-07,
|
|
35304
|
+
"litellm_provider": "replicate",
|
|
35305
|
+
"mode": "chat",
|
|
35306
|
+
"supports_function_calling": true,
|
|
35307
|
+
"supports_system_messages": true
|
|
35308
|
+
},
|
|
35309
|
+
"replicate/openai/gpt-5-mini": {
|
|
35310
|
+
"input_cost_per_token": 2.5e-07,
|
|
35311
|
+
"output_cost_per_token": 2e-06,
|
|
35312
|
+
"litellm_provider": "replicate",
|
|
35313
|
+
"mode": "chat",
|
|
35314
|
+
"supports_function_calling": true,
|
|
35315
|
+
"supports_parallel_function_calling": true,
|
|
35316
|
+
"supports_vision": true,
|
|
35317
|
+
"supports_system_messages": true,
|
|
35318
|
+
"supports_tool_choice": true,
|
|
35319
|
+
"supports_response_schema": true
|
|
35320
|
+
},
|
|
35321
|
+
"replicate/google/gemini-2.5-flash": {
|
|
35322
|
+
"input_cost_per_token": 2.5e-06,
|
|
35323
|
+
"output_cost_per_token": 2.5e-06,
|
|
35324
|
+
"litellm_provider": "replicate",
|
|
35325
|
+
"mode": "chat",
|
|
35326
|
+
"supports_function_calling": true,
|
|
35327
|
+
"supports_parallel_function_calling": true,
|
|
35328
|
+
"supports_vision": true,
|
|
35329
|
+
"supports_system_messages": true,
|
|
35330
|
+
"supports_tool_choice": true,
|
|
35331
|
+
"supports_response_schema": true
|
|
35332
|
+
},
|
|
35333
|
+
"replicate/openai/gpt-oss-120b": {
|
|
35334
|
+
"input_cost_per_token": 1.8e-07,
|
|
35335
|
+
"output_cost_per_token": 7.2e-07,
|
|
35336
|
+
"litellm_provider": "replicate",
|
|
35337
|
+
"mode": "chat",
|
|
35338
|
+
"supports_function_calling": true,
|
|
35339
|
+
"supports_system_messages": true
|
|
35340
|
+
},
|
|
35341
|
+
"replicate/deepseek-ai/deepseek-v3.1": {
|
|
35342
|
+
"input_cost_per_token": 6.72e-07,
|
|
35343
|
+
"output_cost_per_token": 2.016e-06,
|
|
35344
|
+
"litellm_provider": "replicate",
|
|
35345
|
+
"mode": "chat",
|
|
35346
|
+
"max_input_tokens": 163840,
|
|
35347
|
+
"max_output_tokens": 163840,
|
|
35348
|
+
"max_tokens": 163840,
|
|
35349
|
+
"supports_function_calling": true,
|
|
35350
|
+
"supports_reasoning": true,
|
|
35351
|
+
"supports_system_messages": true
|
|
35352
|
+
},
|
|
35353
|
+
"replicate/xai/grok-4": {
|
|
35354
|
+
"input_cost_per_token": 7.2e-06,
|
|
35355
|
+
"output_cost_per_token": 3.6e-05,
|
|
35356
|
+
"litellm_provider": "replicate",
|
|
35357
|
+
"mode": "chat",
|
|
35358
|
+
"supports_function_calling": true,
|
|
35359
|
+
"supports_system_messages": true
|
|
35360
|
+
},
|
|
35361
|
+
"replicate/deepseek-ai/deepseek-r1": {
|
|
35362
|
+
"input_cost_per_token": 3.75e-06,
|
|
35363
|
+
"output_cost_per_token": 1e-05,
|
|
35364
|
+
"output_cost_per_reasoning_token": 1e-05,
|
|
35365
|
+
"litellm_provider": "replicate",
|
|
35366
|
+
"mode": "chat",
|
|
35367
|
+
"max_input_tokens": 65536,
|
|
35368
|
+
"max_output_tokens": 8192,
|
|
35369
|
+
"max_tokens": 8192,
|
|
35370
|
+
"supports_reasoning": true,
|
|
35371
|
+
"supports_system_messages": true
|
|
35077
35372
|
}
|
|
35078
35373
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: tokencostauto
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.447
|
|
4
4
|
Summary: To calculate token and translated USD cost of string and message calls to OpenAI, for example when used by AI agents
|
|
5
5
|
Author-email: Trisha Pan <trishaepan@gmail.com>, Alex Reibman <areibman@gmail.com>, Pratyush Shukla <ps4534@nyu.edu>, Thiago MadPin <madpin@gmail.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/madpin/tokencostaudo
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|