tokencostauto 0.1.378__tar.gz → 0.1.382__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {tokencostauto-0.1.378/tokencostauto.egg-info → tokencostauto-0.1.382}/PKG-INFO +1 -1
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/pyproject.toml +1 -1
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/tokencostauto/model_prices.json +275 -31
- {tokencostauto-0.1.378 → tokencostauto-0.1.382/tokencostauto.egg-info}/PKG-INFO +1 -1
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/LICENSE +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/MANIFEST.in +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/README.md +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/setup.cfg +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/tests/test_costs.py +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/tokencostauto/__init__.py +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/tokencostauto/constants.py +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/tokencostauto/costs.py +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/tokencostauto.egg-info/SOURCES.txt +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/tokencostauto.egg-info/dependency_links.txt +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/tokencostauto.egg-info/requires.txt +0 -0
- {tokencostauto-0.1.378 → tokencostauto-0.1.382}/tokencostauto.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: tokencostauto
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.382
|
|
4
4
|
Summary: To calculate token and translated USD cost of string and message calls to OpenAI, for example when used by AI agents
|
|
5
5
|
Author-email: Trisha Pan <trishaepan@gmail.com>, Alex Reibman <areibman@gmail.com>, Pratyush Shukla <ps4534@nyu.edu>, Thiago MadPin <madpin@gmail.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/madpin/tokencostaudo
|
|
@@ -553,6 +553,7 @@
|
|
|
553
553
|
"supports_tool_choice": true
|
|
554
554
|
},
|
|
555
555
|
"ft:gpt-4o-2024-08-06": {
|
|
556
|
+
"cache_read_input_token_cost": 1.875e-06,
|
|
556
557
|
"input_cost_per_token": 3.75e-06,
|
|
557
558
|
"input_cost_per_token_batches": 1.875e-06,
|
|
558
559
|
"litellm_provider": "openai",
|
|
@@ -565,6 +566,7 @@
|
|
|
565
566
|
"supports_function_calling": true,
|
|
566
567
|
"supports_parallel_function_calling": true,
|
|
567
568
|
"supports_pdf_input": true,
|
|
569
|
+
"supports_prompt_caching": true,
|
|
568
570
|
"supports_response_schema": true,
|
|
569
571
|
"supports_system_messages": true,
|
|
570
572
|
"supports_tool_choice": true,
|
|
@@ -587,29 +589,28 @@
|
|
|
587
589
|
"supports_prompt_caching": true,
|
|
588
590
|
"supports_response_schema": true,
|
|
589
591
|
"supports_system_messages": true,
|
|
590
|
-
"supports_tool_choice": true
|
|
591
|
-
"supports_vision": true
|
|
592
|
+
"supports_tool_choice": true
|
|
592
593
|
},
|
|
593
594
|
"ft:davinci-002": {
|
|
594
|
-
"input_cost_per_token": 2e-
|
|
595
|
+
"input_cost_per_token": 1.2e-05,
|
|
595
596
|
"input_cost_per_token_batches": 1e-06,
|
|
596
597
|
"litellm_provider": "text-completion-openai",
|
|
597
598
|
"max_input_tokens": 16384,
|
|
598
599
|
"max_output_tokens": 4096,
|
|
599
600
|
"max_tokens": 16384,
|
|
600
601
|
"mode": "completion",
|
|
601
|
-
"output_cost_per_token": 2e-
|
|
602
|
+
"output_cost_per_token": 1.2e-05,
|
|
602
603
|
"output_cost_per_token_batches": 1e-06
|
|
603
604
|
},
|
|
604
605
|
"ft:babbage-002": {
|
|
605
|
-
"input_cost_per_token":
|
|
606
|
+
"input_cost_per_token": 1.6e-06,
|
|
606
607
|
"input_cost_per_token_batches": 2e-07,
|
|
607
608
|
"litellm_provider": "text-completion-openai",
|
|
608
609
|
"max_input_tokens": 16384,
|
|
609
610
|
"max_output_tokens": 4096,
|
|
610
611
|
"max_tokens": 16384,
|
|
611
612
|
"mode": "completion",
|
|
612
|
-
"output_cost_per_token":
|
|
613
|
+
"output_cost_per_token": 1.6e-06,
|
|
613
614
|
"output_cost_per_token_batches": 2e-07
|
|
614
615
|
},
|
|
615
616
|
"text-embedding-3-large": {
|
|
@@ -1742,7 +1743,7 @@
|
|
|
1742
1743
|
"mode": "chat",
|
|
1743
1744
|
"output_cost_per_token": 8e-07,
|
|
1744
1745
|
"supports_function_calling": true,
|
|
1745
|
-
"supports_response_schema":
|
|
1746
|
+
"supports_response_schema": false,
|
|
1746
1747
|
"supports_tool_choice": true
|
|
1747
1748
|
},
|
|
1748
1749
|
"groq/llama3-8b-8192": {
|
|
@@ -1775,7 +1776,7 @@
|
|
|
1775
1776
|
"mode": "chat",
|
|
1776
1777
|
"output_cost_per_token": 8e-08,
|
|
1777
1778
|
"supports_function_calling": true,
|
|
1778
|
-
"supports_response_schema":
|
|
1779
|
+
"supports_response_schema": false,
|
|
1779
1780
|
"supports_tool_choice": true
|
|
1780
1781
|
},
|
|
1781
1782
|
"groq/llama-3.1-70b-versatile": {
|
|
@@ -1788,7 +1789,7 @@
|
|
|
1788
1789
|
"mode": "chat",
|
|
1789
1790
|
"output_cost_per_token": 7.9e-07,
|
|
1790
1791
|
"supports_function_calling": true,
|
|
1791
|
-
"supports_response_schema":
|
|
1792
|
+
"supports_response_schema": false,
|
|
1792
1793
|
"supports_tool_choice": true
|
|
1793
1794
|
},
|
|
1794
1795
|
"groq/llama-3.1-405b-reasoning": {
|
|
@@ -1800,7 +1801,7 @@
|
|
|
1800
1801
|
"mode": "chat",
|
|
1801
1802
|
"output_cost_per_token": 7.9e-07,
|
|
1802
1803
|
"supports_function_calling": true,
|
|
1803
|
-
"supports_response_schema":
|
|
1804
|
+
"supports_response_schema": false,
|
|
1804
1805
|
"supports_tool_choice": true
|
|
1805
1806
|
},
|
|
1806
1807
|
"groq/mixtral-8x7b-32768": {
|
|
@@ -1813,7 +1814,7 @@
|
|
|
1813
1814
|
"mode": "chat",
|
|
1814
1815
|
"output_cost_per_token": 2.4e-07,
|
|
1815
1816
|
"supports_function_calling": true,
|
|
1816
|
-
"supports_response_schema":
|
|
1817
|
+
"supports_response_schema": false,
|
|
1817
1818
|
"supports_tool_choice": true
|
|
1818
1819
|
},
|
|
1819
1820
|
"groq/gemma-7b-it": {
|
|
@@ -1826,7 +1827,7 @@
|
|
|
1826
1827
|
"mode": "chat",
|
|
1827
1828
|
"output_cost_per_token": 7e-08,
|
|
1828
1829
|
"supports_function_calling": true,
|
|
1829
|
-
"supports_response_schema":
|
|
1830
|
+
"supports_response_schema": false,
|
|
1830
1831
|
"supports_tool_choice": true
|
|
1831
1832
|
},
|
|
1832
1833
|
"groq/gemma2-9b-it": {
|
|
@@ -1838,7 +1839,7 @@
|
|
|
1838
1839
|
"mode": "chat",
|
|
1839
1840
|
"output_cost_per_token": 2e-07,
|
|
1840
1841
|
"supports_function_calling": false,
|
|
1841
|
-
"supports_response_schema":
|
|
1842
|
+
"supports_response_schema": false,
|
|
1842
1843
|
"supports_tool_choice": false
|
|
1843
1844
|
},
|
|
1844
1845
|
"groq/llama3-groq-70b-8192-tool-use-preview": {
|
|
@@ -1851,7 +1852,7 @@
|
|
|
1851
1852
|
"mode": "chat",
|
|
1852
1853
|
"output_cost_per_token": 8.9e-07,
|
|
1853
1854
|
"supports_function_calling": true,
|
|
1854
|
-
"supports_response_schema":
|
|
1855
|
+
"supports_response_schema": false,
|
|
1855
1856
|
"supports_tool_choice": true
|
|
1856
1857
|
},
|
|
1857
1858
|
"groq/llama3-groq-8b-8192-tool-use-preview": {
|
|
@@ -1864,7 +1865,7 @@
|
|
|
1864
1865
|
"mode": "chat",
|
|
1865
1866
|
"output_cost_per_token": 1.9e-07,
|
|
1866
1867
|
"supports_function_calling": true,
|
|
1867
|
-
"supports_response_schema":
|
|
1868
|
+
"supports_response_schema": false,
|
|
1868
1869
|
"supports_tool_choice": true
|
|
1869
1870
|
},
|
|
1870
1871
|
"cerebras/llama3.1-8b": {
|
|
@@ -7747,8 +7748,7 @@
|
|
|
7747
7748
|
"supports_prompt_caching": true,
|
|
7748
7749
|
"supports_response_schema": true,
|
|
7749
7750
|
"supports_system_messages": true,
|
|
7750
|
-
"supports_tool_choice": true
|
|
7751
|
-
"supports_vision": true
|
|
7751
|
+
"supports_tool_choice": true
|
|
7752
7752
|
},
|
|
7753
7753
|
"azure/gpt-4o-2024-11-20": {
|
|
7754
7754
|
"deprecation_date": "2026-03-01",
|
|
@@ -7793,7 +7793,7 @@
|
|
|
7793
7793
|
"mode": "chat",
|
|
7794
7794
|
"output_cost_per_token": 4e-08,
|
|
7795
7795
|
"supports_function_calling": true,
|
|
7796
|
-
"supports_response_schema":
|
|
7796
|
+
"supports_response_schema": false,
|
|
7797
7797
|
"supports_tool_choice": true
|
|
7798
7798
|
},
|
|
7799
7799
|
"groq/llama-3.2-3b-preview": {
|
|
@@ -7806,7 +7806,7 @@
|
|
|
7806
7806
|
"mode": "chat",
|
|
7807
7807
|
"output_cost_per_token": 6e-08,
|
|
7808
7808
|
"supports_function_calling": true,
|
|
7809
|
-
"supports_response_schema":
|
|
7809
|
+
"supports_response_schema": false,
|
|
7810
7810
|
"supports_tool_choice": true
|
|
7811
7811
|
},
|
|
7812
7812
|
"groq/llama-3.2-11b-text-preview": {
|
|
@@ -7819,7 +7819,7 @@
|
|
|
7819
7819
|
"mode": "chat",
|
|
7820
7820
|
"output_cost_per_token": 1.8e-07,
|
|
7821
7821
|
"supports_function_calling": true,
|
|
7822
|
-
"supports_response_schema":
|
|
7822
|
+
"supports_response_schema": false,
|
|
7823
7823
|
"supports_tool_choice": true
|
|
7824
7824
|
},
|
|
7825
7825
|
"groq/llama-3.2-11b-vision-preview": {
|
|
@@ -7832,7 +7832,7 @@
|
|
|
7832
7832
|
"mode": "chat",
|
|
7833
7833
|
"output_cost_per_token": 1.8e-07,
|
|
7834
7834
|
"supports_function_calling": true,
|
|
7835
|
-
"supports_response_schema":
|
|
7835
|
+
"supports_response_schema": false,
|
|
7836
7836
|
"supports_tool_choice": true,
|
|
7837
7837
|
"supports_vision": true
|
|
7838
7838
|
},
|
|
@@ -7846,7 +7846,7 @@
|
|
|
7846
7846
|
"mode": "chat",
|
|
7847
7847
|
"output_cost_per_token": 9e-07,
|
|
7848
7848
|
"supports_function_calling": true,
|
|
7849
|
-
"supports_response_schema":
|
|
7849
|
+
"supports_response_schema": false,
|
|
7850
7850
|
"supports_tool_choice": true
|
|
7851
7851
|
},
|
|
7852
7852
|
"groq/llama-3.2-90b-vision-preview": {
|
|
@@ -7859,7 +7859,7 @@
|
|
|
7859
7859
|
"mode": "chat",
|
|
7860
7860
|
"output_cost_per_token": 9e-07,
|
|
7861
7861
|
"supports_function_calling": true,
|
|
7862
|
-
"supports_response_schema":
|
|
7862
|
+
"supports_response_schema": false,
|
|
7863
7863
|
"supports_tool_choice": true,
|
|
7864
7864
|
"supports_vision": true
|
|
7865
7865
|
},
|
|
@@ -8337,7 +8337,7 @@
|
|
|
8337
8337
|
"mode": "chat",
|
|
8338
8338
|
"output_cost_per_token": 7.9e-07,
|
|
8339
8339
|
"supports_function_calling": true,
|
|
8340
|
-
"supports_response_schema":
|
|
8340
|
+
"supports_response_schema": false,
|
|
8341
8341
|
"supports_tool_choice": true
|
|
8342
8342
|
},
|
|
8343
8343
|
"groq/llama-3.3-70b-specdec": {
|
|
@@ -9095,7 +9095,7 @@
|
|
|
9095
9095
|
"output_cost_per_token": 9.9e-07,
|
|
9096
9096
|
"supports_function_calling": true,
|
|
9097
9097
|
"supports_reasoning": true,
|
|
9098
|
-
"supports_response_schema":
|
|
9098
|
+
"supports_response_schema": false,
|
|
9099
9099
|
"supports_tool_choice": true
|
|
9100
9100
|
},
|
|
9101
9101
|
"gemini/gemini-2.0-flash": {
|
|
@@ -16949,14 +16949,15 @@
|
|
|
16949
16949
|
"supports_vision": true
|
|
16950
16950
|
},
|
|
16951
16951
|
"moonshot/kimi-thinking-preview": {
|
|
16952
|
-
"
|
|
16952
|
+
"cache_read_input_token_cost": 1.5e-07,
|
|
16953
|
+
"input_cost_per_token": 6e-07,
|
|
16953
16954
|
"litellm_provider": "moonshot",
|
|
16954
16955
|
"max_input_tokens": 131072,
|
|
16955
16956
|
"max_output_tokens": 131072,
|
|
16956
16957
|
"max_tokens": 131072,
|
|
16957
16958
|
"mode": "chat",
|
|
16958
|
-
"output_cost_per_token":
|
|
16959
|
-
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
16959
|
+
"output_cost_per_token": 2.5e-06,
|
|
16960
|
+
"source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2",
|
|
16960
16961
|
"supports_vision": true
|
|
16961
16962
|
},
|
|
16962
16963
|
"moonshot/moonshot-v1-8k-vision-preview": {
|
|
@@ -17008,7 +17009,7 @@
|
|
|
17008
17009
|
"output_cost_per_token": 5.9e-07,
|
|
17009
17010
|
"supports_function_calling": true,
|
|
17010
17011
|
"supports_reasoning": true,
|
|
17011
|
-
"supports_response_schema":
|
|
17012
|
+
"supports_response_schema": false,
|
|
17012
17013
|
"supports_tool_choice": true
|
|
17013
17014
|
},
|
|
17014
17015
|
"openrouter/qwen/qwen-vl-plus": {
|
|
@@ -22660,8 +22661,8 @@
|
|
|
22660
22661
|
"max_tokens": 128000,
|
|
22661
22662
|
"max_input_tokens": 128000,
|
|
22662
22663
|
"max_output_tokens": 128000,
|
|
22663
|
-
"input_cost_per_token":
|
|
22664
|
-
"output_cost_per_token":
|
|
22664
|
+
"input_cost_per_token": 6e-07,
|
|
22665
|
+
"output_cost_per_token": 2.5e-06,
|
|
22665
22666
|
"litellm_provider": "wandb",
|
|
22666
22667
|
"mode": "chat"
|
|
22667
22668
|
},
|
|
@@ -28760,5 +28761,248 @@
|
|
|
28760
28761
|
"supports_function_calling": true,
|
|
28761
28762
|
"supports_tool_choice": true,
|
|
28762
28763
|
"source": "https://docs.z.ai/guides/overview/pricing"
|
|
28764
|
+
},
|
|
28765
|
+
"amazon.nova-2-lite-v1:0": {
|
|
28766
|
+
"input_cost_per_token": 3e-07,
|
|
28767
|
+
"litellm_provider": "bedrock_converse",
|
|
28768
|
+
"max_input_tokens": 1000000,
|
|
28769
|
+
"max_output_tokens": 64000,
|
|
28770
|
+
"max_tokens": 64000,
|
|
28771
|
+
"mode": "chat",
|
|
28772
|
+
"output_cost_per_token": 2.5e-06,
|
|
28773
|
+
"supports_function_calling": true,
|
|
28774
|
+
"supports_pdf_input": true,
|
|
28775
|
+
"supports_prompt_caching": true,
|
|
28776
|
+
"supports_reasoning": true,
|
|
28777
|
+
"supports_response_schema": true,
|
|
28778
|
+
"supports_video_input": true,
|
|
28779
|
+
"supports_vision": true
|
|
28780
|
+
},
|
|
28781
|
+
"apac.amazon.nova-2-lite-v1:0": {
|
|
28782
|
+
"input_cost_per_token": 6e-08,
|
|
28783
|
+
"litellm_provider": "bedrock_converse",
|
|
28784
|
+
"max_input_tokens": 1000000,
|
|
28785
|
+
"max_output_tokens": 64000,
|
|
28786
|
+
"max_tokens": 64000,
|
|
28787
|
+
"mode": "chat",
|
|
28788
|
+
"output_cost_per_token": 2.75e-06,
|
|
28789
|
+
"supports_function_calling": true,
|
|
28790
|
+
"supports_pdf_input": true,
|
|
28791
|
+
"supports_prompt_caching": true,
|
|
28792
|
+
"supports_reasoning": true,
|
|
28793
|
+
"supports_response_schema": true,
|
|
28794
|
+
"supports_video_input": true,
|
|
28795
|
+
"supports_vision": true
|
|
28796
|
+
},
|
|
28797
|
+
"eu.amazon.nova-2-lite-v1:0": {
|
|
28798
|
+
"input_cost_per_token": 6e-08,
|
|
28799
|
+
"litellm_provider": "bedrock_converse",
|
|
28800
|
+
"max_input_tokens": 1000000,
|
|
28801
|
+
"max_output_tokens": 64000,
|
|
28802
|
+
"max_tokens": 64000,
|
|
28803
|
+
"mode": "chat",
|
|
28804
|
+
"output_cost_per_token": 2.75e-06,
|
|
28805
|
+
"supports_function_calling": true,
|
|
28806
|
+
"supports_pdf_input": true,
|
|
28807
|
+
"supports_prompt_caching": true,
|
|
28808
|
+
"supports_reasoning": true,
|
|
28809
|
+
"supports_response_schema": true,
|
|
28810
|
+
"supports_video_input": true,
|
|
28811
|
+
"supports_vision": true
|
|
28812
|
+
},
|
|
28813
|
+
"us.amazon.nova-2-lite-v1:0": {
|
|
28814
|
+
"input_cost_per_token": 6e-08,
|
|
28815
|
+
"litellm_provider": "bedrock_converse",
|
|
28816
|
+
"max_input_tokens": 1000000,
|
|
28817
|
+
"max_output_tokens": 64000,
|
|
28818
|
+
"max_tokens": 64000,
|
|
28819
|
+
"mode": "chat",
|
|
28820
|
+
"output_cost_per_token": 2.75e-06,
|
|
28821
|
+
"supports_function_calling": true,
|
|
28822
|
+
"supports_pdf_input": true,
|
|
28823
|
+
"supports_prompt_caching": true,
|
|
28824
|
+
"supports_reasoning": true,
|
|
28825
|
+
"supports_response_schema": true,
|
|
28826
|
+
"supports_video_input": true,
|
|
28827
|
+
"supports_vision": true
|
|
28828
|
+
},
|
|
28829
|
+
"deepseek/deepseek-v3.2": {
|
|
28830
|
+
"input_cost_per_token": 2.8e-07,
|
|
28831
|
+
"input_cost_per_token_cache_hit": 2.8e-08,
|
|
28832
|
+
"litellm_provider": "deepseek",
|
|
28833
|
+
"max_input_tokens": 163840,
|
|
28834
|
+
"max_output_tokens": 163840,
|
|
28835
|
+
"max_tokens": 8192,
|
|
28836
|
+
"mode": "chat",
|
|
28837
|
+
"output_cost_per_token": 4e-07,
|
|
28838
|
+
"supports_assistant_prefill": true,
|
|
28839
|
+
"supports_function_calling": true,
|
|
28840
|
+
"supports_prompt_caching": true,
|
|
28841
|
+
"supports_reasoning": true,
|
|
28842
|
+
"supports_tool_choice": true
|
|
28843
|
+
},
|
|
28844
|
+
"ft:gpt-4.1-2025-04-14": {
|
|
28845
|
+
"cache_read_input_token_cost": 7.5e-07,
|
|
28846
|
+
"input_cost_per_token": 3e-06,
|
|
28847
|
+
"input_cost_per_token_batches": 1.5e-06,
|
|
28848
|
+
"litellm_provider": "openai",
|
|
28849
|
+
"max_input_tokens": 1047576,
|
|
28850
|
+
"max_output_tokens": 32768,
|
|
28851
|
+
"max_tokens": 32768,
|
|
28852
|
+
"mode": "chat",
|
|
28853
|
+
"output_cost_per_token": 1.2e-05,
|
|
28854
|
+
"output_cost_per_token_batches": 6e-06,
|
|
28855
|
+
"supports_function_calling": true,
|
|
28856
|
+
"supports_parallel_function_calling": true,
|
|
28857
|
+
"supports_prompt_caching": true,
|
|
28858
|
+
"supports_response_schema": true,
|
|
28859
|
+
"supports_system_messages": true,
|
|
28860
|
+
"supports_tool_choice": true
|
|
28861
|
+
},
|
|
28862
|
+
"ft:gpt-4.1-mini-2025-04-14": {
|
|
28863
|
+
"cache_read_input_token_cost": 2e-07,
|
|
28864
|
+
"input_cost_per_token": 8e-07,
|
|
28865
|
+
"input_cost_per_token_batches": 4e-07,
|
|
28866
|
+
"litellm_provider": "openai",
|
|
28867
|
+
"max_input_tokens": 1047576,
|
|
28868
|
+
"max_output_tokens": 32768,
|
|
28869
|
+
"max_tokens": 32768,
|
|
28870
|
+
"mode": "chat",
|
|
28871
|
+
"output_cost_per_token": 3.2e-06,
|
|
28872
|
+
"output_cost_per_token_batches": 1.6e-06,
|
|
28873
|
+
"supports_function_calling": true,
|
|
28874
|
+
"supports_parallel_function_calling": true,
|
|
28875
|
+
"supports_prompt_caching": true,
|
|
28876
|
+
"supports_response_schema": true,
|
|
28877
|
+
"supports_system_messages": true,
|
|
28878
|
+
"supports_tool_choice": true
|
|
28879
|
+
},
|
|
28880
|
+
"ft:gpt-4.1-nano-2025-04-14": {
|
|
28881
|
+
"cache_read_input_token_cost": 5e-08,
|
|
28882
|
+
"input_cost_per_token": 2e-07,
|
|
28883
|
+
"input_cost_per_token_batches": 1e-07,
|
|
28884
|
+
"litellm_provider": "openai",
|
|
28885
|
+
"max_input_tokens": 1047576,
|
|
28886
|
+
"max_output_tokens": 32768,
|
|
28887
|
+
"max_tokens": 32768,
|
|
28888
|
+
"mode": "chat",
|
|
28889
|
+
"output_cost_per_token": 8e-07,
|
|
28890
|
+
"output_cost_per_token_batches": 4e-07,
|
|
28891
|
+
"supports_function_calling": true,
|
|
28892
|
+
"supports_parallel_function_calling": true,
|
|
28893
|
+
"supports_prompt_caching": true,
|
|
28894
|
+
"supports_response_schema": true,
|
|
28895
|
+
"supports_system_messages": true,
|
|
28896
|
+
"supports_tool_choice": true
|
|
28897
|
+
},
|
|
28898
|
+
"ft:o4-mini-2025-04-16": {
|
|
28899
|
+
"cache_read_input_token_cost": 1e-06,
|
|
28900
|
+
"input_cost_per_token": 4e-06,
|
|
28901
|
+
"input_cost_per_token_batches": 2e-06,
|
|
28902
|
+
"litellm_provider": "openai",
|
|
28903
|
+
"max_input_tokens": 200000,
|
|
28904
|
+
"max_output_tokens": 100000,
|
|
28905
|
+
"max_tokens": 100000,
|
|
28906
|
+
"mode": "chat",
|
|
28907
|
+
"output_cost_per_token": 1.6e-05,
|
|
28908
|
+
"output_cost_per_token_batches": 8e-06,
|
|
28909
|
+
"supports_function_calling": true,
|
|
28910
|
+
"supports_parallel_function_calling": false,
|
|
28911
|
+
"supports_prompt_caching": true,
|
|
28912
|
+
"supports_reasoning": true,
|
|
28913
|
+
"supports_response_schema": true,
|
|
28914
|
+
"supports_tool_choice": true
|
|
28915
|
+
},
|
|
28916
|
+
"openrouter/deepseek/deepseek-v3.2": {
|
|
28917
|
+
"input_cost_per_token": 2.8e-07,
|
|
28918
|
+
"input_cost_per_token_cache_hit": 2.8e-08,
|
|
28919
|
+
"litellm_provider": "openrouter",
|
|
28920
|
+
"max_input_tokens": 163840,
|
|
28921
|
+
"max_output_tokens": 163840,
|
|
28922
|
+
"max_tokens": 8192,
|
|
28923
|
+
"mode": "chat",
|
|
28924
|
+
"output_cost_per_token": 4e-07,
|
|
28925
|
+
"supports_assistant_prefill": true,
|
|
28926
|
+
"supports_function_calling": true,
|
|
28927
|
+
"supports_prompt_caching": true,
|
|
28928
|
+
"supports_reasoning": true,
|
|
28929
|
+
"supports_tool_choice": true
|
|
28930
|
+
},
|
|
28931
|
+
"global.anthropic.claude-opus-4-5-20251101-v1:0": {
|
|
28932
|
+
"cache_creation_input_token_cost": 6.25e-06,
|
|
28933
|
+
"cache_read_input_token_cost": 5e-07,
|
|
28934
|
+
"input_cost_per_token": 5e-06,
|
|
28935
|
+
"litellm_provider": "bedrock_converse",
|
|
28936
|
+
"max_input_tokens": 200000,
|
|
28937
|
+
"max_output_tokens": 64000,
|
|
28938
|
+
"max_tokens": 64000,
|
|
28939
|
+
"mode": "chat",
|
|
28940
|
+
"output_cost_per_token": 2.5e-05,
|
|
28941
|
+
"search_context_cost_per_query": {
|
|
28942
|
+
"search_context_size_high": 0.01,
|
|
28943
|
+
"search_context_size_low": 0.01,
|
|
28944
|
+
"search_context_size_medium": 0.01
|
|
28945
|
+
},
|
|
28946
|
+
"supports_assistant_prefill": true,
|
|
28947
|
+
"supports_computer_use": true,
|
|
28948
|
+
"supports_function_calling": true,
|
|
28949
|
+
"supports_pdf_input": true,
|
|
28950
|
+
"supports_prompt_caching": true,
|
|
28951
|
+
"supports_reasoning": true,
|
|
28952
|
+
"supports_response_schema": true,
|
|
28953
|
+
"supports_tool_choice": true,
|
|
28954
|
+
"supports_vision": true,
|
|
28955
|
+
"tool_use_system_prompt_tokens": 159
|
|
28956
|
+
},
|
|
28957
|
+
"amazon.titan-image-generator-v2:0": {
|
|
28958
|
+
"input_cost_per_image": 0.0,
|
|
28959
|
+
"output_cost_per_image": 0.008,
|
|
28960
|
+
"output_cost_per_image_premium_image": 0.01,
|
|
28961
|
+
"output_cost_per_image_above_1024_and_1024_pixels": 0.01,
|
|
28962
|
+
"output_cost_per_image_above_1024_and_1024_pixels_and_premium_image": 0.012,
|
|
28963
|
+
"litellm_provider": "bedrock",
|
|
28964
|
+
"mode": "image_generation"
|
|
28965
|
+
},
|
|
28966
|
+
"moonshot/kimi-k2-0905-preview": {
|
|
28967
|
+
"cache_read_input_token_cost": 1.5e-07,
|
|
28968
|
+
"input_cost_per_token": 6e-07,
|
|
28969
|
+
"litellm_provider": "moonshot",
|
|
28970
|
+
"max_input_tokens": 262144,
|
|
28971
|
+
"max_output_tokens": 262144,
|
|
28972
|
+
"max_tokens": 262144,
|
|
28973
|
+
"mode": "chat",
|
|
28974
|
+
"output_cost_per_token": 2.5e-06,
|
|
28975
|
+
"source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2",
|
|
28976
|
+
"supports_function_calling": true,
|
|
28977
|
+
"supports_tool_choice": true,
|
|
28978
|
+
"supports_web_search": true
|
|
28979
|
+
},
|
|
28980
|
+
"moonshot/kimi-k2-turbo-preview": {
|
|
28981
|
+
"cache_read_input_token_cost": 1.5e-07,
|
|
28982
|
+
"input_cost_per_token": 1.15e-06,
|
|
28983
|
+
"litellm_provider": "moonshot",
|
|
28984
|
+
"max_input_tokens": 262144,
|
|
28985
|
+
"max_output_tokens": 262144,
|
|
28986
|
+
"max_tokens": 262144,
|
|
28987
|
+
"mode": "chat",
|
|
28988
|
+
"output_cost_per_token": 8e-06,
|
|
28989
|
+
"source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2",
|
|
28990
|
+
"supports_function_calling": true,
|
|
28991
|
+
"supports_tool_choice": true,
|
|
28992
|
+
"supports_web_search": true
|
|
28993
|
+
},
|
|
28994
|
+
"moonshot/kimi-k2-thinking-turbo": {
|
|
28995
|
+
"cache_read_input_token_cost": 1.5e-07,
|
|
28996
|
+
"input_cost_per_token": 1.15e-06,
|
|
28997
|
+
"litellm_provider": "moonshot",
|
|
28998
|
+
"max_input_tokens": 262144,
|
|
28999
|
+
"max_output_tokens": 262144,
|
|
29000
|
+
"max_tokens": 262144,
|
|
29001
|
+
"mode": "chat",
|
|
29002
|
+
"output_cost_per_token": 8e-06,
|
|
29003
|
+
"source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2",
|
|
29004
|
+
"supports_function_calling": true,
|
|
29005
|
+
"supports_tool_choice": true,
|
|
29006
|
+
"supports_web_search": true
|
|
28763
29007
|
}
|
|
28764
29008
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: tokencostauto
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.382
|
|
4
4
|
Summary: To calculate token and translated USD cost of string and message calls to OpenAI, for example when used by AI agents
|
|
5
5
|
Author-email: Trisha Pan <trishaepan@gmail.com>, Alex Reibman <areibman@gmail.com>, Pratyush Shukla <ps4534@nyu.edu>, Thiago MadPin <madpin@gmail.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/madpin/tokencostaudo
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|