tokencostauto 0.1.378__py3-none-any.whl → 0.1.380__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -553,6 +553,7 @@
553
553
  "supports_tool_choice": true
554
554
  },
555
555
  "ft:gpt-4o-2024-08-06": {
556
+ "cache_read_input_token_cost": 1.875e-06,
556
557
  "input_cost_per_token": 3.75e-06,
557
558
  "input_cost_per_token_batches": 1.875e-06,
558
559
  "litellm_provider": "openai",
@@ -565,6 +566,7 @@
565
566
  "supports_function_calling": true,
566
567
  "supports_parallel_function_calling": true,
567
568
  "supports_pdf_input": true,
569
+ "supports_prompt_caching": true,
568
570
  "supports_response_schema": true,
569
571
  "supports_system_messages": true,
570
572
  "supports_tool_choice": true,
@@ -587,29 +589,28 @@
587
589
  "supports_prompt_caching": true,
588
590
  "supports_response_schema": true,
589
591
  "supports_system_messages": true,
590
- "supports_tool_choice": true,
591
- "supports_vision": true
592
+ "supports_tool_choice": true
592
593
  },
593
594
  "ft:davinci-002": {
594
- "input_cost_per_token": 2e-06,
595
+ "input_cost_per_token": 1.2e-05,
595
596
  "input_cost_per_token_batches": 1e-06,
596
597
  "litellm_provider": "text-completion-openai",
597
598
  "max_input_tokens": 16384,
598
599
  "max_output_tokens": 4096,
599
600
  "max_tokens": 16384,
600
601
  "mode": "completion",
601
- "output_cost_per_token": 2e-06,
602
+ "output_cost_per_token": 1.2e-05,
602
603
  "output_cost_per_token_batches": 1e-06
603
604
  },
604
605
  "ft:babbage-002": {
605
- "input_cost_per_token": 4e-07,
606
+ "input_cost_per_token": 1.6e-06,
606
607
  "input_cost_per_token_batches": 2e-07,
607
608
  "litellm_provider": "text-completion-openai",
608
609
  "max_input_tokens": 16384,
609
610
  "max_output_tokens": 4096,
610
611
  "max_tokens": 16384,
611
612
  "mode": "completion",
612
- "output_cost_per_token": 4e-07,
613
+ "output_cost_per_token": 1.6e-06,
613
614
  "output_cost_per_token_batches": 2e-07
614
615
  },
615
616
  "text-embedding-3-large": {
@@ -7747,8 +7748,7 @@
7747
7748
  "supports_prompt_caching": true,
7748
7749
  "supports_response_schema": true,
7749
7750
  "supports_system_messages": true,
7750
- "supports_tool_choice": true,
7751
- "supports_vision": true
7751
+ "supports_tool_choice": true
7752
7752
  },
7753
7753
  "azure/gpt-4o-2024-11-20": {
7754
7754
  "deprecation_date": "2026-03-01",
@@ -28760,5 +28760,197 @@
28760
28760
  "supports_function_calling": true,
28761
28761
  "supports_tool_choice": true,
28762
28762
  "source": "https://docs.z.ai/guides/overview/pricing"
28763
+ },
28764
+ "amazon.nova-2-lite-v1:0": {
28765
+ "input_cost_per_token": 3e-07,
28766
+ "litellm_provider": "bedrock_converse",
28767
+ "max_input_tokens": 1000000,
28768
+ "max_output_tokens": 64000,
28769
+ "max_tokens": 64000,
28770
+ "mode": "chat",
28771
+ "output_cost_per_token": 2.5e-06,
28772
+ "supports_function_calling": true,
28773
+ "supports_pdf_input": true,
28774
+ "supports_prompt_caching": true,
28775
+ "supports_reasoning": true,
28776
+ "supports_response_schema": true,
28777
+ "supports_video_input": true,
28778
+ "supports_vision": true
28779
+ },
28780
+ "apac.amazon.nova-2-lite-v1:0": {
28781
+ "input_cost_per_token": 6e-08,
28782
+ "litellm_provider": "bedrock_converse",
28783
+ "max_input_tokens": 1000000,
28784
+ "max_output_tokens": 64000,
28785
+ "max_tokens": 64000,
28786
+ "mode": "chat",
28787
+ "output_cost_per_token": 2.75e-06,
28788
+ "supports_function_calling": true,
28789
+ "supports_pdf_input": true,
28790
+ "supports_prompt_caching": true,
28791
+ "supports_reasoning": true,
28792
+ "supports_response_schema": true,
28793
+ "supports_video_input": true,
28794
+ "supports_vision": true
28795
+ },
28796
+ "eu.amazon.nova-2-lite-v1:0": {
28797
+ "input_cost_per_token": 6e-08,
28798
+ "litellm_provider": "bedrock_converse",
28799
+ "max_input_tokens": 1000000,
28800
+ "max_output_tokens": 64000,
28801
+ "max_tokens": 64000,
28802
+ "mode": "chat",
28803
+ "output_cost_per_token": 2.75e-06,
28804
+ "supports_function_calling": true,
28805
+ "supports_pdf_input": true,
28806
+ "supports_prompt_caching": true,
28807
+ "supports_reasoning": true,
28808
+ "supports_response_schema": true,
28809
+ "supports_video_input": true,
28810
+ "supports_vision": true
28811
+ },
28812
+ "us.amazon.nova-2-lite-v1:0": {
28813
+ "input_cost_per_token": 6e-08,
28814
+ "litellm_provider": "bedrock_converse",
28815
+ "max_input_tokens": 1000000,
28816
+ "max_output_tokens": 64000,
28817
+ "max_tokens": 64000,
28818
+ "mode": "chat",
28819
+ "output_cost_per_token": 2.75e-06,
28820
+ "supports_function_calling": true,
28821
+ "supports_pdf_input": true,
28822
+ "supports_prompt_caching": true,
28823
+ "supports_reasoning": true,
28824
+ "supports_response_schema": true,
28825
+ "supports_video_input": true,
28826
+ "supports_vision": true
28827
+ },
28828
+ "deepseek/deepseek-v3.2": {
28829
+ "input_cost_per_token": 2.8e-07,
28830
+ "input_cost_per_token_cache_hit": 2.8e-08,
28831
+ "litellm_provider": "deepseek",
28832
+ "max_input_tokens": 163840,
28833
+ "max_output_tokens": 163840,
28834
+ "max_tokens": 8192,
28835
+ "mode": "chat",
28836
+ "output_cost_per_token": 4e-07,
28837
+ "supports_assistant_prefill": true,
28838
+ "supports_function_calling": true,
28839
+ "supports_prompt_caching": true,
28840
+ "supports_reasoning": true,
28841
+ "supports_tool_choice": true
28842
+ },
28843
+ "ft:gpt-4.1-2025-04-14": {
28844
+ "cache_read_input_token_cost": 7.5e-07,
28845
+ "input_cost_per_token": 3e-06,
28846
+ "input_cost_per_token_batches": 1.5e-06,
28847
+ "litellm_provider": "openai",
28848
+ "max_input_tokens": 1047576,
28849
+ "max_output_tokens": 32768,
28850
+ "max_tokens": 32768,
28851
+ "mode": "chat",
28852
+ "output_cost_per_token": 1.2e-05,
28853
+ "output_cost_per_token_batches": 6e-06,
28854
+ "supports_function_calling": true,
28855
+ "supports_parallel_function_calling": true,
28856
+ "supports_prompt_caching": true,
28857
+ "supports_response_schema": true,
28858
+ "supports_system_messages": true,
28859
+ "supports_tool_choice": true
28860
+ },
28861
+ "ft:gpt-4.1-mini-2025-04-14": {
28862
+ "cache_read_input_token_cost": 2e-07,
28863
+ "input_cost_per_token": 8e-07,
28864
+ "input_cost_per_token_batches": 4e-07,
28865
+ "litellm_provider": "openai",
28866
+ "max_input_tokens": 1047576,
28867
+ "max_output_tokens": 32768,
28868
+ "max_tokens": 32768,
28869
+ "mode": "chat",
28870
+ "output_cost_per_token": 3.2e-06,
28871
+ "output_cost_per_token_batches": 1.6e-06,
28872
+ "supports_function_calling": true,
28873
+ "supports_parallel_function_calling": true,
28874
+ "supports_prompt_caching": true,
28875
+ "supports_response_schema": true,
28876
+ "supports_system_messages": true,
28877
+ "supports_tool_choice": true
28878
+ },
28879
+ "ft:gpt-4.1-nano-2025-04-14": {
28880
+ "cache_read_input_token_cost": 5e-08,
28881
+ "input_cost_per_token": 2e-07,
28882
+ "input_cost_per_token_batches": 1e-07,
28883
+ "litellm_provider": "openai",
28884
+ "max_input_tokens": 1047576,
28885
+ "max_output_tokens": 32768,
28886
+ "max_tokens": 32768,
28887
+ "mode": "chat",
28888
+ "output_cost_per_token": 8e-07,
28889
+ "output_cost_per_token_batches": 4e-07,
28890
+ "supports_function_calling": true,
28891
+ "supports_parallel_function_calling": true,
28892
+ "supports_prompt_caching": true,
28893
+ "supports_response_schema": true,
28894
+ "supports_system_messages": true,
28895
+ "supports_tool_choice": true
28896
+ },
28897
+ "ft:o4-mini-2025-04-16": {
28898
+ "cache_read_input_token_cost": 1e-06,
28899
+ "input_cost_per_token": 4e-06,
28900
+ "input_cost_per_token_batches": 2e-06,
28901
+ "litellm_provider": "openai",
28902
+ "max_input_tokens": 200000,
28903
+ "max_output_tokens": 100000,
28904
+ "max_tokens": 100000,
28905
+ "mode": "chat",
28906
+ "output_cost_per_token": 1.6e-05,
28907
+ "output_cost_per_token_batches": 8e-06,
28908
+ "supports_function_calling": true,
28909
+ "supports_parallel_function_calling": false,
28910
+ "supports_prompt_caching": true,
28911
+ "supports_reasoning": true,
28912
+ "supports_response_schema": true,
28913
+ "supports_tool_choice": true
28914
+ },
28915
+ "openrouter/deepseek/deepseek-v3.2": {
28916
+ "input_cost_per_token": 2.8e-07,
28917
+ "input_cost_per_token_cache_hit": 2.8e-08,
28918
+ "litellm_provider": "openrouter",
28919
+ "max_input_tokens": 163840,
28920
+ "max_output_tokens": 163840,
28921
+ "max_tokens": 8192,
28922
+ "mode": "chat",
28923
+ "output_cost_per_token": 4e-07,
28924
+ "supports_assistant_prefill": true,
28925
+ "supports_function_calling": true,
28926
+ "supports_prompt_caching": true,
28927
+ "supports_reasoning": true,
28928
+ "supports_tool_choice": true
28929
+ },
28930
+ "global.anthropic.claude-opus-4-5-20251101-v1:0": {
28931
+ "cache_creation_input_token_cost": 6.25e-06,
28932
+ "cache_read_input_token_cost": 5e-07,
28933
+ "input_cost_per_token": 5e-06,
28934
+ "litellm_provider": "bedrock_converse",
28935
+ "max_input_tokens": 200000,
28936
+ "max_output_tokens": 64000,
28937
+ "max_tokens": 64000,
28938
+ "mode": "chat",
28939
+ "output_cost_per_token": 2.5e-05,
28940
+ "search_context_cost_per_query": {
28941
+ "search_context_size_high": 0.01,
28942
+ "search_context_size_low": 0.01,
28943
+ "search_context_size_medium": 0.01
28944
+ },
28945
+ "supports_assistant_prefill": true,
28946
+ "supports_computer_use": true,
28947
+ "supports_function_calling": true,
28948
+ "supports_pdf_input": true,
28949
+ "supports_prompt_caching": true,
28950
+ "supports_reasoning": true,
28951
+ "supports_response_schema": true,
28952
+ "supports_tool_choice": true,
28953
+ "supports_vision": true,
28954
+ "tool_use_system_prompt_tokens": 159
28763
28955
  }
28764
28956
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tokencostauto
3
- Version: 0.1.378
3
+ Version: 0.1.380
4
4
  Summary: To calculate token and translated USD cost of string and message calls to OpenAI, for example when used by AI agents
5
5
  Author-email: Trisha Pan <trishaepan@gmail.com>, Alex Reibman <areibman@gmail.com>, Pratyush Shukla <ps4534@nyu.edu>, Thiago MadPin <madpin@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/madpin/tokencostaudo
@@ -0,0 +1,9 @@
1
+ tokencostauto/__init__.py,sha256=-4d_ryFH62SgNXPXA8vGPFZoAKtOBjnsg37EB_RkZG8,289
2
+ tokencostauto/constants.py,sha256=_82MlTkTrdrwzyRosQD7d3JdgNP9KAUM-cZo8DE00P0,3395
3
+ tokencostauto/costs.py,sha256=tXsgrTypq-dCHaHtoXcg2XepezWsAvZpl9gEsv_53iE,10679
4
+ tokencostauto/model_prices.json,sha256=GQo71l4_B2fpRWiZG4LAsezobw-GzGLfU87ZiJ1NKbA,1032350
5
+ tokencostauto-0.1.380.dist-info/licenses/LICENSE,sha256=4PLv_CD6Ughnsvg_nM2XeTqGwVK6lQVR77kVWbPq-0U,1065
6
+ tokencostauto-0.1.380.dist-info/METADATA,sha256=92Bcw6am7Ru01cYZY5rvk444pTP3bl0nCg8eyqL2vtk,204076
7
+ tokencostauto-0.1.380.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
8
+ tokencostauto-0.1.380.dist-info/top_level.txt,sha256=szZQTUJRotfIaeZCDsOgvofIkLt2ak88RP13oI51-TU,14
9
+ tokencostauto-0.1.380.dist-info/RECORD,,
@@ -1,9 +0,0 @@
1
- tokencostauto/__init__.py,sha256=-4d_ryFH62SgNXPXA8vGPFZoAKtOBjnsg37EB_RkZG8,289
2
- tokencostauto/constants.py,sha256=_82MlTkTrdrwzyRosQD7d3JdgNP9KAUM-cZo8DE00P0,3395
3
- tokencostauto/costs.py,sha256=tXsgrTypq-dCHaHtoXcg2XepezWsAvZpl9gEsv_53iE,10679
4
- tokencostauto/model_prices.json,sha256=7g_kbAkIB8zhogHHKj_XSSS1I61XqMsUxG3Z7O2tbiw,1025225
5
- tokencostauto-0.1.378.dist-info/licenses/LICENSE,sha256=4PLv_CD6Ughnsvg_nM2XeTqGwVK6lQVR77kVWbPq-0U,1065
6
- tokencostauto-0.1.378.dist-info/METADATA,sha256=npBBO1a4KYQ8olhQ9M9La_jxgY39lPHI7zxOAO5CgoQ,204076
7
- tokencostauto-0.1.378.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
8
- tokencostauto-0.1.378.dist-info/top_level.txt,sha256=szZQTUJRotfIaeZCDsOgvofIkLt2ak88RP13oI51-TU,14
9
- tokencostauto-0.1.378.dist-info/RECORD,,