tokencostauto 0.1.393__tar.gz → 0.1.397__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tokencostauto
3
- Version: 0.1.393
3
+ Version: 0.1.397
4
4
  Summary: To calculate token and translated USD cost of string and message calls to OpenAI, for example when used by AI agents
5
5
  Author-email: Trisha Pan <trishaepan@gmail.com>, Alex Reibman <areibman@gmail.com>, Pratyush Shukla <ps4534@nyu.edu>, Thiago MadPin <madpin@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/madpin/tokencostaudo
@@ -11,7 +11,7 @@ tokencostauto = ["model_prices.json"]
11
11
  [project]
12
12
 
13
13
  name = "tokencostauto"
14
- version = "0.1.393"
14
+ version = "0.1.397"
15
15
 
16
16
  authors = [
17
17
  { name = "Trisha Pan", email = "trishaepan@gmail.com" },
@@ -24898,15 +24898,15 @@
24898
24898
  "supports_vision": false
24899
24899
  },
24900
24900
  "global.anthropic.claude-haiku-4-5-20251001-v1:0": {
24901
- "cache_creation_input_token_cost": 1.375e-06,
24902
- "cache_read_input_token_cost": 1.1e-07,
24903
- "input_cost_per_token": 1.1e-06,
24901
+ "cache_creation_input_token_cost": 1.25e-06,
24902
+ "cache_read_input_token_cost": 1e-07,
24903
+ "input_cost_per_token": 1e-06,
24904
24904
  "litellm_provider": "bedrock_converse",
24905
24905
  "max_input_tokens": 200000,
24906
24906
  "max_output_tokens": 64000,
24907
24907
  "max_tokens": 64000,
24908
24908
  "mode": "chat",
24909
- "output_cost_per_token": 5.5e-06,
24909
+ "output_cost_per_token": 5e-06,
24910
24910
  "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock",
24911
24911
  "supports_assistant_prefill": true,
24912
24912
  "supports_computer_use": true,
@@ -31542,5 +31542,463 @@
31542
31542
  "max_tokens": 32000,
31543
31543
  "mode": "rerank",
31544
31544
  "output_cost_per_token": 0.0
31545
+ },
31546
+ "azure_ai/claude-haiku-4-5": {
31547
+ "input_cost_per_token": 1e-06,
31548
+ "litellm_provider": "azure_ai",
31549
+ "max_input_tokens": 200000,
31550
+ "max_output_tokens": 64000,
31551
+ "max_tokens": 64000,
31552
+ "mode": "chat",
31553
+ "output_cost_per_token": 5e-06,
31554
+ "supports_assistant_prefill": true,
31555
+ "supports_computer_use": true,
31556
+ "supports_function_calling": true,
31557
+ "supports_pdf_input": true,
31558
+ "supports_prompt_caching": true,
31559
+ "supports_reasoning": true,
31560
+ "supports_response_schema": true,
31561
+ "supports_tool_choice": true,
31562
+ "supports_vision": true
31563
+ },
31564
+ "azure_ai/claude-opus-4-1": {
31565
+ "input_cost_per_token": 1.5e-05,
31566
+ "litellm_provider": "azure_ai",
31567
+ "max_input_tokens": 200000,
31568
+ "max_output_tokens": 32000,
31569
+ "max_tokens": 32000,
31570
+ "mode": "chat",
31571
+ "output_cost_per_token": 7.5e-05,
31572
+ "supports_assistant_prefill": true,
31573
+ "supports_computer_use": true,
31574
+ "supports_function_calling": true,
31575
+ "supports_pdf_input": true,
31576
+ "supports_prompt_caching": true,
31577
+ "supports_reasoning": true,
31578
+ "supports_response_schema": true,
31579
+ "supports_tool_choice": true,
31580
+ "supports_vision": true
31581
+ },
31582
+ "azure_ai/claude-sonnet-4-5": {
31583
+ "input_cost_per_token": 3e-06,
31584
+ "litellm_provider": "azure_ai",
31585
+ "max_input_tokens": 200000,
31586
+ "max_output_tokens": 64000,
31587
+ "max_tokens": 64000,
31588
+ "mode": "chat",
31589
+ "output_cost_per_token": 1.5e-05,
31590
+ "supports_assistant_prefill": true,
31591
+ "supports_computer_use": true,
31592
+ "supports_function_calling": true,
31593
+ "supports_pdf_input": true,
31594
+ "supports_prompt_caching": true,
31595
+ "supports_reasoning": true,
31596
+ "supports_response_schema": true,
31597
+ "supports_tool_choice": true,
31598
+ "supports_vision": true
31599
+ },
31600
+ "gpt-5.2": {
31601
+ "cache_read_input_token_cost": 1.75e-07,
31602
+ "cache_read_input_token_cost_priority": 3.5e-07,
31603
+ "input_cost_per_token": 1.75e-06,
31604
+ "input_cost_per_token_priority": 3.5e-06,
31605
+ "litellm_provider": "openai",
31606
+ "max_input_tokens": 400000,
31607
+ "max_output_tokens": 128000,
31608
+ "max_tokens": 128000,
31609
+ "mode": "chat",
31610
+ "output_cost_per_token": 1.4e-05,
31611
+ "output_cost_per_token_priority": 2.8e-05,
31612
+ "supported_endpoints": [
31613
+ "/v1/chat/completions",
31614
+ "/v1/batch",
31615
+ "/v1/responses"
31616
+ ],
31617
+ "supported_modalities": [
31618
+ "text",
31619
+ "image"
31620
+ ],
31621
+ "supported_output_modalities": [
31622
+ "text",
31623
+ "image"
31624
+ ],
31625
+ "supports_function_calling": true,
31626
+ "supports_native_streaming": true,
31627
+ "supports_parallel_function_calling": true,
31628
+ "supports_pdf_input": true,
31629
+ "supports_prompt_caching": true,
31630
+ "supports_reasoning": true,
31631
+ "supports_response_schema": true,
31632
+ "supports_system_messages": true,
31633
+ "supports_tool_choice": true,
31634
+ "supports_service_tier": true,
31635
+ "supports_vision": true
31636
+ },
31637
+ "gpt-5.2-2025-12-11": {
31638
+ "cache_read_input_token_cost": 1.75e-07,
31639
+ "cache_read_input_token_cost_priority": 3.5e-07,
31640
+ "input_cost_per_token": 1.75e-06,
31641
+ "input_cost_per_token_priority": 3.5e-06,
31642
+ "litellm_provider": "openai",
31643
+ "max_input_tokens": 400000,
31644
+ "max_output_tokens": 128000,
31645
+ "max_tokens": 128000,
31646
+ "mode": "chat",
31647
+ "output_cost_per_token": 1.4e-05,
31648
+ "output_cost_per_token_priority": 2.8e-05,
31649
+ "supported_endpoints": [
31650
+ "/v1/chat/completions",
31651
+ "/v1/batch",
31652
+ "/v1/responses"
31653
+ ],
31654
+ "supported_modalities": [
31655
+ "text",
31656
+ "image"
31657
+ ],
31658
+ "supported_output_modalities": [
31659
+ "text",
31660
+ "image"
31661
+ ],
31662
+ "supports_function_calling": true,
31663
+ "supports_native_streaming": true,
31664
+ "supports_parallel_function_calling": true,
31665
+ "supports_pdf_input": true,
31666
+ "supports_prompt_caching": true,
31667
+ "supports_reasoning": true,
31668
+ "supports_response_schema": true,
31669
+ "supports_system_messages": true,
31670
+ "supports_tool_choice": true,
31671
+ "supports_service_tier": true,
31672
+ "supports_vision": true
31673
+ },
31674
+ "gpt-5.2-chat-latest": {
31675
+ "cache_read_input_token_cost": 1.75e-07,
31676
+ "cache_read_input_token_cost_priority": 3.5e-07,
31677
+ "input_cost_per_token": 1.75e-06,
31678
+ "input_cost_per_token_priority": 3.5e-06,
31679
+ "litellm_provider": "openai",
31680
+ "max_input_tokens": 128000,
31681
+ "max_output_tokens": 16384,
31682
+ "max_tokens": 16384,
31683
+ "mode": "chat",
31684
+ "output_cost_per_token": 1.4e-05,
31685
+ "output_cost_per_token_priority": 2.8e-05,
31686
+ "supported_endpoints": [
31687
+ "/v1/chat/completions",
31688
+ "/v1/responses"
31689
+ ],
31690
+ "supported_modalities": [
31691
+ "text",
31692
+ "image"
31693
+ ],
31694
+ "supported_output_modalities": [
31695
+ "text"
31696
+ ],
31697
+ "supports_function_calling": true,
31698
+ "supports_native_streaming": true,
31699
+ "supports_parallel_function_calling": true,
31700
+ "supports_pdf_input": true,
31701
+ "supports_prompt_caching": true,
31702
+ "supports_reasoning": true,
31703
+ "supports_response_schema": true,
31704
+ "supports_system_messages": true,
31705
+ "supports_tool_choice": true,
31706
+ "supports_vision": true
31707
+ },
31708
+ "gpt-5.2-pro": {
31709
+ "input_cost_per_token": 2.1e-05,
31710
+ "litellm_provider": "openai",
31711
+ "max_input_tokens": 400000,
31712
+ "max_output_tokens": 128000,
31713
+ "max_tokens": 128000,
31714
+ "mode": "responses",
31715
+ "output_cost_per_token": 0.000168,
31716
+ "supported_endpoints": [
31717
+ "/v1/batch",
31718
+ "/v1/responses"
31719
+ ],
31720
+ "supported_modalities": [
31721
+ "text",
31722
+ "image"
31723
+ ],
31724
+ "supported_output_modalities": [
31725
+ "text"
31726
+ ],
31727
+ "supports_function_calling": true,
31728
+ "supports_native_streaming": true,
31729
+ "supports_parallel_function_calling": true,
31730
+ "supports_pdf_input": true,
31731
+ "supports_prompt_caching": true,
31732
+ "supports_reasoning": true,
31733
+ "supports_response_schema": true,
31734
+ "supports_system_messages": true,
31735
+ "supports_tool_choice": true,
31736
+ "supports_vision": true,
31737
+ "supports_web_search": true
31738
+ },
31739
+ "gpt-5.2-pro-2025-12-11": {
31740
+ "input_cost_per_token": 2.1e-05,
31741
+ "litellm_provider": "openai",
31742
+ "max_input_tokens": 400000,
31743
+ "max_output_tokens": 128000,
31744
+ "max_tokens": 128000,
31745
+ "mode": "responses",
31746
+ "output_cost_per_token": 0.000168,
31747
+ "supported_endpoints": [
31748
+ "/v1/batch",
31749
+ "/v1/responses"
31750
+ ],
31751
+ "supported_modalities": [
31752
+ "text",
31753
+ "image"
31754
+ ],
31755
+ "supported_output_modalities": [
31756
+ "text"
31757
+ ],
31758
+ "supports_function_calling": true,
31759
+ "supports_native_streaming": true,
31760
+ "supports_parallel_function_calling": true,
31761
+ "supports_pdf_input": true,
31762
+ "supports_prompt_caching": true,
31763
+ "supports_reasoning": true,
31764
+ "supports_response_schema": true,
31765
+ "supports_system_messages": true,
31766
+ "supports_tool_choice": true,
31767
+ "supports_vision": true,
31768
+ "supports_web_search": true
31769
+ },
31770
+ "mistral/codestral-2508": {
31771
+ "input_cost_per_token": 3e-07,
31772
+ "litellm_provider": "mistral",
31773
+ "max_input_tokens": 256000,
31774
+ "max_output_tokens": 256000,
31775
+ "max_tokens": 256000,
31776
+ "mode": "chat",
31777
+ "output_cost_per_token": 9e-07,
31778
+ "source": "https://mistral.ai/news/codestral-25-08",
31779
+ "supports_assistant_prefill": true,
31780
+ "supports_function_calling": true,
31781
+ "supports_response_schema": true,
31782
+ "supports_tool_choice": true
31783
+ },
31784
+ "mistral/labs-devstral-small-2512": {
31785
+ "input_cost_per_token": 1e-07,
31786
+ "litellm_provider": "mistral",
31787
+ "max_input_tokens": 256000,
31788
+ "max_output_tokens": 256000,
31789
+ "max_tokens": 256000,
31790
+ "mode": "chat",
31791
+ "output_cost_per_token": 3e-07,
31792
+ "source": "https://docs.mistral.ai/models/devstral-small-2-25-12",
31793
+ "supports_assistant_prefill": true,
31794
+ "supports_function_calling": true,
31795
+ "supports_response_schema": true,
31796
+ "supports_tool_choice": true
31797
+ },
31798
+ "mistral/devstral-2512": {
31799
+ "input_cost_per_token": 4e-07,
31800
+ "litellm_provider": "mistral",
31801
+ "max_input_tokens": 256000,
31802
+ "max_output_tokens": 256000,
31803
+ "max_tokens": 256000,
31804
+ "mode": "chat",
31805
+ "output_cost_per_token": 2e-06,
31806
+ "source": "https://mistral.ai/news/devstral-2-vibe-cli",
31807
+ "supports_assistant_prefill": true,
31808
+ "supports_function_calling": true,
31809
+ "supports_response_schema": true,
31810
+ "supports_tool_choice": true
31811
+ },
31812
+ "azure/gpt-5.2": {
31813
+ "cache_read_input_token_cost": 1.75e-07,
31814
+ "input_cost_per_token": 1.75e-06,
31815
+ "litellm_provider": "azure",
31816
+ "max_input_tokens": 400000,
31817
+ "max_output_tokens": 128000,
31818
+ "max_tokens": 128000,
31819
+ "mode": "chat",
31820
+ "output_cost_per_token": 1.4e-05,
31821
+ "supported_endpoints": [
31822
+ "/v1/chat/completions",
31823
+ "/v1/batch",
31824
+ "/v1/responses"
31825
+ ],
31826
+ "supported_modalities": [
31827
+ "text",
31828
+ "image"
31829
+ ],
31830
+ "supported_output_modalities": [
31831
+ "text",
31832
+ "image"
31833
+ ],
31834
+ "supports_function_calling": true,
31835
+ "supports_native_streaming": true,
31836
+ "supports_parallel_function_calling": true,
31837
+ "supports_pdf_input": true,
31838
+ "supports_prompt_caching": true,
31839
+ "supports_reasoning": true,
31840
+ "supports_response_schema": true,
31841
+ "supports_system_messages": true,
31842
+ "supports_tool_choice": true,
31843
+ "supports_vision": true
31844
+ },
31845
+ "azure/gpt-5.2-2025-12-11": {
31846
+ "cache_read_input_token_cost": 1.75e-07,
31847
+ "cache_read_input_token_cost_priority": 3.5e-07,
31848
+ "input_cost_per_token": 1.75e-06,
31849
+ "input_cost_per_token_priority": 3.5e-06,
31850
+ "litellm_provider": "azure",
31851
+ "max_input_tokens": 400000,
31852
+ "max_output_tokens": 128000,
31853
+ "max_tokens": 128000,
31854
+ "mode": "chat",
31855
+ "output_cost_per_token": 1.4e-05,
31856
+ "output_cost_per_token_priority": 2.8e-05,
31857
+ "supported_endpoints": [
31858
+ "/v1/chat/completions",
31859
+ "/v1/batch",
31860
+ "/v1/responses"
31861
+ ],
31862
+ "supported_modalities": [
31863
+ "text",
31864
+ "image"
31865
+ ],
31866
+ "supported_output_modalities": [
31867
+ "text",
31868
+ "image"
31869
+ ],
31870
+ "supports_function_calling": true,
31871
+ "supports_native_streaming": true,
31872
+ "supports_parallel_function_calling": true,
31873
+ "supports_pdf_input": true,
31874
+ "supports_prompt_caching": true,
31875
+ "supports_reasoning": true,
31876
+ "supports_response_schema": true,
31877
+ "supports_system_messages": true,
31878
+ "supports_tool_choice": true,
31879
+ "supports_service_tier": true,
31880
+ "supports_vision": true
31881
+ },
31882
+ "azure/gpt-5.2-chat-2025-12-11": {
31883
+ "cache_read_input_token_cost": 1.75e-07,
31884
+ "cache_read_input_token_cost_priority": 3.5e-07,
31885
+ "input_cost_per_token": 1.75e-06,
31886
+ "input_cost_per_token_priority": 3.5e-06,
31887
+ "litellm_provider": "azure",
31888
+ "max_input_tokens": 128000,
31889
+ "max_output_tokens": 16384,
31890
+ "max_tokens": 16384,
31891
+ "mode": "chat",
31892
+ "output_cost_per_token": 1.4e-05,
31893
+ "output_cost_per_token_priority": 2.8e-05,
31894
+ "supported_endpoints": [
31895
+ "/v1/chat/completions",
31896
+ "/v1/responses"
31897
+ ],
31898
+ "supported_modalities": [
31899
+ "text",
31900
+ "image"
31901
+ ],
31902
+ "supported_output_modalities": [
31903
+ "text"
31904
+ ],
31905
+ "supports_function_calling": true,
31906
+ "supports_native_streaming": true,
31907
+ "supports_parallel_function_calling": true,
31908
+ "supports_pdf_input": true,
31909
+ "supports_prompt_caching": true,
31910
+ "supports_reasoning": true,
31911
+ "supports_response_schema": true,
31912
+ "supports_system_messages": true,
31913
+ "supports_tool_choice": true,
31914
+ "supports_vision": true
31915
+ },
31916
+ "azure/gpt-5.2-pro": {
31917
+ "input_cost_per_token": 2.1e-05,
31918
+ "litellm_provider": "azure",
31919
+ "max_input_tokens": 400000,
31920
+ "max_output_tokens": 128000,
31921
+ "max_tokens": 128000,
31922
+ "mode": "responses",
31923
+ "output_cost_per_token": 0.000168,
31924
+ "supported_endpoints": [
31925
+ "/v1/batch",
31926
+ "/v1/responses"
31927
+ ],
31928
+ "supported_modalities": [
31929
+ "text",
31930
+ "image"
31931
+ ],
31932
+ "supported_output_modalities": [
31933
+ "text"
31934
+ ],
31935
+ "supports_function_calling": true,
31936
+ "supports_native_streaming": true,
31937
+ "supports_parallel_function_calling": true,
31938
+ "supports_pdf_input": true,
31939
+ "supports_prompt_caching": true,
31940
+ "supports_reasoning": true,
31941
+ "supports_response_schema": true,
31942
+ "supports_system_messages": true,
31943
+ "supports_tool_choice": true,
31944
+ "supports_vision": true,
31945
+ "supports_web_search": true
31946
+ },
31947
+ "azure/gpt-5.2-pro-2025-12-11": {
31948
+ "input_cost_per_token": 2.1e-05,
31949
+ "litellm_provider": "azure",
31950
+ "max_input_tokens": 400000,
31951
+ "max_output_tokens": 128000,
31952
+ "max_tokens": 128000,
31953
+ "mode": "responses",
31954
+ "output_cost_per_token": 0.000168,
31955
+ "supported_endpoints": [
31956
+ "/v1/batch",
31957
+ "/v1/responses"
31958
+ ],
31959
+ "supported_modalities": [
31960
+ "text",
31961
+ "image"
31962
+ ],
31963
+ "supported_output_modalities": [
31964
+ "text"
31965
+ ],
31966
+ "supports_function_calling": true,
31967
+ "supports_native_streaming": true,
31968
+ "supports_parallel_function_calling": true,
31969
+ "supports_pdf_input": true,
31970
+ "supports_prompt_caching": true,
31971
+ "supports_reasoning": true,
31972
+ "supports_response_schema": true,
31973
+ "supports_system_messages": true,
31974
+ "supports_tool_choice": true,
31975
+ "supports_vision": true,
31976
+ "supports_web_search": true
31977
+ },
31978
+ "eu.anthropic.claude-opus-4-5-20251101-v1:0": {
31979
+ "cache_creation_input_token_cost": 6.25e-06,
31980
+ "cache_read_input_token_cost": 5e-07,
31981
+ "input_cost_per_token": 5e-06,
31982
+ "litellm_provider": "bedrock_converse",
31983
+ "max_input_tokens": 200000,
31984
+ "max_output_tokens": 64000,
31985
+ "max_tokens": 64000,
31986
+ "mode": "chat",
31987
+ "output_cost_per_token": 2.5e-05,
31988
+ "search_context_cost_per_query": {
31989
+ "search_context_size_high": 0.01,
31990
+ "search_context_size_low": 0.01,
31991
+ "search_context_size_medium": 0.01
31992
+ },
31993
+ "supports_assistant_prefill": true,
31994
+ "supports_computer_use": true,
31995
+ "supports_function_calling": true,
31996
+ "supports_pdf_input": true,
31997
+ "supports_prompt_caching": true,
31998
+ "supports_reasoning": true,
31999
+ "supports_response_schema": true,
32000
+ "supports_tool_choice": true,
32001
+ "supports_vision": true,
32002
+ "tool_use_system_prompt_tokens": 159
31545
32003
  }
31546
32004
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tokencostauto
3
- Version: 0.1.393
3
+ Version: 0.1.397
4
4
  Summary: To calculate token and translated USD cost of string and message calls to OpenAI, for example when used by AI agents
5
5
  Author-email: Trisha Pan <trishaepan@gmail.com>, Alex Reibman <areibman@gmail.com>, Pratyush Shukla <ps4534@nyu.edu>, Thiago MadPin <madpin@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/madpin/tokencostaudo
File without changes