llama-index-llms-bedrock-converse 0.8.0__tar.gz → 0.8.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llama-index-llms-bedrock-converse
3
- Version: 0.8.0
3
+ Version: 0.8.2
4
4
  Summary: llama-index llms bedrock converse integration
5
5
  Author-email: Your Name <you@example.com>
6
6
  License-Expression: MIT
@@ -252,6 +252,7 @@ class BedrockConverse(FunctionCallingLLM):
252
252
  retries={"max_attempts": max_retries, "mode": "standard"},
253
253
  connect_timeout=timeout,
254
254
  read_timeout=timeout,
255
+ user_agent_extra="x-client-framework:llama_index",
255
256
  )
256
257
  if botocore_config is None
257
258
  else botocore_config
@@ -72,6 +72,8 @@ BEDROCK_MODELS = {
72
72
  "mistral.mistral-large-2402-v1:0": 32000,
73
73
  "mistral.mistral-small-2402-v1:0": 32000,
74
74
  "mistral.mistral-large-2407-v1:0": 32000,
75
+ "openai.gpt-oss-120b-1:0": 128000,
76
+ "openai.gpt-oss-20b-1:0": 128000,
75
77
  "ai21.jamba-1-5-mini-v1:0": 256000,
76
78
  "ai21.jamba-1-5-large-v1:0": 256000,
77
79
  "deepseek.r1-v1:0": 128000,
@@ -101,6 +103,8 @@ BEDROCK_FUNCTION_CALLING_MODELS = (
101
103
  "meta.llama3-3-70b-instruct-v1:0",
102
104
  "meta.llama4-maverick-17b-instruct-v1:0",
103
105
  "meta.llama4-scout-17b-instruct-v1:0",
106
+ "openai.gpt-oss-120b-1:0",
107
+ "openai.gpt-oss-20b-1:0",
104
108
  )
105
109
 
106
110
  BEDROCK_INFERENCE_PROFILE_SUPPORTED_MODELS = (
@@ -29,7 +29,7 @@ dev = [
29
29
 
30
30
  [project]
31
31
  name = "llama-index-llms-bedrock-converse"
32
- version = "0.8.0"
32
+ version = "0.8.2"
33
33
  description = "llama-index llms bedrock converse integration"
34
34
  authors = [{name = "Your Name", email = "you@example.com"}]
35
35
  requires-python = ">=3.9,<4.0"