llama-index-llms-bedrock-converse 0.12.6__tar.gz → 0.12.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llama-index-llms-bedrock-converse
3
- Version: 0.12.6
3
+ Version: 0.12.7
4
4
  Summary: llama-index llms bedrock converse integration
5
5
  Author-email: Your Name <you@example.com>
6
6
  License-Expression: MIT
@@ -6,37 +6,36 @@ from typing import (
6
6
  Callable,
7
7
  Dict,
8
8
  List,
9
+ Literal,
9
10
  Optional,
10
11
  Sequence,
11
12
  Tuple,
12
- Literal,
13
13
  Union,
14
14
  )
15
- from botocore.exceptions import ClientError
16
- from typing_extensions import TypedDict
17
- from tenacity import (
18
- before_sleep_log,
19
- retry,
20
- retry_if_exception,
21
- retry_if_exception_type,
22
- stop_after_attempt,
23
- wait_exponential,
24
- )
25
15
 
16
+ from botocore.exceptions import ClientError
26
17
  from llama_index.core.base.llms.types import (
18
+ AudioBlock,
19
+ CachePoint,
27
20
  ChatMessage,
28
21
  ChatResponse,
29
- MessageRole,
30
- ImageBlock,
31
- TextBlock,
32
22
  ContentBlock,
33
- AudioBlock,
34
23
  DocumentBlock,
35
- CachePoint,
24
+ ImageBlock,
25
+ MessageRole,
26
+ TextBlock,
36
27
  ThinkingBlock,
37
28
  ToolCallBlock,
38
29
  )
39
-
30
+ from tenacity import (
31
+ before_sleep_log,
32
+ retry,
33
+ retry_if_exception,
34
+ retry_if_exception_type,
35
+ stop_after_attempt,
36
+ wait_exponential,
37
+ )
38
+ from typing_extensions import TypedDict
40
39
 
41
40
  logger = logging.getLogger(__name__)
42
41
 
@@ -65,6 +64,7 @@ BEDROCK_MODELS = {
65
64
  "anthropic.claude-opus-4-20250514-v1:0": 200000,
66
65
  "anthropic.claude-opus-4-1-20250805-v1:0": 200000,
67
66
  "anthropic.claude-opus-4-5-20251101-v1:0": 200000,
67
+ "anthropic.claude-opus-4-6-v1": 200000,
68
68
  "anthropic.claude-sonnet-4-20250514-v1:0": 200000,
69
69
  "anthropic.claude-sonnet-4-5-20250929-v1:0": 200000,
70
70
  "anthropic.claude-haiku-4-5-20251001-v1:0": 200000,
@@ -114,6 +114,7 @@ BEDROCK_FUNCTION_CALLING_MODELS = (
114
114
  "anthropic.claude-opus-4-20250514-v1:0",
115
115
  "anthropic.claude-opus-4-1-20250805-v1:0",
116
116
  "anthropic.claude-opus-4-5-20251101-v1:0",
117
+ "anthropic.claude-opus-4-6-v1",
117
118
  "anthropic.claude-sonnet-4-20250514-v1:0",
118
119
  "anthropic.claude-sonnet-4-5-20250929-v1:0",
119
120
  "anthropic.claude-haiku-4-5-20251001-v1:0",
@@ -147,6 +148,7 @@ BEDROCK_INFERENCE_PROFILE_SUPPORTED_MODELS = (
147
148
  "anthropic.claude-opus-4-20250514-v1:0",
148
149
  "anthropic.claude-opus-4-1-20250805-v1:0",
149
150
  "anthropic.claude-opus-4-5-20251101-v1:0",
151
+ "anthropic.claude-opus-4-6-v1",
150
152
  "anthropic.claude-sonnet-4-20250514-v1:0",
151
153
  "anthropic.claude-sonnet-4-5-20250929-v1:0",
152
154
  "anthropic.claude-haiku-4-5-20251001-v1:0",
@@ -168,6 +170,7 @@ BEDROCK_PROMPT_CACHING_SUPPORTED_MODELS = (
168
170
  "anthropic.claude-opus-4-20250514-v1:0",
169
171
  "anthropic.claude-opus-4-1-20250805-v1:0",
170
172
  "anthropic.claude-opus-4-5-20251101-v1:0",
173
+ "anthropic.claude-opus-4-6-v1",
171
174
  "anthropic.claude-sonnet-4-20250514-v1:0",
172
175
  "anthropic.claude-sonnet-4-5-20250929-v1:0",
173
176
  "anthropic.claude-haiku-4-5-20251001-v1:0",
@@ -182,6 +185,7 @@ BEDROCK_REASONING_MODELS = (
182
185
  "anthropic.claude-opus-4-20250514-v1:0",
183
186
  "anthropic.claude-opus-4-1-20250805-v1:0",
184
187
  "anthropic.claude-opus-4-5-20251101-v1:0",
188
+ "anthropic.claude-opus-4-6-v1",
185
189
  "anthropic.claude-sonnet-4-20250514-v1:0",
186
190
  "anthropic.claude-sonnet-4-5-20250929-v1:0",
187
191
  "anthropic.claude-haiku-4-5-20251001-v1:0",
@@ -29,7 +29,7 @@ dev = [
29
29
 
30
30
  [project]
31
31
  name = "llama-index-llms-bedrock-converse"
32
- version = "0.12.6"
32
+ version = "0.12.7"
33
33
  description = "llama-index llms bedrock converse integration"
34
34
  authors = [{name = "Your Name", email = "you@example.com"}]
35
35
  requires-python = ">=3.9,<4.0"