llama-index-llms-bedrock-converse 0.10.6__tar.gz → 0.10.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/PKG-INFO +1 -1
- {llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/llama_index/llms/bedrock_converse/base.py +12 -0
- {llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/llama_index/llms/bedrock_converse/utils.py +10 -0
- {llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/pyproject.toml +1 -1
- {llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/.gitignore +0 -0
- {llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/LICENSE +0 -0
- {llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/README.md +0 -0
- {llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/llama_index/llms/bedrock_converse/__init__.py +0 -0
|
@@ -4,6 +4,7 @@ from typing import (
|
|
|
4
4
|
Callable,
|
|
5
5
|
Dict,
|
|
6
6
|
List,
|
|
7
|
+
Literal,
|
|
7
8
|
Optional,
|
|
8
9
|
Sequence,
|
|
9
10
|
Tuple,
|
|
@@ -157,6 +158,13 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
157
158
|
guardrail_version: Optional[str] = Field(
|
|
158
159
|
description="The version number for the guardrail. The value can also be DRAFT"
|
|
159
160
|
)
|
|
161
|
+
guardrail_stream_processing_mode: Optional[Literal["sync", "async"]] = Field(
|
|
162
|
+
description=(
|
|
163
|
+
"The stream processing mode to use when leveraging a guardrail in a streaming request (ConverseStream). "
|
|
164
|
+
"If set, the specified mode will be included in the request's guardrail configuration object, altering the streaming response behavior. "
|
|
165
|
+
"If a value is not provided, no mode will be explicitly included in the request's guardrail configuration object, and thus Amazon Bedrock's default, Synchronous Mode, will be used."
|
|
166
|
+
)
|
|
167
|
+
)
|
|
160
168
|
application_inference_profile_arn: Optional[str] = Field(
|
|
161
169
|
description="The ARN of an application inference profile to invoke in place of the model. If provided, make sure the model argument refers to the same one underlying the application inference profile."
|
|
162
170
|
)
|
|
@@ -211,6 +219,7 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
211
219
|
output_parser: Optional[BaseOutputParser] = None,
|
|
212
220
|
guardrail_identifier: Optional[str] = None,
|
|
213
221
|
guardrail_version: Optional[str] = None,
|
|
222
|
+
guardrail_stream_processing_mode: Optional[Literal["sync", "async"]] = None,
|
|
214
223
|
application_inference_profile_arn: Optional[str] = None,
|
|
215
224
|
trace: Optional[str] = None,
|
|
216
225
|
thinking: Optional[ThinkingDict] = None,
|
|
@@ -263,6 +272,7 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
263
272
|
botocore_config=botocore_config,
|
|
264
273
|
guardrail_identifier=guardrail_identifier,
|
|
265
274
|
guardrail_version=guardrail_version,
|
|
275
|
+
guardrail_stream_processing_mode=guardrail_stream_processing_mode,
|
|
266
276
|
application_inference_profile_arn=application_inference_profile_arn,
|
|
267
277
|
trace=trace,
|
|
268
278
|
thinking=thinking,
|
|
@@ -474,6 +484,7 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
474
484
|
stream=True,
|
|
475
485
|
guardrail_identifier=self.guardrail_identifier,
|
|
476
486
|
guardrail_version=self.guardrail_version,
|
|
487
|
+
guardrail_stream_processing_mode=self.guardrail_stream_processing_mode,
|
|
477
488
|
trace=self.trace,
|
|
478
489
|
**all_kwargs,
|
|
479
490
|
)
|
|
@@ -668,6 +679,7 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
668
679
|
stream=False,
|
|
669
680
|
guardrail_identifier=self.guardrail_identifier,
|
|
670
681
|
guardrail_version=self.guardrail_version,
|
|
682
|
+
guardrail_stream_processing_mode=self.guardrail_stream_processing_mode,
|
|
671
683
|
trace=self.trace,
|
|
672
684
|
boto_client_kwargs=self._boto_client_kwargs,
|
|
673
685
|
**all_kwargs,
|
|
@@ -563,6 +563,7 @@ def converse_with_retry(
|
|
|
563
563
|
stream: bool = False,
|
|
564
564
|
guardrail_identifier: Optional[str] = None,
|
|
565
565
|
guardrail_version: Optional[str] = None,
|
|
566
|
+
guardrail_stream_processing_mode: Optional[Literal["sync", "async"]] = None,
|
|
566
567
|
trace: Optional[str] = None,
|
|
567
568
|
**kwargs: Any,
|
|
568
569
|
) -> Any:
|
|
@@ -603,6 +604,10 @@ def converse_with_retry(
|
|
|
603
604
|
converse_kwargs["guardrailConfig"]["guardrailVersion"] = guardrail_version
|
|
604
605
|
if trace:
|
|
605
606
|
converse_kwargs["guardrailConfig"]["trace"] = trace
|
|
607
|
+
if guardrail_stream_processing_mode and stream:
|
|
608
|
+
converse_kwargs["guardrailConfig"]["streamProcessingMode"] = (
|
|
609
|
+
guardrail_stream_processing_mode
|
|
610
|
+
)
|
|
606
611
|
|
|
607
612
|
converse_kwargs = join_two_dicts(
|
|
608
613
|
converse_kwargs,
|
|
@@ -644,6 +649,7 @@ async def converse_with_retry_async(
|
|
|
644
649
|
stream: bool = False,
|
|
645
650
|
guardrail_identifier: Optional[str] = None,
|
|
646
651
|
guardrail_version: Optional[str] = None,
|
|
652
|
+
guardrail_stream_processing_mode: Optional[Literal["sync", "async"]] = None,
|
|
647
653
|
trace: Optional[str] = None,
|
|
648
654
|
boto_client_kwargs: Optional[Dict[str, Any]] = None,
|
|
649
655
|
**kwargs: Any,
|
|
@@ -690,6 +696,10 @@ async def converse_with_retry_async(
|
|
|
690
696
|
converse_kwargs["guardrailConfig"]["guardrailVersion"] = guardrail_version
|
|
691
697
|
if trace:
|
|
692
698
|
converse_kwargs["guardrailConfig"]["trace"] = trace
|
|
699
|
+
if guardrail_stream_processing_mode and stream:
|
|
700
|
+
converse_kwargs["guardrailConfig"]["streamProcessingMode"] = (
|
|
701
|
+
guardrail_stream_processing_mode
|
|
702
|
+
)
|
|
693
703
|
converse_kwargs = join_two_dicts(
|
|
694
704
|
converse_kwargs,
|
|
695
705
|
{
|
{llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/pyproject.toml
RENAMED
|
@@ -29,7 +29,7 @@ dev = [
|
|
|
29
29
|
|
|
30
30
|
[project]
|
|
31
31
|
name = "llama-index-llms-bedrock-converse"
|
|
32
|
-
version = "0.10.
|
|
32
|
+
version = "0.10.7"
|
|
33
33
|
description = "llama-index llms bedrock converse integration"
|
|
34
34
|
authors = [{name = "Your Name", email = "you@example.com"}]
|
|
35
35
|
requires-python = ">=3.9,<4.0"
|
{llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/.gitignore
RENAMED
|
File without changes
|
{llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/LICENSE
RENAMED
|
File without changes
|
{llama_index_llms_bedrock_converse-0.10.6 → llama_index_llms_bedrock_converse-0.10.7}/README.md
RENAMED
|
File without changes
|