llama-index-llms-bedrock-converse 0.8.3__tar.gz → 0.9.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/PKG-INFO +1 -1
- {llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/llama_index/llms/bedrock_converse/base.py +25 -1
- {llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/llama_index/llms/bedrock_converse/utils.py +23 -2
- {llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/pyproject.toml +1 -1
- {llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/.gitignore +0 -0
- {llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/LICENSE +0 -0
- {llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/README.md +0 -0
- {llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/llama_index/llms/bedrock_converse/__init__.py +0 -0
|
@@ -138,6 +138,14 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
138
138
|
default=60.0,
|
|
139
139
|
description="The timeout for the Bedrock API request in seconds. It will be used for both connect and read timeouts.",
|
|
140
140
|
)
|
|
141
|
+
system_prompt_caching: bool = Field(
|
|
142
|
+
default=False,
|
|
143
|
+
description="Whether to cache the system prompt. If you are using a system prompt, you should set this to True.",
|
|
144
|
+
)
|
|
145
|
+
tool_caching: bool = Field(
|
|
146
|
+
default=False,
|
|
147
|
+
description="Whether to cache the tools. If you are using tools, you should set this to True.",
|
|
148
|
+
)
|
|
141
149
|
guardrail_identifier: Optional[str] = Field(
|
|
142
150
|
description="The unique identifier of the guardrail that you want to use. If you don't provide a value, no guardrail is applied to the invocation."
|
|
143
151
|
)
|
|
@@ -182,6 +190,8 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
182
190
|
additional_kwargs: Optional[Dict[str, Any]] = None,
|
|
183
191
|
callback_manager: Optional[CallbackManager] = None,
|
|
184
192
|
system_prompt: Optional[str] = None,
|
|
193
|
+
system_prompt_caching: Optional[bool] = False,
|
|
194
|
+
tool_caching: Optional[bool] = False,
|
|
185
195
|
messages_to_prompt: Optional[Callable[[Sequence[ChatMessage]], str]] = None,
|
|
186
196
|
completion_to_prompt: Optional[Callable[[str], str]] = None,
|
|
187
197
|
pydantic_program_mode: PydanticProgramMode = PydanticProgramMode.DEFAULT,
|
|
@@ -212,6 +222,8 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
212
222
|
model=model,
|
|
213
223
|
callback_manager=callback_manager,
|
|
214
224
|
system_prompt=system_prompt,
|
|
225
|
+
system_prompt_caching=system_prompt_caching,
|
|
226
|
+
tool_caching=tool_caching,
|
|
215
227
|
messages_to_prompt=messages_to_prompt,
|
|
216
228
|
completion_to_prompt=completion_to_prompt,
|
|
217
229
|
pydantic_program_mode=pydantic_program_mode,
|
|
@@ -362,6 +374,8 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
362
374
|
client=self._client,
|
|
363
375
|
messages=converse_messages,
|
|
364
376
|
system_prompt=system_prompt,
|
|
377
|
+
system_prompt_caching=self.system_prompt_caching,
|
|
378
|
+
tool_caching=self.tool_caching,
|
|
365
379
|
max_retries=self.max_retries,
|
|
366
380
|
stream=False,
|
|
367
381
|
guardrail_identifier=self.guardrail_identifier,
|
|
@@ -408,6 +422,8 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
408
422
|
client=self._client,
|
|
409
423
|
messages=converse_messages,
|
|
410
424
|
system_prompt=system_prompt,
|
|
425
|
+
system_prompt_caching=self.system_prompt_caching,
|
|
426
|
+
tool_caching=self.tool_caching,
|
|
411
427
|
max_retries=self.max_retries,
|
|
412
428
|
stream=True,
|
|
413
429
|
guardrail_identifier=self.guardrail_identifier,
|
|
@@ -544,6 +560,8 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
544
560
|
config=self._config,
|
|
545
561
|
messages=converse_messages,
|
|
546
562
|
system_prompt=system_prompt,
|
|
563
|
+
system_prompt_caching=self.system_prompt_caching,
|
|
564
|
+
tool_caching=self.tool_caching,
|
|
547
565
|
max_retries=self.max_retries,
|
|
548
566
|
stream=False,
|
|
549
567
|
guardrail_identifier=self.guardrail_identifier,
|
|
@@ -592,6 +610,8 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
592
610
|
config=self._config,
|
|
593
611
|
messages=converse_messages,
|
|
594
612
|
system_prompt=system_prompt,
|
|
613
|
+
system_prompt_caching=self.system_prompt_caching,
|
|
614
|
+
tool_caching=self.tool_caching,
|
|
595
615
|
max_retries=self.max_retries,
|
|
596
616
|
stream=True,
|
|
597
617
|
guardrail_identifier=self.guardrail_identifier,
|
|
@@ -723,6 +743,7 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
723
743
|
verbose: bool = False,
|
|
724
744
|
allow_parallel_tool_calls: bool = False,
|
|
725
745
|
tool_required: bool = False,
|
|
746
|
+
tool_caching: bool = False,
|
|
726
747
|
tool_choice: Optional[dict] = None,
|
|
727
748
|
**kwargs: Any,
|
|
728
749
|
) -> Dict[str, Any]:
|
|
@@ -737,7 +758,10 @@ class BedrockConverse(FunctionCallingLLM):
|
|
|
737
758
|
|
|
738
759
|
# convert Llama Index tools to AWS Bedrock Converse tools
|
|
739
760
|
tool_config = tools_to_converse_tools(
|
|
740
|
-
tools,
|
|
761
|
+
tools,
|
|
762
|
+
tool_choice=tool_choice,
|
|
763
|
+
tool_required=tool_required,
|
|
764
|
+
tool_caching=tool_caching,
|
|
741
765
|
)
|
|
742
766
|
|
|
743
767
|
return {
|
|
@@ -350,6 +350,7 @@ def tools_to_converse_tools(
|
|
|
350
350
|
tools: List["BaseTool"],
|
|
351
351
|
tool_choice: Optional[dict] = None,
|
|
352
352
|
tool_required: bool = False,
|
|
353
|
+
tool_caching: bool = False,
|
|
353
354
|
) -> Dict[str, Any]:
|
|
354
355
|
"""
|
|
355
356
|
Converts a list of tools to AWS Bedrock Converse tools.
|
|
@@ -374,6 +375,8 @@ def tools_to_converse_tools(
|
|
|
374
375
|
"inputSchema": {"json": tool.metadata.get_parameters_dict()},
|
|
375
376
|
}
|
|
376
377
|
converse_tools.append({"toolSpec": tool_dict})
|
|
378
|
+
if tool_caching:
|
|
379
|
+
converse_tools.append({"cachePoint": {"type": "default"}})
|
|
377
380
|
return {
|
|
378
381
|
"tools": converse_tools,
|
|
379
382
|
# https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_ToolChoice.html
|
|
@@ -440,6 +443,8 @@ def converse_with_retry(
|
|
|
440
443
|
messages: Sequence[Dict[str, Any]],
|
|
441
444
|
max_retries: int = 3,
|
|
442
445
|
system_prompt: Optional[str] = None,
|
|
446
|
+
system_prompt_caching: bool = False,
|
|
447
|
+
tool_caching: bool = False,
|
|
443
448
|
max_tokens: int = 1000,
|
|
444
449
|
temperature: float = 0.1,
|
|
445
450
|
stream: bool = False,
|
|
@@ -459,9 +464,16 @@ def converse_with_retry(
|
|
|
459
464
|
},
|
|
460
465
|
}
|
|
461
466
|
if system_prompt:
|
|
462
|
-
|
|
467
|
+
system_messages: list[dict[str, Any]] = [{"text": system_prompt}]
|
|
468
|
+
if system_prompt_caching:
|
|
469
|
+
system_messages.append({"cachePoint": {"type": "default"}})
|
|
470
|
+
converse_kwargs["system"] = system_messages
|
|
463
471
|
if tool_config := kwargs.get("tools"):
|
|
464
472
|
converse_kwargs["toolConfig"] = tool_config
|
|
473
|
+
if tool_caching and "tools" in converse_kwargs["toolConfig"]:
|
|
474
|
+
converse_kwargs["toolConfig"]["tools"].append(
|
|
475
|
+
{"cachePoint": {"type": "default"}}
|
|
476
|
+
)
|
|
465
477
|
if guardrail_identifier and guardrail_version:
|
|
466
478
|
converse_kwargs["guardrailConfig"] = {}
|
|
467
479
|
converse_kwargs["guardrailConfig"]["guardrailIdentifier"] = guardrail_identifier
|
|
@@ -493,6 +505,8 @@ async def converse_with_retry_async(
|
|
|
493
505
|
messages: Sequence[Dict[str, Any]],
|
|
494
506
|
max_retries: int = 3,
|
|
495
507
|
system_prompt: Optional[str] = None,
|
|
508
|
+
system_prompt_caching: bool = False,
|
|
509
|
+
tool_caching: bool = False,
|
|
496
510
|
max_tokens: int = 1000,
|
|
497
511
|
temperature: float = 0.1,
|
|
498
512
|
stream: bool = False,
|
|
@@ -513,9 +527,16 @@ async def converse_with_retry_async(
|
|
|
513
527
|
},
|
|
514
528
|
}
|
|
515
529
|
if system_prompt:
|
|
516
|
-
|
|
530
|
+
system_messages: list[dict[str, Any]] = [{"text": system_prompt}]
|
|
531
|
+
if system_prompt_caching:
|
|
532
|
+
system_messages.append({"cachePoint": {"type": "default"}})
|
|
533
|
+
converse_kwargs["system"] = system_messages
|
|
517
534
|
if tool_config := kwargs.get("tools"):
|
|
518
535
|
converse_kwargs["toolConfig"] = tool_config
|
|
536
|
+
if tool_caching and "tools" in converse_kwargs["toolConfig"]:
|
|
537
|
+
converse_kwargs["toolConfig"]["tools"].append(
|
|
538
|
+
{"cachePoint": {"type": "default"}}
|
|
539
|
+
)
|
|
519
540
|
if guardrail_identifier and guardrail_version:
|
|
520
541
|
converse_kwargs["guardrailConfig"] = {}
|
|
521
542
|
converse_kwargs["guardrailConfig"]["guardrailIdentifier"] = guardrail_identifier
|
{llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/pyproject.toml
RENAMED
|
@@ -29,7 +29,7 @@ dev = [
|
|
|
29
29
|
|
|
30
30
|
[project]
|
|
31
31
|
name = "llama-index-llms-bedrock-converse"
|
|
32
|
-
version = "0.
|
|
32
|
+
version = "0.9.0"
|
|
33
33
|
description = "llama-index llms bedrock converse integration"
|
|
34
34
|
authors = [{name = "Your Name", email = "you@example.com"}]
|
|
35
35
|
requires-python = ">=3.9,<4.0"
|
{llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/.gitignore
RENAMED
|
File without changes
|
|
File without changes
|
{llama_index_llms_bedrock_converse-0.8.3 → llama_index_llms_bedrock_converse-0.9.0}/README.md
RENAMED
|
File without changes
|