llama-index-llms-openai 0.6.1__tar.gz → 0.6.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/PKG-INFO +1 -1
- {llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/llama_index/llms/openai/base.py +1 -1
- {llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/llama_index/llms/openai/responses.py +6 -4
- {llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/llama_index/llms/openai/utils.py +2 -0
- {llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/pyproject.toml +1 -1
- {llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/.gitignore +0 -0
- {llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/LICENSE +0 -0
- {llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/README.md +0 -0
- {llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/llama_index/llms/openai/__init__.py +0 -0
- {llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/llama_index/llms/openai/py.typed +0 -0
{llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/llama_index/llms/openai/base.py
RENAMED
|
@@ -1004,7 +1004,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
1004
1004
|
del llm_kwargs["tool_choice"]
|
|
1005
1005
|
return llm_kwargs
|
|
1006
1006
|
|
|
1007
|
-
def _should_use_structure_outputs(self):
|
|
1007
|
+
def _should_use_structure_outputs(self) -> bool:
|
|
1008
1008
|
return (
|
|
1009
1009
|
self.pydantic_program_mode == PydanticProgramMode.DEFAULT
|
|
1010
1010
|
and is_json_schema_supported(self.model)
|
{llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/llama_index/llms/openai/responses.py
RENAMED
|
@@ -450,7 +450,8 @@ class OpenAIResponses(FunctionCallingLLM):
|
|
|
450
450
|
|
|
451
451
|
return stream_complete_fn(prompt, **kwargs)
|
|
452
452
|
|
|
453
|
-
|
|
453
|
+
@staticmethod
|
|
454
|
+
def _parse_response_output(output: List[ResponseOutputItem]) -> ChatResponse:
|
|
454
455
|
message = ChatMessage(role=MessageRole.ASSISTANT, blocks=[])
|
|
455
456
|
additional_kwargs = {"built_in_tool_calls": []}
|
|
456
457
|
tool_calls = []
|
|
@@ -526,7 +527,7 @@ class OpenAIResponses(FunctionCallingLLM):
|
|
|
526
527
|
if self.track_previous_responses:
|
|
527
528
|
self._previous_response_id = response.id
|
|
528
529
|
|
|
529
|
-
chat_response =
|
|
530
|
+
chat_response = OpenAIResponses._parse_response_output(response.output)
|
|
530
531
|
chat_response.raw = response
|
|
531
532
|
chat_response.additional_kwargs["usage"] = response.usage
|
|
532
533
|
if hasattr(response.usage.output_tokens_details, "reasoning_tokens"):
|
|
@@ -590,7 +591,6 @@ class OpenAIResponses(FunctionCallingLLM):
|
|
|
590
591
|
elif isinstance(event, ResponseTextDeltaEvent):
|
|
591
592
|
# Text content is being added
|
|
592
593
|
delta = event.delta
|
|
593
|
-
blocks.append(TextBlock(text=delta))
|
|
594
594
|
elif isinstance(event, ResponseImageGenCallPartialImageEvent):
|
|
595
595
|
# Partial image
|
|
596
596
|
if event.partial_image_b64:
|
|
@@ -653,6 +653,8 @@ class OpenAIResponses(FunctionCallingLLM):
|
|
|
653
653
|
# Response is complete
|
|
654
654
|
if hasattr(event, "response") and hasattr(event.response, "usage"):
|
|
655
655
|
additional_kwargs["usage"] = event.response.usage
|
|
656
|
+
resp = OpenAIResponses._parse_response_output(event.response.output)
|
|
657
|
+
blocks = resp.message.blocks
|
|
656
658
|
|
|
657
659
|
return (
|
|
658
660
|
blocks,
|
|
@@ -782,7 +784,7 @@ class OpenAIResponses(FunctionCallingLLM):
|
|
|
782
784
|
if self.track_previous_responses:
|
|
783
785
|
self._previous_response_id = response.id
|
|
784
786
|
|
|
785
|
-
chat_response =
|
|
787
|
+
chat_response = OpenAIResponses._parse_response_output(response.output)
|
|
786
788
|
chat_response.raw = response
|
|
787
789
|
chat_response.additional_kwargs["usage"] = response.usage
|
|
788
790
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/llama_index/llms/openai/__init__.py
RENAMED
|
File without changes
|
{llama_index_llms_openai-0.6.1 → llama_index_llms_openai-0.6.3}/llama_index/llms/openai/py.typed
RENAMED
|
File without changes
|