llama-index-llms-openai 0.3.7__tar.gz → 0.3.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_openai-0.3.7 → llama_index_llms_openai-0.3.8}/PKG-INFO +1 -1
- {llama_index_llms_openai-0.3.7 → llama_index_llms_openai-0.3.8}/llama_index/llms/openai/base.py +5 -1
- {llama_index_llms_openai-0.3.7 → llama_index_llms_openai-0.3.8}/llama_index/llms/openai/utils.py +4 -1
- {llama_index_llms_openai-0.3.7 → llama_index_llms_openai-0.3.8}/pyproject.toml +1 -1
- {llama_index_llms_openai-0.3.7 → llama_index_llms_openai-0.3.8}/README.md +0 -0
- {llama_index_llms_openai-0.3.7 → llama_index_llms_openai-0.3.8}/llama_index/llms/openai/__init__.py +0 -0
{llama_index_llms_openai-0.3.7 → llama_index_llms_openai-0.3.8}/llama_index/llms/openai/base.py
RENAMED
|
@@ -267,6 +267,10 @@ class OpenAI(FunctionCallingLLM):
|
|
|
267
267
|
if model in O1_MODELS:
|
|
268
268
|
temperature = 1.0
|
|
269
269
|
|
|
270
|
+
supports_content_blocks = kwargs.pop(
|
|
271
|
+
"supports_content_blocks", model in ALL_AVAILABLE_MODELS
|
|
272
|
+
)
|
|
273
|
+
|
|
270
274
|
super().__init__(
|
|
271
275
|
model=model,
|
|
272
276
|
temperature=temperature,
|
|
@@ -286,7 +290,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
286
290
|
pydantic_program_mode=pydantic_program_mode,
|
|
287
291
|
output_parser=output_parser,
|
|
288
292
|
strict=strict,
|
|
289
|
-
supports_content_blocks=
|
|
293
|
+
supports_content_blocks=supports_content_blocks,
|
|
290
294
|
**kwargs,
|
|
291
295
|
)
|
|
292
296
|
|
{llama_index_llms_openai-0.3.7 → llama_index_llms_openai-0.3.8}/llama_index/llms/openai/utils.py
RENAMED
|
@@ -263,7 +263,10 @@ def to_openai_message_dict(
|
|
|
263
263
|
content_txt = ""
|
|
264
264
|
for block in message.blocks:
|
|
265
265
|
if isinstance(block, TextBlock):
|
|
266
|
-
if
|
|
266
|
+
if (
|
|
267
|
+
message.role.value in ("assistant", "tool", "system")
|
|
268
|
+
or not supports_content_blocks
|
|
269
|
+
):
|
|
267
270
|
# Despite the docs say otherwise, when role is ASSISTANT, SYSTEM
|
|
268
271
|
# or TOOL, 'content' cannot be a list and must be string instead.
|
|
269
272
|
content_txt += block.text
|
|
File without changes
|
{llama_index_llms_openai-0.3.7 → llama_index_llms_openai-0.3.8}/llama_index/llms/openai/__init__.py
RENAMED
|
File without changes
|