llama-index-llms-openai 0.3.6__tar.gz → 0.3.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llama-index-llms-openai
3
- Version: 0.3.6
3
+ Version: 0.3.8
4
4
  Summary: llama-index llms openai integration
5
5
  License: MIT
6
6
  Author: llama-index
@@ -59,6 +59,7 @@ from llama_index.core.llms.llm import ToolSelection
59
59
  from llama_index.core.llms.utils import parse_partial_json
60
60
  from llama_index.core.types import BaseOutputParser, Model, PydanticProgramMode
61
61
  from llama_index.llms.openai.utils import (
62
+ ALL_AVAILABLE_MODELS,
62
63
  O1_MODELS,
63
64
  OpenAIToolCall,
64
65
  create_retry_decorator,
@@ -218,7 +219,7 @@ class OpenAI(FunctionCallingLLM):
218
219
  description="Whether to use strict mode for invoking tools/using schemas.",
219
220
  )
220
221
  supports_content_blocks: bool = Field(
221
- default=True,
222
+ default=False,
222
223
  description="Whether the model supports content blocks in chat messages.",
223
224
  )
224
225
 
@@ -266,6 +267,10 @@ class OpenAI(FunctionCallingLLM):
266
267
  if model in O1_MODELS:
267
268
  temperature = 1.0
268
269
 
270
+ supports_content_blocks = kwargs.pop(
271
+ "supports_content_blocks", model in ALL_AVAILABLE_MODELS
272
+ )
273
+
269
274
  super().__init__(
270
275
  model=model,
271
276
  temperature=temperature,
@@ -285,6 +290,7 @@ class OpenAI(FunctionCallingLLM):
285
290
  pydantic_program_mode=pydantic_program_mode,
286
291
  output_parser=output_parser,
287
292
  strict=strict,
293
+ supports_content_blocks=supports_content_blocks,
288
294
  **kwargs,
289
295
  )
290
296
 
@@ -263,7 +263,10 @@ def to_openai_message_dict(
263
263
  content_txt = ""
264
264
  for block in message.blocks:
265
265
  if isinstance(block, TextBlock):
266
- if message.role.value in ("assistant", "tool", "system") or not supports_content_blocks:
266
+ if (
267
+ message.role.value in ("assistant", "tool", "system")
268
+ or not supports_content_blocks
269
+ ):
267
270
  # Despite the docs say otherwise, when role is ASSISTANT, SYSTEM
268
271
  # or TOOL, 'content' cannot be a list and must be string instead.
269
272
  content_txt += block.text
@@ -29,7 +29,7 @@ exclude = ["**/BUILD"]
29
29
  license = "MIT"
30
30
  name = "llama-index-llms-openai"
31
31
  readme = "README.md"
32
- version = "0.3.6"
32
+ version = "0.3.8"
33
33
 
34
34
  [tool.poetry.dependencies]
35
35
  python = ">=3.9,<4.0"