llama-index-llms-openai 0.3.7__py3-none-any.whl → 0.3.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -59,7 +59,6 @@ from llama_index.core.llms.llm import ToolSelection
59
59
  from llama_index.core.llms.utils import parse_partial_json
60
60
  from llama_index.core.types import BaseOutputParser, Model, PydanticProgramMode
61
61
  from llama_index.llms.openai.utils import (
62
- ALL_AVAILABLE_MODELS,
63
62
  O1_MODELS,
64
63
  OpenAIToolCall,
65
64
  create_retry_decorator,
@@ -218,10 +217,6 @@ class OpenAI(FunctionCallingLLM):
218
217
  default=False,
219
218
  description="Whether to use strict mode for invoking tools/using schemas.",
220
219
  )
221
- supports_content_blocks: bool = Field(
222
- default=False,
223
- description="Whether the model supports content blocks in chat messages.",
224
- )
225
220
 
226
221
  _client: Optional[SyncOpenAI] = PrivateAttr()
227
222
  _aclient: Optional[AsyncOpenAI] = PrivateAttr()
@@ -286,7 +281,6 @@ class OpenAI(FunctionCallingLLM):
286
281
  pydantic_program_mode=pydantic_program_mode,
287
282
  output_parser=output_parser,
288
283
  strict=strict,
289
- supports_content_blocks=model in ALL_AVAILABLE_MODELS,
290
284
  **kwargs,
291
285
  )
292
286
 
@@ -432,7 +426,6 @@ class OpenAI(FunctionCallingLLM):
432
426
  message_dicts = to_openai_message_dicts(
433
427
  messages,
434
428
  model=self.model,
435
- supports_content_blocks=self.supports_content_blocks,
436
429
  )
437
430
 
438
431
  if self.reuse_client:
@@ -471,7 +464,6 @@ class OpenAI(FunctionCallingLLM):
471
464
  message_dicts = to_openai_message_dicts(
472
465
  messages,
473
466
  model=self.model,
474
- supports_content_blocks=self.supports_content_blocks,
475
467
  )
476
468
 
477
469
  def gen() -> ChatResponseGen:
@@ -685,7 +677,6 @@ class OpenAI(FunctionCallingLLM):
685
677
  message_dicts = to_openai_message_dicts(
686
678
  messages,
687
679
  model=self.model,
688
- supports_content_blocks=self.supports_content_blocks,
689
680
  )
690
681
 
691
682
  if self.reuse_client:
@@ -722,7 +713,6 @@ class OpenAI(FunctionCallingLLM):
722
713
  message_dicts = to_openai_message_dicts(
723
714
  messages,
724
715
  model=self.model,
725
- supports_content_blocks=self.supports_content_blocks,
726
716
  )
727
717
 
728
718
  async def gen() -> ChatResponseAsyncGen:
@@ -256,19 +256,14 @@ def to_openai_message_dict(
256
256
  message: ChatMessage,
257
257
  drop_none: bool = False,
258
258
  model: Optional[str] = None,
259
- supports_content_blocks: bool = False,
260
259
  ) -> ChatCompletionMessageParam:
261
260
  """Convert a ChatMessage to an OpenAI message dict."""
262
261
  content = []
263
262
  content_txt = ""
264
263
  for block in message.blocks:
265
264
  if isinstance(block, TextBlock):
266
- if message.role.value in ("assistant", "tool", "system") or not supports_content_blocks:
267
- # Despite the docs say otherwise, when role is ASSISTANT, SYSTEM
268
- # or TOOL, 'content' cannot be a list and must be string instead.
269
- content_txt += block.text
270
- else:
271
- content.append({"type": "text", "text": block.text})
265
+ content.append({"type": "text", "text": block.text})
266
+ content_txt += block.text
272
267
  elif isinstance(block, ImageBlock):
273
268
  if block.url:
274
269
  content.append(
@@ -290,11 +285,15 @@ def to_openai_message_dict(
290
285
  msg = f"Unsupported content block type: {type(block).__name__}"
291
286
  raise ValueError(msg)
292
287
 
288
+ # NOTE: Despite what the openai docs say, if the role is ASSISTANT, SYSTEM
289
+ # or TOOL, 'content' cannot be a list and must be string instead.
290
+ # Furthermore, if all blocks are text blocks, we can use the content_txt
291
+ # as the content. This will avoid breaking openai-like APIs.
293
292
  message_dict = {
294
293
  "role": message.role.value,
295
294
  "content": content_txt
296
295
  if message.role.value in ("assistant", "tool", "system")
297
- or not supports_content_blocks
296
+ or all(isinstance(block, TextBlock) for block in message.blocks)
298
297
  else content,
299
298
  }
300
299
 
@@ -321,7 +320,6 @@ def to_openai_message_dicts(
321
320
  messages: Sequence[ChatMessage],
322
321
  drop_none: bool = False,
323
322
  model: Optional[str] = None,
324
- supports_content_blocks: bool = False,
325
323
  ) -> List[ChatCompletionMessageParam]:
326
324
  """Convert generic messages to OpenAI message dicts."""
327
325
  return [
@@ -329,7 +327,6 @@ def to_openai_message_dicts(
329
327
  message,
330
328
  drop_none=drop_none,
331
329
  model=model,
332
- supports_content_blocks=supports_content_blocks,
333
330
  )
334
331
  for message in messages
335
332
  ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llama-index-llms-openai
3
- Version: 0.3.7
3
+ Version: 0.3.9
4
4
  Summary: llama-index llms openai integration
5
5
  License: MIT
6
6
  Author: llama-index
@@ -0,0 +1,6 @@
1
+ llama_index/llms/openai/__init__.py,sha256=vm3cIBSGkBFlE77GyfyN0EhpJcnJZN95QMhPN53EkbE,148
2
+ llama_index/llms/openai/base.py,sha256=_kW22tTABTKQ-GFRN00P5OOcJS4IeVAI3vPqbjw1D8o,35777
3
+ llama_index/llms/openai/utils.py,sha256=tjAOKHUJtEQMI4pnZ6YMd0kI0YTkeE9SnVFGxQ9tbiU,18223
4
+ llama_index_llms_openai-0.3.9.dist-info/METADATA,sha256=pRNz1HOod8H3BlF5jCrr4cqv-MR9SxTqO6OZPgE1tXY,3320
5
+ llama_index_llms_openai-0.3.9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
6
+ llama_index_llms_openai-0.3.9.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- llama_index/llms/openai/__init__.py,sha256=vm3cIBSGkBFlE77GyfyN0EhpJcnJZN95QMhPN53EkbE,148
2
- llama_index/llms/openai/base.py,sha256=bCJz2CqXRSI1Dfqv4QcvP69xm2i7uYkcZWhi5tOXk9E,36289
3
- llama_index/llms/openai/utils.py,sha256=aF2gP-Aef7rMUIYp9bG2Ke45pNszl_Jagga781Hul-A,18335
4
- llama_index_llms_openai-0.3.7.dist-info/METADATA,sha256=DLhC9DQyyPLbvbASGtZikZvrLmvyodZGXNcR2i7DG-4,3320
5
- llama_index_llms_openai-0.3.7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
6
- llama_index_llms_openai-0.3.7.dist-info/RECORD,,