llama-index-llms-openai 0.3.35__tar.gz → 0.3.37__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llama-index-llms-openai
3
- Version: 0.3.35
3
+ Version: 0.3.37
4
4
  Summary: llama-index llms openai integration
5
5
  License: MIT
6
6
  Author: llama-index
@@ -78,7 +78,7 @@ from llama_index.llms.openai.utils import (
78
78
  to_openai_message_dicts,
79
79
  update_tool_calls,
80
80
  )
81
- from openai import AsyncOpenAI, AzureOpenAI
81
+ from openai import AsyncOpenAI, AzureOpenAI, AsyncAzureOpenAI
82
82
  from openai import OpenAI as SyncOpenAI
83
83
  from openai.types.chat.chat_completion_chunk import (
84
84
  ChatCompletionChunk,
@@ -341,9 +341,6 @@ class OpenAI(FunctionCallingLLM):
341
341
  model_name = model_name.split(":")[1]
342
342
  return model_name
343
343
 
344
- def _is_azure_client(self) -> bool:
345
- return isinstance(self._get_client(), AzureOpenAI)
346
-
347
344
  @classmethod
348
345
  def class_name(cls) -> str:
349
346
  return "openai_llm"
@@ -531,7 +528,7 @@ class OpenAI(FunctionCallingLLM):
531
528
  if len(response.choices) > 0:
532
529
  delta = response.choices[0].delta
533
530
  else:
534
- if self._is_azure_client():
531
+ if isinstance(client, AzureOpenAI):
535
532
  continue
536
533
  else:
537
534
  delta = ChoiceDelta()
@@ -800,7 +797,7 @@ class OpenAI(FunctionCallingLLM):
800
797
  continue
801
798
  delta = response.choices[0].delta
802
799
  else:
803
- if self._is_azure_client():
800
+ if isinstance(aclient, AsyncAzureOpenAI):
804
801
  continue
805
802
  else:
806
803
  delta = ChoiceDelta()
@@ -46,6 +46,10 @@ O1_MODELS: Dict[str, int] = {
46
46
  "o1-mini-2024-09-12": 128000,
47
47
  "o3-mini": 200000,
48
48
  "o3-mini-2025-01-31": 200000,
49
+ "o3": 200000,
50
+ "o3-2025-04-16": 200000,
51
+ "o4-mini": 200000,
52
+ "o4-mini-2025-04-16": 200000,
49
53
  }
50
54
 
51
55
  O1_MODELS_WITHOUT_FUNCTION_CALLING = {
@@ -28,7 +28,7 @@ exclude = ["**/BUILD"]
28
28
  license = "MIT"
29
29
  name = "llama-index-llms-openai"
30
30
  readme = "README.md"
31
- version = "0.3.35"
31
+ version = "0.3.37"
32
32
 
33
33
  [tool.poetry.dependencies]
34
34
  python = ">=3.9,<4.0"