llama-index-llms-openai 0.6.10__tar.gz → 0.6.11__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/PKG-INFO +1 -1
- {llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/llama_index/llms/openai/base.py +6 -0
- {llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/llama_index/llms/openai/utils.py +12 -0
- {llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/pyproject.toml +1 -1
- {llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/.gitignore +0 -0
- {llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/LICENSE +0 -0
- {llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/README.md +0 -0
- {llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/llama_index/llms/openai/__init__.py +0 -0
- {llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/llama_index/llms/openai/py.typed +0 -0
- {llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/llama_index/llms/openai/responses.py +0 -0
{llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/llama_index/llms/openai/base.py
RENAMED
|
@@ -78,6 +78,7 @@ from llama_index.llms.openai.utils import (
|
|
|
78
78
|
to_openai_message_dicts,
|
|
79
79
|
update_tool_calls,
|
|
80
80
|
is_json_schema_supported,
|
|
81
|
+
is_chatcomp_api_supported,
|
|
81
82
|
)
|
|
82
83
|
from openai import AsyncOpenAI
|
|
83
84
|
from openai import OpenAI as SyncOpenAI
|
|
@@ -299,6 +300,11 @@ class OpenAI(FunctionCallingLLM):
|
|
|
299
300
|
if model in O1_MODELS:
|
|
300
301
|
temperature = 1.0
|
|
301
302
|
|
|
303
|
+
if not is_chatcomp_api_supported(model):
|
|
304
|
+
raise ValueError(
|
|
305
|
+
f"Cannot use model {model} as it is only supported by the Responses API. Use the OpenAIResponses class for it."
|
|
306
|
+
)
|
|
307
|
+
|
|
302
308
|
super().__init__(
|
|
303
309
|
model=model,
|
|
304
310
|
temperature=temperature,
|
{llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/llama_index/llms/openai/utils.py
RENAMED
|
@@ -68,6 +68,13 @@ O1_MODELS: Dict[str, int] = {
|
|
|
68
68
|
"gpt-5.1": 400000,
|
|
69
69
|
"gpt-5.1-2025-11-13": 400000,
|
|
70
70
|
"gpt-5.1-chat-latest": 128000,
|
|
71
|
+
"gpt-5.2": 400000,
|
|
72
|
+
"gpt-5.2-2025-12-11": 400000,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
RESPONSES_API_ONLY_MODELS = {
|
|
76
|
+
"gpt-5.2-pro": 400000,
|
|
77
|
+
"gpt-5.2-pro-2025-12-11": 400000,
|
|
71
78
|
}
|
|
72
79
|
|
|
73
80
|
O1_MODELS_WITHOUT_FUNCTION_CALLING = {
|
|
@@ -208,9 +215,14 @@ JSON_SCHEMA_MODELS = [
|
|
|
208
215
|
"gpt-4o",
|
|
209
216
|
"gpt-4.1",
|
|
210
217
|
"gpt-5",
|
|
218
|
+
"gpt-5.2",
|
|
211
219
|
]
|
|
212
220
|
|
|
213
221
|
|
|
222
|
+
def is_chatcomp_api_supported(model: str) -> bool:
|
|
223
|
+
return model not in RESPONSES_API_ONLY_MODELS
|
|
224
|
+
|
|
225
|
+
|
|
214
226
|
def is_json_schema_supported(model: str) -> bool:
|
|
215
227
|
try:
|
|
216
228
|
from openai.resources.chat.completions import completions
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llama_index_llms_openai-0.6.10 → llama_index_llms_openai-0.6.11}/llama_index/llms/openai/py.typed
RENAMED
|
File without changes
|
|
File without changes
|