llama-index-llms-openai 0.2.13__tar.gz → 0.2.15__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_openai-0.2.13 → llama_index_llms_openai-0.2.15}/PKG-INFO +1 -1
- {llama_index_llms_openai-0.2.13 → llama_index_llms_openai-0.2.15}/llama_index/llms/openai/base.py +4 -2
- {llama_index_llms_openai-0.2.13 → llama_index_llms_openai-0.2.15}/llama_index/llms/openai/utils.py +8 -0
- {llama_index_llms_openai-0.2.13 → llama_index_llms_openai-0.2.15}/pyproject.toml +1 -1
- {llama_index_llms_openai-0.2.13 → llama_index_llms_openai-0.2.15}/README.md +0 -0
- {llama_index_llms_openai-0.2.13 → llama_index_llms_openai-0.2.15}/llama_index/llms/openai/__init__.py +0 -0
{llama_index_llms_openai-0.2.13 → llama_index_llms_openai-0.2.15}/llama_index/llms/openai/base.py
RENAMED
|
@@ -239,6 +239,8 @@ class OpenAI(FunctionCallingLLM):
|
|
|
239
239
|
default_headers: Optional[Dict[str, str]] = None,
|
|
240
240
|
http_client: Optional[httpx.Client] = None,
|
|
241
241
|
async_http_client: Optional[httpx.AsyncClient] = None,
|
|
242
|
+
openai_client: Optional[SyncOpenAI] = None,
|
|
243
|
+
async_openai_client: Optional[AsyncOpenAI] = None,
|
|
242
244
|
# base class
|
|
243
245
|
system_prompt: Optional[str] = None,
|
|
244
246
|
messages_to_prompt: Optional[Callable[[Sequence[ChatMessage]], str]] = None,
|
|
@@ -282,8 +284,8 @@ class OpenAI(FunctionCallingLLM):
|
|
|
282
284
|
**kwargs,
|
|
283
285
|
)
|
|
284
286
|
|
|
285
|
-
self._client =
|
|
286
|
-
self._aclient =
|
|
287
|
+
self._client = openai_client
|
|
288
|
+
self._aclient = async_openai_client
|
|
287
289
|
self._http_client = http_client
|
|
288
290
|
self._async_http_client = async_http_client
|
|
289
291
|
|
{llama_index_llms_openai-0.2.13 → llama_index_llms_openai-0.2.15}/llama_index/llms/openai/utils.py
RENAMED
|
@@ -229,6 +229,14 @@ def is_chat_model(model: str) -> bool:
|
|
|
229
229
|
|
|
230
230
|
|
|
231
231
|
def is_function_calling_model(model: str) -> bool:
|
|
232
|
+
# checking whether the model is fine-tuned or not.
|
|
233
|
+
# fine-tuned model names these days look like:
|
|
234
|
+
# ft:gpt-3.5-turbo:acemeco:suffix:abc123
|
|
235
|
+
if model.startswith("ft-"): # legacy fine-tuning
|
|
236
|
+
model = model.split(":")[0]
|
|
237
|
+
elif model.startswith("ft:"):
|
|
238
|
+
model = model.split(":")[1]
|
|
239
|
+
|
|
232
240
|
is_chat_model_ = is_chat_model(model)
|
|
233
241
|
is_old = "0314" in model or "0301" in model
|
|
234
242
|
|
|
File without changes
|
|
File without changes
|