llama-index-llms-openai 0.2.14__tar.gz → 0.2.16__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.16}/PKG-INFO +1 -1
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.16}/llama_index/llms/openai/base.py +6 -0
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.16}/llama_index/llms/openai/utils.py +8 -0
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.16}/pyproject.toml +1 -1
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.16}/README.md +0 -0
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.16}/llama_index/llms/openai/__init__.py +0 -0
{llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.16}/llama_index/llms/openai/base.py
RENAMED
|
@@ -478,6 +478,9 @@ class OpenAI(FunctionCallingLLM):
|
|
|
478
478
|
else:
|
|
479
479
|
delta = ChoiceDelta()
|
|
480
480
|
|
|
481
|
+
if delta is None:
|
|
482
|
+
continue
|
|
483
|
+
|
|
481
484
|
# check if this chunk is the start of a function call
|
|
482
485
|
if delta.tool_calls:
|
|
483
486
|
is_function = True
|
|
@@ -729,6 +732,9 @@ class OpenAI(FunctionCallingLLM):
|
|
|
729
732
|
delta = ChoiceDelta()
|
|
730
733
|
first_chat_chunk = False
|
|
731
734
|
|
|
735
|
+
if delta is None:
|
|
736
|
+
continue
|
|
737
|
+
|
|
732
738
|
# check if this chunk is the start of a function call
|
|
733
739
|
if delta.tool_calls:
|
|
734
740
|
is_function = True
|
{llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.16}/llama_index/llms/openai/utils.py
RENAMED
|
@@ -229,6 +229,14 @@ def is_chat_model(model: str) -> bool:
|
|
|
229
229
|
|
|
230
230
|
|
|
231
231
|
def is_function_calling_model(model: str) -> bool:
|
|
232
|
+
# checking whether the model is fine-tuned or not.
|
|
233
|
+
# fine-tuned model names these days look like:
|
|
234
|
+
# ft:gpt-3.5-turbo:acemeco:suffix:abc123
|
|
235
|
+
if model.startswith("ft-"): # legacy fine-tuning
|
|
236
|
+
model = model.split(":")[0]
|
|
237
|
+
elif model.startswith("ft:"):
|
|
238
|
+
model = model.split(":")[1]
|
|
239
|
+
|
|
232
240
|
is_chat_model_ = is_chat_model(model)
|
|
233
241
|
is_old = "0314" in model or "0301" in model
|
|
234
242
|
|
|
File without changes
|
|
File without changes
|