llama-index-llms-openai 0.2.14__tar.gz → 0.2.15__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.15}/PKG-INFO +1 -1
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.15}/llama_index/llms/openai/utils.py +8 -0
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.15}/pyproject.toml +1 -1
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.15}/README.md +0 -0
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.15}/llama_index/llms/openai/__init__.py +0 -0
- {llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.15}/llama_index/llms/openai/base.py +0 -0
{llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.15}/llama_index/llms/openai/utils.py
RENAMED
|
@@ -229,6 +229,14 @@ def is_chat_model(model: str) -> bool:
|
|
|
229
229
|
|
|
230
230
|
|
|
231
231
|
def is_function_calling_model(model: str) -> bool:
|
|
232
|
+
# checking whether the model is fine-tuned or not.
|
|
233
|
+
# fine-tuned model names these days look like:
|
|
234
|
+
# ft:gpt-3.5-turbo:acemeco:suffix:abc123
|
|
235
|
+
if model.startswith("ft-"): # legacy fine-tuning
|
|
236
|
+
model = model.split(":")[0]
|
|
237
|
+
elif model.startswith("ft:"):
|
|
238
|
+
model = model.split(":")[1]
|
|
239
|
+
|
|
232
240
|
is_chat_model_ = is_chat_model(model)
|
|
233
241
|
is_old = "0314" in model or "0301" in model
|
|
234
242
|
|
|
File without changes
|
|
File without changes
|
{llama_index_llms_openai-0.2.14 → llama_index_llms_openai-0.2.15}/llama_index/llms/openai/base.py
RENAMED
|
File without changes
|