llama-index-llms-openai 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -926,8 +926,10 @@ class OpenAI(FunctionCallingLLM):
926
926
  ) -> BaseModel:
927
927
  """Structured predict."""
928
928
  llm_kwargs = llm_kwargs or {}
929
+ all_kwargs = {**llm_kwargs, **kwargs}
930
+
929
931
  llm_kwargs["tool_choice"] = (
930
- "required" if "tool_choice" not in llm_kwargs else llm_kwargs["tool_choice"]
932
+ "required" if "tool_choice" not in all_kwargs else all_kwargs["tool_choice"]
931
933
  )
932
934
  # by default structured prediction uses function calling to extract structured outputs
933
935
  # here we force tool_choice to be required
@@ -939,8 +941,10 @@ class OpenAI(FunctionCallingLLM):
939
941
  ) -> BaseModel:
940
942
  """Structured predict."""
941
943
  llm_kwargs = llm_kwargs or {}
944
+ all_kwargs = {**llm_kwargs, **kwargs}
945
+
942
946
  llm_kwargs["tool_choice"] = (
943
- "required" if "tool_choice" not in llm_kwargs else llm_kwargs["tool_choice"]
947
+ "required" if "tool_choice" not in all_kwargs else all_kwargs["tool_choice"]
944
948
  )
945
949
  # by default structured prediction uses function calling to extract structured outputs
946
950
  # here we force tool_choice to be required
@@ -952,8 +956,10 @@ class OpenAI(FunctionCallingLLM):
952
956
  ) -> Generator[Union[Model, List[Model]], None, None]:
953
957
  """Stream structured predict."""
954
958
  llm_kwargs = llm_kwargs or {}
959
+ all_kwargs = {**llm_kwargs, **kwargs}
960
+
955
961
  llm_kwargs["tool_choice"] = (
956
- "required" if "tool_choice" not in llm_kwargs else llm_kwargs["tool_choice"]
962
+ "required" if "tool_choice" not in all_kwargs else all_kwargs["tool_choice"]
957
963
  )
958
964
  # by default structured prediction uses function calling to extract structured outputs
959
965
  # here we force tool_choice to be required
@@ -965,8 +971,10 @@ class OpenAI(FunctionCallingLLM):
965
971
  ) -> Generator[Union[Model, List[Model]], None, None]:
966
972
  """Stream structured predict."""
967
973
  llm_kwargs = llm_kwargs or {}
974
+ all_kwargs = {**llm_kwargs, **kwargs}
975
+
968
976
  llm_kwargs["tool_choice"] = (
969
- "required" if "tool_choice" not in llm_kwargs else llm_kwargs["tool_choice"]
977
+ "required" if "tool_choice" not in all_kwargs else all_kwargs["tool_choice"]
970
978
  )
971
979
  # by default structured prediction uses function calling to extract structured outputs
972
980
  # here we force tool_choice to be required
@@ -54,6 +54,7 @@ GPT4_MODELS: Dict[str, int] = {
54
54
  "gpt-4o": 128000,
55
55
  "gpt-4o-2024-05-13": 128000,
56
56
  "gpt-4o-2024-08-06": 128000,
57
+ "gpt-4o-2024-11-20": 128000,
57
58
  # Intended for research and evaluation
58
59
  "chatgpt-4o-latest": 128000,
59
60
  "gpt-4o-mini": 128000,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llama-index-llms-openai
3
- Version: 0.3.0
3
+ Version: 0.3.2
4
4
  Summary: llama-index llms openai integration
5
5
  License: MIT
6
6
  Author: llama-index
@@ -0,0 +1,6 @@
1
+ llama_index/llms/openai/__init__.py,sha256=vm3cIBSGkBFlE77GyfyN0EhpJcnJZN95QMhPN53EkbE,148
2
+ llama_index/llms/openai/base.py,sha256=msUhTxmOq3JVzKPW_gr4qOuKsoQWAaAPtcmCI0hK6lY,35648
3
+ llama_index/llms/openai/utils.py,sha256=3fHw4U-fRa6yADE5DjFbJWs6gagSt3-aqUn8oHdm8Ec,16180
4
+ llama_index_llms_openai-0.3.2.dist-info/METADATA,sha256=suUFtKfPQU5-6OAJUxwwGake3TQffyNZm3ZR7ErE2Y8,3320
5
+ llama_index_llms_openai-0.3.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
6
+ llama_index_llms_openai-0.3.2.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- llama_index/llms/openai/__init__.py,sha256=vm3cIBSGkBFlE77GyfyN0EhpJcnJZN95QMhPN53EkbE,148
2
- llama_index/llms/openai/base.py,sha256=wg509NgBHVLgB1Z9bevMjJenu3BFf4WZvlGP40eVhj8,35460
3
- llama_index/llms/openai/utils.py,sha256=VPRR9leKro1InT-12v1tE9qsu6DzFncmlIzHqApCPfo,16147
4
- llama_index_llms_openai-0.3.0.dist-info/METADATA,sha256=c_108MozjVz1erz8Nj9brgj4pAZS_dKLT2jHoC07sX4,3320
5
- llama_index_llms_openai-0.3.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
6
- llama_index_llms_openai-0.3.0.dist-info/RECORD,,