llama-index-llms-openai 0.1.27__tar.gz → 0.1.29__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_openai-0.1.27 → llama_index_llms_openai-0.1.29}/PKG-INFO +2 -1
- {llama_index_llms_openai-0.1.27 → llama_index_llms_openai-0.1.29}/llama_index/llms/openai/base.py +21 -0
- {llama_index_llms_openai-0.1.27 → llama_index_llms_openai-0.1.29}/llama_index/llms/openai/utils.py +1 -0
- {llama_index_llms_openai-0.1.27 → llama_index_llms_openai-0.1.29}/pyproject.toml +2 -1
- {llama_index_llms_openai-0.1.27 → llama_index_llms_openai-0.1.29}/README.md +0 -0
- {llama_index_llms_openai-0.1.27 → llama_index_llms_openai-0.1.29}/llama_index/llms/openai/__init__.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: llama-index-llms-openai
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.29
|
|
4
4
|
Summary: llama-index llms openai integration
|
|
5
5
|
License: MIT
|
|
6
6
|
Author: llama-index
|
|
@@ -12,6 +12,7 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.11
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.12
|
|
14
14
|
Requires-Dist: llama-index-core (>=0.10.57,<0.11.0)
|
|
15
|
+
Requires-Dist: openai (>=1.40.0,<2.0.0)
|
|
15
16
|
Description-Content-Type: text/markdown
|
|
16
17
|
|
|
17
18
|
# LlamaIndex Llms Integration: Openai
|
{llama_index_llms_openai-0.1.27 → llama_index_llms_openai-0.1.29}/llama_index/llms/openai/base.py
RENAMED
|
@@ -201,6 +201,10 @@ class OpenAI(FunctionCallingLLM):
|
|
|
201
201
|
api_key: str = Field(default=None, description="The OpenAI API key.")
|
|
202
202
|
api_base: str = Field(description="The base URL for OpenAI API.")
|
|
203
203
|
api_version: str = Field(description="The API version for OpenAI API.")
|
|
204
|
+
strict: bool = Field(
|
|
205
|
+
default=False,
|
|
206
|
+
description="Whether to use strict mode for invoking tools/using schemas.",
|
|
207
|
+
)
|
|
204
208
|
|
|
205
209
|
_client: Optional[SyncOpenAI] = PrivateAttr()
|
|
206
210
|
_aclient: Optional[AsyncOpenAI] = PrivateAttr()
|
|
@@ -229,6 +233,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
229
233
|
completion_to_prompt: Optional[Callable[[str], str]] = None,
|
|
230
234
|
pydantic_program_mode: PydanticProgramMode = PydanticProgramMode.DEFAULT,
|
|
231
235
|
output_parser: Optional[BaseOutputParser] = None,
|
|
236
|
+
strict: bool = False,
|
|
232
237
|
**kwargs: Any,
|
|
233
238
|
) -> None:
|
|
234
239
|
additional_kwargs = additional_kwargs or {}
|
|
@@ -257,6 +262,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
257
262
|
completion_to_prompt=completion_to_prompt,
|
|
258
263
|
pydantic_program_mode=pydantic_program_mode,
|
|
259
264
|
output_parser=output_parser,
|
|
265
|
+
strict=strict,
|
|
260
266
|
**kwargs,
|
|
261
267
|
)
|
|
262
268
|
|
|
@@ -832,6 +838,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
832
838
|
verbose: bool = False,
|
|
833
839
|
allow_parallel_tool_calls: bool = False,
|
|
834
840
|
tool_choice: Union[str, dict] = "auto",
|
|
841
|
+
strict: Optional[bool] = None,
|
|
835
842
|
**kwargs: Any,
|
|
836
843
|
) -> Dict[str, Any]:
|
|
837
844
|
"""Predict and call the tool."""
|
|
@@ -840,6 +847,20 @@ class OpenAI(FunctionCallingLLM):
|
|
|
840
847
|
# misralai uses the same openai tool format
|
|
841
848
|
tool_specs = [tool.metadata.to_openai_tool() for tool in tools]
|
|
842
849
|
|
|
850
|
+
# if strict is passed in, use, else default to the class-level attribute, else default to True`
|
|
851
|
+
if strict is not None:
|
|
852
|
+
strict = strict
|
|
853
|
+
else:
|
|
854
|
+
strict = self.strict
|
|
855
|
+
|
|
856
|
+
if self.metadata.is_function_calling_model:
|
|
857
|
+
for tool_spec in tool_specs:
|
|
858
|
+
if tool_spec["type"] == "function":
|
|
859
|
+
tool_spec["function"]["strict"] = strict
|
|
860
|
+
tool_spec["function"]["parameters"][
|
|
861
|
+
"additionalProperties"
|
|
862
|
+
] = False # in current openai 1.40.0 it is always false.
|
|
863
|
+
|
|
843
864
|
if isinstance(user_msg, str):
|
|
844
865
|
user_msg = ChatMessage(role=MessageRole.USER, content=user_msg)
|
|
845
866
|
|
|
@@ -29,11 +29,12 @@ exclude = ["**/BUILD"]
|
|
|
29
29
|
license = "MIT"
|
|
30
30
|
name = "llama-index-llms-openai"
|
|
31
31
|
readme = "README.md"
|
|
32
|
-
version = "0.1.
|
|
32
|
+
version = "0.1.29"
|
|
33
33
|
|
|
34
34
|
[tool.poetry.dependencies]
|
|
35
35
|
python = ">=3.8.1,<4.0"
|
|
36
36
|
llama-index-core = "^0.10.57"
|
|
37
|
+
openai = "^1.40.0"
|
|
37
38
|
|
|
38
39
|
[tool.poetry.group.dev.dependencies]
|
|
39
40
|
ipython = "8.10.0"
|
|
File without changes
|
|
File without changes
|