llama-index-llms-openai 0.6.0__tar.gz → 0.6.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/PKG-INFO +2 -2
- {llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/llama_index/llms/openai/base.py +1 -1
- {llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/llama_index/llms/openai/utils.py +2 -0
- {llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/pyproject.toml +2 -2
- {llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/.gitignore +0 -0
- {llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/LICENSE +0 -0
- {llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/README.md +0 -0
- {llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/llama_index/llms/openai/__init__.py +0 -0
- {llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/llama_index/llms/openai/py.typed +0 -0
- {llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/llama_index/llms/openai/responses.py +0 -0
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: llama-index-llms-openai
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.2
|
|
4
4
|
Summary: llama-index llms openai integration
|
|
5
5
|
Author: llama-index
|
|
6
6
|
License-Expression: MIT
|
|
7
7
|
License-File: LICENSE
|
|
8
8
|
Requires-Python: <4.0,>=3.9
|
|
9
|
-
Requires-Dist: llama-index-core<0.15,>=0.
|
|
9
|
+
Requires-Dist: llama-index-core<0.15,>=0.14.3
|
|
10
10
|
Requires-Dist: openai<2,>=1.108.1
|
|
11
11
|
Description-Content-Type: text/markdown
|
|
12
12
|
|
{llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/llama_index/llms/openai/base.py
RENAMED
|
@@ -1004,7 +1004,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
1004
1004
|
del llm_kwargs["tool_choice"]
|
|
1005
1005
|
return llm_kwargs
|
|
1006
1006
|
|
|
1007
|
-
def _should_use_structure_outputs(self):
|
|
1007
|
+
def _should_use_structure_outputs(self) -> bool:
|
|
1008
1008
|
return (
|
|
1009
1009
|
self.pydantic_program_mode == PydanticProgramMode.DEFAULT
|
|
1010
1010
|
and is_json_schema_supported(self.model)
|
|
@@ -27,13 +27,13 @@ dev = [
|
|
|
27
27
|
|
|
28
28
|
[project]
|
|
29
29
|
name = "llama-index-llms-openai"
|
|
30
|
-
version = "0.6.
|
|
30
|
+
version = "0.6.2"
|
|
31
31
|
description = "llama-index llms openai integration"
|
|
32
32
|
authors = [{name = "llama-index"}]
|
|
33
33
|
requires-python = ">=3.9,<4.0"
|
|
34
34
|
readme = "README.md"
|
|
35
35
|
license = "MIT"
|
|
36
|
-
dependencies = ["openai>=1.108.1,<2", "llama-index-core>=0.
|
|
36
|
+
dependencies = ["openai>=1.108.1,<2", "llama-index-core>=0.14.3,<0.15"]
|
|
37
37
|
|
|
38
38
|
[tool.codespell]
|
|
39
39
|
check-filenames = true
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/llama_index/llms/openai/__init__.py
RENAMED
|
File without changes
|
{llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/llama_index/llms/openai/py.typed
RENAMED
|
File without changes
|
{llama_index_llms_openai-0.6.0 → llama_index_llms_openai-0.6.2}/llama_index/llms/openai/responses.py
RENAMED
|
File without changes
|