llama-index-llms-openai 0.2.3__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llama_index/llms/openai/base.py +9 -4
- llama_index/llms/openai/utils.py +22 -4
- {llama_index_llms_openai-0.2.3.dist-info → llama_index_llms_openai-0.2.4.dist-info}/METADATA +2 -1
- llama_index_llms_openai-0.2.4.dist-info/RECORD +6 -0
- {llama_index_llms_openai-0.2.3.dist-info → llama_index_llms_openai-0.2.4.dist-info}/WHEEL +1 -1
- llama_index_llms_openai-0.2.3.dist-info/RECORD +0 -6
llama_index/llms/openai/base.py
CHANGED
|
@@ -52,6 +52,7 @@ from llama_index.core.llms.function_calling import FunctionCallingLLM
|
|
|
52
52
|
from llama_index.core.llms.llm import ToolSelection
|
|
53
53
|
from llama_index.core.types import BaseOutputParser, PydanticProgramMode, Model
|
|
54
54
|
from llama_index.llms.openai.utils import (
|
|
55
|
+
O1_MODELS,
|
|
55
56
|
OpenAIToolCall,
|
|
56
57
|
create_retry_decorator,
|
|
57
58
|
from_openai_completion_logprobs,
|
|
@@ -331,6 +332,10 @@ class OpenAI(FunctionCallingLLM):
|
|
|
331
332
|
model=self._get_model_name()
|
|
332
333
|
),
|
|
333
334
|
model_name=self.model,
|
|
335
|
+
# TODO: Temp for O1 beta
|
|
336
|
+
system_role=MessageRole.USER
|
|
337
|
+
if self.model in O1_MODELS
|
|
338
|
+
else MessageRole.SYSTEM,
|
|
334
339
|
)
|
|
335
340
|
|
|
336
341
|
@llm_chat_callback()
|
|
@@ -410,7 +415,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
410
415
|
@llm_retry_decorator
|
|
411
416
|
def _chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse:
|
|
412
417
|
client = self._get_client()
|
|
413
|
-
message_dicts = to_openai_message_dicts(messages)
|
|
418
|
+
message_dicts = to_openai_message_dicts(messages, model=self.model)
|
|
414
419
|
|
|
415
420
|
if self.reuse_client:
|
|
416
421
|
response = client.chat.completions.create(
|
|
@@ -492,7 +497,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
492
497
|
self, messages: Sequence[ChatMessage], **kwargs: Any
|
|
493
498
|
) -> ChatResponseGen:
|
|
494
499
|
client = self._get_client()
|
|
495
|
-
message_dicts = to_openai_message_dicts(messages)
|
|
500
|
+
message_dicts = to_openai_message_dicts(messages, model=self.model)
|
|
496
501
|
|
|
497
502
|
def gen() -> ChatResponseGen:
|
|
498
503
|
content = ""
|
|
@@ -698,7 +703,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
698
703
|
self, messages: Sequence[ChatMessage], **kwargs: Any
|
|
699
704
|
) -> ChatResponse:
|
|
700
705
|
aclient = self._get_aclient()
|
|
701
|
-
message_dicts = to_openai_message_dicts(messages)
|
|
706
|
+
message_dicts = to_openai_message_dicts(messages, model=self.model)
|
|
702
707
|
|
|
703
708
|
if self.reuse_client:
|
|
704
709
|
response = await aclient.chat.completions.create(
|
|
@@ -731,7 +736,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
731
736
|
self, messages: Sequence[ChatMessage], **kwargs: Any
|
|
732
737
|
) -> ChatResponseAsyncGen:
|
|
733
738
|
aclient = self._get_aclient()
|
|
734
|
-
message_dicts = to_openai_message_dicts(messages)
|
|
739
|
+
message_dicts = to_openai_message_dicts(messages, model=self.model)
|
|
735
740
|
|
|
736
741
|
async def gen() -> ChatResponseAsyncGen:
|
|
737
742
|
content = ""
|
llama_index/llms/openai/utils.py
CHANGED
|
@@ -29,6 +29,10 @@ DEFAULT_OPENAI_API_TYPE = "open_ai"
|
|
|
29
29
|
DEFAULT_OPENAI_API_BASE = "https://api.openai.com/v1"
|
|
30
30
|
DEFAULT_OPENAI_API_VERSION = ""
|
|
31
31
|
|
|
32
|
+
O1_MODELS: Dict[str, int] = {
|
|
33
|
+
"o1-preview": 128000,
|
|
34
|
+
"o1-mini": 128000,
|
|
35
|
+
}
|
|
32
36
|
|
|
33
37
|
GPT4_MODELS: Dict[str, int] = {
|
|
34
38
|
# stable model names:
|
|
@@ -109,6 +113,7 @@ GPT3_MODELS: Dict[str, int] = {
|
|
|
109
113
|
}
|
|
110
114
|
|
|
111
115
|
ALL_AVAILABLE_MODELS = {
|
|
116
|
+
**O1_MODELS,
|
|
112
117
|
**GPT4_MODELS,
|
|
113
118
|
**TURBO_MODELS,
|
|
114
119
|
**GPT3_5_MODELS,
|
|
@@ -117,6 +122,7 @@ ALL_AVAILABLE_MODELS = {
|
|
|
117
122
|
}
|
|
118
123
|
|
|
119
124
|
CHAT_MODELS = {
|
|
125
|
+
**O1_MODELS,
|
|
120
126
|
**GPT4_MODELS,
|
|
121
127
|
**TURBO_MODELS,
|
|
122
128
|
**AZURE_TURBO_MODELS,
|
|
@@ -220,11 +226,15 @@ def is_chat_model(model: str) -> bool:
|
|
|
220
226
|
def is_function_calling_model(model: str) -> bool:
|
|
221
227
|
is_chat_model_ = is_chat_model(model)
|
|
222
228
|
is_old = "0314" in model or "0301" in model
|
|
223
|
-
|
|
229
|
+
|
|
230
|
+
# TODO: This is temporary for openai's beta
|
|
231
|
+
is_o1_beta = "o1" in model
|
|
232
|
+
|
|
233
|
+
return is_chat_model_ and not is_old and not is_o1_beta
|
|
224
234
|
|
|
225
235
|
|
|
226
236
|
def to_openai_message_dict(
|
|
227
|
-
message: ChatMessage, drop_none: bool = False
|
|
237
|
+
message: ChatMessage, drop_none: bool = False, model: Optional[str] = None
|
|
228
238
|
) -> ChatCompletionMessageParam:
|
|
229
239
|
"""Convert generic message to OpenAI message dict."""
|
|
230
240
|
message_dict = {
|
|
@@ -232,6 +242,11 @@ def to_openai_message_dict(
|
|
|
232
242
|
"content": message.content,
|
|
233
243
|
}
|
|
234
244
|
|
|
245
|
+
# TODO: O1 models do not support system prompts
|
|
246
|
+
if model is not None and model in O1_MODELS:
|
|
247
|
+
if message_dict["role"] == "system":
|
|
248
|
+
message_dict["role"] = "user"
|
|
249
|
+
|
|
235
250
|
# NOTE: openai messages have additional arguments:
|
|
236
251
|
# - function messages have `name`
|
|
237
252
|
# - assistant messages have optional `function_call`
|
|
@@ -247,11 +262,14 @@ def to_openai_message_dict(
|
|
|
247
262
|
|
|
248
263
|
|
|
249
264
|
def to_openai_message_dicts(
|
|
250
|
-
messages: Sequence[ChatMessage],
|
|
265
|
+
messages: Sequence[ChatMessage],
|
|
266
|
+
drop_none: bool = False,
|
|
267
|
+
model: Optional[str] = None,
|
|
251
268
|
) -> List[ChatCompletionMessageParam]:
|
|
252
269
|
"""Convert generic messages to OpenAI message dicts."""
|
|
253
270
|
return [
|
|
254
|
-
to_openai_message_dict(message, drop_none=drop_none)
|
|
271
|
+
to_openai_message_dict(message, drop_none=drop_none, model=model)
|
|
272
|
+
for message in messages
|
|
255
273
|
]
|
|
256
274
|
|
|
257
275
|
|
{llama_index_llms_openai-0.2.3.dist-info → llama_index_llms_openai-0.2.4.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: llama-index-llms-openai
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.4
|
|
4
4
|
Summary: llama-index llms openai integration
|
|
5
5
|
License: MIT
|
|
6
6
|
Author: llama-index
|
|
@@ -10,6 +10,7 @@ Classifier: Programming Language :: Python :: 3
|
|
|
10
10
|
Classifier: Programming Language :: Python :: 3.9
|
|
11
11
|
Classifier: Programming Language :: Python :: 3.10
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
13
14
|
Requires-Dist: llama-index-agent-openai (>=0.3.1,<0.4.0)
|
|
14
15
|
Requires-Dist: llama-index-core (>=0.11.7,<0.12.0)
|
|
15
16
|
Requires-Dist: openai (>=1.40.0,<2.0.0)
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
llama_index/llms/openai/__init__.py,sha256=vm3cIBSGkBFlE77GyfyN0EhpJcnJZN95QMhPN53EkbE,148
|
|
2
|
+
llama_index/llms/openai/base.py,sha256=5wlEeigvfvdrhlm-0EEXvwuJm2t1o6ZJ9Dl2ob8oNLk,36998
|
|
3
|
+
llama_index/llms/openai/utils.py,sha256=MM8xilN70w4jFXsf-aoE4HdYWD3zptLEYjpeBZ02lEc,13571
|
|
4
|
+
llama_index_llms_openai-0.2.4.dist-info/METADATA,sha256=_OiQkheD5dE-b24cKH-uehLz8hnEUwSTjXQgJYEcFBA,705
|
|
5
|
+
llama_index_llms_openai-0.2.4.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
6
|
+
llama_index_llms_openai-0.2.4.dist-info/RECORD,,
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
llama_index/llms/openai/__init__.py,sha256=vm3cIBSGkBFlE77GyfyN0EhpJcnJZN95QMhPN53EkbE,148
|
|
2
|
-
llama_index/llms/openai/base.py,sha256=gn87WGrKkZ2llOos1TishK2XVoWZc05hGK7YrBk8kh0,36757
|
|
3
|
-
llama_index/llms/openai/utils.py,sha256=VuDXkLR_BGVqoZc9IJqiJlVloZwG9Z7s1nGPAhlbvWE,13079
|
|
4
|
-
llama_index_llms_openai-0.2.3.dist-info/METADATA,sha256=ig-bKzvFZSaMSnTtS5NhHVkdr_3HFHRcg2KOXnkLZMo,654
|
|
5
|
-
llama_index_llms_openai-0.2.3.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
|
6
|
-
llama_index_llms_openai-0.2.3.dist-info/RECORD,,
|