llama-index-llms-openai 0.2.10__tar.gz → 0.2.11__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_index_llms_openai-0.2.10 → llama_index_llms_openai-0.2.11}/PKG-INFO +2 -1
- {llama_index_llms_openai-0.2.10 → llama_index_llms_openai-0.2.11}/llama_index/llms/openai/base.py +3 -49
- {llama_index_llms_openai-0.2.10 → llama_index_llms_openai-0.2.11}/llama_index/llms/openai/utils.py +47 -0
- {llama_index_llms_openai-0.2.10 → llama_index_llms_openai-0.2.11}/pyproject.toml +1 -1
- {llama_index_llms_openai-0.2.10 → llama_index_llms_openai-0.2.11}/README.md +0 -0
- {llama_index_llms_openai-0.2.10 → llama_index_llms_openai-0.2.11}/llama_index/llms/openai/__init__.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: llama-index-llms-openai
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.11
|
|
4
4
|
Summary: llama-index llms openai integration
|
|
5
5
|
License: MIT
|
|
6
6
|
Author: llama-index
|
|
@@ -10,6 +10,7 @@ Classifier: Programming Language :: Python :: 3
|
|
|
10
10
|
Classifier: Programming Language :: Python :: 3.9
|
|
11
11
|
Classifier: Programming Language :: Python :: 3.10
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
13
14
|
Requires-Dist: llama-index-core (>=0.11.7,<0.12.0)
|
|
14
15
|
Requires-Dist: openai (>=1.40.0,<2.0.0)
|
|
15
16
|
Description-Content-Type: text/markdown
|
{llama_index_llms_openai-0.2.10 → llama_index_llms_openai-0.2.11}/llama_index/llms/openai/base.py
RENAMED
|
@@ -64,6 +64,7 @@ from llama_index.llms.openai.utils import (
|
|
|
64
64
|
resolve_openai_credentials,
|
|
65
65
|
to_openai_message_dicts,
|
|
66
66
|
resolve_tool_choice,
|
|
67
|
+
update_tool_calls,
|
|
67
68
|
)
|
|
68
69
|
from llama_index.core.bridge.pydantic import (
|
|
69
70
|
BaseModel,
|
|
@@ -450,53 +451,6 @@ class OpenAI(FunctionCallingLLM):
|
|
|
450
451
|
additional_kwargs=self._get_response_token_counts(response),
|
|
451
452
|
)
|
|
452
453
|
|
|
453
|
-
def _update_tool_calls(
|
|
454
|
-
self,
|
|
455
|
-
tool_calls: List[ChoiceDeltaToolCall],
|
|
456
|
-
tool_calls_delta: Optional[List[ChoiceDeltaToolCall]],
|
|
457
|
-
) -> List[ChoiceDeltaToolCall]:
|
|
458
|
-
"""
|
|
459
|
-
Use the tool_calls_delta objects received from openai stream chunks
|
|
460
|
-
to update the running tool_calls object.
|
|
461
|
-
|
|
462
|
-
Args:
|
|
463
|
-
tool_calls (List[ChoiceDeltaToolCall]): the list of tool calls
|
|
464
|
-
tool_calls_delta (ChoiceDeltaToolCall): the delta to update tool_calls
|
|
465
|
-
|
|
466
|
-
Returns:
|
|
467
|
-
List[ChoiceDeltaToolCall]: the updated tool calls
|
|
468
|
-
"""
|
|
469
|
-
# openai provides chunks consisting of tool_call deltas one tool at a time
|
|
470
|
-
if tool_calls_delta is None:
|
|
471
|
-
return tool_calls
|
|
472
|
-
|
|
473
|
-
tc_delta = tool_calls_delta[0]
|
|
474
|
-
|
|
475
|
-
if len(tool_calls) == 0:
|
|
476
|
-
tool_calls.append(tc_delta)
|
|
477
|
-
else:
|
|
478
|
-
# we need to either update latest tool_call or start a
|
|
479
|
-
# new tool_call (i.e., multiple tools in this turn) and
|
|
480
|
-
# accumulate that new tool_call with future delta chunks
|
|
481
|
-
t = tool_calls[-1]
|
|
482
|
-
if t.index != tc_delta.index:
|
|
483
|
-
# the start of a new tool call, so append to our running tool_calls list
|
|
484
|
-
tool_calls.append(tc_delta)
|
|
485
|
-
else:
|
|
486
|
-
# not the start of a new tool call, so update last item of tool_calls
|
|
487
|
-
|
|
488
|
-
# validations to get passed by mypy
|
|
489
|
-
assert t.function is not None
|
|
490
|
-
assert tc_delta.function is not None
|
|
491
|
-
assert t.function.arguments is not None
|
|
492
|
-
assert t.function.name is not None
|
|
493
|
-
assert t.id is not None
|
|
494
|
-
|
|
495
|
-
t.function.arguments += tc_delta.function.arguments or ""
|
|
496
|
-
t.function.name += tc_delta.function.name or ""
|
|
497
|
-
t.id += tc_delta.id or ""
|
|
498
|
-
return tool_calls
|
|
499
|
-
|
|
500
454
|
@llm_retry_decorator
|
|
501
455
|
def _stream_chat(
|
|
502
456
|
self, messages: Sequence[ChatMessage], **kwargs: Any
|
|
@@ -533,7 +487,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
533
487
|
|
|
534
488
|
additional_kwargs = {}
|
|
535
489
|
if is_function:
|
|
536
|
-
tool_calls =
|
|
490
|
+
tool_calls = update_tool_calls(tool_calls, delta.tool_calls)
|
|
537
491
|
additional_kwargs["tool_calls"] = tool_calls
|
|
538
492
|
|
|
539
493
|
yield ChatResponse(
|
|
@@ -783,7 +737,7 @@ class OpenAI(FunctionCallingLLM):
|
|
|
783
737
|
|
|
784
738
|
additional_kwargs = {}
|
|
785
739
|
if is_function:
|
|
786
|
-
tool_calls =
|
|
740
|
+
tool_calls = update_tool_calls(tool_calls, delta.tool_calls)
|
|
787
741
|
additional_kwargs["tool_calls"] = tool_calls
|
|
788
742
|
|
|
789
743
|
yield ChatResponse(
|
{llama_index_llms_openai-0.2.10 → llama_index_llms_openai-0.2.11}/llama_index/llms/openai/utils.py
RENAMED
|
@@ -440,3 +440,50 @@ def resolve_tool_choice(tool_choice: Union[str, dict] = "auto") -> Union[str, di
|
|
|
440
440
|
return {"type": "function", "function": {"name": tool_choice}}
|
|
441
441
|
|
|
442
442
|
return tool_choice
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
def update_tool_calls(
|
|
446
|
+
tool_calls: List[ChoiceDeltaToolCall],
|
|
447
|
+
tool_calls_delta: Optional[List[ChoiceDeltaToolCall]],
|
|
448
|
+
) -> List[ChoiceDeltaToolCall]:
|
|
449
|
+
"""
|
|
450
|
+
Use the tool_calls_delta objects received from openai stream chunks
|
|
451
|
+
to update the running tool_calls object.
|
|
452
|
+
|
|
453
|
+
Args:
|
|
454
|
+
tool_calls (List[ChoiceDeltaToolCall]): the list of tool calls
|
|
455
|
+
tool_calls_delta (ChoiceDeltaToolCall): the delta to update tool_calls
|
|
456
|
+
|
|
457
|
+
Returns:
|
|
458
|
+
List[ChoiceDeltaToolCall]: the updated tool calls
|
|
459
|
+
"""
|
|
460
|
+
# openai provides chunks consisting of tool_call deltas one tool at a time
|
|
461
|
+
if tool_calls_delta is None:
|
|
462
|
+
return tool_calls
|
|
463
|
+
|
|
464
|
+
tc_delta = tool_calls_delta[0]
|
|
465
|
+
|
|
466
|
+
if len(tool_calls) == 0:
|
|
467
|
+
tool_calls.append(tc_delta)
|
|
468
|
+
else:
|
|
469
|
+
# we need to either update latest tool_call or start a
|
|
470
|
+
# new tool_call (i.e., multiple tools in this turn) and
|
|
471
|
+
# accumulate that new tool_call with future delta chunks
|
|
472
|
+
t = tool_calls[-1]
|
|
473
|
+
if t.index != tc_delta.index:
|
|
474
|
+
# the start of a new tool call, so append to our running tool_calls list
|
|
475
|
+
tool_calls.append(tc_delta)
|
|
476
|
+
else:
|
|
477
|
+
# not the start of a new tool call, so update last item of tool_calls
|
|
478
|
+
|
|
479
|
+
# validations to get passed by mypy
|
|
480
|
+
assert t.function is not None
|
|
481
|
+
assert tc_delta.function is not None
|
|
482
|
+
assert t.function.arguments is not None
|
|
483
|
+
assert t.function.name is not None
|
|
484
|
+
assert t.id is not None
|
|
485
|
+
|
|
486
|
+
t.function.arguments += tc_delta.function.arguments or ""
|
|
487
|
+
t.function.name += tc_delta.function.name or ""
|
|
488
|
+
t.id += tc_delta.id or ""
|
|
489
|
+
return tool_calls
|
|
File without changes
|
|
File without changes
|