llama-index-llms-openai 0.2.10__py3-none-any.whl → 0.2.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -64,6 +64,7 @@ from llama_index.llms.openai.utils import (
64
64
  resolve_openai_credentials,
65
65
  to_openai_message_dicts,
66
66
  resolve_tool_choice,
67
+ update_tool_calls,
67
68
  )
68
69
  from llama_index.core.bridge.pydantic import (
69
70
  BaseModel,
@@ -450,53 +451,6 @@ class OpenAI(FunctionCallingLLM):
450
451
  additional_kwargs=self._get_response_token_counts(response),
451
452
  )
452
453
 
453
- def _update_tool_calls(
454
- self,
455
- tool_calls: List[ChoiceDeltaToolCall],
456
- tool_calls_delta: Optional[List[ChoiceDeltaToolCall]],
457
- ) -> List[ChoiceDeltaToolCall]:
458
- """
459
- Use the tool_calls_delta objects received from openai stream chunks
460
- to update the running tool_calls object.
461
-
462
- Args:
463
- tool_calls (List[ChoiceDeltaToolCall]): the list of tool calls
464
- tool_calls_delta (ChoiceDeltaToolCall): the delta to update tool_calls
465
-
466
- Returns:
467
- List[ChoiceDeltaToolCall]: the updated tool calls
468
- """
469
- # openai provides chunks consisting of tool_call deltas one tool at a time
470
- if tool_calls_delta is None:
471
- return tool_calls
472
-
473
- tc_delta = tool_calls_delta[0]
474
-
475
- if len(tool_calls) == 0:
476
- tool_calls.append(tc_delta)
477
- else:
478
- # we need to either update latest tool_call or start a
479
- # new tool_call (i.e., multiple tools in this turn) and
480
- # accumulate that new tool_call with future delta chunks
481
- t = tool_calls[-1]
482
- if t.index != tc_delta.index:
483
- # the start of a new tool call, so append to our running tool_calls list
484
- tool_calls.append(tc_delta)
485
- else:
486
- # not the start of a new tool call, so update last item of tool_calls
487
-
488
- # validations to get passed by mypy
489
- assert t.function is not None
490
- assert tc_delta.function is not None
491
- assert t.function.arguments is not None
492
- assert t.function.name is not None
493
- assert t.id is not None
494
-
495
- t.function.arguments += tc_delta.function.arguments or ""
496
- t.function.name += tc_delta.function.name or ""
497
- t.id += tc_delta.id or ""
498
- return tool_calls
499
-
500
454
  @llm_retry_decorator
501
455
  def _stream_chat(
502
456
  self, messages: Sequence[ChatMessage], **kwargs: Any
@@ -533,8 +487,9 @@ class OpenAI(FunctionCallingLLM):
533
487
 
534
488
  additional_kwargs = {}
535
489
  if is_function:
536
- tool_calls = self._update_tool_calls(tool_calls, delta.tool_calls)
537
- additional_kwargs["tool_calls"] = tool_calls
490
+ tool_calls = update_tool_calls(tool_calls, delta.tool_calls)
491
+ if tool_calls:
492
+ additional_kwargs["tool_calls"] = tool_calls
538
493
 
539
494
  yield ChatResponse(
540
495
  message=ChatMessage(
@@ -783,8 +738,9 @@ class OpenAI(FunctionCallingLLM):
783
738
 
784
739
  additional_kwargs = {}
785
740
  if is_function:
786
- tool_calls = self._update_tool_calls(tool_calls, delta.tool_calls)
787
- additional_kwargs["tool_calls"] = tool_calls
741
+ tool_calls = update_tool_calls(tool_calls, delta.tool_calls)
742
+ if tool_calls:
743
+ additional_kwargs["tool_calls"] = tool_calls
788
744
 
789
745
  yield ChatResponse(
790
746
  message=ChatMessage(
@@ -285,7 +285,7 @@ def from_openai_message(openai_message: ChatCompletionMessage) -> ChatMessage:
285
285
  # function_call = None # deprecated in OpenAI v 1.1.0
286
286
 
287
287
  additional_kwargs: Dict[str, Any] = {}
288
- if openai_message.tool_calls is not None:
288
+ if openai_message.tool_calls:
289
289
  tool_calls: List[ChatCompletionMessageToolCall] = openai_message.tool_calls
290
290
  additional_kwargs.update(tool_calls=tool_calls)
291
291
 
@@ -440,3 +440,50 @@ def resolve_tool_choice(tool_choice: Union[str, dict] = "auto") -> Union[str, di
440
440
  return {"type": "function", "function": {"name": tool_choice}}
441
441
 
442
442
  return tool_choice
443
+
444
+
445
+ def update_tool_calls(
446
+ tool_calls: List[ChoiceDeltaToolCall],
447
+ tool_calls_delta: Optional[List[ChoiceDeltaToolCall]],
448
+ ) -> List[ChoiceDeltaToolCall]:
449
+ """
450
+ Use the tool_calls_delta objects received from openai stream chunks
451
+ to update the running tool_calls object.
452
+
453
+ Args:
454
+ tool_calls (List[ChoiceDeltaToolCall]): the list of tool calls
455
+ tool_calls_delta (ChoiceDeltaToolCall): the delta to update tool_calls
456
+
457
+ Returns:
458
+ List[ChoiceDeltaToolCall]: the updated tool calls
459
+ """
460
+ # openai provides chunks consisting of tool_call deltas one tool at a time
461
+ if tool_calls_delta is None:
462
+ return tool_calls
463
+
464
+ tc_delta = tool_calls_delta[0]
465
+
466
+ if len(tool_calls) == 0:
467
+ tool_calls.append(tc_delta)
468
+ else:
469
+ # we need to either update latest tool_call or start a
470
+ # new tool_call (i.e., multiple tools in this turn) and
471
+ # accumulate that new tool_call with future delta chunks
472
+ t = tool_calls[-1]
473
+ if t.index != tc_delta.index:
474
+ # the start of a new tool call, so append to our running tool_calls list
475
+ tool_calls.append(tc_delta)
476
+ else:
477
+ # not the start of a new tool call, so update last item of tool_calls
478
+
479
+ # validations to get passed by mypy
480
+ assert t.function is not None
481
+ assert tc_delta.function is not None
482
+ assert t.function.arguments is not None
483
+ assert t.function.name is not None
484
+ assert t.id is not None
485
+
486
+ t.function.arguments += tc_delta.function.arguments or ""
487
+ t.function.name += tc_delta.function.name or ""
488
+ t.id += tc_delta.id or ""
489
+ return tool_calls
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llama-index-llms-openai
3
- Version: 0.2.10
3
+ Version: 0.2.12
4
4
  Summary: llama-index llms openai integration
5
5
  License: MIT
6
6
  Author: llama-index
@@ -10,6 +10,7 @@ Classifier: Programming Language :: Python :: 3
10
10
  Classifier: Programming Language :: Python :: 3.9
11
11
  Classifier: Programming Language :: Python :: 3.10
12
12
  Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
13
14
  Requires-Dist: llama-index-core (>=0.11.7,<0.12.0)
14
15
  Requires-Dist: openai (>=1.40.0,<2.0.0)
15
16
  Description-Content-Type: text/markdown
@@ -0,0 +1,6 @@
1
+ llama_index/llms/openai/__init__.py,sha256=vm3cIBSGkBFlE77GyfyN0EhpJcnJZN95QMhPN53EkbE,148
2
+ llama_index/llms/openai/base.py,sha256=tyapzQX3I3WMgH4qwAKU4A7eaa73lnJh9Bc34pvMD0k,35197
3
+ llama_index/llms/openai/utils.py,sha256=o1_DwqNIvmrL8KcF6IfBXOOll0aXVgGNhVPhbUYTWic,15759
4
+ llama_index_llms_openai-0.2.12.dist-info/METADATA,sha256=OrUK6ucgrWE7bz791ZY3t60lafxD2EPMLL5PcA7O6As,649
5
+ llama_index_llms_openai-0.2.12.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
6
+ llama_index_llms_openai-0.2.12.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.6.1
2
+ Generator: poetry-core 1.9.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,6 +0,0 @@
1
- llama_index/llms/openai/__init__.py,sha256=vm3cIBSGkBFlE77GyfyN0EhpJcnJZN95QMhPN53EkbE,148
2
- llama_index/llms/openai/base.py,sha256=h3jab2lijnvWRFwx2M7dlWsY48JBMmehDsU55ebjHBE,36997
3
- llama_index/llms/openai/utils.py,sha256=f1oqKXXfZER1DGNVZvlJACKiEPH_2rCcgduwlaxazEs,14052
4
- llama_index_llms_openai-0.2.10.dist-info/METADATA,sha256=ktNcXZfq_JOaH0ex6Z3wWSysHxjCtXWuOVzk2miZ_-I,598
5
- llama_index_llms_openai-0.2.10.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
6
- llama_index_llms_openai-0.2.10.dist-info/RECORD,,