liteai-sdk 0.3.22__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- liteai_sdk/__init__.py +16 -7
- liteai_sdk/param_parser.py +17 -2
- liteai_sdk/tool/__init__.py +0 -310
- liteai_sdk/tool/execute.py +1 -1
- liteai_sdk/tool/prepare.py +281 -0
- liteai_sdk/tool/toolset.py +18 -0
- liteai_sdk/tool/utils.py +1 -1
- liteai_sdk/types/__init__.py +4 -2
- liteai_sdk/types/message.py +26 -22
- liteai_sdk/types/tool.py +33 -0
- {liteai_sdk-0.3.22.dist-info → liteai_sdk-0.4.0.dist-info}/METADATA +1 -1
- liteai_sdk-0.4.0.dist-info/RECORD +18 -0
- liteai_sdk-0.3.22.dist-info/RECORD +0 -15
- {liteai_sdk-0.3.22.dist-info → liteai_sdk-0.4.0.dist-info}/WHEEL +0 -0
- {liteai_sdk-0.3.22.dist-info → liteai_sdk-0.4.0.dist-info}/licenses/LICENSE +0 -0
liteai_sdk/__init__.py
CHANGED
|
@@ -3,7 +3,7 @@ import queue
|
|
|
3
3
|
from typing import cast
|
|
4
4
|
from collections.abc import AsyncGenerator, Generator
|
|
5
5
|
from litellm import CustomStreamWrapper, completion, acompletion
|
|
6
|
-
from litellm.exceptions import (
|
|
6
|
+
from litellm.exceptions import (
|
|
7
7
|
AuthenticationError,
|
|
8
8
|
PermissionDeniedError,
|
|
9
9
|
RateLimitError,
|
|
@@ -23,10 +23,11 @@ from litellm.types.utils import LlmProviders,\
|
|
|
23
23
|
from .debug import enable_debugging
|
|
24
24
|
from .param_parser import ParamParser
|
|
25
25
|
from .stream import AssistantMessageCollector
|
|
26
|
-
from .tool import ToolFn, ToolDef, RawToolDef, ToolLike
|
|
27
26
|
from .tool.execute import execute_tool_sync, execute_tool
|
|
27
|
+
from .tool.toolset import tool, Toolset
|
|
28
28
|
from .tool.utils import find_tool_by_name
|
|
29
29
|
from .types import LlmRequestParams, GenerateTextResponse, StreamTextResponseSync, StreamTextResponseAsync
|
|
30
|
+
from .types.tool import ToolFn, ToolDef, RawToolDef, ToolLike
|
|
30
31
|
from .types.exceptions import *
|
|
31
32
|
from .types.message import ChatMessage, UserMessage, SystemMessage, AssistantMessage, ToolMessage,\
|
|
32
33
|
MessageChunk, TextChunk, UsageChunk, ReasoningChunk, AudioChunk, ImageChunk, ToolCallChunk,\
|
|
@@ -86,7 +87,9 @@ class LLM:
|
|
|
86
87
|
) -> list[ToolMessage]:
|
|
87
88
|
results = []
|
|
88
89
|
for tool_call_tuple in tool_call_tuples:
|
|
89
|
-
|
|
90
|
+
function_name = tool_call_tuple.function_name
|
|
91
|
+
function_arguments = tool_call_tuple.function_arguments
|
|
92
|
+
|
|
90
93
|
if (target_tool := find_tool_by_name(tools, function_name)) is None:
|
|
91
94
|
logger.warning(f"Tool \"{function_name}\" not found, skipping execution.")
|
|
92
95
|
continue
|
|
@@ -100,7 +103,7 @@ class LLM:
|
|
|
100
103
|
except Exception as e:
|
|
101
104
|
error = f"{type(e).__name__}: {str(e)}"
|
|
102
105
|
results.append(ToolMessage(
|
|
103
|
-
id=id,
|
|
106
|
+
id=tool_call_tuple.id,
|
|
104
107
|
name=function_name,
|
|
105
108
|
arguments=function_arguments,
|
|
106
109
|
result=result,
|
|
@@ -114,7 +117,9 @@ class LLM:
|
|
|
114
117
|
) -> list[ToolMessage]:
|
|
115
118
|
results = []
|
|
116
119
|
for tool_call_tuple in tool_call_tuples:
|
|
117
|
-
|
|
120
|
+
function_name = tool_call_tuple.function_name
|
|
121
|
+
function_arguments = tool_call_tuple.function_arguments
|
|
122
|
+
|
|
118
123
|
if (target_tool := find_tool_by_name(tools, function_name)) is None:
|
|
119
124
|
logger.warning(f"Tool \"{function_name}\" not found, skipping execution.")
|
|
120
125
|
continue
|
|
@@ -128,7 +133,7 @@ class LLM:
|
|
|
128
133
|
except Exception as e:
|
|
129
134
|
error = f"{type(e).__name__}: {str(e)}"
|
|
130
135
|
results.append(ToolMessage(
|
|
131
|
-
id=id,
|
|
136
|
+
id=tool_call_tuple.id,
|
|
132
137
|
name=function_name,
|
|
133
138
|
arguments=function_arguments,
|
|
134
139
|
result=result,
|
|
@@ -190,7 +195,7 @@ class LLM:
|
|
|
190
195
|
return returned_stream, full_message_queue
|
|
191
196
|
|
|
192
197
|
async def stream_text(self, params: LlmRequestParams) -> StreamTextResponseAsync:
|
|
193
|
-
async def stream(response: CustomStreamWrapper) -> AsyncGenerator[
|
|
198
|
+
async def stream(response: CustomStreamWrapper) -> AsyncGenerator[MessageChunk]:
|
|
194
199
|
nonlocal message_collector
|
|
195
200
|
async for chunk in response:
|
|
196
201
|
chunk = cast(LiteLlmModelResponseStream, chunk)
|
|
@@ -228,8 +233,12 @@ __all__ = [
|
|
|
228
233
|
"Timeout",
|
|
229
234
|
|
|
230
235
|
"LLM",
|
|
236
|
+
"LlmProviders",
|
|
231
237
|
"LlmRequestParams",
|
|
232
238
|
|
|
239
|
+
"tool",
|
|
240
|
+
"Toolset",
|
|
241
|
+
|
|
233
242
|
"ToolFn",
|
|
234
243
|
"ToolDef",
|
|
235
244
|
"RawToolDef",
|
liteai_sdk/param_parser.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from typing import Any
|
|
2
2
|
from litellm.types.utils import LlmProviders
|
|
3
|
-
from .tool import prepare_tools
|
|
3
|
+
from .tool.prepare import prepare_tools
|
|
4
4
|
from .types import LlmRequestParams, ToolMessage
|
|
5
|
+
from .types.tool import ToolLike
|
|
5
6
|
|
|
6
7
|
ParsedParams = dict[str, Any]
|
|
7
8
|
|
|
@@ -14,8 +15,22 @@ class ParamParser:
|
|
|
14
15
|
self._base_url = base_url
|
|
15
16
|
self._api_key = api_key
|
|
16
17
|
|
|
18
|
+
@staticmethod
|
|
19
|
+
def _extract_tool_params(params: LlmRequestParams) -> list[ToolLike] | None:
|
|
20
|
+
if params.tools is None and params.toolsets is None:
|
|
21
|
+
return None
|
|
22
|
+
tools = []
|
|
23
|
+
if params.tools:
|
|
24
|
+
tools = params.tools
|
|
25
|
+
if params.toolsets:
|
|
26
|
+
for toolset in params.toolsets:
|
|
27
|
+
tools.extend(toolset.get_tool_methods())
|
|
28
|
+
return tools
|
|
29
|
+
|
|
17
30
|
def _parse(self, params: LlmRequestParams) -> ParsedParams:
|
|
18
|
-
|
|
31
|
+
extracted_tool_likes = self._extract_tool_params(params)
|
|
32
|
+
tools = extracted_tool_likes and prepare_tools(extracted_tool_likes)
|
|
33
|
+
|
|
19
34
|
transformed_messages = []
|
|
20
35
|
for message in params.messages:
|
|
21
36
|
if type(message) is ToolMessage and\
|
liteai_sdk/tool/__init__.py
CHANGED
|
@@ -1,310 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
source: https://github.com/mozilla-ai/any-llm/blob/main/src/any_llm/tools.py
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
import dataclasses
|
|
6
|
-
import enum
|
|
7
|
-
import inspect
|
|
8
|
-
import types as _types
|
|
9
|
-
from collections.abc import Callable, Mapping, Sequence
|
|
10
|
-
from datetime import date, datetime, time
|
|
11
|
-
from typing import Annotated as _Annotated, Literal as _Literal, is_typeddict as _is_typeddict,\
|
|
12
|
-
Any, Awaitable, get_args, get_origin, get_type_hints
|
|
13
|
-
from pydantic import BaseModel as PydanticBaseModel
|
|
14
|
-
|
|
15
|
-
ToolFn = Callable[..., Any] | Callable[..., Awaitable[Any]]
|
|
16
|
-
|
|
17
|
-
"""
|
|
18
|
-
RawToolDef example:
|
|
19
|
-
{
|
|
20
|
-
"name": "get_current_weather",
|
|
21
|
-
"description": "Get the current weather in a given location",
|
|
22
|
-
"parameters": {
|
|
23
|
-
"type": "object",
|
|
24
|
-
"properties": {
|
|
25
|
-
"location": {
|
|
26
|
-
"type": "string",
|
|
27
|
-
"description": "The city and state, e.g. San Francisco, CA",
|
|
28
|
-
},
|
|
29
|
-
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
|
|
30
|
-
},
|
|
31
|
-
"required": ["location"],
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
"""
|
|
35
|
-
RawToolDef = dict[str, Any]
|
|
36
|
-
|
|
37
|
-
@dataclasses.dataclass
|
|
38
|
-
class ToolDef:
|
|
39
|
-
name: str
|
|
40
|
-
description: str
|
|
41
|
-
execute: ToolFn
|
|
42
|
-
|
|
43
|
-
ToolLike = ToolDef | RawToolDef | ToolFn
|
|
44
|
-
|
|
45
|
-
def _python_type_to_json_schema(python_type: Any) -> dict[str, Any]:
|
|
46
|
-
"""Convert Python type annotation to a JSON Schema for a parameter.
|
|
47
|
-
|
|
48
|
-
Supported mappings (subset tailored for LLM tool schemas):
|
|
49
|
-
- Primitives: str/int/float/bool -> string/integer/number/boolean
|
|
50
|
-
- bytes -> string with contentEncoding base64
|
|
51
|
-
- datetime/date/time -> string with format date-time/date/time
|
|
52
|
-
- list[T] / Sequence[T] / set[T] / frozenset[T] -> array with items=schema(T)
|
|
53
|
-
- set/frozenset include uniqueItems=true
|
|
54
|
-
- list without type args defaults items to string
|
|
55
|
-
- dict[K,V] / Mapping[K,V] -> object with additionalProperties=schema(V)
|
|
56
|
-
- dict without type args defaults additionalProperties to string
|
|
57
|
-
- tuple[T1, T2, ...] -> array with prefixItems per element and min/maxItems
|
|
58
|
-
- tuple[T, ...] -> array with items=schema(T)
|
|
59
|
-
- Union[X, Y] and X | Y -> oneOf=[schema(X), schema(Y)] (without top-level type)
|
|
60
|
-
- Optional[T] (Union[T, None]) -> schema(T) (nullability not encoded)
|
|
61
|
-
- Literal[...]/Enum -> enum with appropriate type inference when uniform
|
|
62
|
-
- TypedDict -> object with properties/required per annotations
|
|
63
|
-
- dataclass/Pydantic BaseModel -> object with nested properties inferred from fields
|
|
64
|
-
"""
|
|
65
|
-
origin = get_origin(python_type)
|
|
66
|
-
args = get_args(python_type)
|
|
67
|
-
|
|
68
|
-
if _Annotated is not None and origin is _Annotated and len(args) >= 1:
|
|
69
|
-
python_type = args[0]
|
|
70
|
-
origin = get_origin(python_type)
|
|
71
|
-
args = get_args(python_type)
|
|
72
|
-
|
|
73
|
-
if python_type is Any:
|
|
74
|
-
return {"type": "string"}
|
|
75
|
-
|
|
76
|
-
primitive_map = {str: "string", int: "integer", float: "number", bool: "boolean"}
|
|
77
|
-
if python_type in primitive_map:
|
|
78
|
-
return {"type": primitive_map[python_type]}
|
|
79
|
-
|
|
80
|
-
if python_type is bytes:
|
|
81
|
-
return {"type": "string", "contentEncoding": "base64"}
|
|
82
|
-
if python_type is datetime:
|
|
83
|
-
return {"type": "string", "format": "date-time"}
|
|
84
|
-
if python_type is date:
|
|
85
|
-
return {"type": "string", "format": "date"}
|
|
86
|
-
if python_type is time:
|
|
87
|
-
return {"type": "string", "format": "time"}
|
|
88
|
-
|
|
89
|
-
if python_type is list:
|
|
90
|
-
return {"type": "array", "items": {"type": "string"}}
|
|
91
|
-
if python_type is dict:
|
|
92
|
-
return {"type": "object", "additionalProperties": {"type": "string"}}
|
|
93
|
-
|
|
94
|
-
if origin is _Literal:
|
|
95
|
-
literal_values = list(args)
|
|
96
|
-
schema_lit: dict[str, Any] = {"enum": literal_values}
|
|
97
|
-
if all(isinstance(v, bool) for v in literal_values):
|
|
98
|
-
schema_lit["type"] = "boolean"
|
|
99
|
-
elif all(isinstance(v, str) for v in literal_values):
|
|
100
|
-
schema_lit["type"] = "string"
|
|
101
|
-
elif all(isinstance(v, int) and not isinstance(v, bool) for v in literal_values):
|
|
102
|
-
schema_lit["type"] = "integer"
|
|
103
|
-
elif all(isinstance(v, int | float) and not isinstance(v, bool) for v in literal_values):
|
|
104
|
-
schema_lit["type"] = "number"
|
|
105
|
-
return schema_lit
|
|
106
|
-
|
|
107
|
-
if inspect.isclass(python_type) and issubclass(python_type, enum.Enum):
|
|
108
|
-
enum_values = [e.value for e in python_type]
|
|
109
|
-
value_types = {type(v) for v in enum_values}
|
|
110
|
-
schema: dict[str, Any] = {"enum": enum_values}
|
|
111
|
-
if value_types == {str}:
|
|
112
|
-
schema["type"] = "string"
|
|
113
|
-
elif value_types == {int}:
|
|
114
|
-
schema["type"] = "integer"
|
|
115
|
-
elif value_types <= {int, float}:
|
|
116
|
-
schema["type"] = "number"
|
|
117
|
-
elif value_types == {bool}:
|
|
118
|
-
schema["type"] = "boolean"
|
|
119
|
-
return schema
|
|
120
|
-
|
|
121
|
-
if _is_typeddict(python_type):
|
|
122
|
-
annotations: dict[str, Any] = getattr(python_type, "__annotations__", {}) or {}
|
|
123
|
-
required_keys = set(getattr(python_type, "__required_keys__", set()))
|
|
124
|
-
td_properties: dict[str, Any] = {}
|
|
125
|
-
td_required: list[str] = []
|
|
126
|
-
for field_name, field_type in annotations.items():
|
|
127
|
-
td_properties[field_name] = _python_type_to_json_schema(field_type)
|
|
128
|
-
if field_name in required_keys:
|
|
129
|
-
td_required.append(field_name)
|
|
130
|
-
schema_td: dict[str, Any] = {
|
|
131
|
-
"type": "object",
|
|
132
|
-
"properties": td_properties,
|
|
133
|
-
}
|
|
134
|
-
if td_required:
|
|
135
|
-
schema_td["required"] = td_required
|
|
136
|
-
return schema_td
|
|
137
|
-
|
|
138
|
-
if inspect.isclass(python_type) and dataclasses.is_dataclass(python_type):
|
|
139
|
-
type_hints = get_type_hints(python_type)
|
|
140
|
-
dc_properties: dict[str, Any] = {}
|
|
141
|
-
dc_required: list[str] = []
|
|
142
|
-
for field in dataclasses.fields(python_type):
|
|
143
|
-
field_type = type_hints.get(field.name, Any)
|
|
144
|
-
dc_properties[field.name] = _python_type_to_json_schema(field_type)
|
|
145
|
-
if (
|
|
146
|
-
field.default is dataclasses.MISSING
|
|
147
|
-
and getattr(field, "default_factory", dataclasses.MISSING) is dataclasses.MISSING
|
|
148
|
-
):
|
|
149
|
-
dc_required.append(field.name)
|
|
150
|
-
schema_dc: dict[str, Any] = {"type": "object", "properties": dc_properties}
|
|
151
|
-
if dc_required:
|
|
152
|
-
schema_dc["required"] = dc_required
|
|
153
|
-
return schema_dc
|
|
154
|
-
|
|
155
|
-
if inspect.isclass(python_type) and issubclass(python_type, PydanticBaseModel):
|
|
156
|
-
model_type_hints = get_type_hints(python_type)
|
|
157
|
-
pd_properties: dict[str, Any] = {}
|
|
158
|
-
pd_required: list[str] = []
|
|
159
|
-
model_fields = getattr(python_type, "model_fields", {})
|
|
160
|
-
for name, field_info in model_fields.items():
|
|
161
|
-
pd_properties[name] = _python_type_to_json_schema(model_type_hints.get(name, Any))
|
|
162
|
-
is_required = getattr(field_info, "is_required", None)
|
|
163
|
-
if callable(is_required) and is_required():
|
|
164
|
-
pd_required.append(name)
|
|
165
|
-
schema_pd: dict[str, Any] = {"type": "object", "properties": pd_properties}
|
|
166
|
-
if pd_required:
|
|
167
|
-
schema_pd["required"] = pd_required
|
|
168
|
-
return schema_pd
|
|
169
|
-
|
|
170
|
-
if origin in (list, Sequence, set, frozenset):
|
|
171
|
-
item_type = args[0] if args else Any
|
|
172
|
-
item_schema = _python_type_to_json_schema(item_type)
|
|
173
|
-
schema_arr: dict[str, Any] = {"type": "array", "items": item_schema or {"type": "string"}}
|
|
174
|
-
if origin in (set, frozenset):
|
|
175
|
-
schema_arr["uniqueItems"] = True
|
|
176
|
-
return schema_arr
|
|
177
|
-
if origin is tuple:
|
|
178
|
-
if not args:
|
|
179
|
-
return {"type": "array", "items": {"type": "string"}}
|
|
180
|
-
if len(args) == 2 and args[1] is Ellipsis:
|
|
181
|
-
return {"type": "array", "items": _python_type_to_json_schema(args[0])}
|
|
182
|
-
prefix_items = [_python_type_to_json_schema(a) for a in args]
|
|
183
|
-
return {
|
|
184
|
-
"type": "array",
|
|
185
|
-
"prefixItems": prefix_items,
|
|
186
|
-
"minItems": len(prefix_items),
|
|
187
|
-
"maxItems": len(prefix_items),
|
|
188
|
-
}
|
|
189
|
-
|
|
190
|
-
if origin in (dict, Mapping):
|
|
191
|
-
value_type = args[1] if len(args) >= 2 else Any
|
|
192
|
-
value_schema = _python_type_to_json_schema(value_type)
|
|
193
|
-
return {"type": "object", "additionalProperties": value_schema or {"type": "string"}}
|
|
194
|
-
|
|
195
|
-
typing_union = getattr(__import__("typing"), "Union", None)
|
|
196
|
-
if origin in (typing_union, _types.UnionType):
|
|
197
|
-
non_none_args = [a for a in args if a is not type(None)]
|
|
198
|
-
if len(non_none_args) > 1:
|
|
199
|
-
schemas = [_python_type_to_json_schema(arg) for arg in non_none_args]
|
|
200
|
-
return {"oneOf": schemas}
|
|
201
|
-
if non_none_args:
|
|
202
|
-
return _python_type_to_json_schema(non_none_args[0])
|
|
203
|
-
return {"type": "string"}
|
|
204
|
-
|
|
205
|
-
return {"type": "string"}
|
|
206
|
-
|
|
207
|
-
def _parse_callable_properties(func: ToolFn) -> tuple[dict[str, dict[str, Any]], list[str]]:
|
|
208
|
-
sig = inspect.signature(func)
|
|
209
|
-
type_hints = get_type_hints(func)
|
|
210
|
-
|
|
211
|
-
properties: dict[str, dict[str, Any]] = {}
|
|
212
|
-
required: list[str] = []
|
|
213
|
-
|
|
214
|
-
for param_name, param in sig.parameters.items():
|
|
215
|
-
if param.kind in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD):
|
|
216
|
-
continue
|
|
217
|
-
|
|
218
|
-
annotated_type = type_hints.get(param_name, str)
|
|
219
|
-
param_schema = _python_type_to_json_schema(annotated_type)
|
|
220
|
-
|
|
221
|
-
type_name = getattr(annotated_type, "__name__", str(annotated_type))
|
|
222
|
-
properties[param_name] = {
|
|
223
|
-
**param_schema,
|
|
224
|
-
"description": f"Parameter {param_name} of type {type_name}",
|
|
225
|
-
}
|
|
226
|
-
|
|
227
|
-
if param.default == inspect.Parameter.empty:
|
|
228
|
-
required.append(param_name)
|
|
229
|
-
|
|
230
|
-
return properties, required
|
|
231
|
-
|
|
232
|
-
def generate_tool_definition_from_callable(func: ToolFn) -> dict[str, Any]:
|
|
233
|
-
"""Convert a Python callable to OpenAI tools format.
|
|
234
|
-
|
|
235
|
-
Args:
|
|
236
|
-
func: A Python callable (function) to convert to a tool
|
|
237
|
-
|
|
238
|
-
Returns:
|
|
239
|
-
Dictionary in OpenAI tools format
|
|
240
|
-
|
|
241
|
-
Raises:
|
|
242
|
-
ValueError: If the function doesn't have proper docstring or type annotations
|
|
243
|
-
|
|
244
|
-
Example:
|
|
245
|
-
>>> def get_weather(location: str, unit: str = "celsius") -> str:
|
|
246
|
-
... '''Get weather information for a location.'''
|
|
247
|
-
... return f"Weather in {location} is sunny, 25°{unit[0].upper()}"
|
|
248
|
-
>>>
|
|
249
|
-
>>> tool = generate_tool_definition_from_callable(get_weather)
|
|
250
|
-
>>> # Returns OpenAI tools format dict
|
|
251
|
-
|
|
252
|
-
"""
|
|
253
|
-
if not func.__doc__:
|
|
254
|
-
msg = f"Function {func.__name__} must have a docstring"
|
|
255
|
-
raise ValueError(msg)
|
|
256
|
-
|
|
257
|
-
properties, required = _parse_callable_properties(func)
|
|
258
|
-
return {
|
|
259
|
-
"type": "function",
|
|
260
|
-
"function": {
|
|
261
|
-
"name": func.__name__,
|
|
262
|
-
"description": func.__doc__.strip(),
|
|
263
|
-
"parameters": {"type": "object", "properties": properties, "required": required},
|
|
264
|
-
},
|
|
265
|
-
}
|
|
266
|
-
|
|
267
|
-
def generate_tool_definition_from_tool_def(tool_def: ToolDef) -> dict[str, Any]:
|
|
268
|
-
"""Convert a ToolDef to OpenAI tools format.
|
|
269
|
-
|
|
270
|
-
Args:
|
|
271
|
-
tool_def: A ToolDef to convert to a tool
|
|
272
|
-
|
|
273
|
-
Returns:
|
|
274
|
-
Dictionary in OpenAI tools format
|
|
275
|
-
|
|
276
|
-
Example:
|
|
277
|
-
>>> tool_def = ToolDef(
|
|
278
|
-
... name="get_weather",
|
|
279
|
-
... description="Get weather information for a location.",
|
|
280
|
-
... execute=SomeFunction(),
|
|
281
|
-
... )
|
|
282
|
-
>>> tool = generate_tool_definition_from_tool_def(tool_def)
|
|
283
|
-
>>> # Returns OpenAI tools format dict
|
|
284
|
-
"""
|
|
285
|
-
properties, required = _parse_callable_properties(tool_def.execute)
|
|
286
|
-
return {
|
|
287
|
-
"type": "function",
|
|
288
|
-
"function": {
|
|
289
|
-
"name": tool_def.name,
|
|
290
|
-
"description": tool_def.description,
|
|
291
|
-
"parameters": {"type": "object", "properties": properties, "required": required},
|
|
292
|
-
},
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
def generate_tool_definition_from_raw_tool_def(raw_tool_def: RawToolDef) -> dict[str, Any]:
|
|
296
|
-
return {
|
|
297
|
-
"type": "function",
|
|
298
|
-
"function": raw_tool_def,
|
|
299
|
-
}
|
|
300
|
-
|
|
301
|
-
def prepare_tools(tools: list[ToolLike]) -> list[dict]:
|
|
302
|
-
tool_defs = []
|
|
303
|
-
for tool in tools:
|
|
304
|
-
if callable(tool):
|
|
305
|
-
tool_defs.append(generate_tool_definition_from_callable(tool))
|
|
306
|
-
elif isinstance(tool, ToolDef):
|
|
307
|
-
tool_defs.append(generate_tool_definition_from_tool_def(tool))
|
|
308
|
-
else:
|
|
309
|
-
tool_defs.append(generate_tool_definition_from_raw_tool_def(tool))
|
|
310
|
-
return tool_defs
|
liteai_sdk/tool/execute.py
CHANGED
|
@@ -3,7 +3,7 @@ import json
|
|
|
3
3
|
from functools import singledispatch
|
|
4
4
|
from typing import Any, Awaitable, Callable, cast
|
|
5
5
|
from types import FunctionType, MethodType, CoroutineType
|
|
6
|
-
from . import ToolDef
|
|
6
|
+
from ..types.tool import ToolDef
|
|
7
7
|
|
|
8
8
|
async def _coroutine_wrapper(awaitable: Awaitable[Any]) -> CoroutineType:
|
|
9
9
|
return await awaitable
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
"""
|
|
2
|
+
source: https://github.com/mozilla-ai/any-llm/blob/main/src/any_llm/tools.py
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import dataclasses
|
|
6
|
+
import enum
|
|
7
|
+
import inspect
|
|
8
|
+
import types as _types
|
|
9
|
+
from collections.abc import Mapping, Sequence
|
|
10
|
+
from datetime import date, datetime, time
|
|
11
|
+
from typing import Annotated as _Annotated, Literal as _Literal, is_typeddict as _is_typeddict,\
|
|
12
|
+
Any, get_args, get_origin, get_type_hints
|
|
13
|
+
from pydantic import BaseModel as PydanticBaseModel
|
|
14
|
+
from ..types.tool import ToolFn, ToolDef, RawToolDef, ToolLike
|
|
15
|
+
|
|
16
|
+
def _python_type_to_json_schema(python_type: Any) -> dict[str, Any]:
|
|
17
|
+
"""Convert Python type annotation to a JSON Schema for a parameter.
|
|
18
|
+
|
|
19
|
+
Supported mappings (subset tailored for LLM tool schemas):
|
|
20
|
+
- Primitives: str/int/float/bool -> string/integer/number/boolean
|
|
21
|
+
- bytes -> string with contentEncoding base64
|
|
22
|
+
- datetime/date/time -> string with format date-time/date/time
|
|
23
|
+
- list[T] / Sequence[T] / set[T] / frozenset[T] -> array with items=schema(T)
|
|
24
|
+
- set/frozenset include uniqueItems=true
|
|
25
|
+
- list without type args defaults items to string
|
|
26
|
+
- dict[K,V] / Mapping[K,V] -> object with additionalProperties=schema(V)
|
|
27
|
+
- dict without type args defaults additionalProperties to string
|
|
28
|
+
- tuple[T1, T2, ...] -> array with prefixItems per element and min/maxItems
|
|
29
|
+
- tuple[T, ...] -> array with items=schema(T)
|
|
30
|
+
- Union[X, Y] and X | Y -> oneOf=[schema(X), schema(Y)] (without top-level type)
|
|
31
|
+
- Optional[T] (Union[T, None]) -> schema(T) (nullability not encoded)
|
|
32
|
+
- Literal[...]/Enum -> enum with appropriate type inference when uniform
|
|
33
|
+
- TypedDict -> object with properties/required per annotations
|
|
34
|
+
- dataclass/Pydantic BaseModel -> object with nested properties inferred from fields
|
|
35
|
+
"""
|
|
36
|
+
origin = get_origin(python_type)
|
|
37
|
+
args = get_args(python_type)
|
|
38
|
+
|
|
39
|
+
if _Annotated is not None and origin is _Annotated and len(args) >= 1:
|
|
40
|
+
python_type = args[0]
|
|
41
|
+
origin = get_origin(python_type)
|
|
42
|
+
args = get_args(python_type)
|
|
43
|
+
|
|
44
|
+
if python_type is Any:
|
|
45
|
+
return {"type": "string"}
|
|
46
|
+
|
|
47
|
+
primitive_map = {str: "string", int: "integer", float: "number", bool: "boolean"}
|
|
48
|
+
if python_type in primitive_map:
|
|
49
|
+
return {"type": primitive_map[python_type]}
|
|
50
|
+
|
|
51
|
+
if python_type is bytes:
|
|
52
|
+
return {"type": "string", "contentEncoding": "base64"}
|
|
53
|
+
if python_type is datetime:
|
|
54
|
+
return {"type": "string", "format": "date-time"}
|
|
55
|
+
if python_type is date:
|
|
56
|
+
return {"type": "string", "format": "date"}
|
|
57
|
+
if python_type is time:
|
|
58
|
+
return {"type": "string", "format": "time"}
|
|
59
|
+
|
|
60
|
+
if python_type is list:
|
|
61
|
+
return {"type": "array", "items": {"type": "string"}}
|
|
62
|
+
if python_type is dict:
|
|
63
|
+
return {"type": "object", "additionalProperties": {"type": "string"}}
|
|
64
|
+
|
|
65
|
+
if origin is _Literal:
|
|
66
|
+
literal_values = list(args)
|
|
67
|
+
schema_lit: dict[str, Any] = {"enum": literal_values}
|
|
68
|
+
if all(isinstance(v, bool) for v in literal_values):
|
|
69
|
+
schema_lit["type"] = "boolean"
|
|
70
|
+
elif all(isinstance(v, str) for v in literal_values):
|
|
71
|
+
schema_lit["type"] = "string"
|
|
72
|
+
elif all(isinstance(v, int) and not isinstance(v, bool) for v in literal_values):
|
|
73
|
+
schema_lit["type"] = "integer"
|
|
74
|
+
elif all(isinstance(v, int | float) and not isinstance(v, bool) for v in literal_values):
|
|
75
|
+
schema_lit["type"] = "number"
|
|
76
|
+
return schema_lit
|
|
77
|
+
|
|
78
|
+
if inspect.isclass(python_type) and issubclass(python_type, enum.Enum):
|
|
79
|
+
enum_values = [e.value for e in python_type]
|
|
80
|
+
value_types = {type(v) for v in enum_values}
|
|
81
|
+
schema: dict[str, Any] = {"enum": enum_values}
|
|
82
|
+
if value_types == {str}:
|
|
83
|
+
schema["type"] = "string"
|
|
84
|
+
elif value_types == {int}:
|
|
85
|
+
schema["type"] = "integer"
|
|
86
|
+
elif value_types <= {int, float}:
|
|
87
|
+
schema["type"] = "number"
|
|
88
|
+
elif value_types == {bool}:
|
|
89
|
+
schema["type"] = "boolean"
|
|
90
|
+
return schema
|
|
91
|
+
|
|
92
|
+
if _is_typeddict(python_type):
|
|
93
|
+
annotations: dict[str, Any] = getattr(python_type, "__annotations__", {}) or {}
|
|
94
|
+
required_keys = set(getattr(python_type, "__required_keys__", set()))
|
|
95
|
+
td_properties: dict[str, Any] = {}
|
|
96
|
+
td_required: list[str] = []
|
|
97
|
+
for field_name, field_type in annotations.items():
|
|
98
|
+
td_properties[field_name] = _python_type_to_json_schema(field_type)
|
|
99
|
+
if field_name in required_keys:
|
|
100
|
+
td_required.append(field_name)
|
|
101
|
+
schema_td: dict[str, Any] = {
|
|
102
|
+
"type": "object",
|
|
103
|
+
"properties": td_properties,
|
|
104
|
+
}
|
|
105
|
+
if td_required:
|
|
106
|
+
schema_td["required"] = td_required
|
|
107
|
+
return schema_td
|
|
108
|
+
|
|
109
|
+
if inspect.isclass(python_type) and dataclasses.is_dataclass(python_type):
|
|
110
|
+
type_hints = get_type_hints(python_type)
|
|
111
|
+
dc_properties: dict[str, Any] = {}
|
|
112
|
+
dc_required: list[str] = []
|
|
113
|
+
for field in dataclasses.fields(python_type):
|
|
114
|
+
field_type = type_hints.get(field.name, Any)
|
|
115
|
+
dc_properties[field.name] = _python_type_to_json_schema(field_type)
|
|
116
|
+
if (
|
|
117
|
+
field.default is dataclasses.MISSING
|
|
118
|
+
and getattr(field, "default_factory", dataclasses.MISSING) is dataclasses.MISSING
|
|
119
|
+
):
|
|
120
|
+
dc_required.append(field.name)
|
|
121
|
+
schema_dc: dict[str, Any] = {"type": "object", "properties": dc_properties}
|
|
122
|
+
if dc_required:
|
|
123
|
+
schema_dc["required"] = dc_required
|
|
124
|
+
return schema_dc
|
|
125
|
+
|
|
126
|
+
if inspect.isclass(python_type) and issubclass(python_type, PydanticBaseModel):
|
|
127
|
+
model_type_hints = get_type_hints(python_type)
|
|
128
|
+
pd_properties: dict[str, Any] = {}
|
|
129
|
+
pd_required: list[str] = []
|
|
130
|
+
model_fields = getattr(python_type, "model_fields", {})
|
|
131
|
+
for name, field_info in model_fields.items():
|
|
132
|
+
pd_properties[name] = _python_type_to_json_schema(model_type_hints.get(name, Any))
|
|
133
|
+
is_required = getattr(field_info, "is_required", None)
|
|
134
|
+
if callable(is_required) and is_required():
|
|
135
|
+
pd_required.append(name)
|
|
136
|
+
schema_pd: dict[str, Any] = {"type": "object", "properties": pd_properties}
|
|
137
|
+
if pd_required:
|
|
138
|
+
schema_pd["required"] = pd_required
|
|
139
|
+
return schema_pd
|
|
140
|
+
|
|
141
|
+
if origin in (list, Sequence, set, frozenset):
|
|
142
|
+
item_type = args[0] if args else Any
|
|
143
|
+
item_schema = _python_type_to_json_schema(item_type)
|
|
144
|
+
schema_arr: dict[str, Any] = {"type": "array", "items": item_schema or {"type": "string"}}
|
|
145
|
+
if origin in (set, frozenset):
|
|
146
|
+
schema_arr["uniqueItems"] = True
|
|
147
|
+
return schema_arr
|
|
148
|
+
if origin is tuple:
|
|
149
|
+
if not args:
|
|
150
|
+
return {"type": "array", "items": {"type": "string"}}
|
|
151
|
+
if len(args) == 2 and args[1] is Ellipsis:
|
|
152
|
+
return {"type": "array", "items": _python_type_to_json_schema(args[0])}
|
|
153
|
+
prefix_items = [_python_type_to_json_schema(a) for a in args]
|
|
154
|
+
return {
|
|
155
|
+
"type": "array",
|
|
156
|
+
"prefixItems": prefix_items,
|
|
157
|
+
"minItems": len(prefix_items),
|
|
158
|
+
"maxItems": len(prefix_items),
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
if origin in (dict, Mapping):
|
|
162
|
+
value_type = args[1] if len(args) >= 2 else Any
|
|
163
|
+
value_schema = _python_type_to_json_schema(value_type)
|
|
164
|
+
return {"type": "object", "additionalProperties": value_schema or {"type": "string"}}
|
|
165
|
+
|
|
166
|
+
typing_union = getattr(__import__("typing"), "Union", None)
|
|
167
|
+
if origin in (typing_union, _types.UnionType):
|
|
168
|
+
non_none_args = [a for a in args if a is not type(None)]
|
|
169
|
+
if len(non_none_args) > 1:
|
|
170
|
+
schemas = [_python_type_to_json_schema(arg) for arg in non_none_args]
|
|
171
|
+
return {"oneOf": schemas}
|
|
172
|
+
if non_none_args:
|
|
173
|
+
return _python_type_to_json_schema(non_none_args[0])
|
|
174
|
+
return {"type": "string"}
|
|
175
|
+
|
|
176
|
+
return {"type": "string"}
|
|
177
|
+
|
|
178
|
+
def _parse_callable_properties(func: ToolFn) -> tuple[dict[str, dict[str, Any]], list[str]]:
|
|
179
|
+
sig = inspect.signature(func)
|
|
180
|
+
type_hints = get_type_hints(func)
|
|
181
|
+
|
|
182
|
+
properties: dict[str, dict[str, Any]] = {}
|
|
183
|
+
required: list[str] = []
|
|
184
|
+
|
|
185
|
+
for param_name, param in sig.parameters.items():
|
|
186
|
+
if param.kind in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD):
|
|
187
|
+
continue
|
|
188
|
+
|
|
189
|
+
annotated_type = type_hints.get(param_name, str)
|
|
190
|
+
param_schema = _python_type_to_json_schema(annotated_type)
|
|
191
|
+
|
|
192
|
+
type_name = getattr(annotated_type, "__name__", str(annotated_type))
|
|
193
|
+
properties[param_name] = {
|
|
194
|
+
**param_schema,
|
|
195
|
+
"description": f"Parameter {param_name} of type {type_name}",
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
if param.default == inspect.Parameter.empty:
|
|
199
|
+
required.append(param_name)
|
|
200
|
+
|
|
201
|
+
return properties, required
|
|
202
|
+
|
|
203
|
+
def generate_tool_definition_from_callable(func: ToolFn) -> dict[str, Any]:
|
|
204
|
+
"""Convert a Python callable to OpenAI tools format.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
func: A Python callable (function) to convert to a tool
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
Dictionary in OpenAI tools format
|
|
211
|
+
|
|
212
|
+
Raises:
|
|
213
|
+
ValueError: If the function doesn't have proper docstring or type annotations
|
|
214
|
+
|
|
215
|
+
Example:
|
|
216
|
+
>>> def get_weather(location: str, unit: str = "celsius") -> str:
|
|
217
|
+
... '''Get weather information for a location.'''
|
|
218
|
+
... return f"Weather in {location} is sunny, 25°{unit[0].upper()}"
|
|
219
|
+
>>>
|
|
220
|
+
>>> tool = generate_tool_definition_from_callable(get_weather)
|
|
221
|
+
>>> # Returns OpenAI tools format dict
|
|
222
|
+
|
|
223
|
+
"""
|
|
224
|
+
if not func.__doc__:
|
|
225
|
+
msg = f"Function {func.__name__} must have a docstring"
|
|
226
|
+
raise ValueError(msg)
|
|
227
|
+
|
|
228
|
+
properties, required = _parse_callable_properties(func)
|
|
229
|
+
return {
|
|
230
|
+
"type": "function",
|
|
231
|
+
"function": {
|
|
232
|
+
"name": func.__name__,
|
|
233
|
+
"description": inspect.cleandoc(func.__doc__),
|
|
234
|
+
"parameters": {"type": "object", "properties": properties, "required": required},
|
|
235
|
+
},
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
def generate_tool_definition_from_tool_def(tool_def: ToolDef) -> dict[str, Any]:
|
|
239
|
+
"""Convert a ToolDef to OpenAI tools format.
|
|
240
|
+
|
|
241
|
+
Args:
|
|
242
|
+
tool_def: A ToolDef to convert to a tool
|
|
243
|
+
|
|
244
|
+
Returns:
|
|
245
|
+
Dictionary in OpenAI tools format
|
|
246
|
+
|
|
247
|
+
Example:
|
|
248
|
+
>>> tool_def = ToolDef(
|
|
249
|
+
... name="get_weather",
|
|
250
|
+
... description="Get weather information for a location.",
|
|
251
|
+
... execute=SomeFunction(),
|
|
252
|
+
... )
|
|
253
|
+
>>> tool = generate_tool_definition_from_tool_def(tool_def)
|
|
254
|
+
>>> # Returns OpenAI tools format dict
|
|
255
|
+
"""
|
|
256
|
+
properties, required = _parse_callable_properties(tool_def.execute)
|
|
257
|
+
return {
|
|
258
|
+
"type": "function",
|
|
259
|
+
"function": {
|
|
260
|
+
"name": tool_def.name,
|
|
261
|
+
"description": tool_def.description,
|
|
262
|
+
"parameters": {"type": "object", "properties": properties, "required": required},
|
|
263
|
+
},
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
def generate_tool_definition_from_raw_tool_def(raw_tool_def: RawToolDef) -> dict[str, Any]:
|
|
267
|
+
return {
|
|
268
|
+
"type": "function",
|
|
269
|
+
"function": raw_tool_def,
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
def prepare_tools(tools: Sequence[ToolLike]) -> list[dict]:
|
|
273
|
+
tool_defs = []
|
|
274
|
+
for tool in tools:
|
|
275
|
+
if callable(tool):
|
|
276
|
+
tool_defs.append(generate_tool_definition_from_callable(tool))
|
|
277
|
+
elif isinstance(tool, ToolDef):
|
|
278
|
+
tool_defs.append(generate_tool_definition_from_tool_def(tool))
|
|
279
|
+
else:
|
|
280
|
+
tool_defs.append(generate_tool_definition_from_raw_tool_def(tool))
|
|
281
|
+
return tool_defs
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from typing import Any, Callable, TypeVar
|
|
3
|
+
from ..types.tool import ToolFn
|
|
4
|
+
|
|
5
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
|
6
|
+
TOOL_FLAG = "__is_tool__"
|
|
7
|
+
|
|
8
|
+
def tool(func: F) -> F:
|
|
9
|
+
setattr(func, TOOL_FLAG, True)
|
|
10
|
+
return func
|
|
11
|
+
|
|
12
|
+
class Toolset:
|
|
13
|
+
def get_tool_methods(self) -> list[ToolFn]:
|
|
14
|
+
return [
|
|
15
|
+
method
|
|
16
|
+
for _, method in inspect.getmembers(self, predicate=inspect.ismethod)
|
|
17
|
+
if getattr(method, TOOL_FLAG, False)
|
|
18
|
+
]
|
liteai_sdk/tool/utils.py
CHANGED
liteai_sdk/types/__init__.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import dataclasses
|
|
3
3
|
import queue
|
|
4
|
-
from typing import Any,
|
|
4
|
+
from typing import Any, Literal
|
|
5
5
|
from collections.abc import AsyncGenerator, Generator
|
|
6
|
-
from
|
|
6
|
+
from .tool import ToolLike
|
|
7
|
+
from ..tool.toolset import Toolset
|
|
7
8
|
from .message import ChatMessage, AssistantMessage, ToolMessage, MessageChunk
|
|
8
9
|
|
|
9
10
|
@dataclasses.dataclass
|
|
@@ -11,6 +12,7 @@ class LlmRequestParams:
|
|
|
11
12
|
model: str
|
|
12
13
|
messages: list[ChatMessage]
|
|
13
14
|
tools: list[ToolLike] | None = None
|
|
15
|
+
toolsets: list[Toolset] | None = None
|
|
14
16
|
tool_choice: Literal["auto", "required", "none"] = "auto"
|
|
15
17
|
execute_tools: bool = False
|
|
16
18
|
|
liteai_sdk/types/message.py
CHANGED
|
@@ -3,16 +3,18 @@ from __future__ import annotations
|
|
|
3
3
|
import json
|
|
4
4
|
import dataclasses
|
|
5
5
|
from abc import ABC, abstractmethod
|
|
6
|
-
from typing import TYPE_CHECKING, Any, Literal, cast
|
|
6
|
+
from typing import TYPE_CHECKING, Any, Literal, NamedTuple, cast
|
|
7
7
|
from pydantic import BaseModel, ConfigDict, PrivateAttr, field_validator
|
|
8
|
-
from litellm.types.utils import
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
8
|
+
from litellm.types.utils import (
|
|
9
|
+
Message as LiteLlmMessage,
|
|
10
|
+
ModelResponse as LiteLlmModelResponse,
|
|
11
|
+
ModelResponseStream as LiteLlmModelResponseStream,
|
|
12
|
+
Choices as LiteLlmModelResponseChoices,
|
|
13
|
+
ChatCompletionAudioResponse,
|
|
14
|
+
ChatCompletionMessageToolCall,
|
|
15
|
+
ChatCompletionDeltaToolCall,
|
|
16
|
+
Usage as LiteLlmUsage
|
|
17
|
+
)
|
|
16
18
|
from litellm.types.llms.openai import (
|
|
17
19
|
AllMessageValues,
|
|
18
20
|
OpenAIMessageContent,
|
|
@@ -24,14 +26,14 @@ from litellm.types.llms.openai import (
|
|
|
24
26
|
ChatCompletionToolMessage,
|
|
25
27
|
ChatCompletionSystemMessage,
|
|
26
28
|
)
|
|
27
|
-
from ..tool import ToolLike
|
|
29
|
+
from ..types.tool import ToolLike
|
|
28
30
|
from ..tool.utils import find_tool_by_name
|
|
29
31
|
from ..logger import logger
|
|
30
32
|
|
|
31
33
|
if TYPE_CHECKING:
|
|
32
34
|
from . import LlmRequestParams
|
|
33
35
|
|
|
34
|
-
class ChatMessage(BaseModel, ABC):
|
|
36
|
+
class ChatMessage(BaseModel, ABC):
|
|
35
37
|
model_config = ConfigDict(
|
|
36
38
|
arbitrary_types_allowed=True,
|
|
37
39
|
validate_assignment=True,
|
|
@@ -49,7 +51,7 @@ class UserMessage(ChatMessage):
|
|
|
49
51
|
|
|
50
52
|
class ToolMessage(ChatMessage):
|
|
51
53
|
"""
|
|
52
|
-
The `tool_def` field is ref to the target tool of the tool call, and
|
|
54
|
+
The `tool_def` field is ref to the target tool of the tool call, and
|
|
53
55
|
it will only be None when the target tool is not found
|
|
54
56
|
"""
|
|
55
57
|
id: str
|
|
@@ -91,7 +93,11 @@ class ToolMessage(ChatMessage):
|
|
|
91
93
|
content=content,
|
|
92
94
|
tool_call_id=self.id)
|
|
93
95
|
|
|
94
|
-
ToolCallTuple
|
|
96
|
+
class ToolCallTuple(NamedTuple):
|
|
97
|
+
id: str
|
|
98
|
+
function_name: str
|
|
99
|
+
function_arguments: str
|
|
100
|
+
|
|
95
101
|
class AssistantMessage(ChatMessage):
|
|
96
102
|
content: str | None = None
|
|
97
103
|
reasoning_content: str | None = None
|
|
@@ -151,7 +157,7 @@ class AssistantMessage(ChatMessage):
|
|
|
151
157
|
function_name is None or\
|
|
152
158
|
function_arguments is None:
|
|
153
159
|
return None
|
|
154
|
-
results.append((id, function_name, function_arguments))
|
|
160
|
+
results.append(ToolCallTuple(id, function_name, function_arguments))
|
|
155
161
|
return results
|
|
156
162
|
|
|
157
163
|
def get_partial_tool_messages(self) -> list[ToolMessage] | None:
|
|
@@ -172,22 +178,20 @@ class AssistantMessage(ChatMessage):
|
|
|
172
178
|
|
|
173
179
|
results = []
|
|
174
180
|
for tool_call in parsed_tool_calls:
|
|
175
|
-
id, name, arguments = tool_call
|
|
176
|
-
|
|
177
181
|
tool_message = ToolMessage(
|
|
178
|
-
id=id,
|
|
179
|
-
name=
|
|
180
|
-
arguments=
|
|
182
|
+
id=tool_call.id,
|
|
183
|
+
name=tool_call.function_name,
|
|
184
|
+
arguments=tool_call.function_arguments,
|
|
181
185
|
result=None,
|
|
182
186
|
error=None)
|
|
183
187
|
|
|
184
188
|
if has_tool_def:
|
|
185
189
|
assert self._request_params_ref and self._request_params_ref.tools
|
|
186
|
-
target_tool = find_tool_by_name(self._request_params_ref.tools,
|
|
190
|
+
target_tool = find_tool_by_name(self._request_params_ref.tools, tool_call.function_name)
|
|
187
191
|
if target_tool:
|
|
188
192
|
tool_message = tool_message.with_tool_def(target_tool)
|
|
189
193
|
else:
|
|
190
|
-
logger.warning(f"Tool {
|
|
194
|
+
logger.warning(f"Tool {tool_call.function_name} not found in request params, "
|
|
191
195
|
"tool_def will not be attached to the tool message")
|
|
192
196
|
|
|
193
197
|
results.append(tool_message)
|
|
@@ -229,7 +233,7 @@ class ToolCallChunk:
|
|
|
229
233
|
arguments: str
|
|
230
234
|
index: int
|
|
231
235
|
|
|
232
|
-
MessageChunk = TextChunk | ReasoningChunk | AudioChunk | ImageChunk | ToolCallChunk
|
|
236
|
+
MessageChunk = TextChunk | UsageChunk | ReasoningChunk | AudioChunk | ImageChunk | ToolCallChunk
|
|
233
237
|
|
|
234
238
|
def openai_chunk_normalizer(
|
|
235
239
|
chunk: LiteLlmModelResponseStream
|
liteai_sdk/types/tool.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import dataclasses
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from typing import Any, Awaitable
|
|
4
|
+
|
|
5
|
+
ToolFn = Callable[..., Any] | Callable[..., Awaitable[Any]]
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
RawToolDef example:
|
|
9
|
+
{
|
|
10
|
+
"name": "get_current_weather",
|
|
11
|
+
"description": "Get the current weather in a given location",
|
|
12
|
+
"parameters": {
|
|
13
|
+
"type": "object",
|
|
14
|
+
"properties": {
|
|
15
|
+
"location": {
|
|
16
|
+
"type": "string",
|
|
17
|
+
"description": "The city and state, e.g. San Francisco, CA",
|
|
18
|
+
},
|
|
19
|
+
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
|
|
20
|
+
},
|
|
21
|
+
"required": ["location"],
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
"""
|
|
25
|
+
RawToolDef = dict[str, Any]
|
|
26
|
+
|
|
27
|
+
@dataclasses.dataclass
|
|
28
|
+
class ToolDef:
|
|
29
|
+
name: str
|
|
30
|
+
description: str
|
|
31
|
+
execute: ToolFn
|
|
32
|
+
|
|
33
|
+
ToolLike = ToolDef | RawToolDef | ToolFn
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
liteai_sdk/__init__.py,sha256=nXEHbRSYjpGFJnzmOB_cpx8NL12Eumw_IzY8UjD6GAM,10527
|
|
2
|
+
liteai_sdk/debug.py,sha256=T7qIy1BeeUGlF40l9JCMMVn8pvvMJAEQeG4adQbOydA,69
|
|
3
|
+
liteai_sdk/logger.py,sha256=99vJAQRKcu4CuHgZYAJ2zDQtGea6Bn3vJJrS-mtza7c,677
|
|
4
|
+
liteai_sdk/param_parser.py,sha256=ae_aaOfwBNhR3QW7xxmjOf4D5ssS_9VN0IeRH5aBUYQ,2249
|
|
5
|
+
liteai_sdk/stream.py,sha256=T9MLmgPC8te6qvSkBOh7vkl-I4OGCKuW1kEN6RkiCe0,3176
|
|
6
|
+
liteai_sdk/tool/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
+
liteai_sdk/tool/execute.py,sha256=AzYqX-oG89LLHrxouZtf7M7HohFf_WRtzqczlmw5Nks,2473
|
|
8
|
+
liteai_sdk/tool/prepare.py,sha256=8JJiLev3pALu36gaqXHX9CtfCFWVM6nBbVBIrqm24po,11499
|
|
9
|
+
liteai_sdk/tool/toolset.py,sha256=bl7qrrlBFz7HHpt8ZZlTBqjDin5MIOTtIz2o3H8kgRI,476
|
|
10
|
+
liteai_sdk/tool/utils.py,sha256=A_4Jx1BacRX1KmK3t_9rDXrmSXj6v4fzNtqLsN12S0I,420
|
|
11
|
+
liteai_sdk/types/__init__.py,sha256=WHp1YUOdINvv4shYBNo3xmEr_6B7boXvAAaefldkHbs,1071
|
|
12
|
+
liteai_sdk/types/exceptions.py,sha256=hIGu06htOJxfEBAHx7KTvLQr0Y8GYnBLFJFlr_IGpDs,602
|
|
13
|
+
liteai_sdk/types/message.py,sha256=vAp1uMv-WXFvEBhxFyw6rZt3IS-pLdANEuEFJJRJ8aY,9520
|
|
14
|
+
liteai_sdk/types/tool.py,sha256=XbqbANr8D-FHDConmKCULoX3KfXkQXhCiSTHvVmKOl0,797
|
|
15
|
+
liteai_sdk-0.4.0.dist-info/licenses/LICENSE,sha256=cTeVgQVJJcRdm1boa2P1FBnOeXfA_egV6s4PouyrCxg,1064
|
|
16
|
+
liteai_sdk-0.4.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
17
|
+
liteai_sdk-0.4.0.dist-info/METADATA,sha256=6660sLr1g5VFHUcf57zfHBkYekJCWuOm58qOgBKlNr8,3023
|
|
18
|
+
liteai_sdk-0.4.0.dist-info/RECORD,,
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
liteai_sdk/__init__.py,sha256=rLAihgUHA6hrh9LZSWsAoJdZWH8bIaUYAuDC1AXIb_w,10341
|
|
2
|
-
liteai_sdk/debug.py,sha256=T7qIy1BeeUGlF40l9JCMMVn8pvvMJAEQeG4adQbOydA,69
|
|
3
|
-
liteai_sdk/logger.py,sha256=99vJAQRKcu4CuHgZYAJ2zDQtGea6Bn3vJJrS-mtza7c,677
|
|
4
|
-
liteai_sdk/param_parser.py,sha256=KjVnTnW2cr-mJMSxhBUG__GDzTk-mKO4wlbM6Z3lODM,1714
|
|
5
|
-
liteai_sdk/stream.py,sha256=T9MLmgPC8te6qvSkBOh7vkl-I4OGCKuW1kEN6RkiCe0,3176
|
|
6
|
-
liteai_sdk/tool/__init__.py,sha256=c1qJaEpoYlgOCtAjFODhrSR73ZW17OuamsO__yeYAkY,12150
|
|
7
|
-
liteai_sdk/tool/execute.py,sha256=1CfRlJZgqoev42fDH4vygXyEtCEEBPcRfbqaP77jxu4,2462
|
|
8
|
-
liteai_sdk/tool/utils.py,sha256=Djd1-EoLPfIqgPbWWvOreozQ76NHX4FZ6OXc1evKqPM,409
|
|
9
|
-
liteai_sdk/types/__init__.py,sha256=CMmweIGMgreZlbvBtRTKfvdcC7war2ApLNf-9Fz0yzc,1006
|
|
10
|
-
liteai_sdk/types/exceptions.py,sha256=hIGu06htOJxfEBAHx7KTvLQr0Y8GYnBLFJFlr_IGpDs,602
|
|
11
|
-
liteai_sdk/types/message.py,sha256=rj-h_YGdUH9x87_kToiJfyy_NV6VymZrcEnPFw1nYNU,9575
|
|
12
|
-
liteai_sdk-0.3.22.dist-info/licenses/LICENSE,sha256=cTeVgQVJJcRdm1boa2P1FBnOeXfA_egV6s4PouyrCxg,1064
|
|
13
|
-
liteai_sdk-0.3.22.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
14
|
-
liteai_sdk-0.3.22.dist-info/METADATA,sha256=Nol0OgniIOeSUQ1pOKPhnKBqRBnmJa4cmV17dDVGupk,3024
|
|
15
|
-
liteai_sdk-0.3.22.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|