dais-sdk 0.6.0__py3-none-any.whl → 0.6.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dais_sdk/__init__.py CHANGED
@@ -1,5 +1,4 @@
1
- import asyncio
2
- import queue
1
+ import json
3
2
  from typing import cast
4
3
  from collections.abc import AsyncGenerator, Generator
5
4
  from litellm import CustomStreamWrapper, completion, acompletion
@@ -12,7 +11,10 @@ from litellm.types.utils import (
12
11
  from .debug import enable_debugging
13
12
  from .param_parser import ParamParser
14
13
  from .stream import AssistantMessageCollector
15
- from .tool.execute import execute_tool_sync, execute_tool
14
+ from .tool.execute import (
15
+ ToolExceptionHandlerManager,
16
+ execute_tool_sync, execute_tool
17
+ )
16
18
  from .tool.toolset import (
17
19
  Toolset,
18
20
  python_tool,
@@ -21,7 +23,7 @@ from .tool.toolset import (
21
23
  LocalMcpToolset,
22
24
  RemoteMcpToolset,
23
25
  )
24
- from .tool.utils import find_tool_by_name
26
+ from .tool.utils import get_tool_name
25
27
  from .mcp_client import (
26
28
  McpClient,
27
29
  McpTool,
@@ -51,6 +53,10 @@ from .types.exceptions import (
51
53
  ContentPolicyViolationError,
52
54
  APIError,
53
55
  Timeout,
56
+ LlmToolException,
57
+ ToolDoesNotExistError,
58
+ ToolArgumentDecodeError,
59
+ ToolExecutionError,
54
60
  )
55
61
  from .types.message import (
56
62
  ChatMessage, UserMessage, SystemMessage, AssistantMessage, ToolMessage,
@@ -87,47 +93,49 @@ class LLM:
87
93
  self.provider = provider
88
94
  self.base_url = base_url
89
95
  self.api_key = api_key
96
+ self._tool_exception_handler_manager = ToolExceptionHandlerManager()
90
97
  self._param_parser = ParamParser(self.provider, self.base_url, self.api_key)
91
98
 
92
- @staticmethod
93
- async def execute_tool_call(
94
- params: LlmRequestParams,
95
- incomplete_tool_message: ToolMessage
96
- ) -> tuple[str | None, str | None]:
99
+ @property
100
+ def tool_exception_handler_manager(self) -> ToolExceptionHandlerManager:
101
+ return self._tool_exception_handler_manager
102
+
103
+ async def execute_tool_call(self,
104
+ tool_def: ToolLike,
105
+ arguments: str | dict) -> tuple[str | None, str | None]:
97
106
  """
98
- Receive incomplete tool messages, execute the tool calls and
99
- return the result and error tuple.
107
+ Returns:
108
+ A tuple of (result, error)
100
109
  """
101
- name, arguments = incomplete_tool_message.name, incomplete_tool_message.arguments
102
- tool_def = params.find_tool(incomplete_tool_message.name)
103
- if tool_def is None:
104
- raise LlmRequestParams.ToolDoesNotExistError(name)
105
-
106
110
  result, error = None, None
107
111
  try:
108
112
  result = await execute_tool(tool_def, arguments)
113
+ except json.JSONDecodeError as e:
114
+ assert type(arguments) is str
115
+ _error = ToolArgumentDecodeError(get_tool_name(tool_def), arguments, e)
116
+ error = self._tool_exception_handler_manager.handle(_error)
109
117
  except Exception as e:
110
- error = f"{type(e).__name__}: {str(e)}"
118
+ _error = ToolExecutionError(tool_def, arguments, e)
119
+ error = self._tool_exception_handler_manager.handle(_error)
111
120
  return result, error
112
121
 
113
- @staticmethod
114
- def execute_tool_call_sync(
115
- params: LlmRequestParams,
116
- incomplete_tool_message: ToolMessage
117
- ) -> tuple[str | None, str | None]:
122
+ def execute_tool_call_sync(self,
123
+ tool_def: ToolLike,
124
+ arguments: str | dict
125
+ ) -> tuple[str | None, str | None]:
118
126
  """
119
127
  Synchronous version of `execute_tool_call`.
120
128
  """
121
- name, arguments = incomplete_tool_message.name, incomplete_tool_message.arguments
122
- tool_def = params.find_tool(incomplete_tool_message.name)
123
- if tool_def is None:
124
- raise LlmRequestParams.ToolDoesNotExistError(name)
125
-
126
129
  result, error = None, None
127
130
  try:
128
131
  result = execute_tool_sync(tool_def, arguments)
132
+ except json.JSONDecodeError as e:
133
+ assert type(arguments) is str
134
+ _error = ToolArgumentDecodeError(get_tool_name(tool_def), arguments, e)
135
+ error = self._tool_exception_handler_manager.handle(_error)
129
136
  except Exception as e:
130
- error = f"{type(e).__name__}: {str(e)}"
137
+ _error = ToolExecutionError(tool_def, arguments, e)
138
+ error = self._tool_exception_handler_manager.handle(_error)
131
139
  return result, error
132
140
 
133
141
  def _resolve_tool_calls_sync(self, params: LlmRequestParams, assistant_message: AssistantMessage) -> Generator[ToolMessage]:
@@ -136,17 +144,14 @@ class LLM:
136
144
  := assistant_message.get_incomplete_tool_messages()) is None:
137
145
  return
138
146
  for incomplete_tool_message in incomplete_tool_messages:
139
- try:
140
- result, error = LLM.execute_tool_call_sync(params, incomplete_tool_message)
141
- except LlmRequestParams.ToolDoesNotExistError as e:
142
- logger.warning(f"{e.message} Skipping this tool call.")
147
+ tool = params.find_tool(incomplete_tool_message.name)
148
+ if tool is None:
149
+ _error = ToolDoesNotExistError(incomplete_tool_message.name)
150
+ error = self._tool_exception_handler_manager.handle(_error)
151
+ yield incomplete_tool_message.with_result(None, error)
143
152
  continue
144
- yield ToolMessage(
145
- tool_call_id=incomplete_tool_message.tool_call_id,
146
- name=incomplete_tool_message.name,
147
- arguments=incomplete_tool_message.arguments,
148
- result=result,
149
- error=error)
153
+ result, error = self.execute_tool_call_sync(tool, incomplete_tool_message.arguments)
154
+ yield incomplete_tool_message.with_result(result, error)
150
155
 
151
156
  async def _resolve_tool_calls(self, params: LlmRequestParams, assistant_message: AssistantMessage) -> AsyncGenerator[ToolMessage]:
152
157
  if not params.execute_tools: return
@@ -154,17 +159,14 @@ class LLM:
154
159
  assistant_message.get_incomplete_tool_messages()) is None:
155
160
  return
156
161
  for incomplete_tool_message in incomplete_tool_messages:
157
- try:
158
- result, error = await LLM.execute_tool_call(params, incomplete_tool_message)
159
- except LlmRequestParams.ToolDoesNotExistError as e:
160
- logger.warning(f"{e.message} Skipping this tool call.")
162
+ tool = params.find_tool(incomplete_tool_message.name)
163
+ if tool is None:
164
+ _error = ToolDoesNotExistError(incomplete_tool_message.name)
165
+ error = self._tool_exception_handler_manager.handle(_error)
166
+ yield incomplete_tool_message.with_result(None, error)
161
167
  continue
162
- yield ToolMessage(
163
- tool_call_id=incomplete_tool_message.tool_call_id,
164
- name=incomplete_tool_message.name,
165
- arguments=incomplete_tool_message.arguments,
166
- result=result,
167
- error=error)
168
+ result, error = await self.execute_tool_call(tool, incomplete_tool_message.arguments)
169
+ yield incomplete_tool_message.with_result(result, error)
168
170
 
169
171
  def list_models(self) -> list[str]:
170
172
  provider_config = ProviderConfigManager.get_provider_model_info(
@@ -208,8 +210,8 @@ class LLM:
208
210
  - stream: Generator yielding `MessageChunk` objects
209
211
  - full_message_queue: Queue containing complete `AssistantMessage`, `ToolMessage` (or `None` when done)
210
212
  """
211
- def stream(response: CustomStreamWrapper) -> Generator[MessageChunk]:
212
- nonlocal message_collector
213
+ def stream(response: CustomStreamWrapper, full_message_queue: FullMessageQueueSync) -> Generator[MessageChunk]:
214
+ message_collector = AssistantMessageCollector()
213
215
  for chunk in response:
214
216
  chunk = cast(LiteLlmModelResponseStream, chunk)
215
217
  yield from openai_chunk_normalizer(chunk)
@@ -223,9 +225,8 @@ class LLM:
223
225
  full_message_queue.put(None)
224
226
 
225
227
  response = completion(**self._param_parser.parse_stream(params))
226
- message_collector = AssistantMessageCollector()
227
- returned_stream = stream(cast(CustomStreamWrapper, response))
228
228
  full_message_queue = FullMessageQueueSync()
229
+ returned_stream = stream(cast(CustomStreamWrapper, response), full_message_queue)
229
230
  return returned_stream, full_message_queue
230
231
 
231
232
  async def stream_text(self, params: LlmRequestParams) -> StreamTextResponseAsync:
@@ -234,8 +235,8 @@ class LLM:
234
235
  - stream: Generator yielding `MessageChunk` objects
235
236
  - full_message_queue: Queue containing complete `AssistantMessage`, `ToolMessage` (or `None` when done)
236
237
  """
237
- async def stream(response: CustomStreamWrapper) -> AsyncGenerator[MessageChunk]:
238
- nonlocal message_collector
238
+ async def stream(response: CustomStreamWrapper, full_message_queue: FullMessageQueueAsync) -> AsyncGenerator[MessageChunk]:
239
+ message_collector = AssistantMessageCollector()
239
240
  async for chunk in response:
240
241
  chunk = cast(LiteLlmModelResponseStream, chunk)
241
242
  for normalized_chunk in openai_chunk_normalizer(chunk):
@@ -244,14 +245,14 @@ class LLM:
244
245
 
245
246
  message = message_collector.get_message()
246
247
  await full_message_queue.put(message)
248
+
247
249
  async for tool_message in self._resolve_tool_calls(params, message):
248
250
  await full_message_queue.put(tool_message)
249
251
  await full_message_queue.put(None)
250
252
 
251
253
  response = await acompletion(**self._param_parser.parse_stream(params))
252
- message_collector = AssistantMessageCollector()
253
- returned_stream = stream(cast(CustomStreamWrapper, response))
254
254
  full_message_queue = FullMessageQueueAsync()
255
+ returned_stream = stream(cast(CustomStreamWrapper, response), full_message_queue)
255
256
  return returned_stream, full_message_queue
256
257
 
257
258
  __all__ = [
@@ -317,4 +318,8 @@ __all__ = [
317
318
  "ContentPolicyViolationError",
318
319
  "APIError",
319
320
  "Timeout",
321
+ "LlmToolException",
322
+ "ToolDoesNotExistError",
323
+ "ToolArgumentDecodeError",
324
+ "ToolExecutionError",
320
325
  ]
@@ -1,10 +1,10 @@
1
1
  from contextlib import AsyncExitStack
2
- from typing import Any
2
+ from typing import Any, override
3
3
  from mcp import ClientSession, StdioServerParameters as StdioServerParams
4
4
  from mcp.client.stdio import stdio_client
5
5
  from .base_mcp_client import McpClient, Tool, ToolResult, McpSessionNotEstablishedError
6
6
 
7
- LocalServerParams = StdioServerParams
7
+ type LocalServerParams = StdioServerParams
8
8
 
9
9
  class LocalMcpClient(McpClient):
10
10
  def __init__(self, name: str, params: LocalServerParams):
@@ -14,9 +14,11 @@ class LocalMcpClient(McpClient):
14
14
  self._exit_stack: AsyncExitStack | None = None
15
15
 
16
16
  @property
17
+ @override
17
18
  def name(self) -> str:
18
19
  return self._name
19
20
 
21
+ @override
20
22
  async def connect(self):
21
23
  self._exit_stack = AsyncExitStack()
22
24
 
@@ -32,6 +34,7 @@ class LocalMcpClient(McpClient):
32
34
  await self.disconnect()
33
35
  raise
34
36
 
37
+ @override
35
38
  async def list_tools(self) -> list[Tool]:
36
39
  if not self._session:
37
40
  raise McpSessionNotEstablishedError()
@@ -39,6 +42,7 @@ class LocalMcpClient(McpClient):
39
42
  result = await self._session.list_tools()
40
43
  return result.tools
41
44
 
45
+ @override
42
46
  async def call_tool(
43
47
  self, tool_name: str, arguments: dict[str, Any] | None = None
44
48
  ) -> ToolResult:
@@ -48,6 +52,7 @@ class LocalMcpClient(McpClient):
48
52
  response = await self._session.call_tool(tool_name, arguments)
49
53
  return ToolResult(response.isError, response.content)
50
54
 
55
+ @override
51
56
  async def disconnect(self) -> None:
52
57
  if self._exit_stack:
53
58
  await self._exit_stack.aclose()
@@ -2,7 +2,7 @@ import httpx
2
2
  import webbrowser
3
3
  from dataclasses import dataclass
4
4
  from contextlib import AsyncExitStack
5
- from typing import Any, NamedTuple
5
+ from typing import Any, NamedTuple, override
6
6
  from mcp import ClientSession
7
7
  from mcp.client.auth import OAuthClientProvider
8
8
  from mcp.client.streamable_http import streamable_http_client
@@ -49,6 +49,7 @@ class RemoteMcpClient(McpClient):
49
49
  self._params.oauth_params.oauth_token_storage = storage
50
50
 
51
51
  @property
52
+ @override
52
53
  def name(self) -> str:
53
54
  return self._name
54
55
 
@@ -98,13 +99,14 @@ class RemoteMcpClient(McpClient):
98
99
  try:
99
100
  webbrowser.open(url)
100
101
  except Exception as e:
101
- logger.error(f"[OAuth] Not able to open browser: {e}")
102
+ logger.error(f"[OAuth] Not able to open browser", exc_info=e)
102
103
 
103
104
  async def _handle_oauth_callback(self) -> OAuthCode:
104
105
  if self._oauth_context is None:
105
106
  raise ValueError("OAuth context not initialized")
106
107
  return await self._oauth_context.server.wait_for_code()
107
108
 
109
+ @override
108
110
  async def connect(self):
109
111
  self._exit_stack = AsyncExitStack()
110
112
  if self._oauth_context:
@@ -126,6 +128,7 @@ class RemoteMcpClient(McpClient):
126
128
  await self.disconnect()
127
129
  raise
128
130
 
131
+ @override
129
132
  async def list_tools(self) -> list[Tool]:
130
133
  if not self._session:
131
134
  raise McpSessionNotEstablishedError()
@@ -133,6 +136,7 @@ class RemoteMcpClient(McpClient):
133
136
  result = await self._session.list_tools()
134
137
  return result.tools
135
138
 
139
+ @override
136
140
  async def call_tool(
137
141
  self, tool_name: str, arguments: dict[str, Any] | None = None
138
142
  ) -> ToolResult:
@@ -142,6 +146,7 @@ class RemoteMcpClient(McpClient):
142
146
  response = await self._session.call_tool(tool_name, arguments)
143
147
  return ToolResult(response.isError, response.content)
144
148
 
149
+ @override
145
150
  async def disconnect(self):
146
151
  try:
147
152
  if self._exit_stack:
dais_sdk/param_parser.py CHANGED
@@ -1,4 +1,3 @@
1
- from __future__ import annotations
2
1
  from typing import Any, TYPE_CHECKING
3
2
  from litellm.types.utils import LlmProviders
4
3
  from .tool.prepare import prepare_tools
dais_sdk/tool/execute.py CHANGED
@@ -1,12 +1,47 @@
1
1
  import asyncio
2
2
  import json
3
+ import inspect
3
4
  from functools import singledispatch
4
- from typing import Any, Awaitable, Callable, cast
5
- from types import FunctionType, MethodType, CoroutineType
5
+ from typing import Any, assert_never, Callable, cast
6
+ from types import FunctionType, MethodType
6
7
  from ..types.tool import ToolDef, ToolLike
8
+ from ..types.exceptions import LlmToolException
9
+ from ..logger import logger
7
10
 
8
- async def _coroutine_wrapper(awaitable: Awaitable[Any]) -> CoroutineType:
9
- return await awaitable
11
+ type ExceptionHandler[E: LlmToolException] = Callable[[E], str]
12
+
13
+ class ToolExceptionHandlerManager:
14
+ def __init__(self):
15
+ self._handlers: dict[type[LlmToolException], ExceptionHandler[Any]] = {}
16
+
17
+ def register[E: LlmToolException](self, exception_type: type[E]):
18
+ def decorator(handler: ExceptionHandler[E]) -> ExceptionHandler[E]:
19
+ self.set_handler(exception_type, handler)
20
+ return handler
21
+ return decorator
22
+
23
+ def set_handler[E: LlmToolException](self, exception_type: type[E], handler: ExceptionHandler[E]):
24
+ self._handlers[exception_type] = handler
25
+
26
+ def get_handler[E: LlmToolException](self, exception_type: type[E]) -> ExceptionHandler[E] | None:
27
+ return self._handlers.get(exception_type)
28
+
29
+ def handle(self, e: LlmToolException) -> str:
30
+ def find_best_handler[E: LlmToolException](exc_type: type[E]) -> ExceptionHandler[E] | None:
31
+ for cls in exc_type.__mro__:
32
+ if cls in self._handlers:
33
+ return self._handlers[cls]
34
+ return None
35
+
36
+ # Searches the MRO of the exception type to make sure the subclasses of
37
+ # the registered exception type can also be handled.
38
+ handler = find_best_handler(type(e))
39
+ if handler is None:
40
+ logger.warning(f"Unhandled tool exception: {type(e).__name__}", exc_info=e)
41
+ return f"Unhandled tool exception | {type(e).__name__}: {e}"
42
+ return handler(e)
43
+
44
+ # --- --- --- --- --- ---
10
45
 
11
46
  def _arguments_normalizer(arguments: str | dict) -> dict:
12
47
  if isinstance(arguments, str):
@@ -15,7 +50,7 @@ def _arguments_normalizer(arguments: str | dict) -> dict:
15
50
  elif isinstance(arguments, dict):
16
51
  return arguments
17
52
  else:
18
- raise ValueError(f"Invalid arguments type: {type(arguments)}")
53
+ assert_never(arguments)
19
54
 
20
55
  def _result_normalizer(result: Any) -> str:
21
56
  if isinstance(result, str):
@@ -24,27 +59,37 @@ def _result_normalizer(result: Any) -> str:
24
59
 
25
60
  @singledispatch
26
61
  def execute_tool_sync(tool: ToolLike, arguments: str | dict) -> str:
62
+ """
63
+ Raises:
64
+ ValueError: If the tool type is not supported.
65
+ JSONDecodeError: If the arguments is a string but not valid JSON.
66
+ """
27
67
  raise ValueError(f"Invalid tool type: {type(tool)}")
28
68
 
29
69
  @execute_tool_sync.register(FunctionType)
30
70
  @execute_tool_sync.register(MethodType)
31
71
  def _(toolfn: Callable, arguments: str | dict) -> str:
32
72
  arguments = _arguments_normalizer(arguments)
33
- result = (asyncio.run(_coroutine_wrapper(toolfn(**arguments)))
34
- if asyncio.iscoroutinefunction(toolfn)
73
+ result = (asyncio.run(toolfn(**arguments))
74
+ if inspect.iscoroutinefunction(toolfn)
35
75
  else toolfn(**arguments))
36
76
  return _result_normalizer(result)
37
77
 
38
78
  @execute_tool_sync.register(ToolDef)
39
79
  def _(tooldef: ToolDef, arguments: str | dict) -> str:
40
80
  arguments = _arguments_normalizer(arguments)
41
- result = (asyncio.run(_coroutine_wrapper(tooldef.execute(**arguments)))
42
- if asyncio.iscoroutinefunction(tooldef.execute)
81
+ result = (asyncio.run(tooldef.execute(**arguments))
82
+ if inspect.iscoroutinefunction(tooldef.execute)
43
83
  else tooldef.execute(**arguments))
44
84
  return _result_normalizer(result)
45
85
 
46
86
  @singledispatch
47
87
  async def execute_tool(tool: ToolLike, arguments: str | dict) -> str:
88
+ """
89
+ Raises:
90
+ ValueError: If the tool type is not supported.
91
+ JSONDecodeError: If the arguments is a string but not valid JSON.
92
+ """
48
93
  raise ValueError(f"Invalid tool type: {type(tool)}")
49
94
 
50
95
  @execute_tool.register(FunctionType)
@@ -52,7 +97,7 @@ async def execute_tool(tool: ToolLike, arguments: str | dict) -> str:
52
97
  async def _(toolfn: Callable, arguments: str | dict) -> str:
53
98
  arguments = _arguments_normalizer(arguments)
54
99
  result = (await toolfn(**arguments)
55
- if asyncio.iscoroutinefunction(toolfn)
100
+ if inspect.iscoroutinefunction(toolfn)
56
101
  else toolfn(**arguments))
57
102
  return _result_normalizer(result)
58
103
 
@@ -60,6 +105,6 @@ async def _(toolfn: Callable, arguments: str | dict) -> str:
60
105
  async def _(tooldef: ToolDef, arguments: str | dict) -> str:
61
106
  arguments = _arguments_normalizer(arguments)
62
107
  result = (await tooldef.execute(**arguments)
63
- if asyncio.iscoroutinefunction(tooldef.execute)
108
+ if inspect.iscoroutinefunction(tooldef.execute)
64
109
  else tooldef.execute(**arguments))
65
110
  return _result_normalizer(result)
@@ -1,4 +1,5 @@
1
1
  from dataclasses import replace
2
+ from typing import override
2
3
  from mcp.types import TextContent, ImageContent, AudioContent, ResourceLink, EmbeddedResource, TextResourceContents, BlobResourceContents
3
4
  from .toolset import Toolset
4
5
  from ...mcp_client.base_mcp_client import McpClient, Tool, ToolResult
@@ -72,9 +73,11 @@ class McpToolset(Toolset):
72
73
  for tool in mcp_tools]
73
74
 
74
75
  @property
76
+ @override
75
77
  def name(self) -> str:
76
78
  return self._client.name
77
79
 
80
+ @override
78
81
  def get_tools(self, namespaced_tool_name: bool = True) -> list[ToolDef]:
79
82
  if self._tools_cache is None:
80
83
  raise RuntimeError(f"Not connected to MCP server. Call await {self.__class__.__name__}(...).connect() first")
@@ -1,17 +1,17 @@
1
1
  import inspect
2
- from typing import Any, Callable, TypeVar
2
+ from typing import Any, Callable, override
3
3
  from .toolset import Toolset
4
4
  from ...types.tool import ToolDef
5
5
 
6
- F = TypeVar("F", bound=Callable[..., Any])
7
6
  TOOL_FLAG = "__is_tool__"
8
7
 
9
- def python_tool(func: F) -> F:
8
+ def python_tool[F: Callable[..., Any]](func: F) -> F:
10
9
  setattr(func, TOOL_FLAG, True)
11
10
  return func
12
11
 
13
12
  class PythonToolset(Toolset):
14
13
  @property
14
+ @override
15
15
  def name(self) -> str:
16
16
  """
17
17
  Since the usage of PythonToolset is to inherit and define methods as tools,
@@ -19,6 +19,7 @@ class PythonToolset(Toolset):
19
19
  """
20
20
  return self.__class__.__name__
21
21
 
22
+ @override
22
23
  def get_tools(self, namespaced_tool_name: bool = True) -> list[ToolDef]:
23
24
  result = []
24
25
  for _, method in inspect.getmembers(self, predicate=inspect.ismethod):
dais_sdk/tool/utils.py CHANGED
@@ -1,4 +1,15 @@
1
- from ..types.tool import ToolFn, ToolDef, ToolLike
1
+ from typing import assert_never
2
+ from ..types.tool import ToolDef, ToolLike
3
+
4
+ def get_tool_name(tool: ToolLike) -> str:
5
+ if callable(tool):
6
+ return tool.__name__
7
+ elif isinstance(tool, ToolDef):
8
+ return tool.name
9
+ elif isinstance(tool, dict):
10
+ return tool.get("name", "")
11
+ else:
12
+ assert_never(tool)
2
13
 
3
14
  def find_tool_by_name(tools: list[ToolLike], name: str) -> ToolLike | None:
4
15
  for tool in tools:
@@ -1,4 +1,6 @@
1
- from litellm.exceptions import (
1
+ import json
2
+ from typing import TYPE_CHECKING
3
+ from litellm.exceptions import (
2
4
  AuthenticationError,
3
5
  PermissionDeniedError,
4
6
  RateLimitError,
@@ -12,6 +14,27 @@ from litellm.exceptions import (
12
14
  Timeout,
13
15
  )
14
16
 
17
+ if TYPE_CHECKING:
18
+ from .tool import ToolLike
19
+
20
+ class LlmToolException(Exception): pass
21
+
22
+ class ToolDoesNotExistError(LlmToolException):
23
+ def __init__(self, tool_name: str):
24
+ self.tool_name = tool_name
25
+
26
+ class ToolArgumentDecodeError(LlmToolException):
27
+ def __init__(self, tool_name: str, arguments: str, raw_error: json.JSONDecodeError):
28
+ self.tool_name = tool_name
29
+ self.arguments = arguments
30
+ self.raw_error = raw_error
31
+
32
+ class ToolExecutionError(LlmToolException):
33
+ def __init__(self, tool: ToolLike, arguments: str | dict, raw_error: Exception):
34
+ self.tool = tool
35
+ self.arguments = arguments
36
+ self.raw_error = raw_error
37
+
15
38
  __all__ = [
16
39
  "AuthenticationError",
17
40
  "PermissionDeniedError",
@@ -24,4 +47,9 @@ __all__ = [
24
47
  "ContentPolicyViolationError",
25
48
  "APIError",
26
49
  "Timeout",
50
+
51
+ "LlmToolException",
52
+ "ToolDoesNotExistError",
53
+ "ToolArgumentDecodeError",
54
+ "ToolExecutionError",
27
55
  ]
dais_sdk/types/message.py CHANGED
@@ -2,7 +2,7 @@ import json
2
2
  import dataclasses
3
3
  import uuid
4
4
  from abc import ABC, abstractmethod
5
- from typing import Any, Literal, NamedTuple, cast
5
+ from typing import Any, Literal, cast
6
6
  from pydantic import BaseModel, ConfigDict, Field, field_validator
7
7
  from litellm.types.utils import (
8
8
  Message as LiteLlmMessage,
@@ -59,6 +59,14 @@ class ToolMessage(ChatMessage):
59
59
  if isinstance(v, str): return v
60
60
  return json.dumps(v, ensure_ascii=False)
61
61
 
62
+ def with_result(self, result: str | None, error: str | None) -> "ToolMessage":
63
+ return ToolMessage(
64
+ tool_call_id=self.tool_call_id,
65
+ name=self.name,
66
+ arguments=self.arguments,
67
+ result=result,
68
+ error=error)
69
+
62
70
  def to_litellm_message(self) -> ChatCompletionToolMessage:
63
71
  if self.result is None and self.error is None:
64
72
  raise ValueError(f"ToolMessage({self.id}, {self.name}) is incomplete, "
@@ -178,7 +186,7 @@ class ToolCallChunk:
178
186
  arguments: str
179
187
  index: int
180
188
 
181
- MessageChunk = TextChunk | UsageChunk | ReasoningChunk | AudioChunk | ImageChunk | ToolCallChunk
189
+ type MessageChunk = TextChunk | UsageChunk | ReasoningChunk | AudioChunk | ImageChunk | ToolCallChunk
182
190
 
183
191
  def openai_chunk_normalizer(
184
192
  chunk: LiteLlmModelResponseStream
@@ -47,9 +47,3 @@ class LlmRequestParams:
47
47
  if (tools := self.extract_tools()) is None:
48
48
  return None
49
49
  return find_tool_by_name(tools, tool_name)
50
-
51
- class ToolDoesNotExistError(Exception):
52
- def __init__(self, tool_name: str):
53
- self.tool_name = tool_name
54
- self.message = f"Tool \"{tool_name}\" does not exist in the request params."
55
- super().__init__(self.message)
dais_sdk/types/tool.py CHANGED
@@ -3,7 +3,7 @@ from collections.abc import Callable
3
3
  from typing import Any, Awaitable
4
4
  from ..logger import logger
5
5
 
6
- ToolFn = Callable[..., Any] | Callable[..., Awaitable[Any]]
6
+ type ToolFn = Callable[..., Any] | Callable[..., Awaitable[Any]]
7
7
 
8
8
  """
9
9
  RawToolDef example:
@@ -23,7 +23,7 @@ RawToolDef example:
23
23
  }
24
24
  }
25
25
  """
26
- RawToolDef = dict[str, Any]
26
+ type RawToolDef = dict[str, Any]
27
27
 
28
28
  @dataclasses.dataclass
29
29
  class ToolDef:
@@ -44,4 +44,4 @@ class ToolDef:
44
44
  execute=tool_fn,
45
45
  )
46
46
 
47
- ToolLike = ToolDef | RawToolDef | ToolFn
47
+ type ToolLike = ToolDef | RawToolDef | ToolFn
@@ -1,17 +1,15 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dais-sdk
3
- Version: 0.6.0
3
+ Version: 0.6.2
4
4
  Summary: A wrapper of LiteLLM
5
5
  Author-email: BHznJNs <bhznjns@outlook.com>
6
- Requires-Python: >=3.10
6
+ Requires-Python: >=3.14
7
7
  Description-Content-Type: text/markdown
8
8
  Classifier: Development Status :: 3 - Alpha
9
9
  Classifier: Intended Audience :: Developers
10
10
  Classifier: License :: OSI Approved :: MIT License
11
11
  Classifier: Programming Language :: Python :: 3 :: Only
12
- Classifier: Programming Language :: Python :: 3.10
13
- Classifier: Programming Language :: Python :: 3.11
14
- Classifier: Programming Language :: Python :: 3.12
12
+ Classifier: Programming Language :: Python :: 3.14
15
13
  License-File: LICENSE
16
14
  Requires-Dist: litellm>=1.80.0
17
15
  Requires-Dist: pydantic>=2.0.0
@@ -19,16 +17,8 @@ Requires-Dist: httpx==0.28.1
19
17
  Requires-Dist: mcp==1.25.0
20
18
  Requires-Dist: starlette==0.50.0
21
19
  Requires-Dist: uvicorn==0.40.0
22
- Requires-Dist: python-dotenv>=1.2.1 ; extra == "dev"
23
- Requires-Dist: pytest-cov ; extra == "test"
24
- Requires-Dist: pytest-mock ; extra == "test"
25
- Requires-Dist: pytest-runner ; extra == "test"
26
- Requires-Dist: pytest ; extra == "test"
27
- Requires-Dist: pytest-github-actions-annotate-failures ; extra == "test"
28
20
  Project-URL: Source, https://github.com/Dais-Project/Dais-SDK
29
21
  Project-URL: Tracker, https://github.com/Dais-Project/Dais-SDK/issues
30
- Provides-Extra: dev
31
- Provides-Extra: test
32
22
 
33
23
  # Dais-SDK
34
24
 
@@ -98,3 +88,20 @@ for message in messages:
98
88
  print("Tool: ", message.result)
99
89
  ```
100
90
 
91
+ ## Development
92
+
93
+ Create virtual environment
94
+ ```
95
+ uv venv
96
+ ```
97
+
98
+ Install all dependencies
99
+ ```
100
+ uv sync --all-groups
101
+ ```
102
+
103
+ Run test
104
+ ```
105
+ uv run pytest
106
+ ```
107
+
@@ -0,0 +1,27 @@
1
+ dais_sdk/__init__.py,sha256=5QAoL8GyyFuKfe4HZ-TiRVj9VIpHc45i2xMMYORoWXU,11805
2
+ dais_sdk/debug.py,sha256=T7qIy1BeeUGlF40l9JCMMVn8pvvMJAEQeG4adQbOydA,69
3
+ dais_sdk/logger.py,sha256=99vJAQRKcu4CuHgZYAJ2zDQtGea6Bn3vJJrS-mtza7c,677
4
+ dais_sdk/param_parser.py,sha256=QIxt1izv69r725pzU1qhq5bilcrGUgzlpiItHEWOrdc,1874
5
+ dais_sdk/stream.py,sha256=yu9Zvr3CUrPD9sGsjqwNXy_233Tw25Hd72kwjrraMAM,3170
6
+ dais_sdk/mcp_client/__init__.py,sha256=B86aC4nnGzwfjk7H0CZ38YlMDiEx3EIDEAgJKUnwqIU,405
7
+ dais_sdk/mcp_client/base_mcp_client.py,sha256=jWAfinzY00aL-qdNgyzYXKM-LhPHkmdqL24Uw439v-0,1055
8
+ dais_sdk/mcp_client/local_mcp_client.py,sha256=fhCb69IAtTpaSQlicPqUNuuy0wED8K06ndw05qjjUcM,1956
9
+ dais_sdk/mcp_client/oauth_server.py,sha256=pELKmAjE1QoNpy3_6BPNoORwWYu0j2BYOnnVfMd0iOA,3361
10
+ dais_sdk/mcp_client/remote_mcp_client.py,sha256=8iPX-lGMRCR0FzT8MFi2CvYJXUCWHI_jYGaI30NaBuc,6039
11
+ dais_sdk/tool/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ dais_sdk/tool/execute.py,sha256=wqIlfVounvK-I4mJ48iDXXqalySNQUplPPauNqFfSls,4220
13
+ dais_sdk/tool/prepare.py,sha256=5UZiQc64Ao30Gh3aHqeJGeyUq7ud9A--GUU5QxYPC0M,11572
14
+ dais_sdk/tool/utils.py,sha256=jYv_U6BHvi5Hh7H21_9nDRB9gp_qM_UEle0xq2XwX4M,706
15
+ dais_sdk/tool/toolset/__init__.py,sha256=uh8hGSl1KSn0JI45fCPJnnk31hflOI0mYxC8cdbH-OQ,309
16
+ dais_sdk/tool/toolset/mcp_toolset.py,sha256=J4gOsy8aR61Ui8tUj58AOG51fhKvLPWIL_SWIs-kmrE,4112
17
+ dais_sdk/tool/toolset/python_toolset.py,sha256=p4QeBY_yZpzpnDT5w1S4EECtPOwt8dF8T_rTQv7CtME,1080
18
+ dais_sdk/tool/toolset/toolset.py,sha256=X1xqWiWov4fboWQowB_YgJ_Tc-fIDmxbP8GreTj_7ME,322
19
+ dais_sdk/types/__init__.py,sha256=-i1MYWIlUfjQIX0xZJta6phQNL44vXPSIx1eGyIYZXc,710
20
+ dais_sdk/types/exceptions.py,sha256=ZlQW6QfzPatbfHJi_1s_2XBAHK4bZaeSreI3SUAX_5c,1439
21
+ dais_sdk/types/message.py,sha256=a65Q6K3Hq1aVhsedUv2XjyJq2rQQJi9SkEAPPHcLtFw,7770
22
+ dais_sdk/types/request_params.py,sha256=8Jq-aTeK933YENE-9ay_8q88hEr-oeZZGC7l52tOKEM,1635
23
+ dais_sdk/types/tool.py,sha256=GT6gxjAGdnVzk6W84nPzZb24bLznzqilkHN6uvocf5M,1377
24
+ dais_sdk-0.6.2.dist-info/licenses/LICENSE,sha256=Qwd-hsctqFqJErH4OWNNttmd0jccbH0ZCsf-YtAFNdo,1064
25
+ dais_sdk-0.6.2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
26
+ dais_sdk-0.6.2.dist-info/METADATA,sha256=SxuWO-Ip4vrW4qw4h_c1sW9SqBDbsbYrO_Ty7LYd5nQ,2612
27
+ dais_sdk-0.6.2.dist-info/RECORD,,
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2025 BHznJNs
3
+ Copyright (c) 2026 BHznJNs
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
@@ -1,27 +0,0 @@
1
- dais_sdk/__init__.py,sha256=84ALTOJxrC_SPKPrB5d1eUhBWBoOzRD3hn-sPuk3MUc,11245
2
- dais_sdk/debug.py,sha256=T7qIy1BeeUGlF40l9JCMMVn8pvvMJAEQeG4adQbOydA,69
3
- dais_sdk/logger.py,sha256=99vJAQRKcu4CuHgZYAJ2zDQtGea6Bn3vJJrS-mtza7c,677
4
- dais_sdk/param_parser.py,sha256=gXRFoCi74ZA9xdisqMPgQmWR2i6aTlPEeot78y2vyhM,1909
5
- dais_sdk/stream.py,sha256=yu9Zvr3CUrPD9sGsjqwNXy_233Tw25Hd72kwjrraMAM,3170
6
- dais_sdk/mcp_client/__init__.py,sha256=B86aC4nnGzwfjk7H0CZ38YlMDiEx3EIDEAgJKUnwqIU,405
7
- dais_sdk/mcp_client/base_mcp_client.py,sha256=jWAfinzY00aL-qdNgyzYXKM-LhPHkmdqL24Uw439v-0,1055
8
- dais_sdk/mcp_client/local_mcp_client.py,sha256=unuS-cp4zi0A2x2EYnDFzSpJUzOgVQbnEK0mLBFudy8,1871
9
- dais_sdk/mcp_client/oauth_server.py,sha256=pELKmAjE1QoNpy3_6BPNoORwWYu0j2BYOnnVfMd0iOA,3361
10
- dais_sdk/mcp_client/remote_mcp_client.py,sha256=JAy5zszMQeHdxsgK2seQ6kaN40kxhvnFTKSLT9uWDXU,5952
11
- dais_sdk/tool/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- dais_sdk/tool/execute.py,sha256=Yj4GHty0aHCsB38SI8WJ5ZA_FmqaKRRUqhfsQJR9l04,2507
13
- dais_sdk/tool/prepare.py,sha256=5UZiQc64Ao30Gh3aHqeJGeyUq7ud9A--GUU5QxYPC0M,11572
14
- dais_sdk/tool/utils.py,sha256=A_4Jx1BacRX1KmK3t_9rDXrmSXj6v4fzNtqLsN12S0I,420
15
- dais_sdk/tool/toolset/__init__.py,sha256=uh8hGSl1KSn0JI45fCPJnnk31hflOI0mYxC8cdbH-OQ,309
16
- dais_sdk/tool/toolset/mcp_toolset.py,sha256=-sivM6EUiC3V3gcISnh4u5dosf-lnwVjd7YM3L0U3Ik,4056
17
- dais_sdk/tool/toolset/python_toolset.py,sha256=JlYw49LH9xDL6tk_82EogqxW2U71hhsygamrb-lNvcE,1071
18
- dais_sdk/tool/toolset/toolset.py,sha256=X1xqWiWov4fboWQowB_YgJ_Tc-fIDmxbP8GreTj_7ME,322
19
- dais_sdk/types/__init__.py,sha256=-i1MYWIlUfjQIX0xZJta6phQNL44vXPSIx1eGyIYZXc,710
20
- dais_sdk/types/exceptions.py,sha256=hIGu06htOJxfEBAHx7KTvLQr0Y8GYnBLFJFlr_IGpDs,602
21
- dais_sdk/types/message.py,sha256=M5ZOkpF3QDtHsAduDFqO_-8NLv5z9PJvEBvnaXUz4us,7503
22
- dais_sdk/types/request_params.py,sha256=fWo6gF_DvaThvUEIGUkJ3O7BpoJXF5Oe9WYD0Ky9iws,1895
23
- dais_sdk/types/tool.py,sha256=s0sPwXPl-BeijWgRxgXkXguz_quzmP92sVS2aT7n_nA,1362
24
- dais_sdk-0.6.0.dist-info/licenses/LICENSE,sha256=cTeVgQVJJcRdm1boa2P1FBnOeXfA_egV6s4PouyrCxg,1064
25
- dais_sdk-0.6.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
26
- dais_sdk-0.6.0.dist-info/METADATA,sha256=FeP-GU4pFHaGpHedW95e-VN9FrF5SoqksH9MdaSciFo,2910
27
- dais_sdk-0.6.0.dist-info/RECORD,,