dao-ai 0.0.22__py3-none-any.whl → 0.0.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dao_ai/config.py CHANGED
@@ -666,6 +666,10 @@ class ConnectionModel(BaseModel, HasFullName, IsDatabricksResource):
666
666
  return [
667
667
  "catalog.connections",
668
668
  "serving.serving-endpoints",
669
+ "mcp.genie",
670
+ "mcp.functions",
671
+ "mcp.vectorsearch",
672
+ "mcp.external",
669
673
  ]
670
674
 
671
675
  def as_resources(self) -> Sequence[DatabricksResource]:
@@ -988,6 +992,7 @@ class McpFunctionModel(BaseFunctionModel, HasFullName):
988
992
  transport: TransportType = TransportType.STREAMABLE_HTTP
989
993
  command: Optional[str] = "python"
990
994
  url: Optional[AnyVariable] = None
995
+ connection: Optional[ConnectionModel] = None
991
996
  headers: dict[str, AnyVariable] = Field(default_factory=dict)
992
997
  args: list[str] = Field(default_factory=list)
993
998
  pat: Optional[AnyVariable] = None
dao_ai/tools/__init__.py CHANGED
@@ -7,6 +7,7 @@ from dao_ai.tools.core import (
7
7
  from dao_ai.tools.genie import create_genie_tool
8
8
  from dao_ai.tools.mcp import create_mcp_tools
9
9
  from dao_ai.tools.python import create_factory_tool, create_python_tool
10
+ from dao_ai.tools.slack import create_send_slack_message_tool
10
11
  from dao_ai.tools.time import (
11
12
  add_time_tool,
12
13
  current_time_tool,
@@ -27,6 +28,7 @@ __all__ = [
27
28
  "create_hooks",
28
29
  "create_mcp_tools",
29
30
  "create_python_tool",
31
+ "create_send_slack_message_tool",
30
32
  "create_tools",
31
33
  "create_uc_tools",
32
34
  "create_vector_search_tool",
dao_ai/tools/mcp.py CHANGED
@@ -1,10 +1,14 @@
1
1
  import asyncio
2
2
  from typing import Any, Sequence
3
3
 
4
+ from databricks_mcp import DatabricksOAuthClientProvider
4
5
  from langchain_core.runnables.base import RunnableLike
5
6
  from langchain_core.tools import tool as create_tool
6
7
  from langchain_mcp_adapters.client import MultiServerMCPClient
8
+ from langchain_mcp_adapters.tools import load_mcp_tools
7
9
  from loguru import logger
10
+ from mcp import ClientSession
11
+ from mcp.client.streamable_http import streamablehttp_client
8
12
  from mcp.types import ListToolsResult, Tool
9
13
 
10
14
  from dao_ai.config import (
@@ -20,98 +24,141 @@ def create_mcp_tools(
20
24
  """
21
25
  Create tools for invoking Databricks MCP functions.
22
26
 
27
+ Supports both direct MCP connections and UC Connection-based MCP access.
23
28
  Uses session-based approach to handle authentication token expiration properly.
29
+
30
+ Based on: https://docs.databricks.com/aws/en/generative-ai/mcp/external-mcp
24
31
  """
25
32
  logger.debug(f"create_mcp_tools: {function}")
26
33
 
27
- def _create_fresh_connection() -> dict[str, Any]:
28
- logger.debug("Creating fresh connection...")
29
- """Create connection config with fresh authentication headers."""
30
- if function.transport == TransportType.STDIO:
31
- return {
32
- "command": function.command,
33
- "args": function.args,
34
- "transport": function.transport,
35
- }
36
-
37
- # For HTTP transport, generate fresh headers
38
- headers = function.headers.copy() if function.headers else {}
39
-
40
- if "Authorization" not in headers:
41
- logger.debug("Generating fresh authentication token for MCP function")
42
-
43
- from dao_ai.config import value_of
44
- from dao_ai.providers.databricks import DatabricksProvider
45
-
46
- try:
47
- provider = DatabricksProvider(
48
- workspace_host=value_of(function.workspace_host),
49
- client_id=value_of(function.client_id),
50
- client_secret=value_of(function.client_secret),
51
- pat=value_of(function.pat),
52
- )
53
- headers["Authorization"] = f"Bearer {provider.create_token()}"
54
- logger.debug("Generated fresh authentication token")
55
- except Exception as e:
56
- logger.error(f"Failed to create fresh token: {e}")
57
- else:
58
- logger.debug("Using existing authentication token")
34
+ # Check if using UC Connection or direct MCP connection
35
+ if function.connection:
36
+ # Use UC Connection approach with DatabricksOAuthClientProvider
37
+ logger.debug(f"Using UC Connection for MCP: {function.connection.name}")
38
+ logger.debug(f"MCP URL: {function.url}")
39
+
40
+ async def _get_tools_with_connection():
41
+ """Get tools using DatabricksOAuthClientProvider."""
42
+ workspace_client = function.connection.workspace_client
43
+
44
+ async with streamablehttp_client(
45
+ function.url, auth=DatabricksOAuthClientProvider(workspace_client)
46
+ ) as (read_stream, write_stream, _):
47
+ async with ClientSession(read_stream, write_stream) as session:
48
+ # Initialize and list tools
49
+ await session.initialize()
50
+ tools = await load_mcp_tools(session)
51
+ return tools
59
52
 
60
- response = {
61
- "url": function.url,
62
- "transport": function.transport,
63
- "headers": headers,
64
- }
65
-
66
- return response
53
+ try:
54
+ langchain_tools = asyncio.run(_get_tools_with_connection())
55
+ logger.debug(
56
+ f"Retrieved {len(langchain_tools)} MCP tools via UC Connection"
57
+ )
67
58
 
68
- # Get available tools from MCP server
69
- async def _list_mcp_tools():
70
- connection = _create_fresh_connection()
71
- client = MultiServerMCPClient({function.name: connection})
59
+ # Wrap tools with human-in-the-loop if needed
60
+ wrapped_tools = [
61
+ as_human_in_the_loop(tool, function) for tool in langchain_tools
62
+ ]
63
+ return wrapped_tools
72
64
 
73
- try:
74
- async with client.session(function.name) as session:
75
- return await session.list_tools()
76
65
  except Exception as e:
77
- logger.error(f"Failed to list MCP tools: {e}")
78
- return []
79
-
80
- # Note: This still needs to run sync during tool creation/registration
81
- # The actual tool execution will be async
82
- try:
83
- mcp_tools: list | ListToolsResult = asyncio.run(_list_mcp_tools())
84
- if isinstance(mcp_tools, ListToolsResult):
85
- mcp_tools = mcp_tools.tools
86
-
87
- logger.debug(f"Retrieved {len(mcp_tools)} MCP tools")
88
- except Exception as e:
89
- logger.error(f"Failed to get tools from MCP server: {e}")
90
- raise RuntimeError(
91
- f"Failed to list MCP tools for function '{function.name}' with transport '{function.transport}' and URL '{function.url}': {e}"
92
- )
93
-
94
- # Create wrapper tools with fresh session per invocation
95
- def _create_tool_wrapper(mcp_tool: Tool) -> RunnableLike:
96
- @create_tool(
97
- mcp_tool.name,
98
- description=mcp_tool.description or f"MCP tool: {mcp_tool.name}",
99
- args_schema=mcp_tool.inputSchema,
100
- )
101
- async def tool_wrapper(**kwargs):
102
- """Execute MCP tool with fresh session and authentication."""
103
- logger.debug(f"Invoking MCP tool {mcp_tool.name} with fresh session")
66
+ logger.error(f"Failed to get tools from MCP server via UC Connection: {e}")
67
+ raise RuntimeError(
68
+ f"Failed to list MCP tools for function '{function.name}' via UC Connection '{function.connection.name}': {e}"
69
+ )
70
+
71
+ else:
72
+ # Use direct MCP connection with MultiServerMCPClient
73
+ logger.debug("Using direct MCP connection with MultiServerMCPClient")
74
+
75
+ def _create_fresh_connection() -> dict[str, Any]:
76
+ """Create connection config with fresh authentication headers."""
77
+ logger.debug("Creating fresh connection...")
78
+
79
+ if function.transport == TransportType.STDIO:
80
+ return {
81
+ "command": function.command,
82
+ "args": function.args,
83
+ "transport": function.transport,
84
+ }
85
+
86
+ # For HTTP transport, generate fresh headers
87
+ headers = function.headers.copy() if function.headers else {}
88
+
89
+ if "Authorization" not in headers:
90
+ logger.debug("Generating fresh authentication token for MCP function")
91
+
92
+ from dao_ai.config import value_of
93
+ from dao_ai.providers.databricks import DatabricksProvider
94
+
95
+ try:
96
+ provider = DatabricksProvider(
97
+ workspace_host=value_of(function.workspace_host),
98
+ client_id=value_of(function.client_id),
99
+ client_secret=value_of(function.client_secret),
100
+ pat=value_of(function.pat),
101
+ )
102
+ headers["Authorization"] = f"Bearer {provider.create_token()}"
103
+ logger.debug("Generated fresh authentication token")
104
+ except Exception as e:
105
+ logger.error(f"Failed to create fresh token: {e}")
106
+ else:
107
+ logger.debug("Using existing authentication token")
104
108
 
109
+ return {
110
+ "url": function.url,
111
+ "transport": function.transport,
112
+ "headers": headers,
113
+ }
114
+
115
+ # Get available tools from MCP server
116
+ async def _list_mcp_tools():
105
117
  connection = _create_fresh_connection()
106
118
  client = MultiServerMCPClient({function.name: connection})
107
119
 
108
120
  try:
109
121
  async with client.session(function.name) as session:
110
- return await session.call_tool(mcp_tool.name, kwargs)
122
+ return await session.list_tools()
111
123
  except Exception as e:
112
- logger.error(f"MCP tool {mcp_tool.name} failed: {e}")
113
- raise
124
+ logger.error(f"Failed to list MCP tools: {e}")
125
+ return []
114
126
 
115
- return as_human_in_the_loop(tool_wrapper, function)
127
+ # Note: This still needs to run sync during tool creation/registration
128
+ # The actual tool execution will be async
129
+ try:
130
+ mcp_tools: list[Tool] | ListToolsResult = asyncio.run(_list_mcp_tools())
131
+ if isinstance(mcp_tools, ListToolsResult):
132
+ mcp_tools = mcp_tools.tools
116
133
 
117
- return [_create_tool_wrapper(tool) for tool in mcp_tools]
134
+ logger.debug(f"Retrieved {len(mcp_tools)} MCP tools")
135
+ except Exception as e:
136
+ logger.error(f"Failed to get tools from MCP server: {e}")
137
+ raise RuntimeError(
138
+ f"Failed to list MCP tools for function '{function.name}' with transport '{function.transport}' and URL '{function.url}': {e}"
139
+ )
140
+
141
+ # Create wrapper tools with fresh session per invocation
142
+ def _create_tool_wrapper(mcp_tool: Tool) -> RunnableLike:
143
+ @create_tool(
144
+ mcp_tool.name,
145
+ description=mcp_tool.description or f"MCP tool: {mcp_tool.name}",
146
+ args_schema=mcp_tool.inputSchema,
147
+ )
148
+ async def tool_wrapper(**kwargs):
149
+ """Execute MCP tool with fresh session and authentication."""
150
+ logger.debug(f"Invoking MCP tool {mcp_tool.name} with fresh session")
151
+
152
+ connection = _create_fresh_connection()
153
+ client = MultiServerMCPClient({function.name: connection})
154
+
155
+ try:
156
+ async with client.session(function.name) as session:
157
+ return await session.call_tool(mcp_tool.name, kwargs)
158
+ except Exception as e:
159
+ logger.error(f"MCP tool {mcp_tool.name} failed: {e}")
160
+ raise
161
+
162
+ return as_human_in_the_loop(tool_wrapper, function)
163
+
164
+ return [_create_tool_wrapper(tool) for tool in mcp_tools]
dao_ai/tools/slack.py ADDED
@@ -0,0 +1,136 @@
1
+ from typing import Any, Callable, Optional
2
+
3
+ from databricks.sdk.service.serving import ExternalFunctionRequestHttpMethod
4
+ from langchain_core.tools import tool
5
+ from loguru import logger
6
+ from requests import Response
7
+
8
+ from dao_ai.config import ConnectionModel
9
+
10
+
11
+ def _find_channel_id_by_name(
12
+ connection: ConnectionModel, channel_name: str
13
+ ) -> Optional[str]:
14
+ """
15
+ Find a Slack channel ID by channel name using the conversations.list API.
16
+
17
+ Based on: https://docs.databricks.com/aws/en/generative-ai/agent-framework/slack-agent
18
+
19
+ Args:
20
+ connection: ConnectionModel with workspace_client
21
+ channel_name: Name of the Slack channel (with or without '#' prefix)
22
+
23
+ Returns:
24
+ Channel ID if found, None otherwise
25
+ """
26
+ # Remove '#' prefix if present
27
+ clean_name = channel_name.lstrip("#")
28
+
29
+ logger.debug(f"Looking up Slack channel ID for channel name: {clean_name}")
30
+
31
+ try:
32
+ # Call Slack API to list conversations
33
+ response: Response = connection.workspace_client.serving_endpoints.http_request(
34
+ conn=connection.name,
35
+ method=ExternalFunctionRequestHttpMethod.GET,
36
+ path="/api/conversations.list",
37
+ )
38
+
39
+ if response.status_code != 200:
40
+ logger.error(f"Failed to list Slack channels: {response.text}")
41
+ return None
42
+
43
+ # Parse response
44
+ data = response.json()
45
+
46
+ if not data.get("ok"):
47
+ logger.error(f"Slack API returned error: {data.get('error')}")
48
+ return None
49
+
50
+ # Search for channel by name
51
+ channels = data.get("channels", [])
52
+ for channel in channels:
53
+ if channel.get("name") == clean_name:
54
+ channel_id = channel.get("id")
55
+ logger.debug(
56
+ f"Found channel ID '{channel_id}' for channel name '{clean_name}'"
57
+ )
58
+ return channel_id
59
+
60
+ logger.warning(f"Channel '{clean_name}' not found in Slack workspace")
61
+ return None
62
+
63
+ except Exception as e:
64
+ logger.error(f"Error looking up Slack channel: {e}")
65
+ return None
66
+
67
+
68
+ def create_send_slack_message_tool(
69
+ connection: ConnectionModel | dict[str, Any],
70
+ channel_id: Optional[str] = None,
71
+ channel_name: Optional[str] = None,
72
+ name: Optional[str] = None,
73
+ description: Optional[str] = None,
74
+ ) -> Callable[[str], Any]:
75
+ """
76
+ Create a tool that sends a message to a Slack channel.
77
+
78
+ Args:
79
+ connection: Unity Catalog connection to Slack (ConnectionModel or dict)
80
+ channel_id: Slack channel ID (e.g., 'C1234567890'). If not provided, channel_name is used.
81
+ channel_name: Slack channel name (e.g., 'general' or '#general'). Used to lookup channel_id if not provided.
82
+ name: Custom tool name (default: 'send_slack_message')
83
+ description: Custom tool description
84
+
85
+ Returns:
86
+ A tool function that sends messages to the specified Slack channel
87
+
88
+ Based on: https://docs.databricks.com/aws/en/generative-ai/agent-framework/slack-agent
89
+ """
90
+ logger.debug("create_send_slack_message_tool")
91
+
92
+ # Validate inputs
93
+ if channel_id is None and channel_name is None:
94
+ raise ValueError("Either channel_id or channel_name must be provided")
95
+
96
+ # Convert connection dict to ConnectionModel if needed
97
+ if isinstance(connection, dict):
98
+ connection = ConnectionModel(**connection)
99
+
100
+ # Look up channel_id from channel_name if needed
101
+ if channel_id is None and channel_name is not None:
102
+ logger.debug(f"Looking up channel_id for channel_name: {channel_name}")
103
+ channel_id = _find_channel_id_by_name(connection, channel_name)
104
+ if channel_id is None:
105
+ raise ValueError(f"Could not find Slack channel with name '{channel_name}'")
106
+ logger.debug(
107
+ f"Resolved channel_name '{channel_name}' to channel_id '{channel_id}'"
108
+ )
109
+
110
+ if name is None:
111
+ name = "send_slack_message"
112
+
113
+ if description is None:
114
+ description = "Send a message to a Slack channel"
115
+
116
+ @tool(
117
+ name_or_callable=name,
118
+ description=description,
119
+ )
120
+ def send_slack_message(text: str) -> str:
121
+ response: Response = connection.workspace_client.serving_endpoints.http_request(
122
+ conn=connection.name,
123
+ method=ExternalFunctionRequestHttpMethod.POST,
124
+ path="/api/chat.postMessage",
125
+ json={"channel": channel_id, "text": text},
126
+ )
127
+
128
+ if response.status_code == 200:
129
+ return "Successful request sent to Slack: " + response.text
130
+ else:
131
+ return (
132
+ "Encountered failure when executing request. Message from Call: "
133
+ + response.text
134
+ )
135
+
136
+ return send_slack_message
dao_ai/utils.py CHANGED
@@ -43,6 +43,7 @@ def get_installed_packages() -> dict[str, str]:
43
43
  packages: Sequence[str] = [
44
44
  f"databricks-agents=={version('databricks-agents')}",
45
45
  f"databricks-langchain=={version('databricks-langchain')}",
46
+ f"databricks-mcp=={version('databricks-mcp')}",
46
47
  f"databricks-sdk[openai]=={version('databricks-sdk')}",
47
48
  f"duckduckgo-search=={version('duckduckgo-search')}",
48
49
  f"langchain=={version('langchain')}",
@@ -56,11 +57,14 @@ def get_installed_packages() -> dict[str, str]:
56
57
  f"langgraph-swarm=={version('langgraph-swarm')}",
57
58
  f"langmem=={version('langmem')}",
58
59
  f"loguru=={version('loguru')}",
60
+ f"mcp=={version('mcp')}",
59
61
  f"mlflow=={version('mlflow')}",
62
+ f"nest-asyncio=={version('nest-asyncio')}",
60
63
  f"openevals=={version('openevals')}",
61
64
  f"openpyxl=={version('openpyxl')}",
62
65
  f"psycopg[binary,pool]=={version('psycopg')}",
63
66
  f"pydantic=={version('pydantic')}",
67
+ f"pyyaml=={version('pyyaml')}",
64
68
  f"unitycatalog-ai[databricks]=={version('unitycatalog-ai')}",
65
69
  f"unitycatalog-langchain[databricks]=={version('unitycatalog-langchain')}",
66
70
  ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dao-ai
3
- Version: 0.0.22
3
+ Version: 0.0.23
4
4
  Summary: DAO AI: A modular, multi-agent orchestration framework for complex AI workflows. Supports agent handoff, tool integration, and dynamic configuration via YAML.
5
5
  Project-URL: Homepage, https://github.com/natefleming/dao-ai
6
6
  Project-URL: Documentation, https://natefleming.github.io/dao-ai
@@ -26,6 +26,7 @@ Classifier: Topic :: System :: Distributed Computing
26
26
  Requires-Python: >=3.12
27
27
  Requires-Dist: databricks-agents>=1.6.1
28
28
  Requires-Dist: databricks-langchain>=0.8.0
29
+ Requires-Dist: databricks-mcp>=0.3.0
29
30
  Requires-Dist: databricks-sdk[openai]>=0.67.0
30
31
  Requires-Dist: duckduckgo-search>=8.0.2
31
32
  Requires-Dist: grandalf>=0.8
@@ -653,7 +654,7 @@ test:
653
654
  #### 4. MCP (Model Context Protocol) Tools (`type: mcp`)
654
655
  MCP tools allow interaction with external services that implement the Model Context Protocol, supporting both HTTP and stdio transports.
655
656
 
656
- **Configuration Example:**
657
+ **Configuration Example (Direct URL):**
657
658
  ```yaml
658
659
  tools:
659
660
  weather_tool_mcp:
@@ -664,8 +665,30 @@ test:
664
665
  transport: streamable_http
665
666
  url: http://localhost:8000/mcp
666
667
  ```
668
+
669
+ **Configuration Example (Unity Catalog Connection):**
670
+ MCP tools can also use Unity Catalog Connections for secure, governed access with on-behalf-of-user capabilities. The connection provides OAuth authentication, while the URL specifies the endpoint:
671
+ ```yaml
672
+ resources:
673
+ connections:
674
+ github_connection:
675
+ name: github_u2m_connection # UC Connection name
676
+
677
+ tools:
678
+ github_mcp:
679
+ name: github_mcp
680
+ function:
681
+ type: mcp
682
+ name: github_mcp
683
+ transport: streamable_http
684
+ url: https://workspace.databricks.com/api/2.0/mcp/external/github_u2m_connection # MCP endpoint URL
685
+ connection: *github_connection # UC Connection provides OAuth authentication
686
+ ```
687
+
667
688
  **Development:**
668
- Ensure the MCP service is running and accessible at the specified URL or command. The framework will handle the MCP protocol communication automatically.
689
+ - **For direct URL connections**: Ensure the MCP service is running and accessible at the specified URL or command. Provide OAuth credentials (client_id, client_secret) or PAT for authentication.
690
+ - **For UC Connection**: URL is required to specify the endpoint. The connection provides OAuth authentication via the workspace client. Ensure the connection is configured in Unity Catalog with appropriate MCP scopes (`mcp.genie`, `mcp.functions`, `mcp.vectorsearch`, `mcp.external`).
691
+ - The framework will handle the MCP protocol communication automatically, including session management and authentication.
669
692
 
670
693
  ### Configuring New Agents
671
694
 
@@ -3,7 +3,7 @@ dao_ai/agent_as_code.py,sha256=kPSeDz2-1jRaed1TMs4LA3VECoyqe9_Ed2beRLB9gXQ,472
3
3
  dao_ai/catalog.py,sha256=sPZpHTD3lPx4EZUtIWeQV7VQM89WJ6YH__wluk1v2lE,4947
4
4
  dao_ai/chat_models.py,sha256=uhwwOTeLyHWqoTTgHrs4n5iSyTwe4EQcLKnh3jRxPWI,8626
5
5
  dao_ai/cli.py,sha256=Aez2TQW3Q8Ho1IaIkRggt0NevDxAAVPjXkePC5GPJF0,20429
6
- dao_ai/config.py,sha256=Kh0oJwWr2dhqrOriOEgjMs2CGhOdMYh1hTGO7sAPdw8,55268
6
+ dao_ai/config.py,sha256=XHU6xkRAoTeiZYH5ns_fLwcR6EaxRAGeMwRSoW3n0S8,55431
7
7
  dao_ai/graph.py,sha256=APYc2y3cig4P52X4sOHSFSZNK8j5EtEPJLFwWeJ3KQQ,7956
8
8
  dao_ai/guardrails.py,sha256=4TKArDONRy8RwHzOT1plZ1rhy3x9GF_aeGpPCRl6wYA,4016
9
9
  dao_ai/messages.py,sha256=xl_3-WcFqZKCFCiov8sZOPljTdM3gX3fCHhxq-xFg2U,7005
@@ -12,7 +12,7 @@ dao_ai/nodes.py,sha256=SSuFNTXOdFaKg_aX-yUkQO7fM9wvNGu14lPXKDapU1U,8461
12
12
  dao_ai/prompts.py,sha256=vpmIbWs_szXUgNNDs5Gh2LcxKZti5pHDKSfoClUcgX0,1289
13
13
  dao_ai/state.py,sha256=_lF9krAYYjvFDMUwZzVKOn0ZnXKcOrbjWKdre0C5B54,1137
14
14
  dao_ai/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- dao_ai/utils.py,sha256=A7BR28Rv7tH9B_lLVKlgBnWtcPeQ-Bafm2f57oTrUlQ,4337
15
+ dao_ai/utils.py,sha256=yXgqHrYdO5qDxgxUs2G5XJeLFgwg8D0BIJvbFkqSbhs,4519
16
16
  dao_ai/vector_search.py,sha256=jlaFS_iizJ55wblgzZmswMM3UOL-qOp2BGJc0JqXYSg,2839
17
17
  dao_ai/hooks/__init__.py,sha256=LlHGIuiZt6vGW8K5AQo1XJEkBP5vDVtMhq0IdjcLrD4,417
18
18
  dao_ai/hooks/core.py,sha256=ZShHctUSoauhBgdf1cecy9-D7J6-sGn-pKjuRMumW5U,6663
@@ -23,18 +23,19 @@ dao_ai/memory/postgres.py,sha256=vvI3osjx1EoU5GBA6SCUstTBKillcmLl12hVgDMjfJY,153
23
23
  dao_ai/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  dao_ai/providers/base.py,sha256=-fjKypCOk28h6vioPfMj9YZSw_3Kcbi2nMuAyY7vX9k,1383
25
25
  dao_ai/providers/databricks.py,sha256=CFZ2RojcTjiJ1aGwNI3_0qCGf339w2o5h9CRDKNesLs,39825
26
- dao_ai/tools/__init__.py,sha256=ye6MHaJY7tUnJ8336YJiLxuZr55zDPNdOw6gm7j5jlc,1103
26
+ dao_ai/tools/__init__.py,sha256=G5-5Yi6zpQOH53b5IzLdtsC6g0Ep6leI5GxgxOmgw7Q,1203
27
27
  dao_ai/tools/agent.py,sha256=WbQnyziiT12TLMrA7xK0VuOU029tdmUBXbUl-R1VZ0Q,1886
28
28
  dao_ai/tools/core.py,sha256=Kei33S8vrmvPOAyrFNekaWmV2jqZ-IPS1QDSvU7RZF0,1984
29
29
  dao_ai/tools/genie.py,sha256=8HSOCzSg6PlBzBYXMmNfUnl-LO03p3Ki3fxLPm_dhPg,15051
30
30
  dao_ai/tools/human_in_the_loop.py,sha256=yk35MO9eNETnYFH-sqlgR-G24TrEgXpJlnZUustsLkI,3681
31
- dao_ai/tools/mcp.py,sha256=auEt_dwv4J26fr5AgLmwmnAsI894-cyuvkvjItzAUxs,4419
31
+ dao_ai/tools/mcp.py,sha256=RAAG97boEDJKlX7X_XUz-l-nH5DdqtHUG_I2zw1lWNk,6844
32
32
  dao_ai/tools/python.py,sha256=XcQiTMshZyLUTVR5peB3vqsoUoAAy8gol9_pcrhddfI,1831
33
+ dao_ai/tools/slack.py,sha256=SCvyVcD9Pv_XXPXePE_fSU1Pd8VLTEkKDLvoGTZWy2Y,4775
33
34
  dao_ai/tools/time.py,sha256=Y-23qdnNHzwjvnfkWvYsE7PoWS1hfeKy44tA7sCnNac,8759
34
35
  dao_ai/tools/unity_catalog.py,sha256=uX_h52BuBAr4c9UeqSMI7DNz3BPRLeai5tBVW4sJqRI,13113
35
36
  dao_ai/tools/vector_search.py,sha256=EDYQs51zIPaAP0ma1D81wJT77GQ-v-cjb2XrFVWfWdg,2621
36
- dao_ai-0.0.22.dist-info/METADATA,sha256=kqyr-YBFC_fs-PHknnvm4Ahhad8Pfac0gTb8vKydHMw,41380
37
- dao_ai-0.0.22.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
38
- dao_ai-0.0.22.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
39
- dao_ai-0.0.22.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
40
- dao_ai-0.0.22.dist-info/RECORD,,
37
+ dao_ai-0.0.23.dist-info/METADATA,sha256=6GfCnhhQN9t4x1LX8mUHsOTfr4mgdGR1Xx070pjIm_g,42638
38
+ dao_ai-0.0.23.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
39
+ dao_ai-0.0.23.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
40
+ dao_ai-0.0.23.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
41
+ dao_ai-0.0.23.dist-info/RECORD,,