dao-ai 0.0.25__py3-none-any.whl → 0.0.27__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dao_ai/tools/mcp.py CHANGED
@@ -5,7 +5,6 @@ from databricks_mcp import DatabricksOAuthClientProvider
5
5
  from langchain_core.runnables.base import RunnableLike
6
6
  from langchain_core.tools import tool as create_tool
7
7
  from langchain_mcp_adapters.client import MultiServerMCPClient
8
- from langchain_mcp_adapters.tools import load_mcp_tools
9
8
  from loguru import logger
10
9
  from mcp import ClientSession
11
10
  from mcp.client.streamable_http import streamablehttp_client
@@ -31,26 +30,17 @@ def create_mcp_tools(
31
30
  """
32
31
  logger.debug(f"create_mcp_tools: {function}")
33
32
 
33
+ # Get MCP URL - handles all convenience objects (connection, genie_room, warehouse, etc.)
34
+ mcp_url = function.mcp_url
35
+ logger.debug(f"Using MCP URL: {mcp_url}")
36
+
34
37
  # Check if using UC Connection or direct MCP connection
35
38
  if function.connection:
36
39
  # Use UC Connection approach with DatabricksOAuthClientProvider
37
40
  logger.debug(f"Using UC Connection for MCP: {function.connection.name}")
38
41
 
39
- # Construct URL if not provided
40
- if function.url:
41
- mcp_url = function.url
42
- logger.debug(f"Using provided MCP URL: {mcp_url}")
43
- else:
44
- # Construct URL from workspace host and connection name
45
- # Pattern: https://{workspace_host}/api/2.0/mcp/external/{connection_name}
46
- workspace_client = function.connection.workspace_client
47
- workspace_host = workspace_client.config.host
48
- connection_name = function.connection.name
49
- mcp_url = f"{workspace_host}/api/2.0/mcp/external/{connection_name}"
50
- logger.debug(f"Constructed MCP URL from connection: {mcp_url}")
51
-
52
- async def _get_tools_with_connection():
53
- """Get tools using DatabricksOAuthClientProvider."""
42
+ async def _list_tools_with_connection():
43
+ """List available tools using DatabricksOAuthClientProvider."""
54
44
  workspace_client = function.connection.workspace_client
55
45
 
56
46
  async with streamablehttp_client(
@@ -59,20 +49,16 @@ def create_mcp_tools(
59
49
  async with ClientSession(read_stream, write_stream) as session:
60
50
  # Initialize and list tools
61
51
  await session.initialize()
62
- tools = await load_mcp_tools(session)
63
- return tools
52
+ return await session.list_tools()
64
53
 
65
54
  try:
66
- langchain_tools = asyncio.run(_get_tools_with_connection())
67
- logger.debug(
68
- f"Retrieved {len(langchain_tools)} MCP tools via UC Connection"
55
+ mcp_tools: list[Tool] | ListToolsResult = asyncio.run(
56
+ _list_tools_with_connection()
69
57
  )
58
+ if isinstance(mcp_tools, ListToolsResult):
59
+ mcp_tools = mcp_tools.tools
70
60
 
71
- # Wrap tools with human-in-the-loop if needed
72
- wrapped_tools = [
73
- as_human_in_the_loop(tool, function) for tool in langchain_tools
74
- ]
75
- return wrapped_tools
61
+ logger.debug(f"Retrieved {len(mcp_tools)} MCP tools via UC Connection")
76
62
 
77
63
  except Exception as e:
78
64
  logger.error(f"Failed to get tools from MCP server via UC Connection: {e}")
@@ -80,6 +66,39 @@ def create_mcp_tools(
80
66
  f"Failed to list MCP tools for function '{function.name}' via UC Connection '{function.connection.name}': {e}"
81
67
  )
82
68
 
69
+ # Create wrapper tools with fresh session per invocation
70
+ def _create_tool_wrapper_with_connection(mcp_tool: Tool) -> RunnableLike:
71
+ @create_tool(
72
+ mcp_tool.name,
73
+ description=mcp_tool.description or f"MCP tool: {mcp_tool.name}",
74
+ args_schema=mcp_tool.inputSchema,
75
+ )
76
+ async def tool_wrapper(**kwargs):
77
+ """Execute MCP tool with fresh UC Connection session."""
78
+ logger.debug(
79
+ f"Invoking MCP tool {mcp_tool.name} with fresh UC Connection session"
80
+ )
81
+ workspace_client = function.connection.workspace_client
82
+
83
+ try:
84
+ async with streamablehttp_client(
85
+ mcp_url, auth=DatabricksOAuthClientProvider(workspace_client)
86
+ ) as (read_stream, write_stream, _):
87
+ async with ClientSession(read_stream, write_stream) as session:
88
+ await session.initialize()
89
+ result = await session.call_tool(mcp_tool.name, kwargs)
90
+ logger.debug(
91
+ f"MCP tool {mcp_tool.name} completed successfully"
92
+ )
93
+ return result
94
+ except Exception as e:
95
+ logger.error(f"MCP tool {mcp_tool.name} failed: {e}")
96
+ raise
97
+
98
+ return as_human_in_the_loop(tool_wrapper, function)
99
+
100
+ return [_create_tool_wrapper_with_connection(tool) for tool in mcp_tools]
101
+
83
102
  else:
84
103
  # Use direct MCP connection with MultiServerMCPClient
85
104
  logger.debug("Using direct MCP connection with MultiServerMCPClient")
@@ -119,7 +138,7 @@ def create_mcp_tools(
119
138
  logger.debug("Using existing authentication token")
120
139
 
121
140
  return {
122
- "url": function.url,
141
+ "url": mcp_url, # Use the resolved MCP URL
123
142
  "transport": function.transport,
124
143
  "headers": headers,
125
144
  }
dao_ai/utils.py CHANGED
@@ -3,7 +3,8 @@ import importlib.metadata
3
3
  import os
4
4
  import re
5
5
  import site
6
- from importlib.metadata import version
6
+ from importlib.metadata import PackageNotFoundError, version
7
+ from pathlib import Path
7
8
  from typing import Any, Callable, Sequence
8
9
 
9
10
  from loguru import logger
@@ -37,6 +38,59 @@ def normalize_name(name: str) -> str:
37
38
  return normalized.strip("_")
38
39
 
39
40
 
41
+ def dao_ai_version() -> str:
42
+ """
43
+ Get the dao-ai package version, with fallback for source installations.
44
+
45
+ Tries to get the version from installed package metadata first. If the package
46
+ is not installed (e.g., running from source), falls back to reading from
47
+ pyproject.toml. Returns "dev" if neither method works.
48
+
49
+ Returns:
50
+ str: The version string, or "dev" if version cannot be determined
51
+ """
52
+ try:
53
+ # Try to get version from installed package metadata
54
+ return version("dao-ai")
55
+ except PackageNotFoundError:
56
+ # Package not installed, try reading from pyproject.toml
57
+ logger.debug(
58
+ "dao-ai package not installed, attempting to read version from pyproject.toml"
59
+ )
60
+ try:
61
+ import tomllib # Python 3.11+
62
+ except ImportError:
63
+ try:
64
+ import tomli as tomllib # Fallback for Python < 3.11
65
+ except ImportError:
66
+ logger.warning(
67
+ "Cannot determine dao-ai version: package not installed and tomllib/tomli not available"
68
+ )
69
+ return "dev"
70
+
71
+ try:
72
+ # Find pyproject.toml relative to this file
73
+ project_root = Path(__file__).parents[2]
74
+ pyproject_path = project_root / "pyproject.toml"
75
+
76
+ if not pyproject_path.exists():
77
+ logger.warning(
78
+ f"Cannot determine dao-ai version: pyproject.toml not found at {pyproject_path}"
79
+ )
80
+ return "dev"
81
+
82
+ with open(pyproject_path, "rb") as f:
83
+ pyproject_data = tomllib.load(f)
84
+ pkg_version = pyproject_data.get("project", {}).get("version", "dev")
85
+ logger.debug(
86
+ f"Read version {pkg_version} from pyproject.toml at {pyproject_path}"
87
+ )
88
+ return pkg_version
89
+ except Exception as e:
90
+ logger.warning(f"Cannot determine dao-ai version from pyproject.toml: {e}")
91
+ return "dev"
92
+
93
+
40
94
  def get_installed_packages() -> dict[str, str]:
41
95
  """Get all installed packages with versions"""
42
96
 
@@ -65,6 +119,7 @@ def get_installed_packages() -> dict[str, str]:
65
119
  f"psycopg[binary,pool]=={version('psycopg')}",
66
120
  f"pydantic=={version('pydantic')}",
67
121
  f"pyyaml=={version('pyyaml')}",
122
+ f"tomli=={version('tomli')}",
68
123
  f"unitycatalog-ai[databricks]=={version('unitycatalog-ai')}",
69
124
  f"unitycatalog-langchain[databricks]=={version('unitycatalog-langchain')}",
70
125
  ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dao-ai
3
- Version: 0.0.25
3
+ Version: 0.0.27
4
4
  Summary: DAO AI: A modular, multi-agent orchestration framework for complex AI workflows. Supports agent handoff, tool integration, and dynamic configuration via YAML.
5
5
  Project-URL: Homepage, https://github.com/natefleming/dao-ai
6
6
  Project-URL: Documentation, https://natefleming.github.io/dao-ai
@@ -29,6 +29,7 @@ Requires-Dist: databricks-langchain>=0.8.1
29
29
  Requires-Dist: databricks-mcp>=0.3.0
30
30
  Requires-Dist: databricks-sdk[openai]>=0.67.0
31
31
  Requires-Dist: duckduckgo-search>=8.0.2
32
+ Requires-Dist: gepa>=0.0.17
32
33
  Requires-Dist: grandalf>=0.8
33
34
  Requires-Dist: langchain-mcp-adapters>=0.1.10
34
35
  Requires-Dist: langchain-tavily>=0.2.11
@@ -40,7 +41,7 @@ Requires-Dist: langgraph>=0.6.10
40
41
  Requires-Dist: langmem>=0.0.29
41
42
  Requires-Dist: loguru>=0.7.3
42
43
  Requires-Dist: mcp>=1.17.0
43
- Requires-Dist: mlflow>=3.4.0
44
+ Requires-Dist: mlflow>=3.5.1
44
45
  Requires-Dist: nest-asyncio>=1.6.0
45
46
  Requires-Dist: openevals>=0.0.19
46
47
  Requires-Dist: openpyxl>=3.1.5
@@ -51,6 +52,7 @@ Requires-Dist: pyyaml>=6.0.2
51
52
  Requires-Dist: rich>=14.0.0
52
53
  Requires-Dist: scipy<=1.15
53
54
  Requires-Dist: sqlparse>=0.5.3
55
+ Requires-Dist: tomli>=2.3.0
54
56
  Requires-Dist: unitycatalog-ai[databricks]>=0.3.0
55
57
  Provides-Extra: databricks
56
58
  Requires-Dist: databricks-connect>=15.0.0; extra == 'databricks'
@@ -3,16 +3,16 @@ dao_ai/agent_as_code.py,sha256=kPSeDz2-1jRaed1TMs4LA3VECoyqe9_Ed2beRLB9gXQ,472
3
3
  dao_ai/catalog.py,sha256=sPZpHTD3lPx4EZUtIWeQV7VQM89WJ6YH__wluk1v2lE,4947
4
4
  dao_ai/chat_models.py,sha256=uhwwOTeLyHWqoTTgHrs4n5iSyTwe4EQcLKnh3jRxPWI,8626
5
5
  dao_ai/cli.py,sha256=gq-nsapWxDA1M6Jua3vajBvIwf0Oa6YLcB58lEtMKUo,22503
6
- dao_ai/config.py,sha256=_4OyJ1x7DH1S-5-FTJp7geeOf2H096PQHVFUBtALKsU,56795
7
- dao_ai/graph.py,sha256=APYc2y3cig4P52X4sOHSFSZNK8j5EtEPJLFwWeJ3KQQ,7956
6
+ dao_ai/config.py,sha256=DRFj_1W5sfGH5f2tGQaeC733pTIDTqvbyAt14v8FQYs,70296
7
+ dao_ai/graph.py,sha256=9kjJx0oFZKq5J9-Kpri4-0VCJILHYdYyhqQnj0_noxQ,8913
8
8
  dao_ai/guardrails.py,sha256=4TKArDONRy8RwHzOT1plZ1rhy3x9GF_aeGpPCRl6wYA,4016
9
9
  dao_ai/messages.py,sha256=xl_3-WcFqZKCFCiov8sZOPljTdM3gX3fCHhxq-xFg2U,7005
10
10
  dao_ai/models.py,sha256=8r8GIG3EGxtVyWsRNI56lVaBjiNrPkzh4HdwMZRq8iw,31689
11
- dao_ai/nodes.py,sha256=SSuFNTXOdFaKg_aX-yUkQO7fM9wvNGu14lPXKDapU1U,8461
11
+ dao_ai/nodes.py,sha256=iQ_5vL6mt1UcRnhwgz-l1D8Ww4CMQrSMVnP_Lu7fFjU,8781
12
12
  dao_ai/prompts.py,sha256=7Hcstmv514P0s9s-TVoIlbkDV2XXOphGCW6gcPeyUYE,1628
13
13
  dao_ai/state.py,sha256=_lF9krAYYjvFDMUwZzVKOn0ZnXKcOrbjWKdre0C5B54,1137
14
14
  dao_ai/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- dao_ai/utils.py,sha256=yXgqHrYdO5qDxgxUs2G5XJeLFgwg8D0BIJvbFkqSbhs,4519
15
+ dao_ai/utils.py,sha256=xfBo9-6k9ss1c7QlC26QDCwS2sY3mgUYgWzTTV6443M,6662
16
16
  dao_ai/vector_search.py,sha256=jlaFS_iizJ55wblgzZmswMM3UOL-qOp2BGJc0JqXYSg,2839
17
17
  dao_ai/hooks/__init__.py,sha256=LlHGIuiZt6vGW8K5AQo1XJEkBP5vDVtMhq0IdjcLrD4,417
18
18
  dao_ai/hooks/core.py,sha256=ZShHctUSoauhBgdf1cecy9-D7J6-sGn-pKjuRMumW5U,6663
@@ -22,20 +22,20 @@ dao_ai/memory/core.py,sha256=DnEjQO3S7hXr3CDDd7C2eE7fQUmcCS_8q9BXEgjPH3U,4271
22
22
  dao_ai/memory/postgres.py,sha256=vvI3osjx1EoU5GBA6SCUstTBKillcmLl12hVgDMjfJY,15346
23
23
  dao_ai/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  dao_ai/providers/base.py,sha256=-fjKypCOk28h6vioPfMj9YZSw_3Kcbi2nMuAyY7vX9k,1383
25
- dao_ai/providers/databricks.py,sha256=1BPYQxi4-Z4I1ygZYlKV8ycdxZTtWNXplySToayHCEI,43096
25
+ dao_ai/providers/databricks.py,sha256=W_lXSMbPTULMAx-KW7zBJfP7LtkcPGRnEfGcSYuu708,65824
26
26
  dao_ai/tools/__init__.py,sha256=G5-5Yi6zpQOH53b5IzLdtsC6g0Ep6leI5GxgxOmgw7Q,1203
27
27
  dao_ai/tools/agent.py,sha256=WbQnyziiT12TLMrA7xK0VuOU029tdmUBXbUl-R1VZ0Q,1886
28
28
  dao_ai/tools/core.py,sha256=Kei33S8vrmvPOAyrFNekaWmV2jqZ-IPS1QDSvU7RZF0,1984
29
29
  dao_ai/tools/genie.py,sha256=8HSOCzSg6PlBzBYXMmNfUnl-LO03p3Ki3fxLPm_dhPg,15051
30
30
  dao_ai/tools/human_in_the_loop.py,sha256=yk35MO9eNETnYFH-sqlgR-G24TrEgXpJlnZUustsLkI,3681
31
- dao_ai/tools/mcp.py,sha256=CYv59yn-LIY11atUgNtN2W6vR7C6Qyo7-rvPcVJnXVk,7461
31
+ dao_ai/tools/mcp.py,sha256=5aQoRtx2z4xm6zgRslc78rSfEQe-mfhqov2NsiybYfc,8416
32
32
  dao_ai/tools/python.py,sha256=XcQiTMshZyLUTVR5peB3vqsoUoAAy8gol9_pcrhddfI,1831
33
33
  dao_ai/tools/slack.py,sha256=SCvyVcD9Pv_XXPXePE_fSU1Pd8VLTEkKDLvoGTZWy2Y,4775
34
34
  dao_ai/tools/time.py,sha256=Y-23qdnNHzwjvnfkWvYsE7PoWS1hfeKy44tA7sCnNac,8759
35
35
  dao_ai/tools/unity_catalog.py,sha256=uX_h52BuBAr4c9UeqSMI7DNz3BPRLeai5tBVW4sJqRI,13113
36
36
  dao_ai/tools/vector_search.py,sha256=EDYQs51zIPaAP0ma1D81wJT77GQ-v-cjb2XrFVWfWdg,2621
37
- dao_ai-0.0.25.dist-info/METADATA,sha256=ahAblBSty81iw_mlf9blqOF4-AKN5Asak9SWH0H4FIs,42639
38
- dao_ai-0.0.25.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
39
- dao_ai-0.0.25.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
40
- dao_ai-0.0.25.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
41
- dao_ai-0.0.25.dist-info/RECORD,,
37
+ dao_ai-0.0.27.dist-info/METADATA,sha256=iKbZTl0tFi0S2XUir9uIGh6WyOTvM9yH-4FHCHOzsuE,42695
38
+ dao_ai-0.0.27.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
39
+ dao_ai-0.0.27.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
40
+ dao_ai-0.0.27.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
41
+ dao_ai-0.0.27.dist-info/RECORD,,