dao-ai 0.0.21__py3-none-any.whl → 0.0.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dao_ai/config.py CHANGED
@@ -23,6 +23,7 @@ from databricks.sdk.credentials_provider import (
23
23
  ModelServingUserCredentials,
24
24
  )
25
25
  from databricks.sdk.service.catalog import FunctionInfo, TableInfo
26
+ from databricks.sdk.service.database import DatabaseInstance
26
27
  from databricks.vector_search.client import VectorSearchClient
27
28
  from databricks.vector_search.index import VectorSearchIndex
28
29
  from databricks_langchain import (
@@ -703,15 +704,18 @@ class WarehouseModel(BaseModel, IsDatabricksResource):
703
704
 
704
705
 
705
706
  class DatabaseModel(BaseModel, IsDatabricksResource):
706
- model_config = ConfigDict(frozen=True)
707
+ model_config = ConfigDict(use_enum_values=True, extra="forbid")
707
708
  name: str
709
+ instance_name: Optional[str] = None
708
710
  description: Optional[str] = None
709
- host: Optional[AnyVariable]
711
+ host: Optional[AnyVariable] = None
710
712
  database: Optional[AnyVariable] = "databricks_postgres"
711
713
  port: Optional[AnyVariable] = 5432
712
714
  connection_kwargs: Optional[dict[str, Any]] = Field(default_factory=dict)
713
715
  max_pool_size: Optional[int] = 10
714
- timeout_seconds: Optional[int] = 5
716
+ timeout_seconds: Optional[int] = 10
717
+ capacity: Optional[Literal["CU_1", "CU_2"]] = "CU_2"
718
+ node_count: Optional[int] = None
715
719
  user: Optional[AnyVariable] = None
716
720
  password: Optional[AnyVariable] = None
717
721
  client_id: Optional[AnyVariable] = None
@@ -725,11 +729,44 @@ class DatabaseModel(BaseModel, IsDatabricksResource):
725
729
  def as_resources(self) -> Sequence[DatabricksResource]:
726
730
  return [
727
731
  DatabricksLakebase(
728
- database_instance_name=self.name,
732
+ database_instance_name=self.instance_name,
729
733
  on_behalf_of_user=self.on_behalf_of_user,
730
734
  )
731
735
  ]
732
736
 
737
+ @model_validator(mode="after")
738
+ def update_instance_name(self):
739
+ if self.instance_name is None:
740
+ self.instance_name = self.name
741
+
742
+ return self
743
+
744
+ @model_validator(mode="after")
745
+ def update_user(self):
746
+ if self.client_id or self.user:
747
+ return self
748
+
749
+ self.user = self.workspace_client.current_user.me().user_name
750
+ if not self.user:
751
+ raise ValueError(
752
+ "Unable to determine current user. Please provide a user name or OAuth credentials."
753
+ )
754
+
755
+ return self
756
+
757
+ @model_validator(mode="after")
758
+ def update_host(self):
759
+ if self.host is not None:
760
+ return self
761
+
762
+ existing_instance: DatabaseInstance = (
763
+ self.workspace_client.database.get_database_instance(
764
+ name=self.instance_name
765
+ )
766
+ )
767
+ self.host = existing_instance.read_write_dns
768
+ return self
769
+
733
770
  @model_validator(mode="after")
734
771
  def validate_auth_methods(self):
735
772
  oauth_fields: Sequence[Any] = [
@@ -739,7 +776,7 @@ class DatabaseModel(BaseModel, IsDatabricksResource):
739
776
  ]
740
777
  has_oauth: bool = all(field is not None for field in oauth_fields)
741
778
 
742
- pat_fields: Sequence[Any] = [self.user, self.password]
779
+ pat_fields: Sequence[Any] = [self.user]
743
780
  has_user_auth: bool = all(field is not None for field in pat_fields)
744
781
 
745
782
  if has_oauth and has_user_auth:
@@ -758,7 +795,14 @@ class DatabaseModel(BaseModel, IsDatabricksResource):
758
795
  return self
759
796
 
760
797
  @property
761
- def connection_url(self) -> str:
798
+ def connection_params(self) -> dict[str, Any]:
799
+ """
800
+ Get database connection parameters as a dictionary.
801
+
802
+ Returns a dict with connection parameters suitable for psycopg ConnectionPool.
803
+ If username is configured, it will be included; otherwise it will be omitted
804
+ to allow Lakebase to authenticate using the token's identity.
805
+ """
762
806
  from dao_ai.providers.base import ServiceProvider
763
807
  from dao_ai.providers.databricks import DatabricksProvider
764
808
 
@@ -766,7 +810,7 @@ class DatabaseModel(BaseModel, IsDatabricksResource):
766
810
 
767
811
  if self.client_id and self.client_secret and self.workspace_host:
768
812
  username = value_of(self.client_id)
769
- else:
813
+ elif self.user:
770
814
  username = value_of(self.user)
771
815
 
772
816
  host: str = value_of(self.host)
@@ -779,11 +823,48 @@ class DatabaseModel(BaseModel, IsDatabricksResource):
779
823
  workspace_host=value_of(self.workspace_host),
780
824
  pat=value_of(self.password),
781
825
  )
782
- token: str = provider.create_token()
783
826
 
784
- return (
785
- f"postgresql://{username}:{token}@{host}:{port}/{database}?sslmode=require"
786
- )
827
+ token: str = provider.lakebase_password_provider(self.instance_name)
828
+
829
+ # Build connection parameters dictionary
830
+ params: dict[str, Any] = {
831
+ "dbname": database,
832
+ "host": host,
833
+ "port": port,
834
+ "password": token,
835
+ "sslmode": "require",
836
+ }
837
+
838
+ # Only include user if explicitly configured
839
+ if username:
840
+ params["user"] = username
841
+ logger.debug(
842
+ f"Connection params: dbname={database} user={username} host={host} port={port} password=******** sslmode=require"
843
+ )
844
+ else:
845
+ logger.debug(
846
+ f"Connection params: dbname={database} host={host} port={port} password=******** sslmode=require (using token identity)"
847
+ )
848
+
849
+ return params
850
+
851
+ @property
852
+ def connection_url(self) -> str:
853
+ """
854
+ Get database connection URL as a string (for backwards compatibility).
855
+
856
+ Note: It's recommended to use connection_params instead for better flexibility.
857
+ """
858
+ params = self.connection_params
859
+ parts = [f"{k}={v}" for k, v in params.items()]
860
+ return " ".join(parts)
861
+
862
+ def create(self, w: WorkspaceClient | None = None) -> None:
863
+ from dao_ai.providers.databricks import DatabricksProvider
864
+
865
+ provider: DatabricksProvider = DatabricksProvider()
866
+ provider.create_lakebase(self)
867
+ provider.create_lakebase_instance_role(self)
787
868
 
788
869
 
789
870
  class SearchParametersModel(BaseModel):
@@ -1093,6 +1174,7 @@ class AgentModel(BaseModel):
1093
1174
  class SupervisorModel(BaseModel):
1094
1175
  model_config = ConfigDict(use_enum_values=True, extra="forbid")
1095
1176
  model: LLMModel
1177
+ tools: list[ToolModel] = Field(default_factory=list)
1096
1178
  prompt: Optional[str] = None
1097
1179
 
1098
1180
 
dao_ai/graph.py CHANGED
@@ -27,6 +27,7 @@ from dao_ai.nodes import (
27
27
  )
28
28
  from dao_ai.prompts import make_prompt
29
29
  from dao_ai.state import Context, IncomingState, OutgoingState, SharedState
30
+ from dao_ai.tools import create_tools
30
31
 
31
32
 
32
33
  def route_message(state: SharedState) -> str:
@@ -91,6 +92,8 @@ def _create_supervisor_graph(config: AppConfig) -> CompiledStateGraph:
91
92
  orchestration: OrchestrationModel = config.app.orchestration
92
93
  supervisor: SupervisorModel = orchestration.supervisor
93
94
 
95
+ tools += create_tools(orchestration.supervisor.tools)
96
+
94
97
  store: BaseStore = None
95
98
  if orchestration.memory and orchestration.memory.store:
96
99
  store = orchestration.memory.store.as_store()
dao_ai/memory/core.py CHANGED
@@ -70,10 +70,14 @@ class StoreManager:
70
70
  case StorageType.POSTGRES:
71
71
  from dao_ai.memory.postgres import PostgresStoreManager
72
72
 
73
- store_manager = cls.store_managers.get(store_model.database.name)
73
+ store_manager = cls.store_managers.get(
74
+ store_model.database.instance_name
75
+ )
74
76
  if store_manager is None:
75
77
  store_manager = PostgresStoreManager(store_model)
76
- cls.store_managers[store_model.database.name] = store_manager
78
+ cls.store_managers[store_model.database.instance_name] = (
79
+ store_manager
80
+ )
77
81
  case _:
78
82
  raise ValueError(f"Unknown store type: {store_model.type}")
79
83
 
@@ -102,15 +106,15 @@ class CheckpointManager:
102
106
  from dao_ai.memory.postgres import AsyncPostgresCheckpointerManager
103
107
 
104
108
  checkpointer_manager = cls.checkpoint_managers.get(
105
- checkpointer_model.database.name
109
+ checkpointer_model.database.instance_name
106
110
  )
107
111
  if checkpointer_manager is None:
108
112
  checkpointer_manager = AsyncPostgresCheckpointerManager(
109
113
  checkpointer_model
110
114
  )
111
- cls.checkpoint_managers[checkpointer_model.database.name] = (
112
- checkpointer_manager
113
- )
115
+ cls.checkpoint_managers[
116
+ checkpointer_model.database.instance_name
117
+ ] = checkpointer_manager
114
118
  case _:
115
119
  raise ValueError(f"Unknown store type: {checkpointer_model.type}")
116
120
 
dao_ai/memory/postgres.py CHANGED
@@ -20,6 +20,59 @@ from dao_ai.memory.base import (
20
20
  )
21
21
 
22
22
 
23
+ def _create_pool(
24
+ connection_params: dict[str, Any],
25
+ database_name: str,
26
+ max_pool_size: int,
27
+ timeout_seconds: int,
28
+ kwargs: dict,
29
+ ) -> ConnectionPool:
30
+ """Create a connection pool using the provided connection parameters."""
31
+ logger.debug(
32
+ f"Connection params for {database_name}: {', '.join(k + '=' + (str(v) if k != 'password' else '***') for k, v in connection_params.items())}"
33
+ )
34
+
35
+ # Merge connection_params into kwargs for psycopg
36
+ connection_kwargs = kwargs | connection_params
37
+ pool = ConnectionPool(
38
+ conninfo="", # Empty conninfo, params come from kwargs
39
+ min_size=1,
40
+ max_size=max_pool_size,
41
+ open=False,
42
+ timeout=timeout_seconds,
43
+ kwargs=connection_kwargs,
44
+ )
45
+ pool.open(wait=True, timeout=timeout_seconds)
46
+ logger.info(f"Successfully connected to {database_name}")
47
+ return pool
48
+
49
+
50
+ async def _create_async_pool(
51
+ connection_params: dict[str, Any],
52
+ database_name: str,
53
+ max_pool_size: int,
54
+ timeout_seconds: int,
55
+ kwargs: dict,
56
+ ) -> AsyncConnectionPool:
57
+ """Create an async connection pool using the provided connection parameters."""
58
+ logger.debug(
59
+ f"Connection params for {database_name}: {', '.join(k + '=' + (str(v) if k != 'password' else '***') for k, v in connection_params.items())}"
60
+ )
61
+
62
+ # Merge connection_params into kwargs for psycopg
63
+ connection_kwargs = kwargs | connection_params
64
+ pool = AsyncConnectionPool(
65
+ conninfo="", # Empty conninfo, params come from kwargs
66
+ max_size=max_pool_size,
67
+ open=False,
68
+ timeout=timeout_seconds,
69
+ kwargs=connection_kwargs,
70
+ )
71
+ await pool.open(wait=True, timeout=timeout_seconds)
72
+ logger.info(f"Successfully connected to {database_name}")
73
+ return pool
74
+
75
+
23
76
  class AsyncPostgresPoolManager:
24
77
  _pools: dict[str, AsyncConnectionPool] = {}
25
78
  _lock: asyncio.Lock = asyncio.Lock()
@@ -27,7 +80,7 @@ class AsyncPostgresPoolManager:
27
80
  @classmethod
28
81
  async def get_pool(cls, database: DatabaseModel) -> AsyncConnectionPool:
29
82
  connection_key: str = database.name
30
- connection_url: str = database.connection_url
83
+ connection_params: dict[str, Any] = database.connection_params
31
84
 
32
85
  async with cls._lock:
33
86
  if connection_key in cls._pools:
@@ -41,23 +94,17 @@ class AsyncPostgresPoolManager:
41
94
  "autocommit": True,
42
95
  } | database.connection_kwargs or {}
43
96
 
44
- pool: AsyncConnectionPool = AsyncConnectionPool(
45
- conninfo=connection_url,
46
- max_size=database.max_pool_size,
47
- open=False,
48
- timeout=database.timeout_seconds,
97
+ # Create connection pool
98
+ pool: AsyncConnectionPool = await _create_async_pool(
99
+ connection_params=connection_params,
100
+ database_name=database.name,
101
+ max_pool_size=database.max_pool_size,
102
+ timeout_seconds=database.timeout_seconds,
49
103
  kwargs=kwargs,
50
104
  )
51
105
 
52
- try:
53
- await pool.open(wait=True, timeout=database.timeout_seconds)
54
- cls._pools[connection_key] = pool
55
- return pool
56
- except Exception as e:
57
- logger.error(
58
- f"Failed to create PostgreSQL pool for {database.name}: {e}"
59
- )
60
- raise e
106
+ cls._pools[connection_key] = pool
107
+ return pool
61
108
 
62
109
  @classmethod
63
110
  async def close_pool(cls, database: DatabaseModel):
@@ -218,7 +265,7 @@ class PostgresPoolManager:
218
265
  @classmethod
219
266
  def get_pool(cls, database: DatabaseModel) -> ConnectionPool:
220
267
  connection_key: str = str(database.name)
221
- connection_url: str = database.connection_url
268
+ connection_params: dict[str, Any] = database.connection_params
222
269
 
223
270
  with cls._lock:
224
271
  if connection_key in cls._pools:
@@ -232,23 +279,17 @@ class PostgresPoolManager:
232
279
  "autocommit": True,
233
280
  } | database.connection_kwargs or {}
234
281
 
235
- pool: ConnectionPool = ConnectionPool(
236
- conninfo=connection_url,
237
- max_size=database.max_pool_size,
238
- open=False,
239
- timeout=database.timeout_seconds,
282
+ # Create connection pool
283
+ pool: ConnectionPool = _create_pool(
284
+ connection_params=connection_params,
285
+ database_name=database.name,
286
+ max_pool_size=database.max_pool_size,
287
+ timeout_seconds=database.timeout_seconds,
240
288
  kwargs=kwargs,
241
289
  )
242
290
 
243
- try:
244
- pool.open(wait=True, timeout=database.timeout_seconds)
245
- cls._pools[connection_key] = pool
246
- return pool
247
- except Exception as e:
248
- logger.error(
249
- f"Failed to create PostgreSQL pool for {database.name}: {e}"
250
- )
251
- raise e
291
+ cls._pools[connection_key] = pool
292
+ return pool
252
293
 
253
294
  @classmethod
254
295
  def close_pool(cls, database: DatabaseModel):
@@ -1,4 +1,5 @@
1
1
  import base64
2
+ import uuid
2
3
  from importlib.metadata import version
3
4
  from pathlib import Path
4
5
  from typing import Any, Callable, Final, Sequence
@@ -21,6 +22,7 @@ from databricks.sdk.service.catalog import (
21
22
  VolumeInfo,
22
23
  VolumeType,
23
24
  )
25
+ from databricks.sdk.service.database import DatabaseCredential
24
26
  from databricks.sdk.service.iam import User
25
27
  from databricks.sdk.service.workspace import GetSecretResponse
26
28
  from databricks.vector_search.client import VectorSearchClient
@@ -743,3 +745,281 @@ class DatabricksProvider(ServiceProvider):
743
745
  break
744
746
  logger.debug(f"Vector search index found: {found_endpoint_name}")
745
747
  return found_endpoint_name
748
+
749
+ def create_lakebase(self, database: DatabaseModel) -> None:
750
+ """
751
+ Create a Lakebase database instance using the Databricks workspace client.
752
+
753
+ This method handles idempotent database creation, gracefully handling cases where:
754
+ - The database instance already exists
755
+ - The database is in an intermediate state (STARTING, UPDATING, etc.)
756
+
757
+ Args:
758
+ database: DatabaseModel containing the database configuration
759
+
760
+ Returns:
761
+ None
762
+
763
+ Raises:
764
+ Exception: If an unexpected error occurs during database creation
765
+ """
766
+ import time
767
+ from typing import Any
768
+
769
+ workspace_client: WorkspaceClient = database.workspace_client
770
+
771
+ try:
772
+ # First, check if the database instance already exists
773
+ existing_instance: Any = workspace_client.database.get_database_instance(
774
+ name=database.instance_name
775
+ )
776
+
777
+ if existing_instance:
778
+ logger.debug(
779
+ f"Database instance {database.instance_name} already exists with state: {existing_instance.state}"
780
+ )
781
+
782
+ # Check if database is in an intermediate state
783
+ if existing_instance.state in ["STARTING", "UPDATING"]:
784
+ logger.info(
785
+ f"Database instance {database.instance_name} is in {existing_instance.state} state, waiting for it to become AVAILABLE..."
786
+ )
787
+
788
+ # Wait for database to reach a stable state
789
+ max_wait_time: int = 600 # 10 minutes
790
+ wait_interval: int = 10 # 10 seconds
791
+ elapsed: int = 0
792
+
793
+ while elapsed < max_wait_time:
794
+ try:
795
+ current_instance: Any = (
796
+ workspace_client.database.get_database_instance(
797
+ name=database.instance_name
798
+ )
799
+ )
800
+ current_state: str = current_instance.state
801
+ logger.debug(f"Database instance state: {current_state}")
802
+
803
+ if current_state == "AVAILABLE":
804
+ logger.info(
805
+ f"Database instance {database.instance_name} is now AVAILABLE"
806
+ )
807
+ break
808
+ elif current_state in ["STARTING", "UPDATING"]:
809
+ logger.debug(
810
+ f"Database instance still in {current_state} state, waiting {wait_interval} seconds..."
811
+ )
812
+ time.sleep(wait_interval)
813
+ elapsed += wait_interval
814
+ elif current_state in ["STOPPED", "DELETING"]:
815
+ logger.warning(
816
+ f"Database instance {database.instance_name} is in unexpected state: {current_state}"
817
+ )
818
+ break
819
+ else:
820
+ logger.warning(
821
+ f"Unknown database state: {current_state}, proceeding anyway"
822
+ )
823
+ break
824
+ except NotFound:
825
+ logger.warning(
826
+ f"Database instance {database.instance_name} no longer exists, will attempt to recreate"
827
+ )
828
+ break
829
+ except Exception as state_error:
830
+ logger.warning(
831
+ f"Could not check database state: {state_error}, proceeding anyway"
832
+ )
833
+ break
834
+
835
+ if elapsed >= max_wait_time:
836
+ logger.warning(
837
+ f"Timed out waiting for database instance {database.instance_name} to become AVAILABLE after {max_wait_time} seconds"
838
+ )
839
+
840
+ elif existing_instance.state == "AVAILABLE":
841
+ logger.info(
842
+ f"Database instance {database.instance_name} already exists and is AVAILABLE"
843
+ )
844
+ return
845
+ elif existing_instance.state in ["STOPPED", "DELETING"]:
846
+ logger.warning(
847
+ f"Database instance {database.instance_name} is in {existing_instance.state} state"
848
+ )
849
+ return
850
+ else:
851
+ logger.info(
852
+ f"Database instance {database.instance_name} already exists with state: {existing_instance.state}"
853
+ )
854
+ return
855
+
856
+ except NotFound:
857
+ # Database doesn't exist, proceed with creation
858
+ logger.debug(
859
+ f"Database instance {database.instance_name} not found, creating new instance..."
860
+ )
861
+
862
+ try:
863
+ # Resolve variable values for database parameters
864
+ from databricks.sdk.service.database import DatabaseInstance
865
+
866
+ capacity: str = database.capacity if database.capacity else "CU_2"
867
+
868
+ # Create the database instance object
869
+ database_instance: DatabaseInstance = DatabaseInstance(
870
+ name=database.instance_name,
871
+ capacity=capacity,
872
+ node_count=database.node_count,
873
+ )
874
+
875
+ # Create the database instance via API
876
+ workspace_client.database.create_database_instance(
877
+ database_instance=database_instance
878
+ )
879
+ logger.info(
880
+ f"Successfully created database instance: {database.instance_name}"
881
+ )
882
+
883
+ except Exception as create_error:
884
+ error_msg: str = str(create_error)
885
+
886
+ # Handle case where database was created by another process concurrently
887
+ if (
888
+ "already exists" in error_msg.lower()
889
+ or "RESOURCE_ALREADY_EXISTS" in error_msg
890
+ ):
891
+ logger.info(
892
+ f"Database instance {database.instance_name} was created concurrently by another process"
893
+ )
894
+ return
895
+ else:
896
+ # Re-raise unexpected errors
897
+ logger.error(
898
+ f"Error creating database instance {database.instance_name}: {create_error}"
899
+ )
900
+ raise
901
+
902
+ except Exception as e:
903
+ # Handle other unexpected errors
904
+ error_msg: str = str(e)
905
+
906
+ # Check if this is actually a "resource already exists" type error
907
+ if (
908
+ "already exists" in error_msg.lower()
909
+ or "RESOURCE_ALREADY_EXISTS" in error_msg
910
+ ):
911
+ logger.info(
912
+ f"Database instance {database.instance_name} already exists (detected via exception)"
913
+ )
914
+ return
915
+ else:
916
+ logger.error(
917
+ f"Unexpected error while handling database {database.instance_name}: {e}"
918
+ )
919
+ raise
920
+
921
+ def lakebase_password_provider(self, instance_name: str) -> str:
922
+ """
923
+ Ask Databricks to mint a fresh DB credential for this instance.
924
+ """
925
+ logger.debug(f"Generating password for lakebase instance: {instance_name}")
926
+ w: WorkspaceClient = self.w
927
+ cred: DatabaseCredential = w.database.generate_database_credential(
928
+ request_id=str(uuid.uuid4()),
929
+ instance_names=[instance_name],
930
+ )
931
+ return cred.token
932
+
933
+ def create_lakebase_instance_role(self, database: DatabaseModel) -> None:
934
+ """
935
+ Create a database instance role for a Lakebase instance.
936
+
937
+ This method creates a role with DATABRICKS_SUPERUSER membership for the
938
+ service principal specified in the database configuration.
939
+
940
+ Args:
941
+ database: DatabaseModel containing the database and service principal configuration
942
+
943
+ Returns:
944
+ None
945
+
946
+ Raises:
947
+ ValueError: If client_id is not provided in the database configuration
948
+ Exception: If an unexpected error occurs during role creation
949
+ """
950
+ from databricks.sdk.service.database import (
951
+ DatabaseInstanceRole,
952
+ DatabaseInstanceRoleIdentityType,
953
+ DatabaseInstanceRoleMembershipRole,
954
+ )
955
+
956
+ from dao_ai.config import value_of
957
+
958
+ # Validate that client_id is provided
959
+ if not database.client_id:
960
+ logger.warning(
961
+ f"client_id is required to create instance role for database {database.instance_name}"
962
+ )
963
+ return
964
+
965
+ # Resolve the client_id value
966
+ client_id: str = value_of(database.client_id)
967
+ role_name: str = client_id
968
+ instance_name: str = database.instance_name
969
+
970
+ logger.debug(
971
+ f"Creating instance role '{role_name}' for database {instance_name} with principal {client_id}"
972
+ )
973
+
974
+ try:
975
+ # Check if role already exists
976
+ try:
977
+ _ = self.w.database.get_database_instance_role(
978
+ instance_name=instance_name,
979
+ name=role_name,
980
+ )
981
+ logger.info(
982
+ f"Instance role '{role_name}' already exists for database {instance_name}"
983
+ )
984
+ return
985
+ except NotFound:
986
+ # Role doesn't exist, proceed with creation
987
+ logger.debug(
988
+ f"Instance role '{role_name}' not found, creating new role..."
989
+ )
990
+
991
+ # Create the database instance role
992
+ role: DatabaseInstanceRole = DatabaseInstanceRole(
993
+ name=role_name,
994
+ identity_type=DatabaseInstanceRoleIdentityType.SERVICE_PRINCIPAL,
995
+ membership_role=DatabaseInstanceRoleMembershipRole.DATABRICKS_SUPERUSER,
996
+ )
997
+
998
+ # Create the role using the API
999
+ self.w.database.create_database_instance_role(
1000
+ instance_name=instance_name,
1001
+ database_instance_role=role,
1002
+ )
1003
+
1004
+ logger.info(
1005
+ f"Successfully created instance role '{role_name}' for database {instance_name}"
1006
+ )
1007
+
1008
+ except Exception as e:
1009
+ error_msg: str = str(e)
1010
+
1011
+ # Handle case where role was created concurrently
1012
+ if (
1013
+ "already exists" in error_msg.lower()
1014
+ or "RESOURCE_ALREADY_EXISTS" in error_msg
1015
+ ):
1016
+ logger.info(
1017
+ f"Instance role '{role_name}' was created concurrently for database {instance_name}"
1018
+ )
1019
+ return
1020
+
1021
+ # Re-raise unexpected errors
1022
+ logger.error(
1023
+ f"Error creating instance role '{role_name}' for database {instance_name}: {e}"
1024
+ )
1025
+ raise
dao_ai/tools/genie.py CHANGED
@@ -276,7 +276,7 @@ def create_genie_tool(
276
276
  genie_room: GenieRoomModel | dict[str, Any],
277
277
  name: Optional[str] = None,
278
278
  description: Optional[str] = None,
279
- persist_conversation: bool = True,
279
+ persist_conversation: bool = False,
280
280
  truncate_results: bool = False,
281
281
  poll_interval: int = DEFAULT_POLLING_INTERVAL_SECS,
282
282
  ) -> Callable[[str], GenieResponse]:
dao_ai/utils.py CHANGED
@@ -112,3 +112,7 @@ def load_function(function_name: str) -> Callable[..., Any]:
112
112
  except (ImportError, AttributeError, TypeError) as e:
113
113
  # Provide a detailed error message that includes the original exception
114
114
  raise ImportError(f"Failed to import {function_name}: {e}")
115
+
116
+
117
+ def is_in_model_serving() -> bool:
118
+ return os.environ.get("IS_IN_DB_MODEL_SERVING_ENV", "false").lower() == "true"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dao-ai
3
- Version: 0.0.21
3
+ Version: 0.0.22
4
4
  Summary: DAO AI: A modular, multi-agent orchestration framework for complex AI workflows. Supports agent handoff, tool integration, and dynamic configuration via YAML.
5
5
  Project-URL: Homepage, https://github.com/natefleming/dao-ai
6
6
  Project-URL: Documentation, https://natefleming.github.io/dao-ai
@@ -24,9 +24,9 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
24
24
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
25
25
  Classifier: Topic :: System :: Distributed Computing
26
26
  Requires-Python: >=3.12
27
- Requires-Dist: databricks-agents>=1.6.0
27
+ Requires-Dist: databricks-agents>=1.6.1
28
28
  Requires-Dist: databricks-langchain>=0.8.0
29
- Requires-Dist: databricks-sdk[openai]>=0.66.0
29
+ Requires-Dist: databricks-sdk[openai]>=0.67.0
30
30
  Requires-Dist: duckduckgo-search>=8.0.2
31
31
  Requires-Dist: grandalf>=0.8
32
32
  Requires-Dist: langchain-mcp-adapters>=0.1.10
@@ -3,8 +3,8 @@ dao_ai/agent_as_code.py,sha256=kPSeDz2-1jRaed1TMs4LA3VECoyqe9_Ed2beRLB9gXQ,472
3
3
  dao_ai/catalog.py,sha256=sPZpHTD3lPx4EZUtIWeQV7VQM89WJ6YH__wluk1v2lE,4947
4
4
  dao_ai/chat_models.py,sha256=uhwwOTeLyHWqoTTgHrs4n5iSyTwe4EQcLKnh3jRxPWI,8626
5
5
  dao_ai/cli.py,sha256=Aez2TQW3Q8Ho1IaIkRggt0NevDxAAVPjXkePC5GPJF0,20429
6
- dao_ai/config.py,sha256=GeaM00wNlYecwe3HhqeG88Hprt0SvGg4HtC7g_m-v98,52386
7
- dao_ai/graph.py,sha256=gmD9mxODfXuvn9xWeBfewm1FiuVAWMLEdnZz7DNmSH0,7859
6
+ dao_ai/config.py,sha256=Kh0oJwWr2dhqrOriOEgjMs2CGhOdMYh1hTGO7sAPdw8,55268
7
+ dao_ai/graph.py,sha256=APYc2y3cig4P52X4sOHSFSZNK8j5EtEPJLFwWeJ3KQQ,7956
8
8
  dao_ai/guardrails.py,sha256=4TKArDONRy8RwHzOT1plZ1rhy3x9GF_aeGpPCRl6wYA,4016
9
9
  dao_ai/messages.py,sha256=xl_3-WcFqZKCFCiov8sZOPljTdM3gX3fCHhxq-xFg2U,7005
10
10
  dao_ai/models.py,sha256=8r8GIG3EGxtVyWsRNI56lVaBjiNrPkzh4HdwMZRq8iw,31689
@@ -12,29 +12,29 @@ dao_ai/nodes.py,sha256=SSuFNTXOdFaKg_aX-yUkQO7fM9wvNGu14lPXKDapU1U,8461
12
12
  dao_ai/prompts.py,sha256=vpmIbWs_szXUgNNDs5Gh2LcxKZti5pHDKSfoClUcgX0,1289
13
13
  dao_ai/state.py,sha256=_lF9krAYYjvFDMUwZzVKOn0ZnXKcOrbjWKdre0C5B54,1137
14
14
  dao_ai/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- dao_ai/utils.py,sha256=dkZTXNN6q0xwkrvSWdNq8937W2xGuLCRWRb6hRQM6kA,4217
15
+ dao_ai/utils.py,sha256=A7BR28Rv7tH9B_lLVKlgBnWtcPeQ-Bafm2f57oTrUlQ,4337
16
16
  dao_ai/vector_search.py,sha256=jlaFS_iizJ55wblgzZmswMM3UOL-qOp2BGJc0JqXYSg,2839
17
17
  dao_ai/hooks/__init__.py,sha256=LlHGIuiZt6vGW8K5AQo1XJEkBP5vDVtMhq0IdjcLrD4,417
18
18
  dao_ai/hooks/core.py,sha256=ZShHctUSoauhBgdf1cecy9-D7J6-sGn-pKjuRMumW5U,6663
19
19
  dao_ai/memory/__init__.py,sha256=1kHx_p9abKYFQ6EYD05nuc1GS5HXVEpufmjBGw_7Uho,260
20
20
  dao_ai/memory/base.py,sha256=99nfr2UZJ4jmfTL_KrqUlRSCoRxzkZyWyx5WqeUoMdQ,338
21
- dao_ai/memory/core.py,sha256=g7chjBgVgx3iKjR2hghl0QL1j3802uIM_e7mgszur9M,4151
22
- dao_ai/memory/postgres.py,sha256=aWHRLhPm-9ywjlQe2B4XSdLbeaiuVV88p4PiQJFNEWo,13924
21
+ dao_ai/memory/core.py,sha256=DnEjQO3S7hXr3CDDd7C2eE7fQUmcCS_8q9BXEgjPH3U,4271
22
+ dao_ai/memory/postgres.py,sha256=vvI3osjx1EoU5GBA6SCUstTBKillcmLl12hVgDMjfJY,15346
23
23
  dao_ai/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  dao_ai/providers/base.py,sha256=-fjKypCOk28h6vioPfMj9YZSw_3Kcbi2nMuAyY7vX9k,1383
25
- dao_ai/providers/databricks.py,sha256=PX5mBvZaIxSJIAHWVnPXsho1XvxcoR3Qs3I9UavFRsY,28306
25
+ dao_ai/providers/databricks.py,sha256=CFZ2RojcTjiJ1aGwNI3_0qCGf339w2o5h9CRDKNesLs,39825
26
26
  dao_ai/tools/__init__.py,sha256=ye6MHaJY7tUnJ8336YJiLxuZr55zDPNdOw6gm7j5jlc,1103
27
27
  dao_ai/tools/agent.py,sha256=WbQnyziiT12TLMrA7xK0VuOU029tdmUBXbUl-R1VZ0Q,1886
28
28
  dao_ai/tools/core.py,sha256=Kei33S8vrmvPOAyrFNekaWmV2jqZ-IPS1QDSvU7RZF0,1984
29
- dao_ai/tools/genie.py,sha256=1CbLViNQ3KnmDtHXuwqCPug7rEhCGvuHP1NgsY-AJZ0,15050
29
+ dao_ai/tools/genie.py,sha256=8HSOCzSg6PlBzBYXMmNfUnl-LO03p3Ki3fxLPm_dhPg,15051
30
30
  dao_ai/tools/human_in_the_loop.py,sha256=yk35MO9eNETnYFH-sqlgR-G24TrEgXpJlnZUustsLkI,3681
31
31
  dao_ai/tools/mcp.py,sha256=auEt_dwv4J26fr5AgLmwmnAsI894-cyuvkvjItzAUxs,4419
32
32
  dao_ai/tools/python.py,sha256=XcQiTMshZyLUTVR5peB3vqsoUoAAy8gol9_pcrhddfI,1831
33
33
  dao_ai/tools/time.py,sha256=Y-23qdnNHzwjvnfkWvYsE7PoWS1hfeKy44tA7sCnNac,8759
34
34
  dao_ai/tools/unity_catalog.py,sha256=uX_h52BuBAr4c9UeqSMI7DNz3BPRLeai5tBVW4sJqRI,13113
35
35
  dao_ai/tools/vector_search.py,sha256=EDYQs51zIPaAP0ma1D81wJT77GQ-v-cjb2XrFVWfWdg,2621
36
- dao_ai-0.0.21.dist-info/METADATA,sha256=PG-eOltuUpaJf4lYEw-DoVy5BFT9LbMCfe8GanIV7zQ,41380
37
- dao_ai-0.0.21.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
38
- dao_ai-0.0.21.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
39
- dao_ai-0.0.21.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
40
- dao_ai-0.0.21.dist-info/RECORD,,
36
+ dao_ai-0.0.22.dist-info/METADATA,sha256=kqyr-YBFC_fs-PHknnvm4Ahhad8Pfac0gTb8vKydHMw,41380
37
+ dao_ai-0.0.22.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
38
+ dao_ai-0.0.22.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
39
+ dao_ai-0.0.22.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
40
+ dao_ai-0.0.22.dist-info/RECORD,,