dao-ai 0.1.10__py3-none-any.whl → 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dao_ai/apps/resources.py +45 -2
- dao_ai/providers/databricks.py +12 -2
- {dao_ai-0.1.10.dist-info → dao_ai-0.1.11.dist-info}/METADATA +2 -2
- {dao_ai-0.1.10.dist-info → dao_ai-0.1.11.dist-info}/RECORD +7 -7
- {dao_ai-0.1.10.dist-info → dao_ai-0.1.11.dist-info}/WHEEL +0 -0
- {dao_ai-0.1.10.dist-info → dao_ai-0.1.11.dist-info}/entry_points.txt +0 -0
- {dao_ai-0.1.10.dist-info → dao_ai-0.1.11.dist-info}/licenses/LICENSE +0 -0
dao_ai/apps/resources.py
CHANGED
|
@@ -38,6 +38,8 @@ from databricks.sdk.service.apps import (
|
|
|
38
38
|
AppResource,
|
|
39
39
|
AppResourceDatabase,
|
|
40
40
|
AppResourceDatabaseDatabasePermission,
|
|
41
|
+
AppResourceExperiment,
|
|
42
|
+
AppResourceExperimentExperimentPermission,
|
|
41
43
|
AppResourceGenieSpace,
|
|
42
44
|
AppResourceGenieSpaceGenieSpacePermission,
|
|
43
45
|
AppResourceSecret,
|
|
@@ -518,7 +520,10 @@ def _sanitize_resource_name(name: str) -> str:
|
|
|
518
520
|
return sanitized
|
|
519
521
|
|
|
520
522
|
|
|
521
|
-
def generate_sdk_resources(
|
|
523
|
+
def generate_sdk_resources(
|
|
524
|
+
config: AppConfig,
|
|
525
|
+
experiment_id: str | None = None,
|
|
526
|
+
) -> list[AppResource]:
|
|
522
527
|
"""
|
|
523
528
|
Generate Databricks SDK AppResource objects from an AppConfig.
|
|
524
529
|
|
|
@@ -528,6 +533,9 @@ def generate_sdk_resources(config: AppConfig) -> list[AppResource]:
|
|
|
528
533
|
|
|
529
534
|
Args:
|
|
530
535
|
config: The AppConfig containing resource definitions
|
|
536
|
+
experiment_id: Optional MLflow experiment ID to add as a resource.
|
|
537
|
+
When provided, the experiment is added with CAN_EDIT permission,
|
|
538
|
+
allowing the app to log traces and runs.
|
|
531
539
|
|
|
532
540
|
Returns:
|
|
533
541
|
A list of AppResource objects for the Databricks SDK
|
|
@@ -536,13 +544,17 @@ def generate_sdk_resources(config: AppConfig) -> list[AppResource]:
|
|
|
536
544
|
>>> from databricks.sdk import WorkspaceClient
|
|
537
545
|
>>> from databricks.sdk.service.apps import App
|
|
538
546
|
>>> config = AppConfig.from_file("model_config.yaml")
|
|
539
|
-
>>> resources = generate_sdk_resources(config)
|
|
547
|
+
>>> resources = generate_sdk_resources(config, experiment_id="12345")
|
|
540
548
|
>>> w = WorkspaceClient()
|
|
541
549
|
>>> app = App(name="my-app", resources=resources)
|
|
542
550
|
>>> w.apps.create_and_wait(app=app)
|
|
543
551
|
"""
|
|
544
552
|
resources: list[AppResource] = []
|
|
545
553
|
|
|
554
|
+
# Add experiment resource if provided
|
|
555
|
+
if experiment_id:
|
|
556
|
+
resources.append(_extract_sdk_experiment_resource(experiment_id))
|
|
557
|
+
|
|
546
558
|
if config.resources is None:
|
|
547
559
|
logger.debug("No resources defined in config")
|
|
548
560
|
return resources
|
|
@@ -685,6 +697,36 @@ def _extract_sdk_volume_resources(
|
|
|
685
697
|
return resources
|
|
686
698
|
|
|
687
699
|
|
|
700
|
+
def _extract_sdk_experiment_resource(
|
|
701
|
+
experiment_id: str,
|
|
702
|
+
resource_name: str = "experiment",
|
|
703
|
+
) -> AppResource:
|
|
704
|
+
"""Create SDK AppResource for MLflow experiment.
|
|
705
|
+
|
|
706
|
+
This allows the Databricks App to log traces and runs to the specified
|
|
707
|
+
MLflow experiment. The experiment ID is exposed via the MLFLOW_EXPERIMENT_ID
|
|
708
|
+
environment variable using valueFrom: experiment in app.yaml.
|
|
709
|
+
|
|
710
|
+
Args:
|
|
711
|
+
experiment_id: The MLflow experiment ID
|
|
712
|
+
resource_name: The resource key name (default: "experiment")
|
|
713
|
+
|
|
714
|
+
Returns:
|
|
715
|
+
An AppResource for the MLflow experiment
|
|
716
|
+
"""
|
|
717
|
+
resource = AppResource(
|
|
718
|
+
name=resource_name,
|
|
719
|
+
experiment=AppResourceExperiment(
|
|
720
|
+
experiment_id=experiment_id,
|
|
721
|
+
permission=AppResourceExperimentExperimentPermission.CAN_EDIT,
|
|
722
|
+
),
|
|
723
|
+
)
|
|
724
|
+
logger.debug(
|
|
725
|
+
f"Extracted SDK experiment resource: {resource_name} -> {experiment_id}"
|
|
726
|
+
)
|
|
727
|
+
return resource
|
|
728
|
+
|
|
729
|
+
|
|
688
730
|
def _extract_sdk_secrets_from_config(config: AppConfig) -> list[AppResource]:
|
|
689
731
|
"""
|
|
690
732
|
Extract SDK AppResource objects for all secrets referenced in the config.
|
|
@@ -926,6 +968,7 @@ def generate_app_yaml(
|
|
|
926
968
|
env_vars: list[dict[str, str]] = [
|
|
927
969
|
{"name": "MLFLOW_TRACKING_URI", "value": "databricks"},
|
|
928
970
|
{"name": "MLFLOW_REGISTRY_URI", "value": "databricks-uc"},
|
|
971
|
+
{"name": "MLFLOW_EXPERIMENT_ID", "valueFrom": "experiment"},
|
|
929
972
|
{"name": "DAO_AI_CONFIG_PATH", "value": "model_config.yaml"},
|
|
930
973
|
]
|
|
931
974
|
|
dao_ai/providers/databricks.py
CHANGED
|
@@ -558,6 +558,16 @@ class DatabricksProvider(ServiceProvider):
|
|
|
558
558
|
|
|
559
559
|
logger.info("Using workspace source path", source_path=source_path)
|
|
560
560
|
|
|
561
|
+
# Get or create experiment for this app (for tracing and tracking)
|
|
562
|
+
from mlflow.entities import Experiment
|
|
563
|
+
|
|
564
|
+
experiment: Experiment = self.get_or_create_experiment(config)
|
|
565
|
+
logger.info(
|
|
566
|
+
"Using MLflow experiment for app",
|
|
567
|
+
experiment_name=experiment.name,
|
|
568
|
+
experiment_id=experiment.experiment_id,
|
|
569
|
+
)
|
|
570
|
+
|
|
561
571
|
# Upload the configuration file to the workspace
|
|
562
572
|
source_config_path: str | None = config.source_config_path
|
|
563
573
|
if source_config_path:
|
|
@@ -618,13 +628,13 @@ class DatabricksProvider(ServiceProvider):
|
|
|
618
628
|
)
|
|
619
629
|
logger.info("app.yaml with resources uploaded", path=app_yaml_path)
|
|
620
630
|
|
|
621
|
-
# Generate SDK resources from the config
|
|
631
|
+
# Generate SDK resources from the config (including experiment)
|
|
622
632
|
from dao_ai.apps.resources import (
|
|
623
633
|
generate_sdk_resources,
|
|
624
634
|
generate_user_api_scopes,
|
|
625
635
|
)
|
|
626
636
|
|
|
627
|
-
sdk_resources = generate_sdk_resources(config)
|
|
637
|
+
sdk_resources = generate_sdk_resources(config, experiment_id=experiment.experiment_id)
|
|
628
638
|
if sdk_resources:
|
|
629
639
|
logger.info(
|
|
630
640
|
"Discovered app resources from config",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dao-ai
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.11
|
|
4
4
|
Summary: DAO AI: A modular, multi-agent orchestration framework for complex AI workflows. Supports agent handoff, tool integration, and dynamic configuration via YAML.
|
|
5
5
|
Project-URL: Homepage, https://github.com/natefleming/dao-ai
|
|
6
6
|
Project-URL: Documentation, https://natefleming.github.io/dao-ai
|
|
@@ -28,7 +28,7 @@ Requires-Python: >=3.11
|
|
|
28
28
|
Requires-Dist: databricks-agents>=1.9.0
|
|
29
29
|
Requires-Dist: databricks-langchain[memory]>=0.12.1
|
|
30
30
|
Requires-Dist: databricks-mcp>=0.5.0
|
|
31
|
-
Requires-Dist: databricks-sdk[openai]>=0.
|
|
31
|
+
Requires-Dist: databricks-sdk[openai]>=0.77.0
|
|
32
32
|
Requires-Dist: ddgs>=9.10.0
|
|
33
33
|
Requires-Dist: dspy>=2.6.27
|
|
34
34
|
Requires-Dist: flashrank>=0.2.10
|
|
@@ -16,7 +16,7 @@ dao_ai/vector_search.py,sha256=8d3xROg9zSIYNXjRRl6rSexsJTlufjRl5Fy1ZA8daKA,4019
|
|
|
16
16
|
dao_ai/apps/__init__.py,sha256=RLuhZf4gQ4pemwKDz1183aXib8UfaRhwfKvRx68GRlM,661
|
|
17
17
|
dao_ai/apps/handlers.py,sha256=NN81uDV2hy83zT-kY36mxyBgCQIHBJApX4bnUceGB8k,2614
|
|
18
18
|
dao_ai/apps/model_serving.py,sha256=XLt3_0pGSRceMK6YtOrND9Jnh7mKLPCtwjVDLIaptQU,847
|
|
19
|
-
dao_ai/apps/resources.py,sha256=
|
|
19
|
+
dao_ai/apps/resources.py,sha256=kAXqYx-Xwba1SaltPZIAtB2xoIHxH98Q41b8YGPLfI0,38029
|
|
20
20
|
dao_ai/apps/server.py,sha256=D3R3J1svtpxnpDjoM-oxg66dMDI8USgiQHPRvRHc7oQ,1276
|
|
21
21
|
dao_ai/genie/__init__.py,sha256=vdEyGhrt6L8GlK75SyYvTnl8QpHKDCJC5hJKLg4DesQ,1063
|
|
22
22
|
dao_ai/genie/core.py,sha256=HPKbocvhnnw_PkQwfoq5bpgQmL9lZyyS6_goTJL8yiY,1073
|
|
@@ -53,7 +53,7 @@ dao_ai/orchestration/supervisor.py,sha256=alKMEEo9G5LhdpMvTVdAMel234cZj5_MguWl4w
|
|
|
53
53
|
dao_ai/orchestration/swarm.py,sha256=8tp1eGmsQqqWpaDcjPoJckddPWohZdmmN0RGRJ_xzOA,9198
|
|
54
54
|
dao_ai/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
55
55
|
dao_ai/providers/base.py,sha256=cJGo3UjUTPgS91dv38ePOHwQQtYhIa84ebb167CBXjk,2111
|
|
56
|
-
dao_ai/providers/databricks.py,sha256=
|
|
56
|
+
dao_ai/providers/databricks.py,sha256=EZokRfbScW5K2h8EOCQ95XjoXt981ySslQk0d0DJmeI,70687
|
|
57
57
|
dao_ai/tools/__init__.py,sha256=NfRpAKds_taHbx6gzLPWgtPXve-YpwzkoOAUflwxceM,1734
|
|
58
58
|
dao_ai/tools/agent.py,sha256=plIWALywRjaDSnot13nYehBsrHRpBUpsVZakoGeajOE,1858
|
|
59
59
|
dao_ai/tools/core.py,sha256=bRIN3BZhRQX8-Kpu3HPomliodyskCqjxynQmYbk6Vjs,3783
|
|
@@ -68,8 +68,8 @@ dao_ai/tools/sql.py,sha256=tKd1gjpLuKdQDyfmyYYtMiNRHDW6MGRbdEVaeqyB8Ok,7632
|
|
|
68
68
|
dao_ai/tools/time.py,sha256=tufJniwivq29y0LIffbgeBTIDE6VgrLpmVf8Qr90qjw,9224
|
|
69
69
|
dao_ai/tools/unity_catalog.py,sha256=AjQfW7bvV8NurqDLIyntYRv2eJuTwNdbvex1L5CRjOk,15534
|
|
70
70
|
dao_ai/tools/vector_search.py,sha256=oe2uBwl2TfeJIXPpwiS6Rmz7wcHczSxNyqS9P3hE6co,14542
|
|
71
|
-
dao_ai-0.1.
|
|
72
|
-
dao_ai-0.1.
|
|
73
|
-
dao_ai-0.1.
|
|
74
|
-
dao_ai-0.1.
|
|
75
|
-
dao_ai-0.1.
|
|
71
|
+
dao_ai-0.1.11.dist-info/METADATA,sha256=uSlkpK84xgQlBOkLAzdOCycWxUC1ovzrrrU_uCVMOd0,16698
|
|
72
|
+
dao_ai-0.1.11.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
73
|
+
dao_ai-0.1.11.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
|
|
74
|
+
dao_ai-0.1.11.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
|
|
75
|
+
dao_ai-0.1.11.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|