dao-ai 0.0.23__py3-none-any.whl → 0.0.25__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dao_ai/cli.py +77 -18
- dao_ai/config.py +43 -4
- dao_ai/prompts.py +13 -5
- dao_ai/providers/databricks.py +78 -0
- dao_ai/tools/mcp.py +14 -2
- {dao_ai-0.0.23.dist-info → dao_ai-0.0.25.dist-info}/METADATA +7 -7
- {dao_ai-0.0.23.dist-info → dao_ai-0.0.25.dist-info}/RECORD +10 -10
- {dao_ai-0.0.23.dist-info → dao_ai-0.0.25.dist-info}/WHEEL +0 -0
- {dao_ai-0.0.23.dist-info → dao_ai-0.0.25.dist-info}/entry_points.txt +0 -0
- {dao_ai-0.0.23.dist-info → dao_ai-0.0.25.dist-info}/licenses/LICENSE +0 -0
dao_ai/cli.py
CHANGED
|
@@ -460,6 +460,49 @@ def setup_logging(verbosity: int) -> None:
|
|
|
460
460
|
logger.add(sys.stderr, level=level)
|
|
461
461
|
|
|
462
462
|
|
|
463
|
+
def generate_bundle_from_template(config_path: Path, app_name: str) -> Path:
|
|
464
|
+
"""
|
|
465
|
+
Generate an app-specific databricks.yaml from databricks.yaml.template.
|
|
466
|
+
|
|
467
|
+
This function:
|
|
468
|
+
1. Reads databricks.yaml.template (permanent template file)
|
|
469
|
+
2. Replaces __APP_NAME__ with the actual app name
|
|
470
|
+
3. Writes to databricks.yaml (overwrites if exists)
|
|
471
|
+
4. Returns the path to the generated file
|
|
472
|
+
|
|
473
|
+
The generated databricks.yaml is overwritten on each deployment and is not tracked in git.
|
|
474
|
+
Schema reference remains pointing to ./schemas/bundle_config_schema.json.
|
|
475
|
+
|
|
476
|
+
Args:
|
|
477
|
+
config_path: Path to the app config file
|
|
478
|
+
app_name: Normalized app name
|
|
479
|
+
|
|
480
|
+
Returns:
|
|
481
|
+
Path to the generated databricks.yaml file
|
|
482
|
+
"""
|
|
483
|
+
cwd = Path.cwd()
|
|
484
|
+
template_path = cwd / "databricks.yaml.template"
|
|
485
|
+
output_path = cwd / "databricks.yaml"
|
|
486
|
+
|
|
487
|
+
if not template_path.exists():
|
|
488
|
+
logger.error(f"Template file {template_path} does not exist.")
|
|
489
|
+
sys.exit(1)
|
|
490
|
+
|
|
491
|
+
# Read template
|
|
492
|
+
with open(template_path, "r") as f:
|
|
493
|
+
template_content = f.read()
|
|
494
|
+
|
|
495
|
+
# Replace template variables
|
|
496
|
+
bundle_content = template_content.replace("__APP_NAME__", app_name)
|
|
497
|
+
|
|
498
|
+
# Write generated databricks.yaml (overwrite if exists)
|
|
499
|
+
with open(output_path, "w") as f:
|
|
500
|
+
f.write(bundle_content)
|
|
501
|
+
|
|
502
|
+
logger.info(f"Generated bundle configuration at {output_path} from template")
|
|
503
|
+
return output_path
|
|
504
|
+
|
|
505
|
+
|
|
463
506
|
def run_databricks_command(
|
|
464
507
|
command: list[str],
|
|
465
508
|
profile: Optional[str] = None,
|
|
@@ -467,44 +510,55 @@ def run_databricks_command(
|
|
|
467
510
|
target: Optional[str] = None,
|
|
468
511
|
dry_run: bool = False,
|
|
469
512
|
) -> None:
|
|
470
|
-
"""Execute a databricks CLI command with optional profile."""
|
|
513
|
+
"""Execute a databricks CLI command with optional profile and target."""
|
|
514
|
+
config_path = Path(config) if config else None
|
|
515
|
+
|
|
516
|
+
if config_path and not config_path.exists():
|
|
517
|
+
logger.error(f"Configuration file {config_path} does not exist.")
|
|
518
|
+
sys.exit(1)
|
|
519
|
+
|
|
520
|
+
# Load app config and generate bundle from template
|
|
521
|
+
app_config: AppConfig = AppConfig.from_file(config_path) if config_path else None
|
|
522
|
+
normalized_name: str = normalize_name(app_config.app.name) if app_config else None
|
|
523
|
+
|
|
524
|
+
# Generate app-specific bundle from template (overwrites databricks.yaml temporarily)
|
|
525
|
+
if config_path and app_config:
|
|
526
|
+
generate_bundle_from_template(config_path, normalized_name)
|
|
527
|
+
|
|
528
|
+
# Use app name as target if not explicitly provided
|
|
529
|
+
# This ensures each app gets its own Terraform state in .databricks/bundle/<app-name>/
|
|
530
|
+
if not target and normalized_name:
|
|
531
|
+
target = normalized_name
|
|
532
|
+
logger.debug(f"Using app-specific target: {target}")
|
|
533
|
+
|
|
534
|
+
# Build databricks command (no -c flag needed, uses databricks.yaml in current dir)
|
|
471
535
|
cmd = ["databricks"]
|
|
472
536
|
if profile:
|
|
473
537
|
cmd.extend(["--profile", profile])
|
|
538
|
+
|
|
474
539
|
if target:
|
|
475
540
|
cmd.extend(["--target", target])
|
|
476
|
-
cmd.extend(command)
|
|
477
|
-
if config:
|
|
478
|
-
config_path = Path(config)
|
|
479
541
|
|
|
480
|
-
|
|
481
|
-
logger.error(f"Configuration file {config_path} does not exist.")
|
|
482
|
-
sys.exit(1)
|
|
483
|
-
|
|
484
|
-
app_config: AppConfig = AppConfig.from_file(config_path)
|
|
542
|
+
cmd.extend(command)
|
|
485
543
|
|
|
486
|
-
|
|
487
|
-
|
|
544
|
+
# Add config_path variable for notebooks
|
|
545
|
+
if config_path and app_config:
|
|
546
|
+
# Calculate relative path from notebooks directory to config file
|
|
488
547
|
config_abs = config_path.resolve()
|
|
489
548
|
cwd = Path.cwd()
|
|
490
549
|
notebooks_dir = cwd / "notebooks"
|
|
491
550
|
|
|
492
|
-
# Calculate relative path from notebooks directory to config file
|
|
493
551
|
try:
|
|
494
552
|
relative_config = config_abs.relative_to(notebooks_dir)
|
|
495
553
|
except ValueError:
|
|
496
|
-
# Config file is outside notebooks directory, calculate relative path
|
|
497
|
-
# Use os.path.relpath to get the relative path from notebooks_dir to config file
|
|
498
554
|
relative_config = Path(os.path.relpath(config_abs, notebooks_dir))
|
|
499
555
|
|
|
500
556
|
cmd.append(f'--var="config_path={relative_config}"')
|
|
501
557
|
|
|
502
|
-
normalized_name: str = normalize_name(app_config.app.name)
|
|
503
|
-
cmd.append(f'--var="app_name={normalized_name}"')
|
|
504
|
-
|
|
505
558
|
logger.debug(f"Executing command: {' '.join(cmd)}")
|
|
506
559
|
|
|
507
560
|
if dry_run:
|
|
561
|
+
logger.info(f"[DRY RUN] Would execute: {' '.join(cmd)}")
|
|
508
562
|
return
|
|
509
563
|
|
|
510
564
|
try:
|
|
@@ -531,6 +585,9 @@ def run_databricks_command(
|
|
|
531
585
|
except FileNotFoundError:
|
|
532
586
|
logger.error("databricks CLI not found. Please install the Databricks CLI.")
|
|
533
587
|
sys.exit(1)
|
|
588
|
+
except Exception as e:
|
|
589
|
+
logger.error(f"Command execution failed: {e}")
|
|
590
|
+
sys.exit(1)
|
|
534
591
|
|
|
535
592
|
|
|
536
593
|
def handle_bundle_command(options: Namespace) -> None:
|
|
@@ -539,6 +596,7 @@ def handle_bundle_command(options: Namespace) -> None:
|
|
|
539
596
|
config: Optional[str] = options.config
|
|
540
597
|
target: Optional[str] = options.target
|
|
541
598
|
dry_run: bool = options.dry_run
|
|
599
|
+
|
|
542
600
|
if options.deploy:
|
|
543
601
|
logger.info("Deploying DAO AI asset bundle...")
|
|
544
602
|
run_databricks_command(
|
|
@@ -546,8 +604,9 @@ def handle_bundle_command(options: Namespace) -> None:
|
|
|
546
604
|
)
|
|
547
605
|
if options.run:
|
|
548
606
|
logger.info("Running DAO AI system with current configuration...")
|
|
607
|
+
# Use static job resource key that matches databricks.yaml (resources.jobs.deploy_job)
|
|
549
608
|
run_databricks_command(
|
|
550
|
-
["bundle", "run", "
|
|
609
|
+
["bundle", "run", "deploy_job"],
|
|
551
610
|
profile,
|
|
552
611
|
config,
|
|
553
612
|
target,
|
dao_ai/config.py
CHANGED
|
@@ -1012,8 +1012,12 @@ class McpFunctionModel(BaseFunctionModel, HasFullName):
|
|
|
1012
1012
|
|
|
1013
1013
|
@model_validator(mode="after")
|
|
1014
1014
|
def validate_mutually_exclusive(self):
|
|
1015
|
-
if self.transport == TransportType.STREAMABLE_HTTP and not
|
|
1016
|
-
|
|
1015
|
+
if self.transport == TransportType.STREAMABLE_HTTP and not (
|
|
1016
|
+
self.url or self.connection
|
|
1017
|
+
):
|
|
1018
|
+
raise ValueError(
|
|
1019
|
+
"url or connection must be provided for STREAMABLE_HTTP transport"
|
|
1020
|
+
)
|
|
1017
1021
|
if self.transport == TransportType.STDIO and not self.command:
|
|
1018
1022
|
raise ValueError("command must not be provided for STDIO transport")
|
|
1019
1023
|
if self.transport == TransportType.STDIO and not self.args:
|
|
@@ -1162,6 +1166,40 @@ class MemoryModel(BaseModel):
|
|
|
1162
1166
|
FunctionHook: TypeAlias = PythonFunctionModel | FactoryFunctionModel | str
|
|
1163
1167
|
|
|
1164
1168
|
|
|
1169
|
+
class PromptModel(BaseModel, HasFullName):
|
|
1170
|
+
model_config = ConfigDict(use_enum_values=True, extra="forbid")
|
|
1171
|
+
schema_model: Optional[SchemaModel] = Field(default=None, alias="schema")
|
|
1172
|
+
name: str
|
|
1173
|
+
description: Optional[str] = None
|
|
1174
|
+
default_template: Optional[str] = None
|
|
1175
|
+
alias: Optional[str] = None
|
|
1176
|
+
version: Optional[int] = None
|
|
1177
|
+
tags: Optional[dict[str, Any]] = Field(default_factory=dict)
|
|
1178
|
+
|
|
1179
|
+
@property
|
|
1180
|
+
def template(self) -> str:
|
|
1181
|
+
from dao_ai.providers.databricks import DatabricksProvider
|
|
1182
|
+
|
|
1183
|
+
provider: DatabricksProvider = DatabricksProvider()
|
|
1184
|
+
prompt: str = provider.get_prompt(self)
|
|
1185
|
+
return prompt
|
|
1186
|
+
|
|
1187
|
+
@property
|
|
1188
|
+
def full_name(self) -> str:
|
|
1189
|
+
if self.schema_model:
|
|
1190
|
+
name: str = ""
|
|
1191
|
+
if self.name:
|
|
1192
|
+
name = f".{self.name}"
|
|
1193
|
+
return f"{self.schema_model.catalog_name}.{self.schema_model.schema_name}{name}"
|
|
1194
|
+
return self.name
|
|
1195
|
+
|
|
1196
|
+
@model_validator(mode="after")
|
|
1197
|
+
def validate_mutually_exclusive(self):
|
|
1198
|
+
if self.alias and self.version:
|
|
1199
|
+
raise ValueError("Cannot specify both alias and version")
|
|
1200
|
+
return self
|
|
1201
|
+
|
|
1202
|
+
|
|
1165
1203
|
class AgentModel(BaseModel):
|
|
1166
1204
|
model_config = ConfigDict(use_enum_values=True, extra="forbid")
|
|
1167
1205
|
name: str
|
|
@@ -1169,7 +1207,7 @@ class AgentModel(BaseModel):
|
|
|
1169
1207
|
model: LLMModel
|
|
1170
1208
|
tools: list[ToolModel] = Field(default_factory=list)
|
|
1171
1209
|
guardrails: list[GuardrailModel] = Field(default_factory=list)
|
|
1172
|
-
prompt: Optional[str] = None
|
|
1210
|
+
prompt: Optional[str | PromptModel] = None
|
|
1173
1211
|
handoff_prompt: Optional[str] = None
|
|
1174
1212
|
create_agent_hook: Optional[FunctionHook] = None
|
|
1175
1213
|
pre_agent_hook: Optional[FunctionHook] = None
|
|
@@ -1263,7 +1301,7 @@ class ChatPayload(BaseModel):
|
|
|
1263
1301
|
model_config = ConfigDict(use_enum_values=True, extra="forbid")
|
|
1264
1302
|
input: Optional[list[Message]] = None
|
|
1265
1303
|
messages: Optional[list[Message]] = None
|
|
1266
|
-
custom_inputs: dict
|
|
1304
|
+
custom_inputs: Optional[dict] = Field(default_factory=dict)
|
|
1267
1305
|
|
|
1268
1306
|
@model_validator(mode="after")
|
|
1269
1307
|
def validate_mutual_exclusion_and_alias(self) -> "ChatPayload":
|
|
@@ -1495,6 +1533,7 @@ class AppConfig(BaseModel):
|
|
|
1495
1533
|
tools: dict[str, ToolModel] = Field(default_factory=dict)
|
|
1496
1534
|
guardrails: dict[str, GuardrailModel] = Field(default_factory=dict)
|
|
1497
1535
|
memory: Optional[MemoryModel] = None
|
|
1536
|
+
prompts: dict[str, PromptModel] = Field(default_factory=dict)
|
|
1498
1537
|
agents: dict[str, AgentModel] = Field(default_factory=dict)
|
|
1499
1538
|
app: Optional[AppModel] = None
|
|
1500
1539
|
evaluation: Optional[EvaluationModel] = None
|
dao_ai/prompts.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Any, Callable, Sequence
|
|
1
|
+
from typing import Any, Callable, Optional, Sequence
|
|
2
2
|
|
|
3
3
|
from langchain.prompts import PromptTemplate
|
|
4
4
|
from langchain_core.messages import (
|
|
@@ -8,18 +8,26 @@ from langchain_core.messages import (
|
|
|
8
8
|
from langchain_core.runnables import RunnableConfig
|
|
9
9
|
from loguru import logger
|
|
10
10
|
|
|
11
|
+
from dao_ai.config import PromptModel
|
|
11
12
|
from dao_ai.state import SharedState
|
|
12
13
|
|
|
13
14
|
|
|
14
|
-
def make_prompt(
|
|
15
|
+
def make_prompt(
|
|
16
|
+
base_system_prompt: Optional[str | PromptModel],
|
|
17
|
+
) -> Callable[[dict, RunnableConfig], list]:
|
|
15
18
|
logger.debug(f"make_prompt: {base_system_prompt}")
|
|
16
19
|
|
|
17
20
|
def prompt(state: SharedState, config: RunnableConfig) -> list:
|
|
18
21
|
system_prompt: str = ""
|
|
19
22
|
if base_system_prompt:
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
)
|
|
23
|
+
# Extract template string from PromptModel or use string directly
|
|
24
|
+
template_str: str
|
|
25
|
+
if isinstance(base_system_prompt, PromptModel):
|
|
26
|
+
template_str = base_system_prompt.template
|
|
27
|
+
else:
|
|
28
|
+
template_str = base_system_prompt
|
|
29
|
+
|
|
30
|
+
prompt_template: PromptTemplate = PromptTemplate.from_template(template_str)
|
|
23
31
|
|
|
24
32
|
params: dict[str, Any] = {
|
|
25
33
|
input_variable: "" for input_variable in prompt_template.input_variables
|
dao_ai/providers/databricks.py
CHANGED
|
@@ -30,6 +30,7 @@ from databricks.vector_search.index import VectorSearchIndex
|
|
|
30
30
|
from loguru import logger
|
|
31
31
|
from mlflow import MlflowClient
|
|
32
32
|
from mlflow.entities import Experiment
|
|
33
|
+
from mlflow.entities.model_registry import PromptVersion
|
|
33
34
|
from mlflow.entities.model_registry.model_version import ModelVersion
|
|
34
35
|
from mlflow.models.auth_policy import AuthPolicy, SystemAuthPolicy, UserAuthPolicy
|
|
35
36
|
from mlflow.models.model import ModelInfo
|
|
@@ -52,6 +53,7 @@ from dao_ai.config import (
|
|
|
52
53
|
IndexModel,
|
|
53
54
|
IsDatabricksResource,
|
|
54
55
|
LLMModel,
|
|
56
|
+
PromptModel,
|
|
55
57
|
SchemaModel,
|
|
56
58
|
TableModel,
|
|
57
59
|
UnityCatalogFunctionSqlModel,
|
|
@@ -1023,3 +1025,79 @@ class DatabricksProvider(ServiceProvider):
|
|
|
1023
1025
|
f"Error creating instance role '{role_name}' for database {instance_name}: {e}"
|
|
1024
1026
|
)
|
|
1025
1027
|
raise
|
|
1028
|
+
|
|
1029
|
+
def get_prompt(self, prompt_model: PromptModel) -> str:
|
|
1030
|
+
"""Load prompt from MLflow Prompt Registry or fall back to default_template."""
|
|
1031
|
+
prompt_name: str = prompt_model.full_name
|
|
1032
|
+
|
|
1033
|
+
# Build prompt URI based on alias, version, or default to latest
|
|
1034
|
+
if prompt_model.alias:
|
|
1035
|
+
prompt_uri = f"prompts:/{prompt_name}@{prompt_model.alias}"
|
|
1036
|
+
elif prompt_model.version:
|
|
1037
|
+
prompt_uri = f"prompts:/{prompt_name}/{prompt_model.version}"
|
|
1038
|
+
else:
|
|
1039
|
+
prompt_uri = f"prompts:/{prompt_name}@latest"
|
|
1040
|
+
|
|
1041
|
+
try:
|
|
1042
|
+
from mlflow.genai.prompts import Prompt
|
|
1043
|
+
|
|
1044
|
+
prompt_obj: Prompt = mlflow.genai.load_prompt(prompt_uri)
|
|
1045
|
+
return prompt_obj.to_single_brace_format()
|
|
1046
|
+
|
|
1047
|
+
except Exception as e:
|
|
1048
|
+
logger.warning(f"Failed to load prompt '{prompt_name}' from registry: {e}")
|
|
1049
|
+
|
|
1050
|
+
if prompt_model.default_template:
|
|
1051
|
+
logger.info(f"Using default_template for '{prompt_name}'")
|
|
1052
|
+
self._sync_default_template_to_registry(
|
|
1053
|
+
prompt_name, prompt_model.default_template, prompt_model.description
|
|
1054
|
+
)
|
|
1055
|
+
return prompt_model.default_template
|
|
1056
|
+
|
|
1057
|
+
raise ValueError(
|
|
1058
|
+
f"Prompt '{prompt_name}' not found in registry and no default_template provided"
|
|
1059
|
+
) from e
|
|
1060
|
+
|
|
1061
|
+
def _sync_default_template_to_registry(
|
|
1062
|
+
self, prompt_name: str, default_template: str, description: str | None = None
|
|
1063
|
+
) -> None:
|
|
1064
|
+
"""Register default_template to prompt registry under 'default' alias if changed."""
|
|
1065
|
+
try:
|
|
1066
|
+
# Check if default alias already has the same template
|
|
1067
|
+
try:
|
|
1068
|
+
logger.debug(f"Loading prompt '{prompt_name}' from registry...")
|
|
1069
|
+
existing: PromptVersion = mlflow.genai.load_prompt(
|
|
1070
|
+
f"prompts:/{prompt_name}@default"
|
|
1071
|
+
)
|
|
1072
|
+
if (
|
|
1073
|
+
existing.to_single_brace_format().strip()
|
|
1074
|
+
== default_template.strip()
|
|
1075
|
+
):
|
|
1076
|
+
logger.debug(f"Prompt '{prompt_name}' is already up-to-date")
|
|
1077
|
+
return # Already up-to-date
|
|
1078
|
+
except Exception:
|
|
1079
|
+
logger.debug(
|
|
1080
|
+
f"Default alias for prompt '{prompt_name}' doesn't exist yet"
|
|
1081
|
+
)
|
|
1082
|
+
|
|
1083
|
+
# Register new version and set as default alias
|
|
1084
|
+
commit_message = description or "Auto-synced from default_template"
|
|
1085
|
+
prompt_version: PromptVersion = mlflow.genai.register_prompt(
|
|
1086
|
+
name=prompt_name,
|
|
1087
|
+
template=default_template,
|
|
1088
|
+
commit_message=commit_message,
|
|
1089
|
+
)
|
|
1090
|
+
|
|
1091
|
+
logger.debug(f"Setting default alias for prompt '{prompt_name}'")
|
|
1092
|
+
mlflow.genai.set_prompt_alias(
|
|
1093
|
+
name=prompt_name,
|
|
1094
|
+
alias="default",
|
|
1095
|
+
version=prompt_version.version,
|
|
1096
|
+
)
|
|
1097
|
+
|
|
1098
|
+
logger.info(
|
|
1099
|
+
f"Synced prompt '{prompt_name}' v{prompt_version.version} to registry"
|
|
1100
|
+
)
|
|
1101
|
+
|
|
1102
|
+
except Exception as e:
|
|
1103
|
+
logger.warning(f"Failed to sync '{prompt_name}' to registry: {e}")
|
dao_ai/tools/mcp.py
CHANGED
|
@@ -35,14 +35,26 @@ def create_mcp_tools(
|
|
|
35
35
|
if function.connection:
|
|
36
36
|
# Use UC Connection approach with DatabricksOAuthClientProvider
|
|
37
37
|
logger.debug(f"Using UC Connection for MCP: {function.connection.name}")
|
|
38
|
-
|
|
38
|
+
|
|
39
|
+
# Construct URL if not provided
|
|
40
|
+
if function.url:
|
|
41
|
+
mcp_url = function.url
|
|
42
|
+
logger.debug(f"Using provided MCP URL: {mcp_url}")
|
|
43
|
+
else:
|
|
44
|
+
# Construct URL from workspace host and connection name
|
|
45
|
+
# Pattern: https://{workspace_host}/api/2.0/mcp/external/{connection_name}
|
|
46
|
+
workspace_client = function.connection.workspace_client
|
|
47
|
+
workspace_host = workspace_client.config.host
|
|
48
|
+
connection_name = function.connection.name
|
|
49
|
+
mcp_url = f"{workspace_host}/api/2.0/mcp/external/{connection_name}"
|
|
50
|
+
logger.debug(f"Constructed MCP URL from connection: {mcp_url}")
|
|
39
51
|
|
|
40
52
|
async def _get_tools_with_connection():
|
|
41
53
|
"""Get tools using DatabricksOAuthClientProvider."""
|
|
42
54
|
workspace_client = function.connection.workspace_client
|
|
43
55
|
|
|
44
56
|
async with streamablehttp_client(
|
|
45
|
-
|
|
57
|
+
mcp_url, auth=DatabricksOAuthClientProvider(workspace_client)
|
|
46
58
|
) as (read_stream, write_stream, _):
|
|
47
59
|
async with ClientSession(read_stream, write_stream) as session:
|
|
48
60
|
# Initialize and list tools
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dao-ai
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.25
|
|
4
4
|
Summary: DAO AI: A modular, multi-agent orchestration framework for complex AI workflows. Supports agent handoff, tool integration, and dynamic configuration via YAML.
|
|
5
5
|
Project-URL: Homepage, https://github.com/natefleming/dao-ai
|
|
6
6
|
Project-URL: Documentation, https://natefleming.github.io/dao-ai
|
|
@@ -24,8 +24,8 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
|
24
24
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
25
25
|
Classifier: Topic :: System :: Distributed Computing
|
|
26
26
|
Requires-Python: >=3.12
|
|
27
|
-
Requires-Dist: databricks-agents>=1.
|
|
28
|
-
Requires-Dist: databricks-langchain>=0.8.
|
|
27
|
+
Requires-Dist: databricks-agents>=1.7.0
|
|
28
|
+
Requires-Dist: databricks-langchain>=0.8.1
|
|
29
29
|
Requires-Dist: databricks-mcp>=0.3.0
|
|
30
30
|
Requires-Dist: databricks-sdk[openai]>=0.67.0
|
|
31
31
|
Requires-Dist: duckduckgo-search>=8.0.2
|
|
@@ -33,19 +33,19 @@ Requires-Dist: grandalf>=0.8
|
|
|
33
33
|
Requires-Dist: langchain-mcp-adapters>=0.1.10
|
|
34
34
|
Requires-Dist: langchain-tavily>=0.2.11
|
|
35
35
|
Requires-Dist: langchain>=0.3.27
|
|
36
|
-
Requires-Dist: langgraph-checkpoint-postgres>=2.0.
|
|
36
|
+
Requires-Dist: langgraph-checkpoint-postgres>=2.0.25
|
|
37
37
|
Requires-Dist: langgraph-supervisor>=0.0.29
|
|
38
38
|
Requires-Dist: langgraph-swarm>=0.0.14
|
|
39
|
-
Requires-Dist: langgraph>=0.6.
|
|
39
|
+
Requires-Dist: langgraph>=0.6.10
|
|
40
40
|
Requires-Dist: langmem>=0.0.29
|
|
41
41
|
Requires-Dist: loguru>=0.7.3
|
|
42
|
-
Requires-Dist: mcp>=1.
|
|
42
|
+
Requires-Dist: mcp>=1.17.0
|
|
43
43
|
Requires-Dist: mlflow>=3.4.0
|
|
44
44
|
Requires-Dist: nest-asyncio>=1.6.0
|
|
45
45
|
Requires-Dist: openevals>=0.0.19
|
|
46
46
|
Requires-Dist: openpyxl>=3.1.5
|
|
47
47
|
Requires-Dist: psycopg[binary,pool]>=3.2.9
|
|
48
|
-
Requires-Dist: pydantic>=2.
|
|
48
|
+
Requires-Dist: pydantic>=2.12.0
|
|
49
49
|
Requires-Dist: python-dotenv>=1.1.0
|
|
50
50
|
Requires-Dist: pyyaml>=6.0.2
|
|
51
51
|
Requires-Dist: rich>=14.0.0
|
|
@@ -2,14 +2,14 @@ dao_ai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
2
2
|
dao_ai/agent_as_code.py,sha256=kPSeDz2-1jRaed1TMs4LA3VECoyqe9_Ed2beRLB9gXQ,472
|
|
3
3
|
dao_ai/catalog.py,sha256=sPZpHTD3lPx4EZUtIWeQV7VQM89WJ6YH__wluk1v2lE,4947
|
|
4
4
|
dao_ai/chat_models.py,sha256=uhwwOTeLyHWqoTTgHrs4n5iSyTwe4EQcLKnh3jRxPWI,8626
|
|
5
|
-
dao_ai/cli.py,sha256=
|
|
6
|
-
dao_ai/config.py,sha256=
|
|
5
|
+
dao_ai/cli.py,sha256=gq-nsapWxDA1M6Jua3vajBvIwf0Oa6YLcB58lEtMKUo,22503
|
|
6
|
+
dao_ai/config.py,sha256=_4OyJ1x7DH1S-5-FTJp7geeOf2H096PQHVFUBtALKsU,56795
|
|
7
7
|
dao_ai/graph.py,sha256=APYc2y3cig4P52X4sOHSFSZNK8j5EtEPJLFwWeJ3KQQ,7956
|
|
8
8
|
dao_ai/guardrails.py,sha256=4TKArDONRy8RwHzOT1plZ1rhy3x9GF_aeGpPCRl6wYA,4016
|
|
9
9
|
dao_ai/messages.py,sha256=xl_3-WcFqZKCFCiov8sZOPljTdM3gX3fCHhxq-xFg2U,7005
|
|
10
10
|
dao_ai/models.py,sha256=8r8GIG3EGxtVyWsRNI56lVaBjiNrPkzh4HdwMZRq8iw,31689
|
|
11
11
|
dao_ai/nodes.py,sha256=SSuFNTXOdFaKg_aX-yUkQO7fM9wvNGu14lPXKDapU1U,8461
|
|
12
|
-
dao_ai/prompts.py,sha256=
|
|
12
|
+
dao_ai/prompts.py,sha256=7Hcstmv514P0s9s-TVoIlbkDV2XXOphGCW6gcPeyUYE,1628
|
|
13
13
|
dao_ai/state.py,sha256=_lF9krAYYjvFDMUwZzVKOn0ZnXKcOrbjWKdre0C5B54,1137
|
|
14
14
|
dao_ai/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
15
|
dao_ai/utils.py,sha256=yXgqHrYdO5qDxgxUs2G5XJeLFgwg8D0BIJvbFkqSbhs,4519
|
|
@@ -22,20 +22,20 @@ dao_ai/memory/core.py,sha256=DnEjQO3S7hXr3CDDd7C2eE7fQUmcCS_8q9BXEgjPH3U,4271
|
|
|
22
22
|
dao_ai/memory/postgres.py,sha256=vvI3osjx1EoU5GBA6SCUstTBKillcmLl12hVgDMjfJY,15346
|
|
23
23
|
dao_ai/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
24
24
|
dao_ai/providers/base.py,sha256=-fjKypCOk28h6vioPfMj9YZSw_3Kcbi2nMuAyY7vX9k,1383
|
|
25
|
-
dao_ai/providers/databricks.py,sha256=
|
|
25
|
+
dao_ai/providers/databricks.py,sha256=1BPYQxi4-Z4I1ygZYlKV8ycdxZTtWNXplySToayHCEI,43096
|
|
26
26
|
dao_ai/tools/__init__.py,sha256=G5-5Yi6zpQOH53b5IzLdtsC6g0Ep6leI5GxgxOmgw7Q,1203
|
|
27
27
|
dao_ai/tools/agent.py,sha256=WbQnyziiT12TLMrA7xK0VuOU029tdmUBXbUl-R1VZ0Q,1886
|
|
28
28
|
dao_ai/tools/core.py,sha256=Kei33S8vrmvPOAyrFNekaWmV2jqZ-IPS1QDSvU7RZF0,1984
|
|
29
29
|
dao_ai/tools/genie.py,sha256=8HSOCzSg6PlBzBYXMmNfUnl-LO03p3Ki3fxLPm_dhPg,15051
|
|
30
30
|
dao_ai/tools/human_in_the_loop.py,sha256=yk35MO9eNETnYFH-sqlgR-G24TrEgXpJlnZUustsLkI,3681
|
|
31
|
-
dao_ai/tools/mcp.py,sha256=
|
|
31
|
+
dao_ai/tools/mcp.py,sha256=CYv59yn-LIY11atUgNtN2W6vR7C6Qyo7-rvPcVJnXVk,7461
|
|
32
32
|
dao_ai/tools/python.py,sha256=XcQiTMshZyLUTVR5peB3vqsoUoAAy8gol9_pcrhddfI,1831
|
|
33
33
|
dao_ai/tools/slack.py,sha256=SCvyVcD9Pv_XXPXePE_fSU1Pd8VLTEkKDLvoGTZWy2Y,4775
|
|
34
34
|
dao_ai/tools/time.py,sha256=Y-23qdnNHzwjvnfkWvYsE7PoWS1hfeKy44tA7sCnNac,8759
|
|
35
35
|
dao_ai/tools/unity_catalog.py,sha256=uX_h52BuBAr4c9UeqSMI7DNz3BPRLeai5tBVW4sJqRI,13113
|
|
36
36
|
dao_ai/tools/vector_search.py,sha256=EDYQs51zIPaAP0ma1D81wJT77GQ-v-cjb2XrFVWfWdg,2621
|
|
37
|
-
dao_ai-0.0.
|
|
38
|
-
dao_ai-0.0.
|
|
39
|
-
dao_ai-0.0.
|
|
40
|
-
dao_ai-0.0.
|
|
41
|
-
dao_ai-0.0.
|
|
37
|
+
dao_ai-0.0.25.dist-info/METADATA,sha256=ahAblBSty81iw_mlf9blqOF4-AKN5Asak9SWH0H4FIs,42639
|
|
38
|
+
dao_ai-0.0.25.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
39
|
+
dao_ai-0.0.25.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
|
|
40
|
+
dao_ai-0.0.25.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
|
|
41
|
+
dao_ai-0.0.25.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|