dao-ai 0.0.24__py3-none-any.whl → 0.0.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dao_ai/cli.py CHANGED
@@ -460,6 +460,49 @@ def setup_logging(verbosity: int) -> None:
460
460
  logger.add(sys.stderr, level=level)
461
461
 
462
462
 
463
+ def generate_bundle_from_template(config_path: Path, app_name: str) -> Path:
464
+ """
465
+ Generate an app-specific databricks.yaml from databricks.yaml.template.
466
+
467
+ This function:
468
+ 1. Reads databricks.yaml.template (permanent template file)
469
+ 2. Replaces __APP_NAME__ with the actual app name
470
+ 3. Writes to databricks.yaml (overwrites if exists)
471
+ 4. Returns the path to the generated file
472
+
473
+ The generated databricks.yaml is overwritten on each deployment and is not tracked in git.
474
+ Schema reference remains pointing to ./schemas/bundle_config_schema.json.
475
+
476
+ Args:
477
+ config_path: Path to the app config file
478
+ app_name: Normalized app name
479
+
480
+ Returns:
481
+ Path to the generated databricks.yaml file
482
+ """
483
+ cwd = Path.cwd()
484
+ template_path = cwd / "databricks.yaml.template"
485
+ output_path = cwd / "databricks.yaml"
486
+
487
+ if not template_path.exists():
488
+ logger.error(f"Template file {template_path} does not exist.")
489
+ sys.exit(1)
490
+
491
+ # Read template
492
+ with open(template_path, "r") as f:
493
+ template_content = f.read()
494
+
495
+ # Replace template variables
496
+ bundle_content = template_content.replace("__APP_NAME__", app_name)
497
+
498
+ # Write generated databricks.yaml (overwrite if exists)
499
+ with open(output_path, "w") as f:
500
+ f.write(bundle_content)
501
+
502
+ logger.info(f"Generated bundle configuration at {output_path} from template")
503
+ return output_path
504
+
505
+
463
506
  def run_databricks_command(
464
507
  command: list[str],
465
508
  profile: Optional[str] = None,
@@ -467,44 +510,55 @@ def run_databricks_command(
467
510
  target: Optional[str] = None,
468
511
  dry_run: bool = False,
469
512
  ) -> None:
470
- """Execute a databricks CLI command with optional profile."""
513
+ """Execute a databricks CLI command with optional profile and target."""
514
+ config_path = Path(config) if config else None
515
+
516
+ if config_path and not config_path.exists():
517
+ logger.error(f"Configuration file {config_path} does not exist.")
518
+ sys.exit(1)
519
+
520
+ # Load app config and generate bundle from template
521
+ app_config: AppConfig = AppConfig.from_file(config_path) if config_path else None
522
+ normalized_name: str = normalize_name(app_config.app.name) if app_config else None
523
+
524
+ # Generate app-specific bundle from template (overwrites databricks.yaml temporarily)
525
+ if config_path and app_config:
526
+ generate_bundle_from_template(config_path, normalized_name)
527
+
528
+ # Use app name as target if not explicitly provided
529
+ # This ensures each app gets its own Terraform state in .databricks/bundle/<app-name>/
530
+ if not target and normalized_name:
531
+ target = normalized_name
532
+ logger.debug(f"Using app-specific target: {target}")
533
+
534
+ # Build databricks command (no -c flag needed, uses databricks.yaml in current dir)
471
535
  cmd = ["databricks"]
472
536
  if profile:
473
537
  cmd.extend(["--profile", profile])
538
+
474
539
  if target:
475
540
  cmd.extend(["--target", target])
476
- cmd.extend(command)
477
- if config:
478
- config_path = Path(config)
479
541
 
480
- if not config_path.exists():
481
- logger.error(f"Configuration file {config_path} does not exist.")
482
- sys.exit(1)
483
-
484
- app_config: AppConfig = AppConfig.from_file(config_path)
542
+ cmd.extend(command)
485
543
 
486
- # Always convert to path relative to notebooks directory
487
- # Get absolute path of config file and current working directory
544
+ # Add config_path variable for notebooks
545
+ if config_path and app_config:
546
+ # Calculate relative path from notebooks directory to config file
488
547
  config_abs = config_path.resolve()
489
548
  cwd = Path.cwd()
490
549
  notebooks_dir = cwd / "notebooks"
491
550
 
492
- # Calculate relative path from notebooks directory to config file
493
551
  try:
494
552
  relative_config = config_abs.relative_to(notebooks_dir)
495
553
  except ValueError:
496
- # Config file is outside notebooks directory, calculate relative path
497
- # Use os.path.relpath to get the relative path from notebooks_dir to config file
498
554
  relative_config = Path(os.path.relpath(config_abs, notebooks_dir))
499
555
 
500
556
  cmd.append(f'--var="config_path={relative_config}"')
501
557
 
502
- normalized_name: str = normalize_name(app_config.app.name)
503
- cmd.append(f'--var="app_name={normalized_name}"')
504
-
505
558
  logger.debug(f"Executing command: {' '.join(cmd)}")
506
559
 
507
560
  if dry_run:
561
+ logger.info(f"[DRY RUN] Would execute: {' '.join(cmd)}")
508
562
  return
509
563
 
510
564
  try:
@@ -531,6 +585,9 @@ def run_databricks_command(
531
585
  except FileNotFoundError:
532
586
  logger.error("databricks CLI not found. Please install the Databricks CLI.")
533
587
  sys.exit(1)
588
+ except Exception as e:
589
+ logger.error(f"Command execution failed: {e}")
590
+ sys.exit(1)
534
591
 
535
592
 
536
593
  def handle_bundle_command(options: Namespace) -> None:
@@ -539,6 +596,7 @@ def handle_bundle_command(options: Namespace) -> None:
539
596
  config: Optional[str] = options.config
540
597
  target: Optional[str] = options.target
541
598
  dry_run: bool = options.dry_run
599
+
542
600
  if options.deploy:
543
601
  logger.info("Deploying DAO AI asset bundle...")
544
602
  run_databricks_command(
@@ -546,8 +604,9 @@ def handle_bundle_command(options: Namespace) -> None:
546
604
  )
547
605
  if options.run:
548
606
  logger.info("Running DAO AI system with current configuration...")
607
+ # Use static job resource key that matches databricks.yaml (resources.jobs.deploy_job)
549
608
  run_databricks_command(
550
- ["bundle", "run", "deploy-end-to-end"],
609
+ ["bundle", "run", "deploy_job"],
551
610
  profile,
552
611
  config,
553
612
  target,
dao_ai/config.py CHANGED
@@ -1301,7 +1301,7 @@ class ChatPayload(BaseModel):
1301
1301
  model_config = ConfigDict(use_enum_values=True, extra="forbid")
1302
1302
  input: Optional[list[Message]] = None
1303
1303
  messages: Optional[list[Message]] = None
1304
- custom_inputs: dict
1304
+ custom_inputs: Optional[dict] = Field(default_factory=dict)
1305
1305
 
1306
1306
  @model_validator(mode="after")
1307
1307
  def validate_mutual_exclusion_and_alias(self) -> "ChatPayload":
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dao-ai
3
- Version: 0.0.24
3
+ Version: 0.0.25
4
4
  Summary: DAO AI: A modular, multi-agent orchestration framework for complex AI workflows. Supports agent handoff, tool integration, and dynamic configuration via YAML.
5
5
  Project-URL: Homepage, https://github.com/natefleming/dao-ai
6
6
  Project-URL: Documentation, https://natefleming.github.io/dao-ai
@@ -2,8 +2,8 @@ dao_ai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  dao_ai/agent_as_code.py,sha256=kPSeDz2-1jRaed1TMs4LA3VECoyqe9_Ed2beRLB9gXQ,472
3
3
  dao_ai/catalog.py,sha256=sPZpHTD3lPx4EZUtIWeQV7VQM89WJ6YH__wluk1v2lE,4947
4
4
  dao_ai/chat_models.py,sha256=uhwwOTeLyHWqoTTgHrs4n5iSyTwe4EQcLKnh3jRxPWI,8626
5
- dao_ai/cli.py,sha256=Aez2TQW3Q8Ho1IaIkRggt0NevDxAAVPjXkePC5GPJF0,20429
6
- dao_ai/config.py,sha256=j9SAdf7UHSoS2pLos-oypJNDPS48A2rRq55OEW1wsMI,56755
5
+ dao_ai/cli.py,sha256=gq-nsapWxDA1M6Jua3vajBvIwf0Oa6YLcB58lEtMKUo,22503
6
+ dao_ai/config.py,sha256=_4OyJ1x7DH1S-5-FTJp7geeOf2H096PQHVFUBtALKsU,56795
7
7
  dao_ai/graph.py,sha256=APYc2y3cig4P52X4sOHSFSZNK8j5EtEPJLFwWeJ3KQQ,7956
8
8
  dao_ai/guardrails.py,sha256=4TKArDONRy8RwHzOT1plZ1rhy3x9GF_aeGpPCRl6wYA,4016
9
9
  dao_ai/messages.py,sha256=xl_3-WcFqZKCFCiov8sZOPljTdM3gX3fCHhxq-xFg2U,7005
@@ -34,8 +34,8 @@ dao_ai/tools/slack.py,sha256=SCvyVcD9Pv_XXPXePE_fSU1Pd8VLTEkKDLvoGTZWy2Y,4775
34
34
  dao_ai/tools/time.py,sha256=Y-23qdnNHzwjvnfkWvYsE7PoWS1hfeKy44tA7sCnNac,8759
35
35
  dao_ai/tools/unity_catalog.py,sha256=uX_h52BuBAr4c9UeqSMI7DNz3BPRLeai5tBVW4sJqRI,13113
36
36
  dao_ai/tools/vector_search.py,sha256=EDYQs51zIPaAP0ma1D81wJT77GQ-v-cjb2XrFVWfWdg,2621
37
- dao_ai-0.0.24.dist-info/METADATA,sha256=69qYBqdZg8tZF1ni90RBrSG9OAmE4jXdf5lec6U0TL8,42639
38
- dao_ai-0.0.24.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
39
- dao_ai-0.0.24.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
40
- dao_ai-0.0.24.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
41
- dao_ai-0.0.24.dist-info/RECORD,,
37
+ dao_ai-0.0.25.dist-info/METADATA,sha256=ahAblBSty81iw_mlf9blqOF4-AKN5Asak9SWH0H4FIs,42639
38
+ dao_ai-0.0.25.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
39
+ dao_ai-0.0.25.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
40
+ dao_ai-0.0.25.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
41
+ dao_ai-0.0.25.dist-info/RECORD,,