databricks-bundles 0.267.0__tar.gz → 0.269.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/PKG-INFO +2 -2
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/README.md +1 -1
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/build.py +48 -7
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/__init__.py +2 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_diagnostics.py +11 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_resource_mutator.py +33 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_resource_type.py +7 -1
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_resources.py +44 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/__init__.py +8 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/job.py +11 -0
- databricks_bundles-0.269.0/databricks/bundles/jobs/_models/lifecycle.py +38 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/__init__.py +8 -0
- databricks_bundles-0.269.0/databricks/bundles/pipelines/_models/lifecycle.py +38 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/pipeline.py +14 -0
- databricks_bundles-0.269.0/databricks/bundles/schemas/__init__.py +30 -0
- databricks_bundles-0.269.0/databricks/bundles/schemas/_models/lifecycle.py +38 -0
- databricks_bundles-0.269.0/databricks/bundles/schemas/_models/schema.py +97 -0
- databricks_bundles-0.269.0/databricks/bundles/schemas/_models/schema_grant.py +40 -0
- databricks_bundles-0.269.0/databricks/bundles/schemas/_models/schema_grant_privilege.py +38 -0
- databricks_bundles-0.269.0/databricks/bundles/version.py +1 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/volumes/__init__.py +8 -0
- databricks_bundles-0.269.0/databricks/bundles/volumes/_models/lifecycle.py +38 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/volumes/_models/volume.py +11 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/pyproject.toml +1 -1
- databricks_bundles-0.267.0/databricks/bundles/version.py +0 -1
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/LICENSE +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/__init__.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/__init__.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_bundle.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_load.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_location.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_resource.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_transform.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_transform_to_json.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_variable.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/adlsgen2_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/authentication_method.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/auto_scale.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/aws_attributes.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/aws_availability.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/azure_attributes.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/azure_availability.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/clean_rooms_notebook_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/clients_types.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/cluster_log_conf.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/cluster_spec.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/compute_config.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/condition.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/condition_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/condition_task_op.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/continuous.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/cron_schedule.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/dashboard_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/data_security_mode.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/dbfs_storage_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/dbt_platform_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/dbt_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/docker_basic_auth.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/docker_image.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/ebs_volume_type.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/environment.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/for_each_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/gcp_attributes.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/gcp_availability.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/gcs_storage_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/gen_ai_compute_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/git_provider.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/git_source.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/init_script_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/job_cluster.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/job_email_notifications.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/job_environment.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/job_notification_settings.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/job_parameter_definition.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/job_permission.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/job_permission_level.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/job_run_as.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/jobs_health_metric.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/jobs_health_operator.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/jobs_health_rule.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/jobs_health_rules.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/library.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/local_file_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/log_analytics_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/maven_library.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/notebook_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/pause_status.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/performance_target.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration_time_unit.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/pipeline_params.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/pipeline_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/power_bi_model.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/power_bi_table.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/power_bi_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/python_py_pi_library.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/python_wheel_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/queue_settings.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/r_cran_library.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/run_if.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/run_job_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/runtime_engine.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/s3_storage_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/source.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/spark_jar_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/spark_python_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/spark_submit_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/sql_task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/sql_task_alert.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/sql_task_dashboard.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/sql_task_file.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/sql_task_query.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/sql_task_subscription.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/storage_mode.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/subscription.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/subscription_subscriber.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/table_update_trigger_configuration.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/task.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/task_dependency.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/task_email_notifications.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/task_notification_settings.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/task_retry_mode.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/trigger_settings.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/volumes_storage_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/webhook.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/webhook_notifications.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/workload_type.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/workspace_storage_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/adlsgen2_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/aws_attributes.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/aws_availability.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/azure_attributes.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/azure_availability.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/cluster_log_conf.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/day_of_week.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/dbfs_storage_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/ebs_volume_type.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/event_log_spec.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/file_library.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/filters.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/gcp_attributes.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/gcp_availability.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/gcs_storage_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/ingestion_config.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_table_specific_config_query_based_connector_config.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/ingestion_source_type.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/init_script_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/local_file_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/log_analytics_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/maven_library.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/notebook_library.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/notifications.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/path_pattern.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/pipeline_cluster.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale_mode.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/pipeline_library.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/pipeline_permission.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/pipeline_permission_level.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/pipelines_environment.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/postgres_catalog_config.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/postgres_slot_config.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/report_spec.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/restart_window.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/run_as.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/s3_storage_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/schema_spec.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/source_catalog_config.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/source_config.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/table_spec.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/table_specific_config.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/volumes_storage_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/_models/workspace_storage_info.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/py.typed +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/volumes/_models/volume_grant.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/volumes/_models/volume_grant_privilege.py +0 -0
- {databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/volumes/_models/volume_type.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: databricks-bundles
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.269.0
|
|
4
4
|
Summary: Python support for Databricks Asset Bundles
|
|
5
5
|
Author-email: Gleb Kanterov <gleb.kanterov@databricks.com>
|
|
6
6
|
Requires-Python: >=3.10
|
|
@@ -22,7 +22,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
|
|
|
22
22
|
|
|
23
23
|
To use `databricks-bundles`, you must first:
|
|
24
24
|
|
|
25
|
-
1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.
|
|
25
|
+
1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.269.0 or above
|
|
26
26
|
2. Authenticate to your Databricks workspace if you have not done so already:
|
|
27
27
|
|
|
28
28
|
```bash
|
|
@@ -13,7 +13,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
|
|
|
13
13
|
|
|
14
14
|
To use `databricks-bundles`, you must first:
|
|
15
15
|
|
|
16
|
-
1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.
|
|
16
|
+
1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.269.0 or above
|
|
17
17
|
2. Authenticate to your Databricks workspace if you have not done so already:
|
|
18
18
|
|
|
19
19
|
```bash
|
|
@@ -166,16 +166,12 @@ def _apply_mutators_for_type(
|
|
|
166
166
|
return resources, Diagnostics()
|
|
167
167
|
|
|
168
168
|
|
|
169
|
-
def
|
|
170
|
-
args: _Args,
|
|
171
|
-
) -> tuple[dict, dict[tuple[str, ...], Location], Diagnostics]:
|
|
172
|
-
input = json.load(open(args.input, encoding="utf-8"))
|
|
169
|
+
def _read_conf(input: dict) -> tuple[_Conf, Diagnostics]:
|
|
173
170
|
experimental = input.get("experimental", {})
|
|
174
171
|
|
|
175
172
|
if experimental.get("pydabs", {}) != {}:
|
|
176
173
|
return (
|
|
177
|
-
|
|
178
|
-
{},
|
|
174
|
+
_Conf(),
|
|
179
175
|
Diagnostics.create_error(
|
|
180
176
|
"'experimental/pydabs' is not supported by 'databricks-bundles', use 'experimental/python' instead",
|
|
181
177
|
detail="",
|
|
@@ -184,8 +180,53 @@ def python_mutator(
|
|
|
184
180
|
),
|
|
185
181
|
)
|
|
186
182
|
|
|
187
|
-
|
|
183
|
+
experimental_conf_dict = experimental.get("python", {})
|
|
184
|
+
experimental_conf = _transform(_Conf, experimental_conf_dict)
|
|
185
|
+
|
|
186
|
+
conf_dict = input.get("python", {})
|
|
188
187
|
conf = _transform(_Conf, conf_dict)
|
|
188
|
+
|
|
189
|
+
has_conf = conf != _Conf()
|
|
190
|
+
has_experimental_conf = experimental_conf != _Conf()
|
|
191
|
+
|
|
192
|
+
if has_conf and not has_experimental_conf:
|
|
193
|
+
return conf, Diagnostics()
|
|
194
|
+
elif not has_conf and has_experimental_conf:
|
|
195
|
+
# do not generate warning in Python code, if CLI supports non-experimental 'python',
|
|
196
|
+
# it should generate a warning
|
|
197
|
+
return experimental_conf, Diagnostics()
|
|
198
|
+
elif has_conf and has_experimental_conf:
|
|
199
|
+
# for backward-compatibility, CLI can copy contents of 'python' into 'experimental/python'
|
|
200
|
+
# if configs are equal, it isn't a problem
|
|
201
|
+
if conf != experimental_conf:
|
|
202
|
+
return (
|
|
203
|
+
_Conf(),
|
|
204
|
+
Diagnostics.create_error(
|
|
205
|
+
"Both 'python' and 'experimental/python' sections are present, use 'python' section only",
|
|
206
|
+
detail="",
|
|
207
|
+
location=None,
|
|
208
|
+
path=(
|
|
209
|
+
"experimental",
|
|
210
|
+
"python",
|
|
211
|
+
),
|
|
212
|
+
),
|
|
213
|
+
)
|
|
214
|
+
else:
|
|
215
|
+
return conf, Diagnostics()
|
|
216
|
+
else:
|
|
217
|
+
return _Conf(), Diagnostics()
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def python_mutator(
|
|
221
|
+
args: _Args,
|
|
222
|
+
) -> tuple[dict, dict[tuple[str, ...], Location], Diagnostics]:
|
|
223
|
+
input = json.load(open(args.input, encoding="utf-8"))
|
|
224
|
+
diagnostics = Diagnostics()
|
|
225
|
+
|
|
226
|
+
conf, diagnostics = diagnostics.extend_tuple(_read_conf(input))
|
|
227
|
+
if diagnostics.has_error():
|
|
228
|
+
return input, {}, diagnostics
|
|
229
|
+
|
|
189
230
|
bundle = _parse_bundle_info(input)
|
|
190
231
|
|
|
191
232
|
if args.phase == "load_resources":
|
{databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/__init__.py
RENAMED
|
@@ -18,6 +18,7 @@ __all__ = [
|
|
|
18
18
|
"load_resources_from_modules",
|
|
19
19
|
"load_resources_from_package_module",
|
|
20
20
|
"pipeline_mutator",
|
|
21
|
+
"schema_mutator",
|
|
21
22
|
"variables",
|
|
22
23
|
"volume_mutator",
|
|
23
24
|
]
|
|
@@ -40,6 +41,7 @@ from databricks.bundles.core._resource_mutator import (
|
|
|
40
41
|
ResourceMutator,
|
|
41
42
|
job_mutator,
|
|
42
43
|
pipeline_mutator,
|
|
44
|
+
schema_mutator,
|
|
43
45
|
volume_mutator,
|
|
44
46
|
)
|
|
45
47
|
from databricks.bundles.core._resources import Resources
|
{databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_diagnostics.py
RENAMED
|
@@ -134,6 +134,17 @@ class Diagnostics:
|
|
|
134
134
|
|
|
135
135
|
return False
|
|
136
136
|
|
|
137
|
+
def has_warning(self) -> bool:
|
|
138
|
+
"""
|
|
139
|
+
Returns True if there is at least one warning in diagnostics.
|
|
140
|
+
"""
|
|
141
|
+
|
|
142
|
+
for item in self.items:
|
|
143
|
+
if item.severity == Severity.WARNING:
|
|
144
|
+
return True
|
|
145
|
+
|
|
146
|
+
return False
|
|
147
|
+
|
|
137
148
|
@classmethod
|
|
138
149
|
def create_error(
|
|
139
150
|
cls,
|
|
@@ -8,6 +8,7 @@ from databricks.bundles.core._resource import Resource
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
9
9
|
from databricks.bundles.jobs._models.job import Job
|
|
10
10
|
from databricks.bundles.pipelines._models.pipeline import Pipeline
|
|
11
|
+
from databricks.bundles.schemas._models.schema import Schema
|
|
11
12
|
from databricks.bundles.volumes._models.volume import Volume
|
|
12
13
|
|
|
13
14
|
_T = TypeVar("_T", bound=Resource)
|
|
@@ -130,6 +131,38 @@ def pipeline_mutator(function: Callable) -> ResourceMutator["Pipeline"]:
|
|
|
130
131
|
return ResourceMutator(resource_type=Pipeline, function=function)
|
|
131
132
|
|
|
132
133
|
|
|
134
|
+
@overload
|
|
135
|
+
def schema_mutator(
|
|
136
|
+
function: Callable[[Bundle, "Schema"], "Schema"],
|
|
137
|
+
) -> ResourceMutator["Schema"]: ...
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
@overload
|
|
141
|
+
def schema_mutator(
|
|
142
|
+
function: Callable[["Schema"], "Schema"],
|
|
143
|
+
) -> ResourceMutator["Schema"]: ...
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def schema_mutator(function: Callable) -> ResourceMutator["Schema"]:
|
|
147
|
+
"""
|
|
148
|
+
Decorator for defining a schema mutator. Function should return a new instance of the schema with the desired changes,
|
|
149
|
+
instead of mutating the input schema.
|
|
150
|
+
|
|
151
|
+
Example:
|
|
152
|
+
|
|
153
|
+
.. code-block:: python
|
|
154
|
+
|
|
155
|
+
@schema_mutator
|
|
156
|
+
def my_schema_mutator(bundle: Bundle, schema: Schema) -> Schema:
|
|
157
|
+
return replace(schema, name="my_schema")
|
|
158
|
+
|
|
159
|
+
:param function: Function that mutates a schema.
|
|
160
|
+
"""
|
|
161
|
+
from databricks.bundles.schemas._models.schema import Schema
|
|
162
|
+
|
|
163
|
+
return ResourceMutator(resource_type=Schema, function=function)
|
|
164
|
+
|
|
165
|
+
|
|
133
166
|
@overload
|
|
134
167
|
def volume_mutator(
|
|
135
168
|
function: Callable[[Bundle, "Volume"], "Volume"],
|
{databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_resource_type.py
RENAMED
|
@@ -2,7 +2,6 @@ from dataclasses import dataclass
|
|
|
2
2
|
from typing import Type
|
|
3
3
|
|
|
4
4
|
from databricks.bundles.core._resource import Resource
|
|
5
|
-
from databricks.bundles.volumes._models.volume import Volume
|
|
6
5
|
|
|
7
6
|
|
|
8
7
|
@dataclass(kw_only=True, frozen=True)
|
|
@@ -34,6 +33,8 @@ class _ResourceType:
|
|
|
34
33
|
|
|
35
34
|
from databricks.bundles.jobs._models.job import Job
|
|
36
35
|
from databricks.bundles.pipelines._models.pipeline import Pipeline
|
|
36
|
+
from databricks.bundles.schemas._models.schema import Schema
|
|
37
|
+
from databricks.bundles.volumes._models.volume import Volume
|
|
37
38
|
|
|
38
39
|
return (
|
|
39
40
|
_ResourceType(
|
|
@@ -51,4 +52,9 @@ class _ResourceType:
|
|
|
51
52
|
plural_name="volumes",
|
|
52
53
|
singular_name="volume",
|
|
53
54
|
),
|
|
55
|
+
_ResourceType(
|
|
56
|
+
resource_type=Schema,
|
|
57
|
+
plural_name="schemas",
|
|
58
|
+
singular_name="schema",
|
|
59
|
+
),
|
|
54
60
|
)
|
{databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/core/_resources.py
RENAMED
|
@@ -8,6 +8,7 @@ from databricks.bundles.core._transform import _transform
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
9
9
|
from databricks.bundles.jobs._models.job import Job, JobParam
|
|
10
10
|
from databricks.bundles.pipelines._models.pipeline import Pipeline, PipelineParam
|
|
11
|
+
from databricks.bundles.schemas._models.schema import Schema, SchemaParam
|
|
11
12
|
from databricks.bundles.volumes._models.volume import Volume, VolumeParam
|
|
12
13
|
|
|
13
14
|
__all__ = ["Resources"]
|
|
@@ -58,6 +59,7 @@ class Resources:
|
|
|
58
59
|
def __init__(self):
|
|
59
60
|
self._jobs = dict[str, "Job"]()
|
|
60
61
|
self._pipelines = dict[str, "Pipeline"]()
|
|
62
|
+
self._schemas = dict[str, "Schema"]()
|
|
61
63
|
self._volumes = dict[str, "Volume"]()
|
|
62
64
|
self._locations = dict[tuple[str, ...], Location]()
|
|
63
65
|
self._diagnostics = Diagnostics()
|
|
@@ -70,6 +72,10 @@ class Resources:
|
|
|
70
72
|
def pipelines(self) -> dict[str, "Pipeline"]:
|
|
71
73
|
return self._pipelines
|
|
72
74
|
|
|
75
|
+
@property
|
|
76
|
+
def schemas(self) -> dict[str, "Schema"]:
|
|
77
|
+
return self._schemas
|
|
78
|
+
|
|
73
79
|
@property
|
|
74
80
|
def volumes(self) -> dict[str, "Volume"]:
|
|
75
81
|
return self._volumes
|
|
@@ -99,6 +105,7 @@ class Resources:
|
|
|
99
105
|
|
|
100
106
|
from databricks.bundles.jobs import Job
|
|
101
107
|
from databricks.bundles.pipelines import Pipeline
|
|
108
|
+
from databricks.bundles.schemas import Schema
|
|
102
109
|
from databricks.bundles.volumes import Volume
|
|
103
110
|
|
|
104
111
|
location = location or Location.from_stack_frame(depth=1)
|
|
@@ -108,6 +115,8 @@ class Resources:
|
|
|
108
115
|
self.add_job(resource_name, resource, location=location)
|
|
109
116
|
case Pipeline():
|
|
110
117
|
self.add_pipeline(resource_name, resource, location=location)
|
|
118
|
+
case Schema():
|
|
119
|
+
self.add_schema(resource_name, resource, location=location)
|
|
111
120
|
case Volume():
|
|
112
121
|
self.add_volume(resource_name, resource, location=location)
|
|
113
122
|
case _:
|
|
@@ -177,6 +186,38 @@ class Resources:
|
|
|
177
186
|
|
|
178
187
|
self._pipelines[resource_name] = pipeline
|
|
179
188
|
|
|
189
|
+
def add_schema(
|
|
190
|
+
self,
|
|
191
|
+
resource_name: str,
|
|
192
|
+
schema: "SchemaParam",
|
|
193
|
+
*,
|
|
194
|
+
location: Optional[Location] = None,
|
|
195
|
+
) -> None:
|
|
196
|
+
"""
|
|
197
|
+
Adds a schema to the collection of resources. Resource name must be unique across all schemas.
|
|
198
|
+
|
|
199
|
+
:param resource_name: unique identifier for the schema
|
|
200
|
+
:param schema: the schema to add, can be Schema or dict
|
|
201
|
+
:param location: optional location of the schema in the source code
|
|
202
|
+
"""
|
|
203
|
+
from databricks.bundles.schemas import Schema
|
|
204
|
+
|
|
205
|
+
schema = _transform(Schema, schema)
|
|
206
|
+
path = ("resources", "schemas", resource_name)
|
|
207
|
+
location = location or Location.from_stack_frame(depth=1)
|
|
208
|
+
|
|
209
|
+
if self._schemas.get(resource_name):
|
|
210
|
+
self.add_diagnostic_error(
|
|
211
|
+
msg=f"Duplicate resource name '{resource_name}' for a schema. Resource names must be unique.",
|
|
212
|
+
location=location,
|
|
213
|
+
path=path,
|
|
214
|
+
)
|
|
215
|
+
else:
|
|
216
|
+
if location:
|
|
217
|
+
self.add_location(path, location)
|
|
218
|
+
|
|
219
|
+
self._schemas[resource_name] = schema
|
|
220
|
+
|
|
180
221
|
def add_volume(
|
|
181
222
|
self,
|
|
182
223
|
resource_name: str,
|
|
@@ -285,6 +326,9 @@ class Resources:
|
|
|
285
326
|
for name, pipeline in other.pipelines.items():
|
|
286
327
|
self.add_pipeline(name, pipeline)
|
|
287
328
|
|
|
329
|
+
for name, schema in other.schemas.items():
|
|
330
|
+
self.add_schema(name, schema)
|
|
331
|
+
|
|
288
332
|
for name, volume in other.volumes.items():
|
|
289
333
|
self.add_volume(name, volume)
|
|
290
334
|
|
{databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/__init__.py
RENAMED
|
@@ -134,6 +134,9 @@ __all__ = [
|
|
|
134
134
|
"Library",
|
|
135
135
|
"LibraryDict",
|
|
136
136
|
"LibraryParam",
|
|
137
|
+
"Lifecycle",
|
|
138
|
+
"LifecycleDict",
|
|
139
|
+
"LifecycleParam",
|
|
137
140
|
"LocalFileInfo",
|
|
138
141
|
"LocalFileInfoDict",
|
|
139
142
|
"LocalFileInfoParam",
|
|
@@ -482,6 +485,11 @@ from databricks.bundles.jobs._models.jobs_health_rules import (
|
|
|
482
485
|
JobsHealthRulesParam,
|
|
483
486
|
)
|
|
484
487
|
from databricks.bundles.jobs._models.library import Library, LibraryDict, LibraryParam
|
|
488
|
+
from databricks.bundles.jobs._models.lifecycle import (
|
|
489
|
+
Lifecycle,
|
|
490
|
+
LifecycleDict,
|
|
491
|
+
LifecycleParam,
|
|
492
|
+
)
|
|
485
493
|
from databricks.bundles.jobs._models.local_file_info import (
|
|
486
494
|
LocalFileInfo,
|
|
487
495
|
LocalFileInfoDict,
|
{databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/jobs/_models/job.py
RENAMED
|
@@ -44,6 +44,7 @@ from databricks.bundles.jobs._models.jobs_health_rules import (
|
|
|
44
44
|
JobsHealthRules,
|
|
45
45
|
JobsHealthRulesParam,
|
|
46
46
|
)
|
|
47
|
+
from databricks.bundles.jobs._models.lifecycle import Lifecycle, LifecycleParam
|
|
47
48
|
from databricks.bundles.jobs._models.performance_target import (
|
|
48
49
|
PerformanceTarget,
|
|
49
50
|
PerformanceTargetParam,
|
|
@@ -116,6 +117,11 @@ class Job(Resource):
|
|
|
116
117
|
A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
|
|
117
118
|
"""
|
|
118
119
|
|
|
120
|
+
lifecycle: VariableOrOptional[Lifecycle] = None
|
|
121
|
+
"""
|
|
122
|
+
Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.
|
|
123
|
+
"""
|
|
124
|
+
|
|
119
125
|
max_concurrent_runs: VariableOrOptional[int] = None
|
|
120
126
|
"""
|
|
121
127
|
An optional maximum allowed number of concurrent runs of the job.
|
|
@@ -256,6 +262,11 @@ class JobDict(TypedDict, total=False):
|
|
|
256
262
|
A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
|
|
257
263
|
"""
|
|
258
264
|
|
|
265
|
+
lifecycle: VariableOrOptional[LifecycleParam]
|
|
266
|
+
"""
|
|
267
|
+
Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.
|
|
268
|
+
"""
|
|
269
|
+
|
|
259
270
|
max_concurrent_runs: VariableOrOptional[int]
|
|
260
271
|
"""
|
|
261
272
|
An optional maximum allowed number of concurrent runs of the job.
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._transform import _transform
|
|
5
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
6
|
+
from databricks.bundles.core._variable import VariableOrOptional
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from typing_extensions import Self
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(kw_only=True)
|
|
13
|
+
class Lifecycle:
|
|
14
|
+
""""""
|
|
15
|
+
|
|
16
|
+
prevent_destroy: VariableOrOptional[bool] = None
|
|
17
|
+
"""
|
|
18
|
+
Lifecycle setting to prevent the resource from being destroyed.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
@classmethod
|
|
22
|
+
def from_dict(cls, value: "LifecycleDict") -> "Self":
|
|
23
|
+
return _transform(cls, value)
|
|
24
|
+
|
|
25
|
+
def as_dict(self) -> "LifecycleDict":
|
|
26
|
+
return _transform_to_json_value(self) # type:ignore
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class LifecycleDict(TypedDict, total=False):
|
|
30
|
+
""""""
|
|
31
|
+
|
|
32
|
+
prevent_destroy: VariableOrOptional[bool]
|
|
33
|
+
"""
|
|
34
|
+
Lifecycle setting to prevent the resource from being destroyed.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
LifecycleParam = LifecycleDict | Lifecycle
|
{databricks_bundles-0.267.0 → databricks_bundles-0.269.0}/databricks/bundles/pipelines/__init__.py
RENAMED
|
@@ -56,6 +56,9 @@ __all__ = [
|
|
|
56
56
|
"InitScriptInfo",
|
|
57
57
|
"InitScriptInfoDict",
|
|
58
58
|
"InitScriptInfoParam",
|
|
59
|
+
"Lifecycle",
|
|
60
|
+
"LifecycleDict",
|
|
61
|
+
"LifecycleParam",
|
|
59
62
|
"LocalFileInfo",
|
|
60
63
|
"LocalFileInfoDict",
|
|
61
64
|
"LocalFileInfoParam",
|
|
@@ -236,6 +239,11 @@ from databricks.bundles.pipelines._models.init_script_info import (
|
|
|
236
239
|
InitScriptInfoDict,
|
|
237
240
|
InitScriptInfoParam,
|
|
238
241
|
)
|
|
242
|
+
from databricks.bundles.pipelines._models.lifecycle import (
|
|
243
|
+
Lifecycle,
|
|
244
|
+
LifecycleDict,
|
|
245
|
+
LifecycleParam,
|
|
246
|
+
)
|
|
239
247
|
from databricks.bundles.pipelines._models.local_file_info import (
|
|
240
248
|
LocalFileInfo,
|
|
241
249
|
LocalFileInfoDict,
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._transform import _transform
|
|
5
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
6
|
+
from databricks.bundles.core._variable import VariableOrOptional
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from typing_extensions import Self
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(kw_only=True)
|
|
13
|
+
class Lifecycle:
|
|
14
|
+
""""""
|
|
15
|
+
|
|
16
|
+
prevent_destroy: VariableOrOptional[bool] = None
|
|
17
|
+
"""
|
|
18
|
+
Lifecycle setting to prevent the resource from being destroyed.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
@classmethod
|
|
22
|
+
def from_dict(cls, value: "LifecycleDict") -> "Self":
|
|
23
|
+
return _transform(cls, value)
|
|
24
|
+
|
|
25
|
+
def as_dict(self) -> "LifecycleDict":
|
|
26
|
+
return _transform_to_json_value(self) # type:ignore
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class LifecycleDict(TypedDict, total=False):
|
|
30
|
+
""""""
|
|
31
|
+
|
|
32
|
+
prevent_destroy: VariableOrOptional[bool]
|
|
33
|
+
"""
|
|
34
|
+
Lifecycle setting to prevent the resource from being destroyed.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
LifecycleParam = LifecycleDict | Lifecycle
|
|
@@ -25,6 +25,10 @@ from databricks.bundles.pipelines._models.ingestion_pipeline_definition import (
|
|
|
25
25
|
IngestionPipelineDefinition,
|
|
26
26
|
IngestionPipelineDefinitionParam,
|
|
27
27
|
)
|
|
28
|
+
from databricks.bundles.pipelines._models.lifecycle import (
|
|
29
|
+
Lifecycle,
|
|
30
|
+
LifecycleParam,
|
|
31
|
+
)
|
|
28
32
|
from databricks.bundles.pipelines._models.notifications import (
|
|
29
33
|
Notifications,
|
|
30
34
|
NotificationsParam,
|
|
@@ -143,6 +147,11 @@ class Pipeline(Resource):
|
|
|
143
147
|
Libraries or code needed by this deployment.
|
|
144
148
|
"""
|
|
145
149
|
|
|
150
|
+
lifecycle: VariableOrOptional[Lifecycle] = None
|
|
151
|
+
"""
|
|
152
|
+
Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.
|
|
153
|
+
"""
|
|
154
|
+
|
|
146
155
|
name: VariableOrOptional[str] = None
|
|
147
156
|
"""
|
|
148
157
|
Friendly identifier for this pipeline.
|
|
@@ -301,6 +310,11 @@ class PipelineDict(TypedDict, total=False):
|
|
|
301
310
|
Libraries or code needed by this deployment.
|
|
302
311
|
"""
|
|
303
312
|
|
|
313
|
+
lifecycle: VariableOrOptional[LifecycleParam]
|
|
314
|
+
"""
|
|
315
|
+
Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.
|
|
316
|
+
"""
|
|
317
|
+
|
|
304
318
|
name: VariableOrOptional[str]
|
|
305
319
|
"""
|
|
306
320
|
Friendly identifier for this pipeline.
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
__all__ = [
|
|
2
|
+
"Lifecycle",
|
|
3
|
+
"LifecycleDict",
|
|
4
|
+
"LifecycleParam",
|
|
5
|
+
"Schema",
|
|
6
|
+
"SchemaDict",
|
|
7
|
+
"SchemaGrant",
|
|
8
|
+
"SchemaGrantDict",
|
|
9
|
+
"SchemaGrantParam",
|
|
10
|
+
"SchemaGrantPrivilege",
|
|
11
|
+
"SchemaGrantPrivilegeParam",
|
|
12
|
+
"SchemaParam",
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
from databricks.bundles.schemas._models.lifecycle import (
|
|
17
|
+
Lifecycle,
|
|
18
|
+
LifecycleDict,
|
|
19
|
+
LifecycleParam,
|
|
20
|
+
)
|
|
21
|
+
from databricks.bundles.schemas._models.schema import Schema, SchemaDict, SchemaParam
|
|
22
|
+
from databricks.bundles.schemas._models.schema_grant import (
|
|
23
|
+
SchemaGrant,
|
|
24
|
+
SchemaGrantDict,
|
|
25
|
+
SchemaGrantParam,
|
|
26
|
+
)
|
|
27
|
+
from databricks.bundles.schemas._models.schema_grant_privilege import (
|
|
28
|
+
SchemaGrantPrivilege,
|
|
29
|
+
SchemaGrantPrivilegeParam,
|
|
30
|
+
)
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._transform import _transform
|
|
5
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
6
|
+
from databricks.bundles.core._variable import VariableOrOptional
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from typing_extensions import Self
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(kw_only=True)
|
|
13
|
+
class Lifecycle:
|
|
14
|
+
""""""
|
|
15
|
+
|
|
16
|
+
prevent_destroy: VariableOrOptional[bool] = None
|
|
17
|
+
"""
|
|
18
|
+
Lifecycle setting to prevent the resource from being destroyed.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
@classmethod
|
|
22
|
+
def from_dict(cls, value: "LifecycleDict") -> "Self":
|
|
23
|
+
return _transform(cls, value)
|
|
24
|
+
|
|
25
|
+
def as_dict(self) -> "LifecycleDict":
|
|
26
|
+
return _transform_to_json_value(self) # type:ignore
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class LifecycleDict(TypedDict, total=False):
|
|
30
|
+
""""""
|
|
31
|
+
|
|
32
|
+
prevent_destroy: VariableOrOptional[bool]
|
|
33
|
+
"""
|
|
34
|
+
Lifecycle setting to prevent the resource from being destroyed.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
LifecycleParam = LifecycleDict | Lifecycle
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._resource import Resource
|
|
5
|
+
from databricks.bundles.core._transform import _transform
|
|
6
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
7
|
+
from databricks.bundles.core._variable import (
|
|
8
|
+
VariableOr,
|
|
9
|
+
VariableOrDict,
|
|
10
|
+
VariableOrList,
|
|
11
|
+
VariableOrOptional,
|
|
12
|
+
)
|
|
13
|
+
from databricks.bundles.schemas._models.lifecycle import Lifecycle, LifecycleParam
|
|
14
|
+
from databricks.bundles.schemas._models.schema_grant import (
|
|
15
|
+
SchemaGrant,
|
|
16
|
+
SchemaGrantParam,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
from typing_extensions import Self
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass(kw_only=True)
|
|
24
|
+
class Schema(Resource):
|
|
25
|
+
""""""
|
|
26
|
+
|
|
27
|
+
catalog_name: VariableOr[str]
|
|
28
|
+
"""
|
|
29
|
+
Name of parent catalog.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
name: VariableOr[str]
|
|
33
|
+
"""
|
|
34
|
+
Name of schema, relative to parent catalog.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
comment: VariableOrOptional[str] = None
|
|
38
|
+
"""
|
|
39
|
+
User-provided free-form text description.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
grants: VariableOrList[SchemaGrant] = field(default_factory=list)
|
|
43
|
+
|
|
44
|
+
lifecycle: VariableOrOptional[Lifecycle] = None
|
|
45
|
+
"""
|
|
46
|
+
Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
properties: VariableOrDict[str] = field(default_factory=dict)
|
|
50
|
+
|
|
51
|
+
storage_root: VariableOrOptional[str] = None
|
|
52
|
+
"""
|
|
53
|
+
Storage root URL for managed tables within schema.
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def from_dict(cls, value: "SchemaDict") -> "Self":
|
|
58
|
+
return _transform(cls, value)
|
|
59
|
+
|
|
60
|
+
def as_dict(self) -> "SchemaDict":
|
|
61
|
+
return _transform_to_json_value(self) # type:ignore
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class SchemaDict(TypedDict, total=False):
|
|
65
|
+
""""""
|
|
66
|
+
|
|
67
|
+
catalog_name: VariableOr[str]
|
|
68
|
+
"""
|
|
69
|
+
Name of parent catalog.
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
name: VariableOr[str]
|
|
73
|
+
"""
|
|
74
|
+
Name of schema, relative to parent catalog.
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
comment: VariableOrOptional[str]
|
|
78
|
+
"""
|
|
79
|
+
User-provided free-form text description.
|
|
80
|
+
"""
|
|
81
|
+
|
|
82
|
+
grants: VariableOrList[SchemaGrantParam]
|
|
83
|
+
|
|
84
|
+
lifecycle: VariableOrOptional[LifecycleParam]
|
|
85
|
+
"""
|
|
86
|
+
Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
properties: VariableOrDict[str]
|
|
90
|
+
|
|
91
|
+
storage_root: VariableOrOptional[str]
|
|
92
|
+
"""
|
|
93
|
+
Storage root URL for managed tables within schema.
|
|
94
|
+
"""
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
SchemaParam = SchemaDict | Schema
|