databricks-bundles 0.270.0__tar.gz → 0.272.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/PKG-INFO +2 -2
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/README.md +1 -1
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/build.py +13 -1
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/__init__.py +3 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/cluster_spec.py +5 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/environment.py +2 -2
- databricks_bundles-0.272.0/databricks/bundles/jobs/_models/kind.py +9 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/spark_submit_task.py +3 -1
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/task.py +18 -20
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/__init__.py +16 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +18 -0
- databricks_bundles-0.272.0/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters.py +95 -0
- databricks_bundles-0.272.0/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters_query_key_value.py +70 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/ingestion_source_type.py +2 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/pipeline.py +0 -6
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/run_as.py +0 -2
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/table_specific_config.py +22 -0
- databricks_bundles-0.272.0/databricks/bundles/version.py +1 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/pyproject.toml +1 -1
- databricks_bundles-0.270.0/databricks/bundles/version.py +0 -1
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/LICENSE +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/__init__.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/__init__.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/__init__.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_bundle.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_diagnostics.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_load.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_location.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_resource.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_resource_mutator.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_resource_type.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_resources.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_transform.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_transform_to_json.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_variable.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/adlsgen2_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/authentication_method.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/auto_scale.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/aws_attributes.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/aws_availability.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/azure_attributes.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/azure_availability.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/clean_rooms_notebook_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/clients_types.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/cluster_log_conf.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/compute_config.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/condition.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/condition_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/condition_task_op.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/continuous.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/cron_schedule.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/dashboard_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/data_security_mode.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/dbfs_storage_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/dbt_platform_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/dbt_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/docker_basic_auth.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/docker_image.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/ebs_volume_type.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/for_each_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/gcp_attributes.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/gcp_availability.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/gcs_storage_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/gen_ai_compute_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/git_provider.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/git_source.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/init_script_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/job.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/job_cluster.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/job_email_notifications.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/job_environment.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/job_notification_settings.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/job_parameter_definition.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/job_permission.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/job_permission_level.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/job_run_as.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/jobs_health_metric.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/jobs_health_operator.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/jobs_health_rule.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/jobs_health_rules.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/library.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/lifecycle.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/local_file_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/log_analytics_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/maven_library.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/notebook_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/pause_status.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/performance_target.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration_time_unit.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/pipeline_params.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/pipeline_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/power_bi_model.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/power_bi_table.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/power_bi_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/python_py_pi_library.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/python_wheel_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/queue_settings.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/r_cran_library.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/run_if.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/run_job_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/runtime_engine.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/s3_storage_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/source.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/spark_jar_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/spark_python_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/sql_task.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/sql_task_alert.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/sql_task_dashboard.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/sql_task_file.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/sql_task_query.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/sql_task_subscription.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/storage_mode.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/subscription.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/subscription_subscriber.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/table_update_trigger_configuration.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/task_dependency.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/task_email_notifications.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/task_notification_settings.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/task_retry_mode.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/trigger_settings.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/volumes_storage_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/webhook.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/webhook_notifications.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/workload_type.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/workspace_storage_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/adlsgen2_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/aws_attributes.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/aws_availability.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/azure_attributes.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/azure_availability.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/cluster_log_conf.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/day_of_week.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/dbfs_storage_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/ebs_volume_type.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/event_log_spec.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/file_library.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/filters.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/gcp_attributes.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/gcp_availability.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/gcs_storage_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/ingestion_config.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_table_specific_config_query_based_connector_config.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/init_script_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/lifecycle.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/local_file_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/log_analytics_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/maven_library.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/notebook_library.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/notifications.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/path_pattern.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/pipeline_cluster.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale_mode.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/pipeline_library.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/pipeline_permission.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/pipeline_permission_level.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/pipelines_environment.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/postgres_catalog_config.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/postgres_slot_config.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/report_spec.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/restart_window.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/s3_storage_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/schema_spec.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/source_catalog_config.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/source_config.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/table_spec.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/volumes_storage_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/_models/workspace_storage_info.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/py.typed +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/schemas/__init__.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/schemas/_models/lifecycle.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/schemas/_models/schema.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/schemas/_models/schema_grant.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/schemas/_models/schema_grant_privilege.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/volumes/__init__.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/volumes/_models/lifecycle.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/volumes/_models/volume.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/volumes/_models/volume_grant.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/volumes/_models/volume_grant_privilege.py +0 -0
- {databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/volumes/_models/volume_type.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: databricks-bundles
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.272.0
|
|
4
4
|
Summary: Python support for Databricks Asset Bundles
|
|
5
5
|
Author-email: Gleb Kanterov <gleb.kanterov@databricks.com>
|
|
6
6
|
Requires-Python: >=3.10
|
|
@@ -22,7 +22,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
|
|
|
22
22
|
|
|
23
23
|
To use `databricks-bundles`, you must first:
|
|
24
24
|
|
|
25
|
-
1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.
|
|
25
|
+
1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.272.0 or above
|
|
26
26
|
2. Authenticate to your Databricks workspace if you have not done so already:
|
|
27
27
|
|
|
28
28
|
```bash
|
|
@@ -13,7 +13,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
|
|
|
13
13
|
|
|
14
14
|
To use `databricks-bundles`, you must first:
|
|
15
15
|
|
|
16
|
-
1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.
|
|
16
|
+
1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.272.0 or above
|
|
17
17
|
2. Authenticate to your Databricks workspace if you have not done so already:
|
|
18
18
|
|
|
19
19
|
```bash
|
|
@@ -528,11 +528,23 @@ def _relativize_location(location: Location) -> Location:
|
|
|
528
528
|
|
|
529
529
|
|
|
530
530
|
def _relativize_path(path: str) -> str:
|
|
531
|
+
"""
|
|
532
|
+
Attempt to relativize an absolute path to the current working directory.
|
|
533
|
+
|
|
534
|
+
If the path is not absolute or cannot be relativized, return it as is.
|
|
535
|
+
Used to relativize paths in locations to show shorter paths in diagnostics.
|
|
536
|
+
"""
|
|
537
|
+
|
|
531
538
|
if not os.path.isabs(path):
|
|
532
539
|
return path
|
|
533
540
|
|
|
534
541
|
cwd = os.getcwd()
|
|
535
|
-
|
|
542
|
+
|
|
543
|
+
try:
|
|
544
|
+
common = os.path.commonpath([cwd, path])
|
|
545
|
+
except ValueError:
|
|
546
|
+
# On Windows, paths on different drives don't have a common path
|
|
547
|
+
return path
|
|
536
548
|
|
|
537
549
|
if common == cwd:
|
|
538
550
|
return os.path.relpath(path, cwd)
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/__init__.py
RENAMED
|
@@ -131,6 +131,8 @@ __all__ = [
|
|
|
131
131
|
"JobsHealthRules",
|
|
132
132
|
"JobsHealthRulesDict",
|
|
133
133
|
"JobsHealthRulesParam",
|
|
134
|
+
"Kind",
|
|
135
|
+
"KindParam",
|
|
134
136
|
"Library",
|
|
135
137
|
"LibraryDict",
|
|
136
138
|
"LibraryParam",
|
|
@@ -484,6 +486,7 @@ from databricks.bundles.jobs._models.jobs_health_rules import (
|
|
|
484
486
|
JobsHealthRulesDict,
|
|
485
487
|
JobsHealthRulesParam,
|
|
486
488
|
)
|
|
489
|
+
from databricks.bundles.jobs._models.kind import Kind, KindParam
|
|
487
490
|
from databricks.bundles.jobs._models.library import Library, LibraryDict, LibraryParam
|
|
488
491
|
from databricks.bundles.jobs._models.lifecycle import (
|
|
489
492
|
Lifecycle,
|
|
@@ -37,6 +37,7 @@ from databricks.bundles.jobs._models.init_script_info import (
|
|
|
37
37
|
InitScriptInfo,
|
|
38
38
|
InitScriptInfoParam,
|
|
39
39
|
)
|
|
40
|
+
from databricks.bundles.jobs._models.kind import Kind, KindParam
|
|
40
41
|
from databricks.bundles.jobs._models.runtime_engine import (
|
|
41
42
|
RuntimeEngine,
|
|
42
43
|
RuntimeEngineParam,
|
|
@@ -171,6 +172,8 @@ class ClusterSpec:
|
|
|
171
172
|
When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`
|
|
172
173
|
"""
|
|
173
174
|
|
|
175
|
+
kind: VariableOrOptional[Kind] = None
|
|
176
|
+
|
|
174
177
|
node_type_id: VariableOrOptional[str] = None
|
|
175
178
|
"""
|
|
176
179
|
This field encodes, through a single value, the resources available to each of
|
|
@@ -384,6 +387,8 @@ class ClusterSpecDict(TypedDict, total=False):
|
|
|
384
387
|
When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`
|
|
385
388
|
"""
|
|
386
389
|
|
|
390
|
+
kind: VariableOrOptional[KindParam]
|
|
391
|
+
|
|
387
392
|
node_type_id: VariableOrOptional[str]
|
|
388
393
|
"""
|
|
389
394
|
This field encodes, through a single value, the resources available to each of
|
|
@@ -33,7 +33,7 @@ class Environment:
|
|
|
33
33
|
The version is a string, consisting of an integer.
|
|
34
34
|
"""
|
|
35
35
|
|
|
36
|
-
|
|
36
|
+
java_dependencies: VariableOrList[str] = field(default_factory=list)
|
|
37
37
|
"""
|
|
38
38
|
:meta private: [EXPERIMENTAL]
|
|
39
39
|
|
|
@@ -68,7 +68,7 @@ class EnvironmentDict(TypedDict, total=False):
|
|
|
68
68
|
The version is a string, consisting of an integer.
|
|
69
69
|
"""
|
|
70
70
|
|
|
71
|
-
|
|
71
|
+
java_dependencies: VariableOrList[str]
|
|
72
72
|
"""
|
|
73
73
|
:meta private: [EXPERIMENTAL]
|
|
74
74
|
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/jobs/_models/task.py
RENAMED
|
@@ -104,7 +104,7 @@ class Task:
|
|
|
104
104
|
|
|
105
105
|
clean_rooms_notebook_task: VariableOrOptional[CleanRoomsNotebookTask] = None
|
|
106
106
|
"""
|
|
107
|
-
The task runs a [clean rooms](https://docs.databricks.com/
|
|
107
|
+
The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook
|
|
108
108
|
when the `clean_rooms_notebook_task` field is present.
|
|
109
109
|
"""
|
|
110
110
|
|
|
@@ -145,6 +145,13 @@ class Task:
|
|
|
145
145
|
An option to disable auto optimization in serverless
|
|
146
146
|
"""
|
|
147
147
|
|
|
148
|
+
disabled: VariableOrOptional[bool] = None
|
|
149
|
+
"""
|
|
150
|
+
:meta private: [EXPERIMENTAL]
|
|
151
|
+
|
|
152
|
+
An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.
|
|
153
|
+
"""
|
|
154
|
+
|
|
148
155
|
email_notifications: VariableOrOptional[TaskEmailNotifications] = None
|
|
149
156
|
"""
|
|
150
157
|
An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.
|
|
@@ -261,15 +268,7 @@ class Task:
|
|
|
261
268
|
|
|
262
269
|
spark_submit_task: VariableOrOptional[SparkSubmitTask] = None
|
|
263
270
|
"""
|
|
264
|
-
(Legacy) The task runs the spark-submit script when the
|
|
265
|
-
|
|
266
|
-
In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations.
|
|
267
|
-
|
|
268
|
-
`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters.
|
|
269
|
-
|
|
270
|
-
By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage.
|
|
271
|
-
|
|
272
|
-
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.
|
|
271
|
+
[DEPRECATED] (Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).
|
|
273
272
|
"""
|
|
274
273
|
|
|
275
274
|
sql_task: VariableOrOptional[SqlTask] = None
|
|
@@ -307,7 +306,7 @@ class TaskDict(TypedDict, total=False):
|
|
|
307
306
|
|
|
308
307
|
clean_rooms_notebook_task: VariableOrOptional[CleanRoomsNotebookTaskParam]
|
|
309
308
|
"""
|
|
310
|
-
The task runs a [clean rooms](https://docs.databricks.com/
|
|
309
|
+
The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook
|
|
311
310
|
when the `clean_rooms_notebook_task` field is present.
|
|
312
311
|
"""
|
|
313
312
|
|
|
@@ -348,6 +347,13 @@ class TaskDict(TypedDict, total=False):
|
|
|
348
347
|
An option to disable auto optimization in serverless
|
|
349
348
|
"""
|
|
350
349
|
|
|
350
|
+
disabled: VariableOrOptional[bool]
|
|
351
|
+
"""
|
|
352
|
+
:meta private: [EXPERIMENTAL]
|
|
353
|
+
|
|
354
|
+
An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.
|
|
355
|
+
"""
|
|
356
|
+
|
|
351
357
|
email_notifications: VariableOrOptional[TaskEmailNotificationsParam]
|
|
352
358
|
"""
|
|
353
359
|
An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.
|
|
@@ -464,15 +470,7 @@ class TaskDict(TypedDict, total=False):
|
|
|
464
470
|
|
|
465
471
|
spark_submit_task: VariableOrOptional[SparkSubmitTaskParam]
|
|
466
472
|
"""
|
|
467
|
-
(Legacy) The task runs the spark-submit script when the
|
|
468
|
-
|
|
469
|
-
In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations.
|
|
470
|
-
|
|
471
|
-
`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters.
|
|
472
|
-
|
|
473
|
-
By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage.
|
|
474
|
-
|
|
475
|
-
The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.
|
|
473
|
+
[DEPRECATED] (Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).
|
|
476
474
|
"""
|
|
477
475
|
|
|
478
476
|
sql_task: VariableOrOptional[SqlTaskParam]
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/pipelines/__init__.py
RENAMED
|
@@ -51,6 +51,12 @@ __all__ = [
|
|
|
51
51
|
"IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig",
|
|
52
52
|
"IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigDict",
|
|
53
53
|
"IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam",
|
|
54
|
+
"IngestionPipelineDefinitionWorkdayReportParameters",
|
|
55
|
+
"IngestionPipelineDefinitionWorkdayReportParametersDict",
|
|
56
|
+
"IngestionPipelineDefinitionWorkdayReportParametersParam",
|
|
57
|
+
"IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue",
|
|
58
|
+
"IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict",
|
|
59
|
+
"IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam",
|
|
54
60
|
"IngestionSourceType",
|
|
55
61
|
"IngestionSourceTypeParam",
|
|
56
62
|
"InitScriptInfo",
|
|
@@ -230,6 +236,16 @@ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_table_sp
|
|
|
230
236
|
IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigDict,
|
|
231
237
|
IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam,
|
|
232
238
|
)
|
|
239
|
+
from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters import (
|
|
240
|
+
IngestionPipelineDefinitionWorkdayReportParameters,
|
|
241
|
+
IngestionPipelineDefinitionWorkdayReportParametersDict,
|
|
242
|
+
IngestionPipelineDefinitionWorkdayReportParametersParam,
|
|
243
|
+
)
|
|
244
|
+
from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters_query_key_value import (
|
|
245
|
+
IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue,
|
|
246
|
+
IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict,
|
|
247
|
+
IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam,
|
|
248
|
+
)
|
|
233
249
|
from databricks.bundles.pipelines._models.ingestion_source_type import (
|
|
234
250
|
IngestionSourceType,
|
|
235
251
|
IngestionSourceTypeParam,
|
|
@@ -39,6 +39,15 @@ class IngestionPipelineDefinition:
|
|
|
39
39
|
Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
|
|
40
40
|
"""
|
|
41
41
|
|
|
42
|
+
netsuite_jar_path: VariableOrOptional[str] = None
|
|
43
|
+
"""
|
|
44
|
+
:meta private: [EXPERIMENTAL]
|
|
45
|
+
|
|
46
|
+
Netsuite only configuration. When the field is set for a netsuite connector,
|
|
47
|
+
the jar stored in the field will be validated and added to the classpath of
|
|
48
|
+
pipeline's cluster.
|
|
49
|
+
"""
|
|
50
|
+
|
|
42
51
|
objects: VariableOrList[IngestionConfig] = field(default_factory=list)
|
|
43
52
|
"""
|
|
44
53
|
Required. Settings specifying tables to replicate and the destination for the replicated tables.
|
|
@@ -84,6 +93,15 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
|
|
|
84
93
|
Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
|
|
85
94
|
"""
|
|
86
95
|
|
|
96
|
+
netsuite_jar_path: VariableOrOptional[str]
|
|
97
|
+
"""
|
|
98
|
+
:meta private: [EXPERIMENTAL]
|
|
99
|
+
|
|
100
|
+
Netsuite only configuration. When the field is set for a netsuite connector,
|
|
101
|
+
the jar stored in the field will be validated and added to the classpath of
|
|
102
|
+
pipeline's cluster.
|
|
103
|
+
"""
|
|
104
|
+
|
|
87
105
|
objects: VariableOrList[IngestionConfigParam]
|
|
88
106
|
"""
|
|
89
107
|
Required. Settings specifying tables to replicate and the destination for the replicated tables.
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._transform import _transform
|
|
5
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
6
|
+
from databricks.bundles.core._variable import (
|
|
7
|
+
VariableOrDict,
|
|
8
|
+
VariableOrList,
|
|
9
|
+
VariableOrOptional,
|
|
10
|
+
)
|
|
11
|
+
from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters_query_key_value import (
|
|
12
|
+
IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue,
|
|
13
|
+
IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from typing_extensions import Self
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass(kw_only=True)
|
|
21
|
+
class IngestionPipelineDefinitionWorkdayReportParameters:
|
|
22
|
+
"""
|
|
23
|
+
:meta private: [EXPERIMENTAL]
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
incremental: VariableOrOptional[bool] = None
|
|
27
|
+
"""
|
|
28
|
+
[DEPRECATED] (Optional) Marks the report as incremental.
|
|
29
|
+
This field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now
|
|
30
|
+
controlled by the `parameters` field.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
parameters: VariableOrDict[str] = field(default_factory=dict)
|
|
34
|
+
"""
|
|
35
|
+
Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", "end_date"),
|
|
36
|
+
and the corresponding value is a SQL-like expression used to compute the parameter value at runtime.
|
|
37
|
+
Example:
|
|
38
|
+
{
|
|
39
|
+
"start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) }",
|
|
40
|
+
"end_date": "{ current_date() - INTERVAL 1 DAY }"
|
|
41
|
+
}
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
report_parameters: VariableOrList[
|
|
45
|
+
IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue
|
|
46
|
+
] = field(default_factory=list)
|
|
47
|
+
"""
|
|
48
|
+
[DEPRECATED] (Optional) Additional custom parameters for Workday Report
|
|
49
|
+
This field is deprecated and should not be used. Use `parameters` instead.
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
def from_dict(
|
|
54
|
+
cls, value: "IngestionPipelineDefinitionWorkdayReportParametersDict"
|
|
55
|
+
) -> "Self":
|
|
56
|
+
return _transform(cls, value)
|
|
57
|
+
|
|
58
|
+
def as_dict(self) -> "IngestionPipelineDefinitionWorkdayReportParametersDict":
|
|
59
|
+
return _transform_to_json_value(self) # type:ignore
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class IngestionPipelineDefinitionWorkdayReportParametersDict(TypedDict, total=False):
|
|
63
|
+
""""""
|
|
64
|
+
|
|
65
|
+
incremental: VariableOrOptional[bool]
|
|
66
|
+
"""
|
|
67
|
+
[DEPRECATED] (Optional) Marks the report as incremental.
|
|
68
|
+
This field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now
|
|
69
|
+
controlled by the `parameters` field.
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
parameters: VariableOrDict[str]
|
|
73
|
+
"""
|
|
74
|
+
Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", "end_date"),
|
|
75
|
+
and the corresponding value is a SQL-like expression used to compute the parameter value at runtime.
|
|
76
|
+
Example:
|
|
77
|
+
{
|
|
78
|
+
"start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) }",
|
|
79
|
+
"end_date": "{ current_date() - INTERVAL 1 DAY }"
|
|
80
|
+
}
|
|
81
|
+
"""
|
|
82
|
+
|
|
83
|
+
report_parameters: VariableOrList[
|
|
84
|
+
IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam
|
|
85
|
+
]
|
|
86
|
+
"""
|
|
87
|
+
[DEPRECATED] (Optional) Additional custom parameters for Workday Report
|
|
88
|
+
This field is deprecated and should not be used. Use `parameters` instead.
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
IngestionPipelineDefinitionWorkdayReportParametersParam = (
|
|
93
|
+
IngestionPipelineDefinitionWorkdayReportParametersDict
|
|
94
|
+
| IngestionPipelineDefinitionWorkdayReportParameters
|
|
95
|
+
)
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._transform import _transform
|
|
5
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
6
|
+
from databricks.bundles.core._variable import VariableOrOptional
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from typing_extensions import Self
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(kw_only=True)
|
|
13
|
+
class IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue:
|
|
14
|
+
"""
|
|
15
|
+
:meta private: [EXPERIMENTAL]
|
|
16
|
+
|
|
17
|
+
[DEPRECATED]
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
key: VariableOrOptional[str] = None
|
|
21
|
+
"""
|
|
22
|
+
Key for the report parameter, can be a column name or other metadata
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
value: VariableOrOptional[str] = None
|
|
26
|
+
"""
|
|
27
|
+
Value for the report parameter.
|
|
28
|
+
Possible values it can take are these sql functions:
|
|
29
|
+
1. coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed date, else current_offset()
|
|
30
|
+
2. current_date()
|
|
31
|
+
3. date_sub(current_date(), x) -> subtract x (some non-negative integer) days from current date
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def from_dict(
|
|
36
|
+
cls,
|
|
37
|
+
value: "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict",
|
|
38
|
+
) -> "Self":
|
|
39
|
+
return _transform(cls, value)
|
|
40
|
+
|
|
41
|
+
def as_dict(
|
|
42
|
+
self,
|
|
43
|
+
) -> "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict":
|
|
44
|
+
return _transform_to_json_value(self) # type:ignore
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict(
|
|
48
|
+
TypedDict, total=False
|
|
49
|
+
):
|
|
50
|
+
""""""
|
|
51
|
+
|
|
52
|
+
key: VariableOrOptional[str]
|
|
53
|
+
"""
|
|
54
|
+
Key for the report parameter, can be a column name or other metadata
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
value: VariableOrOptional[str]
|
|
58
|
+
"""
|
|
59
|
+
Value for the report parameter.
|
|
60
|
+
Possible values it can take are these sql functions:
|
|
61
|
+
1. coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed date, else current_offset()
|
|
62
|
+
2. current_date()
|
|
63
|
+
3. date_sub(current_date(), x) -> subtract x (some non-negative integer) days from current date
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam = (
|
|
68
|
+
IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict
|
|
69
|
+
| IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue
|
|
70
|
+
)
|
|
@@ -21,6 +21,7 @@ class IngestionSourceType(Enum):
|
|
|
21
21
|
DYNAMICS365 = "DYNAMICS365"
|
|
22
22
|
CONFLUENCE = "CONFLUENCE"
|
|
23
23
|
META_MARKETING = "META_MARKETING"
|
|
24
|
+
FOREIGN_CATALOG = "FOREIGN_CATALOG"
|
|
24
25
|
|
|
25
26
|
|
|
26
27
|
IngestionSourceTypeParam = (
|
|
@@ -43,6 +44,7 @@ IngestionSourceTypeParam = (
|
|
|
43
44
|
"DYNAMICS365",
|
|
44
45
|
"CONFLUENCE",
|
|
45
46
|
"META_MARKETING",
|
|
47
|
+
"FOREIGN_CATALOG",
|
|
46
48
|
]
|
|
47
49
|
| IngestionSourceType
|
|
48
50
|
)
|
|
@@ -184,9 +184,6 @@ class Pipeline(Resource):
|
|
|
184
184
|
"""
|
|
185
185
|
|
|
186
186
|
run_as: VariableOrOptional[RunAs] = None
|
|
187
|
-
"""
|
|
188
|
-
:meta private: [EXPERIMENTAL]
|
|
189
|
-
"""
|
|
190
187
|
|
|
191
188
|
schema: VariableOrOptional[str] = None
|
|
192
189
|
"""
|
|
@@ -347,9 +344,6 @@ class PipelineDict(TypedDict, total=False):
|
|
|
347
344
|
"""
|
|
348
345
|
|
|
349
346
|
run_as: VariableOrOptional[RunAsParam]
|
|
350
|
-
"""
|
|
351
|
-
:meta private: [EXPERIMENTAL]
|
|
352
|
-
"""
|
|
353
347
|
|
|
354
348
|
schema: VariableOrOptional[str]
|
|
355
349
|
"""
|
|
@@ -12,8 +12,6 @@ if TYPE_CHECKING:
|
|
|
12
12
|
@dataclass(kw_only=True)
|
|
13
13
|
class RunAs:
|
|
14
14
|
"""
|
|
15
|
-
:meta private: [EXPERIMENTAL]
|
|
16
|
-
|
|
17
15
|
Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
|
|
18
16
|
|
|
19
17
|
Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown.
|
|
@@ -8,6 +8,10 @@ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_table_sp
|
|
|
8
8
|
IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig,
|
|
9
9
|
IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam,
|
|
10
10
|
)
|
|
11
|
+
from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters import (
|
|
12
|
+
IngestionPipelineDefinitionWorkdayReportParameters,
|
|
13
|
+
IngestionPipelineDefinitionWorkdayReportParametersParam,
|
|
14
|
+
)
|
|
11
15
|
from databricks.bundles.pipelines._models.table_specific_config_scd_type import (
|
|
12
16
|
TableSpecificConfigScdType,
|
|
13
17
|
TableSpecificConfigScdTypeParam,
|
|
@@ -71,6 +75,15 @@ class TableSpecificConfig:
|
|
|
71
75
|
The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.
|
|
72
76
|
"""
|
|
73
77
|
|
|
78
|
+
workday_report_parameters: VariableOrOptional[
|
|
79
|
+
IngestionPipelineDefinitionWorkdayReportParameters
|
|
80
|
+
] = None
|
|
81
|
+
"""
|
|
82
|
+
:meta private: [EXPERIMENTAL]
|
|
83
|
+
|
|
84
|
+
(Optional) Additional custom parameters for Workday Report
|
|
85
|
+
"""
|
|
86
|
+
|
|
74
87
|
@classmethod
|
|
75
88
|
def from_dict(cls, value: "TableSpecificConfigDict") -> "Self":
|
|
76
89
|
return _transform(cls, value)
|
|
@@ -132,5 +145,14 @@ class TableSpecificConfigDict(TypedDict, total=False):
|
|
|
132
145
|
The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.
|
|
133
146
|
"""
|
|
134
147
|
|
|
148
|
+
workday_report_parameters: VariableOrOptional[
|
|
149
|
+
IngestionPipelineDefinitionWorkdayReportParametersParam
|
|
150
|
+
]
|
|
151
|
+
"""
|
|
152
|
+
:meta private: [EXPERIMENTAL]
|
|
153
|
+
|
|
154
|
+
(Optional) Additional custom parameters for Workday Report
|
|
155
|
+
"""
|
|
156
|
+
|
|
135
157
|
|
|
136
158
|
TableSpecificConfigParam = TableSpecificConfigDict | TableSpecificConfig
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.272.0"
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.270.0"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/__init__.py
RENAMED
|
File without changes
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_bundle.py
RENAMED
|
File without changes
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_diagnostics.py
RENAMED
|
File without changes
|
|
File without changes
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_location.py
RENAMED
|
File without changes
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_resource.py
RENAMED
|
File without changes
|
|
File without changes
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_resource_type.py
RENAMED
|
File without changes
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_resources.py
RENAMED
|
File without changes
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_transform.py
RENAMED
|
File without changes
|
|
File without changes
|
{databricks_bundles-0.270.0 → databricks_bundles-0.272.0}/databricks/bundles/core/_variable.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|