databricks-bundles 0.283.0__tar.gz → 0.285.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (189) hide show
  1. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/PKG-INFO +2 -2
  2. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/README.md +1 -1
  3. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/__init__.py +8 -0
  4. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/cluster_spec.py +24 -0
  5. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/dashboard_task.py +1 -4
  6. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/job.py +1 -4
  7. databricks_bundles-0.285.0/databricks/bundles/jobs/_models/node_type_flexibility.py +40 -0
  8. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/__init__.py +16 -0
  9. databricks_bundles-0.285.0/databricks/bundles/pipelines/_models/auto_full_refresh_policy.py +54 -0
  10. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/day_of_week.py +0 -2
  11. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +14 -0
  12. databricks_bundles-0.285.0/databricks/bundles/pipelines/_models/operation_time_window.py +69 -0
  13. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/table_specific_config.py +34 -0
  14. databricks_bundles-0.285.0/databricks/bundles/version.py +1 -0
  15. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/pyproject.toml +1 -1
  16. databricks_bundles-0.283.0/databricks/bundles/version.py +0 -1
  17. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/LICENSE +0 -0
  18. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/__init__.py +0 -0
  19. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/__init__.py +0 -0
  20. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/build.py +0 -0
  21. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/__init__.py +0 -0
  22. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_bundle.py +0 -0
  23. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_diagnostics.py +0 -0
  24. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_load.py +0 -0
  25. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_location.py +0 -0
  26. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_resource.py +0 -0
  27. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_resource_mutator.py +0 -0
  28. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_resource_type.py +0 -0
  29. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_resources.py +0 -0
  30. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_transform.py +0 -0
  31. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_transform_to_json.py +0 -0
  32. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/core/_variable.py +0 -0
  33. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/adlsgen2_info.py +0 -0
  34. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/authentication_method.py +0 -0
  35. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/auto_scale.py +0 -0
  36. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/aws_attributes.py +0 -0
  37. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/aws_availability.py +0 -0
  38. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/azure_attributes.py +0 -0
  39. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/azure_availability.py +0 -0
  40. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/clean_rooms_notebook_task.py +0 -0
  41. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/clients_types.py +0 -0
  42. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/cluster_log_conf.py +0 -0
  43. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/compute_config.py +0 -0
  44. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/condition.py +0 -0
  45. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/condition_task.py +0 -0
  46. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/condition_task_op.py +0 -0
  47. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/continuous.py +0 -0
  48. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/cron_schedule.py +0 -0
  49. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/data_security_mode.py +0 -0
  50. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/dbfs_storage_info.py +0 -0
  51. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/dbt_platform_task.py +0 -0
  52. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/dbt_task.py +0 -0
  53. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/docker_basic_auth.py +0 -0
  54. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/docker_image.py +0 -0
  55. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/ebs_volume_type.py +0 -0
  56. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/environment.py +0 -0
  57. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py +0 -0
  58. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/for_each_task.py +0 -0
  59. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/gcp_attributes.py +0 -0
  60. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/gcp_availability.py +0 -0
  61. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/gcs_storage_info.py +0 -0
  62. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/gen_ai_compute_task.py +0 -0
  63. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/git_provider.py +0 -0
  64. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/git_source.py +0 -0
  65. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/init_script_info.py +0 -0
  66. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/job_cluster.py +0 -0
  67. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/job_email_notifications.py +0 -0
  68. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/job_environment.py +0 -0
  69. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/job_notification_settings.py +0 -0
  70. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/job_parameter_definition.py +0 -0
  71. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/job_permission.py +0 -0
  72. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/job_permission_level.py +0 -0
  73. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/job_run_as.py +0 -0
  74. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/jobs_health_metric.py +0 -0
  75. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/jobs_health_operator.py +0 -0
  76. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/jobs_health_rule.py +0 -0
  77. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/jobs_health_rules.py +0 -0
  78. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/kind.py +0 -0
  79. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/library.py +0 -0
  80. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/lifecycle.py +0 -0
  81. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/local_file_info.py +0 -0
  82. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/log_analytics_info.py +0 -0
  83. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/maven_library.py +0 -0
  84. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/model_trigger_configuration.py +0 -0
  85. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/model_trigger_configuration_condition.py +0 -0
  86. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/notebook_task.py +0 -0
  87. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/pause_status.py +0 -0
  88. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/performance_target.py +0 -0
  89. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration.py +0 -0
  90. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration_time_unit.py +0 -0
  91. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/pipeline_params.py +0 -0
  92. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/pipeline_task.py +0 -0
  93. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/power_bi_model.py +0 -0
  94. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/power_bi_table.py +0 -0
  95. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/power_bi_task.py +0 -0
  96. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/python_py_pi_library.py +0 -0
  97. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/python_wheel_task.py +0 -0
  98. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/queue_settings.py +0 -0
  99. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/r_cran_library.py +0 -0
  100. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/run_if.py +0 -0
  101. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/run_job_task.py +0 -0
  102. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/runtime_engine.py +0 -0
  103. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/s3_storage_info.py +0 -0
  104. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/source.py +0 -0
  105. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/spark_jar_task.py +0 -0
  106. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/spark_python_task.py +0 -0
  107. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/spark_submit_task.py +0 -0
  108. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/sql_task.py +0 -0
  109. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/sql_task_alert.py +0 -0
  110. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/sql_task_dashboard.py +0 -0
  111. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/sql_task_file.py +0 -0
  112. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/sql_task_query.py +0 -0
  113. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/sql_task_subscription.py +0 -0
  114. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/storage_mode.py +0 -0
  115. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/subscription.py +0 -0
  116. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/subscription_subscriber.py +0 -0
  117. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/table_update_trigger_configuration.py +0 -0
  118. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/task.py +0 -0
  119. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/task_dependency.py +0 -0
  120. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/task_email_notifications.py +0 -0
  121. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/task_notification_settings.py +0 -0
  122. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/task_retry_mode.py +0 -0
  123. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/trigger_settings.py +0 -0
  124. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/volumes_storage_info.py +0 -0
  125. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/webhook.py +0 -0
  126. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/webhook_notifications.py +0 -0
  127. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/workload_type.py +0 -0
  128. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/jobs/_models/workspace_storage_info.py +0 -0
  129. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/adlsgen2_info.py +0 -0
  130. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/aws_attributes.py +0 -0
  131. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/aws_availability.py +0 -0
  132. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/azure_attributes.py +0 -0
  133. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/azure_availability.py +0 -0
  134. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/cluster_log_conf.py +0 -0
  135. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/connection_parameters.py +0 -0
  136. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/dbfs_storage_info.py +0 -0
  137. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/ebs_volume_type.py +0 -0
  138. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/event_log_spec.py +0 -0
  139. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/file_library.py +0 -0
  140. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/filters.py +0 -0
  141. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/gcp_attributes.py +0 -0
  142. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/gcp_availability.py +0 -0
  143. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/gcs_storage_info.py +0 -0
  144. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/ingestion_config.py +0 -0
  145. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +0 -0
  146. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_table_specific_config_query_based_connector_config.py +0 -0
  147. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters.py +0 -0
  148. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters_query_key_value.py +0 -0
  149. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/init_script_info.py +0 -0
  150. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/lifecycle.py +0 -0
  151. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/local_file_info.py +0 -0
  152. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/log_analytics_info.py +0 -0
  153. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/maven_library.py +0 -0
  154. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/notebook_library.py +0 -0
  155. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/notifications.py +0 -0
  156. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/path_pattern.py +0 -0
  157. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/pipeline.py +0 -0
  158. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/pipeline_cluster.py +0 -0
  159. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py +0 -0
  160. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale_mode.py +0 -0
  161. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/pipeline_library.py +0 -0
  162. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/pipeline_permission.py +0 -0
  163. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/pipeline_permission_level.py +0 -0
  164. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/pipelines_environment.py +0 -0
  165. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/postgres_catalog_config.py +0 -0
  166. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/postgres_slot_config.py +0 -0
  167. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/report_spec.py +0 -0
  168. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/restart_window.py +0 -0
  169. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/run_as.py +0 -0
  170. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/s3_storage_info.py +0 -0
  171. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/schema_spec.py +0 -0
  172. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/source_catalog_config.py +0 -0
  173. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/source_config.py +0 -0
  174. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/table_spec.py +0 -0
  175. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py +0 -0
  176. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/volumes_storage_info.py +0 -0
  177. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/pipelines/_models/workspace_storage_info.py +0 -0
  178. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/py.typed +0 -0
  179. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/schemas/__init__.py +0 -0
  180. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/schemas/_models/lifecycle.py +0 -0
  181. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/schemas/_models/schema.py +0 -0
  182. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/schemas/_models/schema_grant.py +0 -0
  183. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/schemas/_models/schema_grant_privilege.py +0 -0
  184. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/volumes/__init__.py +0 -0
  185. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/volumes/_models/lifecycle.py +0 -0
  186. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/volumes/_models/volume.py +0 -0
  187. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/volumes/_models/volume_grant.py +0 -0
  188. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/volumes/_models/volume_grant_privilege.py +0 -0
  189. {databricks_bundles-0.283.0 → databricks_bundles-0.285.0}/databricks/bundles/volumes/_models/volume_type.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-bundles
3
- Version: 0.283.0
3
+ Version: 0.285.0
4
4
  Summary: Python support for Databricks Asset Bundles
5
5
  Author-email: Gleb Kanterov <gleb.kanterov@databricks.com>
6
6
  Requires-Python: >=3.10
@@ -22,7 +22,7 @@ Reference documentation is available at https://databricks.github.io/cli/python/
22
22
 
23
23
  To use `databricks-bundles`, you must first:
24
24
 
25
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.283.0 or above
25
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.285.0 or above
26
26
  2. Authenticate to your Databricks workspace if you have not done so already:
27
27
 
28
28
  ```bash
@@ -13,7 +13,7 @@ Reference documentation is available at https://databricks.github.io/cli/python/
13
13
 
14
14
  To use `databricks-bundles`, you must first:
15
15
 
16
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.283.0 or above
16
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.285.0 or above
17
17
  2. Authenticate to your Databricks workspace if you have not done so already:
18
18
 
19
19
  ```bash
@@ -153,6 +153,9 @@ __all__ = [
153
153
  "ModelTriggerConfigurationConditionParam",
154
154
  "ModelTriggerConfigurationDict",
155
155
  "ModelTriggerConfigurationParam",
156
+ "NodeTypeFlexibility",
157
+ "NodeTypeFlexibilityDict",
158
+ "NodeTypeFlexibilityParam",
156
159
  "NotebookTask",
157
160
  "NotebookTaskDict",
158
161
  "NotebookTaskParam",
@@ -522,6 +525,11 @@ from databricks.bundles.jobs._models.model_trigger_configuration_condition impor
522
525
  ModelTriggerConfigurationCondition,
523
526
  ModelTriggerConfigurationConditionParam,
524
527
  )
528
+ from databricks.bundles.jobs._models.node_type_flexibility import (
529
+ NodeTypeFlexibility,
530
+ NodeTypeFlexibilityDict,
531
+ NodeTypeFlexibilityParam,
532
+ )
525
533
  from databricks.bundles.jobs._models.notebook_task import (
526
534
  NotebookTask,
527
535
  NotebookTaskDict,
@@ -38,6 +38,10 @@ from databricks.bundles.jobs._models.init_script_info import (
38
38
  InitScriptInfoParam,
39
39
  )
40
40
  from databricks.bundles.jobs._models.kind import Kind, KindParam
41
+ from databricks.bundles.jobs._models.node_type_flexibility import (
42
+ NodeTypeFlexibility,
43
+ NodeTypeFlexibilityParam,
44
+ )
41
45
  from databricks.bundles.jobs._models.runtime_engine import (
42
46
  RuntimeEngine,
43
47
  RuntimeEngineParam,
@@ -125,6 +129,11 @@ class ClusterSpec:
125
129
  assigned.
126
130
  """
127
131
 
132
+ driver_node_type_flexibility: VariableOrOptional[NodeTypeFlexibility] = None
133
+ """
134
+ Flexible node type configuration for the driver node.
135
+ """
136
+
128
137
  driver_node_type_id: VariableOrOptional[str] = None
129
138
  """
130
139
  The node type of the Spark driver.
@@ -258,6 +267,11 @@ class ClusterSpec:
258
267
  `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
259
268
  """
260
269
 
270
+ worker_node_type_flexibility: VariableOrOptional[NodeTypeFlexibility] = None
271
+ """
272
+ Flexible node type configuration for worker nodes.
273
+ """
274
+
261
275
  workload_type: VariableOrOptional[WorkloadType] = None
262
276
 
263
277
  @classmethod
@@ -339,6 +353,11 @@ class ClusterSpecDict(TypedDict, total=False):
339
353
  assigned.
340
354
  """
341
355
 
356
+ driver_node_type_flexibility: VariableOrOptional[NodeTypeFlexibilityParam]
357
+ """
358
+ Flexible node type configuration for the driver node.
359
+ """
360
+
342
361
  driver_node_type_id: VariableOrOptional[str]
343
362
  """
344
363
  The node type of the Spark driver.
@@ -472,6 +491,11 @@ class ClusterSpecDict(TypedDict, total=False):
472
491
  `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
473
492
  """
474
493
 
494
+ worker_node_type_flexibility: VariableOrOptional[NodeTypeFlexibilityParam]
495
+ """
496
+ Flexible node type configuration for worker nodes.
497
+ """
498
+
475
499
  workload_type: VariableOrOptional[WorkloadTypeParam]
476
500
 
477
501
 
@@ -4,10 +4,7 @@ from typing import TYPE_CHECKING, TypedDict
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOrOptional
7
- from databricks.bundles.jobs._models.subscription import (
8
- Subscription,
9
- SubscriptionParam,
10
- )
7
+ from databricks.bundles.jobs._models.subscription import Subscription, SubscriptionParam
11
8
 
12
9
  if TYPE_CHECKING:
13
10
  from typing_extensions import Self
@@ -9,10 +9,7 @@ from databricks.bundles.core._variable import (
9
9
  VariableOrList,
10
10
  VariableOrOptional,
11
11
  )
12
- from databricks.bundles.jobs._models.continuous import (
13
- Continuous,
14
- ContinuousParam,
15
- )
12
+ from databricks.bundles.jobs._models.continuous import Continuous, ContinuousParam
16
13
  from databricks.bundles.jobs._models.cron_schedule import (
17
14
  CronSchedule,
18
15
  CronScheduleParam,
@@ -0,0 +1,40 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrList
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class NodeTypeFlexibility:
14
+ """
15
+ Configuration for flexible node types, allowing fallback to alternate node types during cluster launch and upscale.
16
+ """
17
+
18
+ alternate_node_type_ids: VariableOrList[str] = field(default_factory=list)
19
+ """
20
+ A list of node type IDs to use as fallbacks when the primary node type is unavailable.
21
+ """
22
+
23
+ @classmethod
24
+ def from_dict(cls, value: "NodeTypeFlexibilityDict") -> "Self":
25
+ return _transform(cls, value)
26
+
27
+ def as_dict(self) -> "NodeTypeFlexibilityDict":
28
+ return _transform_to_json_value(self) # type:ignore
29
+
30
+
31
+ class NodeTypeFlexibilityDict(TypedDict, total=False):
32
+ """"""
33
+
34
+ alternate_node_type_ids: VariableOrList[str]
35
+ """
36
+ A list of node type IDs to use as fallbacks when the primary node type is unavailable.
37
+ """
38
+
39
+
40
+ NodeTypeFlexibilityParam = NodeTypeFlexibilityDict | NodeTypeFlexibility
@@ -2,6 +2,9 @@ __all__ = [
2
2
  "Adlsgen2Info",
3
3
  "Adlsgen2InfoDict",
4
4
  "Adlsgen2InfoParam",
5
+ "AutoFullRefreshPolicy",
6
+ "AutoFullRefreshPolicyDict",
7
+ "AutoFullRefreshPolicyParam",
5
8
  "AwsAttributes",
6
9
  "AwsAttributesDict",
7
10
  "AwsAttributesParam",
@@ -81,6 +84,9 @@ __all__ = [
81
84
  "Notifications",
82
85
  "NotificationsDict",
83
86
  "NotificationsParam",
87
+ "OperationTimeWindow",
88
+ "OperationTimeWindowDict",
89
+ "OperationTimeWindowParam",
84
90
  "PathPattern",
85
91
  "PathPatternDict",
86
92
  "PathPatternParam",
@@ -155,6 +161,11 @@ from databricks.bundles.pipelines._models.adlsgen2_info import (
155
161
  Adlsgen2InfoDict,
156
162
  Adlsgen2InfoParam,
157
163
  )
164
+ from databricks.bundles.pipelines._models.auto_full_refresh_policy import (
165
+ AutoFullRefreshPolicy,
166
+ AutoFullRefreshPolicyDict,
167
+ AutoFullRefreshPolicyParam,
168
+ )
158
169
  from databricks.bundles.pipelines._models.aws_attributes import (
159
170
  AwsAttributes,
160
171
  AwsAttributesDict,
@@ -287,6 +298,11 @@ from databricks.bundles.pipelines._models.notifications import (
287
298
  NotificationsDict,
288
299
  NotificationsParam,
289
300
  )
301
+ from databricks.bundles.pipelines._models.operation_time_window import (
302
+ OperationTimeWindow,
303
+ OperationTimeWindowDict,
304
+ OperationTimeWindowParam,
305
+ )
290
306
  from databricks.bundles.pipelines._models.path_pattern import (
291
307
  PathPattern,
292
308
  PathPatternDict,
@@ -0,0 +1,54 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOr, VariableOrOptional
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class AutoFullRefreshPolicy:
14
+ """
15
+ Policy for auto full refresh.
16
+ """
17
+
18
+ enabled: VariableOr[bool]
19
+ """
20
+ (Required, Mutable) Whether to enable auto full refresh or not.
21
+ """
22
+
23
+ min_interval_hours: VariableOrOptional[int] = None
24
+ """
25
+ (Optional, Mutable) Specify the minimum interval in hours between the timestamp
26
+ at which a table was last full refreshed and the current timestamp for triggering auto full
27
+ If unspecified and autoFullRefresh is enabled then by default min_interval_hours is 24 hours.
28
+ """
29
+
30
+ @classmethod
31
+ def from_dict(cls, value: "AutoFullRefreshPolicyDict") -> "Self":
32
+ return _transform(cls, value)
33
+
34
+ def as_dict(self) -> "AutoFullRefreshPolicyDict":
35
+ return _transform_to_json_value(self) # type:ignore
36
+
37
+
38
+ class AutoFullRefreshPolicyDict(TypedDict, total=False):
39
+ """"""
40
+
41
+ enabled: VariableOr[bool]
42
+ """
43
+ (Required, Mutable) Whether to enable auto full refresh or not.
44
+ """
45
+
46
+ min_interval_hours: VariableOrOptional[int]
47
+ """
48
+ (Optional, Mutable) Specify the minimum interval in hours between the timestamp
49
+ at which a table was last full refreshed and the current timestamp for triggering auto full
50
+ If unspecified and autoFullRefresh is enabled then by default min_interval_hours is 24 hours.
51
+ """
52
+
53
+
54
+ AutoFullRefreshPolicyParam = AutoFullRefreshPolicyDict | AutoFullRefreshPolicy
@@ -4,8 +4,6 @@ from typing import Literal
4
4
 
5
5
  class DayOfWeek(Enum):
6
6
  """
7
- :meta private: [EXPERIMENTAL]
8
-
9
7
  Days of week in which the window is allowed to happen.
10
8
  If not specified all days of the week will be used.
11
9
  """
@@ -8,6 +8,10 @@ from databricks.bundles.pipelines._models.ingestion_config import (
8
8
  IngestionConfig,
9
9
  IngestionConfigParam,
10
10
  )
11
+ from databricks.bundles.pipelines._models.operation_time_window import (
12
+ OperationTimeWindow,
13
+ OperationTimeWindowParam,
14
+ )
11
15
  from databricks.bundles.pipelines._models.source_config import (
12
16
  SourceConfig,
13
17
  SourceConfigParam,
@@ -30,6 +34,11 @@ class IngestionPipelineDefinition:
30
34
  Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.
31
35
  """
32
36
 
37
+ full_refresh_window: VariableOrOptional[OperationTimeWindow] = None
38
+ """
39
+ (Optional) A window that specifies a set of time ranges for snapshot queries in CDC.
40
+ """
41
+
33
42
  ingest_from_uc_foreign_catalog: VariableOrOptional[bool] = None
34
43
  """
35
44
  :meta private: [EXPERIMENTAL]
@@ -81,6 +90,11 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
81
90
  Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.
82
91
  """
83
92
 
93
+ full_refresh_window: VariableOrOptional[OperationTimeWindowParam]
94
+ """
95
+ (Optional) A window that specifies a set of time ranges for snapshot queries in CDC.
96
+ """
97
+
84
98
  ingest_from_uc_foreign_catalog: VariableOrOptional[bool]
85
99
  """
86
100
  :meta private: [EXPERIMENTAL]
@@ -0,0 +1,69 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import (
7
+ VariableOr,
8
+ VariableOrList,
9
+ VariableOrOptional,
10
+ )
11
+ from databricks.bundles.pipelines._models.day_of_week import DayOfWeek, DayOfWeekParam
12
+
13
+ if TYPE_CHECKING:
14
+ from typing_extensions import Self
15
+
16
+
17
+ @dataclass(kw_only=True)
18
+ class OperationTimeWindow:
19
+ """
20
+ Proto representing a window
21
+ """
22
+
23
+ start_hour: VariableOr[int]
24
+ """
25
+ An integer between 0 and 23 denoting the start hour for the window in the 24-hour day.
26
+ """
27
+
28
+ days_of_week: VariableOrList[DayOfWeek] = field(default_factory=list)
29
+ """
30
+ Days of week in which the window is allowed to happen
31
+ If not specified all days of the week will be used.
32
+ """
33
+
34
+ time_zone_id: VariableOrOptional[str] = None
35
+ """
36
+ Time zone id of window. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.
37
+ If not specified, UTC will be used.
38
+ """
39
+
40
+ @classmethod
41
+ def from_dict(cls, value: "OperationTimeWindowDict") -> "Self":
42
+ return _transform(cls, value)
43
+
44
+ def as_dict(self) -> "OperationTimeWindowDict":
45
+ return _transform_to_json_value(self) # type:ignore
46
+
47
+
48
+ class OperationTimeWindowDict(TypedDict, total=False):
49
+ """"""
50
+
51
+ start_hour: VariableOr[int]
52
+ """
53
+ An integer between 0 and 23 denoting the start hour for the window in the 24-hour day.
54
+ """
55
+
56
+ days_of_week: VariableOrList[DayOfWeekParam]
57
+ """
58
+ Days of week in which the window is allowed to happen
59
+ If not specified all days of the week will be used.
60
+ """
61
+
62
+ time_zone_id: VariableOrOptional[str]
63
+ """
64
+ Time zone id of window. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.
65
+ If not specified, UTC will be used.
66
+ """
67
+
68
+
69
+ OperationTimeWindowParam = OperationTimeWindowDict | OperationTimeWindow
@@ -4,6 +4,10 @@ from typing import TYPE_CHECKING, TypedDict
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOrList, VariableOrOptional
7
+ from databricks.bundles.pipelines._models.auto_full_refresh_policy import (
8
+ AutoFullRefreshPolicy,
9
+ AutoFullRefreshPolicyParam,
10
+ )
7
11
  from databricks.bundles.pipelines._models.ingestion_pipeline_definition_table_specific_config_query_based_connector_config import (
8
12
  IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig,
9
13
  IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam,
@@ -25,6 +29,21 @@ if TYPE_CHECKING:
25
29
  class TableSpecificConfig:
26
30
  """"""
27
31
 
32
+ auto_full_refresh_policy: VariableOrOptional[AutoFullRefreshPolicy] = None
33
+ """
34
+ (Optional, Mutable) Policy for auto full refresh, if enabled pipeline will automatically try
35
+ to fix issues by doing a full refresh on the table in the retry run. auto_full_refresh_policy
36
+ in table configuration will override the above level auto_full_refresh_policy.
37
+ For example,
38
+ {
39
+ "auto_full_refresh_policy": {
40
+ "enabled": true,
41
+ "min_interval_hours": 23,
42
+ }
43
+ }
44
+ If unspecified, auto full refresh is disabled.
45
+ """
46
+
28
47
  exclude_columns: VariableOrList[str] = field(default_factory=list)
29
48
  """
30
49
  A list of column names to be excluded for the ingestion.
@@ -102,6 +121,21 @@ class TableSpecificConfig:
102
121
  class TableSpecificConfigDict(TypedDict, total=False):
103
122
  """"""
104
123
 
124
+ auto_full_refresh_policy: VariableOrOptional[AutoFullRefreshPolicyParam]
125
+ """
126
+ (Optional, Mutable) Policy for auto full refresh, if enabled pipeline will automatically try
127
+ to fix issues by doing a full refresh on the table in the retry run. auto_full_refresh_policy
128
+ in table configuration will override the above level auto_full_refresh_policy.
129
+ For example,
130
+ {
131
+ "auto_full_refresh_policy": {
132
+ "enabled": true,
133
+ "min_interval_hours": 23,
134
+ }
135
+ }
136
+ If unspecified, auto full refresh is disabled.
137
+ """
138
+
105
139
  exclude_columns: VariableOrList[str]
106
140
  """
107
141
  A list of column names to be excluded for the ingestion.
@@ -0,0 +1 @@
1
+ __version__ = "0.285.0"
@@ -1,7 +1,7 @@
1
1
  [project]
2
2
  name = "databricks-bundles"
3
3
  description = "Python support for Databricks Asset Bundles"
4
- version = "0.283.0"
4
+ version = "0.285.0"
5
5
 
6
6
  authors = [
7
7
  { name = "Gleb Kanterov", email = "gleb.kanterov@databricks.com" },
@@ -1 +0,0 @@
1
- __version__ = "0.283.0"