databricks-bundles 0.266.0__tar.gz → 0.268.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (181) hide show
  1. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/PKG-INFO +2 -2
  2. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/README.md +1 -1
  3. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/build.py +48 -7
  4. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/__init__.py +2 -0
  5. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_diagnostics.py +11 -0
  6. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_resource_mutator.py +33 -0
  7. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_resource_type.py +7 -1
  8. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_resources.py +44 -0
  9. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/__init__.py +14 -0
  10. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/continuous.py +14 -0
  11. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/environment.py +10 -0
  12. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/gcp_attributes.py +14 -0
  13. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/init_script_info.py +16 -0
  14. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/job.py +11 -0
  15. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/job_email_notifications.py +13 -1
  16. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/library.py +10 -0
  17. databricks_bundles-0.268.0/databricks/bundles/jobs/_models/lifecycle.py +38 -0
  18. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/spark_jar_task.py +25 -1
  19. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/task_email_notifications.py +13 -1
  20. databricks_bundles-0.268.0/databricks/bundles/jobs/_models/task_retry_mode.py +17 -0
  21. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/__init__.py +40 -0
  22. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/gcp_attributes.py +14 -0
  23. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +10 -0
  24. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +18 -0
  25. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/init_script_info.py +16 -0
  26. databricks_bundles-0.268.0/databricks/bundles/pipelines/_models/lifecycle.py +38 -0
  27. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/pipeline.py +16 -2
  28. databricks_bundles-0.268.0/databricks/bundles/pipelines/_models/postgres_catalog_config.py +50 -0
  29. databricks_bundles-0.268.0/databricks/bundles/pipelines/_models/postgres_slot_config.py +60 -0
  30. databricks_bundles-0.268.0/databricks/bundles/pipelines/_models/source_catalog_config.py +64 -0
  31. databricks_bundles-0.268.0/databricks/bundles/pipelines/_models/source_config.py +48 -0
  32. databricks_bundles-0.268.0/databricks/bundles/schemas/__init__.py +30 -0
  33. databricks_bundles-0.268.0/databricks/bundles/schemas/_models/lifecycle.py +38 -0
  34. databricks_bundles-0.268.0/databricks/bundles/schemas/_models/schema.py +97 -0
  35. databricks_bundles-0.268.0/databricks/bundles/schemas/_models/schema_grant.py +40 -0
  36. databricks_bundles-0.268.0/databricks/bundles/schemas/_models/schema_grant_privilege.py +38 -0
  37. databricks_bundles-0.268.0/databricks/bundles/version.py +1 -0
  38. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/volumes/__init__.py +8 -0
  39. databricks_bundles-0.268.0/databricks/bundles/volumes/_models/lifecycle.py +38 -0
  40. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/volumes/_models/volume.py +11 -0
  41. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/pyproject.toml +2 -1
  42. databricks_bundles-0.266.0/databricks/bundles/version.py +0 -1
  43. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/LICENSE +0 -0
  44. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/__init__.py +0 -0
  45. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/__init__.py +0 -0
  46. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_bundle.py +0 -0
  47. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_load.py +2 -2
  48. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_location.py +0 -0
  49. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_resource.py +0 -0
  50. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_transform.py +0 -0
  51. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_transform_to_json.py +0 -0
  52. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/core/_variable.py +1 -1
  53. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/adlsgen2_info.py +0 -0
  54. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/authentication_method.py +0 -0
  55. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/auto_scale.py +0 -0
  56. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/aws_attributes.py +0 -0
  57. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/aws_availability.py +0 -0
  58. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/azure_attributes.py +0 -0
  59. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/azure_availability.py +0 -0
  60. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/clean_rooms_notebook_task.py +0 -0
  61. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/clients_types.py +0 -0
  62. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/cluster_log_conf.py +0 -0
  63. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/cluster_spec.py +0 -0
  64. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/compute_config.py +0 -0
  65. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/condition.py +0 -0
  66. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/condition_task.py +0 -0
  67. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/condition_task_op.py +0 -0
  68. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/cron_schedule.py +0 -0
  69. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/dashboard_task.py +0 -0
  70. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/data_security_mode.py +0 -0
  71. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/dbfs_storage_info.py +0 -0
  72. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/dbt_platform_task.py +0 -0
  73. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/dbt_task.py +0 -0
  74. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/docker_basic_auth.py +0 -0
  75. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/docker_image.py +0 -0
  76. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/ebs_volume_type.py +0 -0
  77. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py +0 -0
  78. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/for_each_task.py +0 -0
  79. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/gcp_availability.py +0 -0
  80. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/gcs_storage_info.py +0 -0
  81. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/gen_ai_compute_task.py +0 -0
  82. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/git_provider.py +0 -0
  83. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/git_source.py +0 -0
  84. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/job_cluster.py +0 -0
  85. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/job_environment.py +0 -0
  86. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/job_notification_settings.py +0 -0
  87. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/job_parameter_definition.py +0 -0
  88. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/job_permission.py +0 -0
  89. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/job_permission_level.py +0 -0
  90. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/job_run_as.py +0 -0
  91. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/jobs_health_metric.py +0 -0
  92. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/jobs_health_operator.py +0 -0
  93. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/jobs_health_rule.py +0 -0
  94. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/jobs_health_rules.py +0 -0
  95. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/local_file_info.py +0 -0
  96. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/log_analytics_info.py +0 -0
  97. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/maven_library.py +0 -0
  98. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/notebook_task.py +0 -0
  99. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/pause_status.py +0 -0
  100. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/performance_target.py +0 -0
  101. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration.py +0 -0
  102. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration_time_unit.py +0 -0
  103. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/pipeline_params.py +0 -0
  104. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/pipeline_task.py +0 -0
  105. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/power_bi_model.py +0 -0
  106. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/power_bi_table.py +0 -0
  107. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/power_bi_task.py +0 -0
  108. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/python_py_pi_library.py +0 -0
  109. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/python_wheel_task.py +0 -0
  110. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/queue_settings.py +0 -0
  111. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/r_cran_library.py +0 -0
  112. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/run_if.py +0 -0
  113. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/run_job_task.py +0 -0
  114. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/runtime_engine.py +0 -0
  115. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/s3_storage_info.py +0 -0
  116. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/source.py +0 -0
  117. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/spark_python_task.py +0 -0
  118. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/spark_submit_task.py +0 -0
  119. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/sql_task.py +0 -0
  120. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/sql_task_alert.py +0 -0
  121. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/sql_task_dashboard.py +0 -0
  122. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/sql_task_file.py +0 -0
  123. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/sql_task_query.py +0 -0
  124. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/sql_task_subscription.py +0 -0
  125. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/storage_mode.py +0 -0
  126. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/subscription.py +0 -0
  127. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/subscription_subscriber.py +0 -0
  128. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/table_update_trigger_configuration.py +0 -0
  129. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/task.py +0 -0
  130. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/task_dependency.py +0 -0
  131. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/task_notification_settings.py +0 -0
  132. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/trigger_settings.py +0 -0
  133. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/volumes_storage_info.py +0 -0
  134. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/webhook.py +0 -0
  135. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/webhook_notifications.py +0 -0
  136. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/workload_type.py +0 -0
  137. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/jobs/_models/workspace_storage_info.py +0 -0
  138. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/adlsgen2_info.py +0 -0
  139. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/aws_attributes.py +0 -0
  140. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/aws_availability.py +0 -0
  141. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/azure_attributes.py +0 -0
  142. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/azure_availability.py +0 -0
  143. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/cluster_log_conf.py +0 -0
  144. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/day_of_week.py +0 -0
  145. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/dbfs_storage_info.py +0 -0
  146. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/ebs_volume_type.py +0 -0
  147. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/event_log_spec.py +0 -0
  148. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/file_library.py +0 -0
  149. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/filters.py +0 -0
  150. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/gcp_availability.py +0 -0
  151. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/gcs_storage_info.py +0 -0
  152. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/ingestion_config.py +0 -0
  153. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_table_specific_config_query_based_connector_config.py +0 -0
  154. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/ingestion_source_type.py +0 -0
  155. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/local_file_info.py +0 -0
  156. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/log_analytics_info.py +0 -0
  157. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/maven_library.py +0 -0
  158. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/notebook_library.py +0 -0
  159. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/notifications.py +0 -0
  160. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/path_pattern.py +0 -0
  161. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/pipeline_cluster.py +0 -0
  162. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py +0 -0
  163. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale_mode.py +0 -0
  164. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/pipeline_library.py +0 -0
  165. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/pipeline_permission.py +0 -0
  166. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/pipeline_permission_level.py +0 -0
  167. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/pipelines_environment.py +0 -0
  168. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/report_spec.py +0 -0
  169. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/restart_window.py +0 -0
  170. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/run_as.py +0 -0
  171. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/s3_storage_info.py +0 -0
  172. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/schema_spec.py +0 -0
  173. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/table_spec.py +0 -0
  174. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/table_specific_config.py +0 -0
  175. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py +0 -0
  176. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/volumes_storage_info.py +0 -0
  177. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/pipelines/_models/workspace_storage_info.py +0 -0
  178. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/py.typed +0 -0
  179. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/volumes/_models/volume_grant.py +0 -0
  180. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/volumes/_models/volume_grant_privilege.py +0 -0
  181. {databricks_bundles-0.266.0 → databricks_bundles-0.268.0}/databricks/bundles/volumes/_models/volume_type.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-bundles
3
- Version: 0.266.0
3
+ Version: 0.268.0
4
4
  Summary: Python support for Databricks Asset Bundles
5
5
  Author-email: Gleb Kanterov <gleb.kanterov@databricks.com>
6
6
  Requires-Python: >=3.10
@@ -22,7 +22,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
22
22
 
23
23
  To use `databricks-bundles`, you must first:
24
24
 
25
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.266.0 or above
25
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.268.0 or above
26
26
  2. Authenticate to your Databricks workspace if you have not done so already:
27
27
 
28
28
  ```bash
@@ -13,7 +13,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
13
13
 
14
14
  To use `databricks-bundles`, you must first:
15
15
 
16
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.266.0 or above
16
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.268.0 or above
17
17
  2. Authenticate to your Databricks workspace if you have not done so already:
18
18
 
19
19
  ```bash
@@ -166,16 +166,12 @@ def _apply_mutators_for_type(
166
166
  return resources, Diagnostics()
167
167
 
168
168
 
169
- def python_mutator(
170
- args: _Args,
171
- ) -> tuple[dict, dict[tuple[str, ...], Location], Diagnostics]:
172
- input = json.load(open(args.input, encoding="utf-8"))
169
+ def _read_conf(input: dict) -> tuple[_Conf, Diagnostics]:
173
170
  experimental = input.get("experimental", {})
174
171
 
175
172
  if experimental.get("pydabs", {}) != {}:
176
173
  return (
177
- {},
178
- {},
174
+ _Conf(),
179
175
  Diagnostics.create_error(
180
176
  "'experimental/pydabs' is not supported by 'databricks-bundles', use 'experimental/python' instead",
181
177
  detail="",
@@ -184,8 +180,53 @@ def python_mutator(
184
180
  ),
185
181
  )
186
182
 
187
- conf_dict = experimental.get("python", {})
183
+ experimental_conf_dict = experimental.get("python", {})
184
+ experimental_conf = _transform(_Conf, experimental_conf_dict)
185
+
186
+ conf_dict = input.get("python", {})
188
187
  conf = _transform(_Conf, conf_dict)
188
+
189
+ has_conf = conf != _Conf()
190
+ has_experimental_conf = experimental_conf != _Conf()
191
+
192
+ if has_conf and not has_experimental_conf:
193
+ return conf, Diagnostics()
194
+ elif not has_conf and has_experimental_conf:
195
+ # do not generate warning in Python code, if CLI supports non-experimental 'python',
196
+ # it should generate a warning
197
+ return experimental_conf, Diagnostics()
198
+ elif has_conf and has_experimental_conf:
199
+ # for backward-compatibility, CLI can copy contents of 'python' into 'experimental/python'
200
+ # if configs are equal, it isn't a problem
201
+ if conf != experimental_conf:
202
+ return (
203
+ _Conf(),
204
+ Diagnostics.create_error(
205
+ "Both 'python' and 'experimental/python' sections are present, use 'python' section only",
206
+ detail="",
207
+ location=None,
208
+ path=(
209
+ "experimental",
210
+ "python",
211
+ ),
212
+ ),
213
+ )
214
+ else:
215
+ return conf, Diagnostics()
216
+ else:
217
+ return _Conf(), Diagnostics()
218
+
219
+
220
+ def python_mutator(
221
+ args: _Args,
222
+ ) -> tuple[dict, dict[tuple[str, ...], Location], Diagnostics]:
223
+ input = json.load(open(args.input, encoding="utf-8"))
224
+ diagnostics = Diagnostics()
225
+
226
+ conf, diagnostics = diagnostics.extend_tuple(_read_conf(input))
227
+ if diagnostics.has_error():
228
+ return input, {}, diagnostics
229
+
189
230
  bundle = _parse_bundle_info(input)
190
231
 
191
232
  if args.phase == "load_resources":
@@ -18,6 +18,7 @@ __all__ = [
18
18
  "load_resources_from_modules",
19
19
  "load_resources_from_package_module",
20
20
  "pipeline_mutator",
21
+ "schema_mutator",
21
22
  "variables",
22
23
  "volume_mutator",
23
24
  ]
@@ -40,6 +41,7 @@ from databricks.bundles.core._resource_mutator import (
40
41
  ResourceMutator,
41
42
  job_mutator,
42
43
  pipeline_mutator,
44
+ schema_mutator,
43
45
  volume_mutator,
44
46
  )
45
47
  from databricks.bundles.core._resources import Resources
@@ -134,6 +134,17 @@ class Diagnostics:
134
134
 
135
135
  return False
136
136
 
137
+ def has_warning(self) -> bool:
138
+ """
139
+ Returns True if there is at least one warning in diagnostics.
140
+ """
141
+
142
+ for item in self.items:
143
+ if item.severity == Severity.WARNING:
144
+ return True
145
+
146
+ return False
147
+
137
148
  @classmethod
138
149
  def create_error(
139
150
  cls,
@@ -8,6 +8,7 @@ from databricks.bundles.core._resource import Resource
8
8
  if TYPE_CHECKING:
9
9
  from databricks.bundles.jobs._models.job import Job
10
10
  from databricks.bundles.pipelines._models.pipeline import Pipeline
11
+ from databricks.bundles.schemas._models.schema import Schema
11
12
  from databricks.bundles.volumes._models.volume import Volume
12
13
 
13
14
  _T = TypeVar("_T", bound=Resource)
@@ -130,6 +131,38 @@ def pipeline_mutator(function: Callable) -> ResourceMutator["Pipeline"]:
130
131
  return ResourceMutator(resource_type=Pipeline, function=function)
131
132
 
132
133
 
134
+ @overload
135
+ def schema_mutator(
136
+ function: Callable[[Bundle, "Schema"], "Schema"],
137
+ ) -> ResourceMutator["Schema"]: ...
138
+
139
+
140
+ @overload
141
+ def schema_mutator(
142
+ function: Callable[["Schema"], "Schema"],
143
+ ) -> ResourceMutator["Schema"]: ...
144
+
145
+
146
+ def schema_mutator(function: Callable) -> ResourceMutator["Schema"]:
147
+ """
148
+ Decorator for defining a schema mutator. Function should return a new instance of the schema with the desired changes,
149
+ instead of mutating the input schema.
150
+
151
+ Example:
152
+
153
+ .. code-block:: python
154
+
155
+ @schema_mutator
156
+ def my_schema_mutator(bundle: Bundle, schema: Schema) -> Schema:
157
+ return replace(schema, name="my_schema")
158
+
159
+ :param function: Function that mutates a schema.
160
+ """
161
+ from databricks.bundles.schemas._models.schema import Schema
162
+
163
+ return ResourceMutator(resource_type=Schema, function=function)
164
+
165
+
133
166
  @overload
134
167
  def volume_mutator(
135
168
  function: Callable[[Bundle, "Volume"], "Volume"],
@@ -2,7 +2,6 @@ from dataclasses import dataclass
2
2
  from typing import Type
3
3
 
4
4
  from databricks.bundles.core._resource import Resource
5
- from databricks.bundles.volumes._models.volume import Volume
6
5
 
7
6
 
8
7
  @dataclass(kw_only=True, frozen=True)
@@ -34,6 +33,8 @@ class _ResourceType:
34
33
 
35
34
  from databricks.bundles.jobs._models.job import Job
36
35
  from databricks.bundles.pipelines._models.pipeline import Pipeline
36
+ from databricks.bundles.schemas._models.schema import Schema
37
+ from databricks.bundles.volumes._models.volume import Volume
37
38
 
38
39
  return (
39
40
  _ResourceType(
@@ -51,4 +52,9 @@ class _ResourceType:
51
52
  plural_name="volumes",
52
53
  singular_name="volume",
53
54
  ),
55
+ _ResourceType(
56
+ resource_type=Schema,
57
+ plural_name="schemas",
58
+ singular_name="schema",
59
+ ),
54
60
  )
@@ -8,6 +8,7 @@ from databricks.bundles.core._transform import _transform
8
8
  if TYPE_CHECKING:
9
9
  from databricks.bundles.jobs._models.job import Job, JobParam
10
10
  from databricks.bundles.pipelines._models.pipeline import Pipeline, PipelineParam
11
+ from databricks.bundles.schemas._models.schema import Schema, SchemaParam
11
12
  from databricks.bundles.volumes._models.volume import Volume, VolumeParam
12
13
 
13
14
  __all__ = ["Resources"]
@@ -58,6 +59,7 @@ class Resources:
58
59
  def __init__(self):
59
60
  self._jobs = dict[str, "Job"]()
60
61
  self._pipelines = dict[str, "Pipeline"]()
62
+ self._schemas = dict[str, "Schema"]()
61
63
  self._volumes = dict[str, "Volume"]()
62
64
  self._locations = dict[tuple[str, ...], Location]()
63
65
  self._diagnostics = Diagnostics()
@@ -70,6 +72,10 @@ class Resources:
70
72
  def pipelines(self) -> dict[str, "Pipeline"]:
71
73
  return self._pipelines
72
74
 
75
+ @property
76
+ def schemas(self) -> dict[str, "Schema"]:
77
+ return self._schemas
78
+
73
79
  @property
74
80
  def volumes(self) -> dict[str, "Volume"]:
75
81
  return self._volumes
@@ -99,6 +105,7 @@ class Resources:
99
105
 
100
106
  from databricks.bundles.jobs import Job
101
107
  from databricks.bundles.pipelines import Pipeline
108
+ from databricks.bundles.schemas import Schema
102
109
  from databricks.bundles.volumes import Volume
103
110
 
104
111
  location = location or Location.from_stack_frame(depth=1)
@@ -108,6 +115,8 @@ class Resources:
108
115
  self.add_job(resource_name, resource, location=location)
109
116
  case Pipeline():
110
117
  self.add_pipeline(resource_name, resource, location=location)
118
+ case Schema():
119
+ self.add_schema(resource_name, resource, location=location)
111
120
  case Volume():
112
121
  self.add_volume(resource_name, resource, location=location)
113
122
  case _:
@@ -177,6 +186,38 @@ class Resources:
177
186
 
178
187
  self._pipelines[resource_name] = pipeline
179
188
 
189
+ def add_schema(
190
+ self,
191
+ resource_name: str,
192
+ schema: "SchemaParam",
193
+ *,
194
+ location: Optional[Location] = None,
195
+ ) -> None:
196
+ """
197
+ Adds a schema to the collection of resources. Resource name must be unique across all schemas.
198
+
199
+ :param resource_name: unique identifier for the schema
200
+ :param schema: the schema to add, can be Schema or dict
201
+ :param location: optional location of the schema in the source code
202
+ """
203
+ from databricks.bundles.schemas import Schema
204
+
205
+ schema = _transform(Schema, schema)
206
+ path = ("resources", "schemas", resource_name)
207
+ location = location or Location.from_stack_frame(depth=1)
208
+
209
+ if self._schemas.get(resource_name):
210
+ self.add_diagnostic_error(
211
+ msg=f"Duplicate resource name '{resource_name}' for a schema. Resource names must be unique.",
212
+ location=location,
213
+ path=path,
214
+ )
215
+ else:
216
+ if location:
217
+ self.add_location(path, location)
218
+
219
+ self._schemas[resource_name] = schema
220
+
180
221
  def add_volume(
181
222
  self,
182
223
  resource_name: str,
@@ -285,6 +326,9 @@ class Resources:
285
326
  for name, pipeline in other.pipelines.items():
286
327
  self.add_pipeline(name, pipeline)
287
328
 
329
+ for name, schema in other.schemas.items():
330
+ self.add_schema(name, schema)
331
+
288
332
  for name, volume in other.volumes.items():
289
333
  self.add_volume(name, volume)
290
334
 
@@ -134,6 +134,9 @@ __all__ = [
134
134
  "Library",
135
135
  "LibraryDict",
136
136
  "LibraryParam",
137
+ "Lifecycle",
138
+ "LifecycleDict",
139
+ "LifecycleParam",
137
140
  "LocalFileInfo",
138
141
  "LocalFileInfoDict",
139
142
  "LocalFileInfoParam",
@@ -244,6 +247,8 @@ __all__ = [
244
247
  "TaskNotificationSettingsDict",
245
248
  "TaskNotificationSettingsParam",
246
249
  "TaskParam",
250
+ "TaskRetryMode",
251
+ "TaskRetryModeParam",
247
252
  "TriggerSettings",
248
253
  "TriggerSettingsDict",
249
254
  "TriggerSettingsParam",
@@ -480,6 +485,11 @@ from databricks.bundles.jobs._models.jobs_health_rules import (
480
485
  JobsHealthRulesParam,
481
486
  )
482
487
  from databricks.bundles.jobs._models.library import Library, LibraryDict, LibraryParam
488
+ from databricks.bundles.jobs._models.lifecycle import (
489
+ Lifecycle,
490
+ LifecycleDict,
491
+ LifecycleParam,
492
+ )
483
493
  from databricks.bundles.jobs._models.local_file_info import (
484
494
  LocalFileInfo,
485
495
  LocalFileInfoDict,
@@ -648,6 +658,10 @@ from databricks.bundles.jobs._models.task_notification_settings import (
648
658
  TaskNotificationSettingsDict,
649
659
  TaskNotificationSettingsParam,
650
660
  )
661
+ from databricks.bundles.jobs._models.task_retry_mode import (
662
+ TaskRetryMode,
663
+ TaskRetryModeParam,
664
+ )
651
665
  from databricks.bundles.jobs._models.trigger_settings import (
652
666
  TriggerSettings,
653
667
  TriggerSettingsDict,
@@ -5,6 +5,10 @@ from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOrOptional
7
7
  from databricks.bundles.jobs._models.pause_status import PauseStatus, PauseStatusParam
8
+ from databricks.bundles.jobs._models.task_retry_mode import (
9
+ TaskRetryMode,
10
+ TaskRetryModeParam,
11
+ )
8
12
 
9
13
  if TYPE_CHECKING:
10
14
  from typing_extensions import Self
@@ -19,6 +23,11 @@ class Continuous:
19
23
  Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.
20
24
  """
21
25
 
26
+ task_retry_mode: VariableOrOptional[TaskRetryMode] = None
27
+ """
28
+ Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.
29
+ """
30
+
22
31
  @classmethod
23
32
  def from_dict(cls, value: "ContinuousDict") -> "Self":
24
33
  return _transform(cls, value)
@@ -35,5 +44,10 @@ class ContinuousDict(TypedDict, total=False):
35
44
  Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.
36
45
  """
37
46
 
47
+ task_retry_mode: VariableOrOptional[TaskRetryModeParam]
48
+ """
49
+ Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.
50
+ """
51
+
38
52
 
39
53
  ContinuousParam = ContinuousDict | Continuous
@@ -16,6 +16,11 @@ class Environment:
16
16
  In this minimal environment spec, only pip dependencies are supported.
17
17
  """
18
18
 
19
+ client: VariableOrOptional[str] = None
20
+ """
21
+ [DEPRECATED] Use `environment_version` instead.
22
+ """
23
+
19
24
  dependencies: VariableOrList[str] = field(default_factory=list)
20
25
  """
21
26
  List of pip dependencies, as supported by the version of pip in this environment.
@@ -46,6 +51,11 @@ class Environment:
46
51
  class EnvironmentDict(TypedDict, total=False):
47
52
  """"""
48
53
 
54
+ client: VariableOrOptional[str]
55
+ """
56
+ [DEPRECATED] Use `environment_version` instead.
57
+ """
58
+
49
59
  dependencies: VariableOrList[str]
50
60
  """
51
61
  List of pip dependencies, as supported by the version of pip in this environment.
@@ -53,6 +53,13 @@ class GcpAttributes:
53
53
  for the supported number of local SSDs for each instance type.
54
54
  """
55
55
 
56
+ use_preemptible_executors: VariableOrOptional[bool] = None
57
+ """
58
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
59
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
60
+ Note: Soon to be deprecated, use the 'availability' field instead.
61
+ """
62
+
56
63
  zone_id: VariableOrOptional[str] = None
57
64
  """
58
65
  Identifier for the availability zone in which the cluster resides.
@@ -108,6 +115,13 @@ class GcpAttributesDict(TypedDict, total=False):
108
115
  for the supported number of local SSDs for each instance type.
109
116
  """
110
117
 
118
+ use_preemptible_executors: VariableOrOptional[bool]
119
+ """
120
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
121
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
122
+ Note: Soon to be deprecated, use the 'availability' field instead.
123
+ """
124
+
111
125
  zone_id: VariableOrOptional[str]
112
126
  """
113
127
  Identifier for the availability zone in which the cluster resides.
@@ -8,6 +8,10 @@ from databricks.bundles.jobs._models.adlsgen2_info import (
8
8
  Adlsgen2Info,
9
9
  Adlsgen2InfoParam,
10
10
  )
11
+ from databricks.bundles.jobs._models.dbfs_storage_info import (
12
+ DbfsStorageInfo,
13
+ DbfsStorageInfoParam,
14
+ )
11
15
  from databricks.bundles.jobs._models.gcs_storage_info import (
12
16
  GcsStorageInfo,
13
17
  GcsStorageInfoParam,
@@ -45,6 +49,12 @@ class InitScriptInfo:
45
49
  Contains the Azure Data Lake Storage destination path
46
50
  """
47
51
 
52
+ dbfs: VariableOrOptional[DbfsStorageInfo] = None
53
+ """
54
+ [DEPRECATED] destination needs to be provided. e.g.
55
+ `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
56
+ """
57
+
48
58
  file: VariableOrOptional[LocalFileInfo] = None
49
59
  """
50
60
  destination needs to be provided, e.g.
@@ -93,6 +103,12 @@ class InitScriptInfoDict(TypedDict, total=False):
93
103
  Contains the Azure Data Lake Storage destination path
94
104
  """
95
105
 
106
+ dbfs: VariableOrOptional[DbfsStorageInfoParam]
107
+ """
108
+ [DEPRECATED] destination needs to be provided. e.g.
109
+ `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
110
+ """
111
+
96
112
  file: VariableOrOptional[LocalFileInfoParam]
97
113
  """
98
114
  destination needs to be provided, e.g.
@@ -44,6 +44,7 @@ from databricks.bundles.jobs._models.jobs_health_rules import (
44
44
  JobsHealthRules,
45
45
  JobsHealthRulesParam,
46
46
  )
47
+ from databricks.bundles.jobs._models.lifecycle import Lifecycle, LifecycleParam
47
48
  from databricks.bundles.jobs._models.performance_target import (
48
49
  PerformanceTarget,
49
50
  PerformanceTargetParam,
@@ -116,6 +117,11 @@ class Job(Resource):
116
117
  A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
117
118
  """
118
119
 
120
+ lifecycle: VariableOrOptional[Lifecycle] = None
121
+ """
122
+ Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.
123
+ """
124
+
119
125
  max_concurrent_runs: VariableOrOptional[int] = None
120
126
  """
121
127
  An optional maximum allowed number of concurrent runs of the job.
@@ -256,6 +262,11 @@ class JobDict(TypedDict, total=False):
256
262
  A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
257
263
  """
258
264
 
265
+ lifecycle: VariableOrOptional[LifecycleParam]
266
+ """
267
+ Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.
268
+ """
269
+
259
270
  max_concurrent_runs: VariableOrOptional[int]
260
271
  """
261
272
  An optional maximum allowed number of concurrent runs of the job.
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, TypedDict
3
3
 
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
- from databricks.bundles.core._variable import VariableOrList
6
+ from databricks.bundles.core._variable import VariableOrList, VariableOrOptional
7
7
 
8
8
  if TYPE_CHECKING:
9
9
  from typing_extensions import Self
@@ -13,6 +13,12 @@ if TYPE_CHECKING:
13
13
  class JobEmailNotifications:
14
14
  """"""
15
15
 
16
+ no_alert_for_skipped_runs: VariableOrOptional[bool] = None
17
+ """
18
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
19
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
20
+ """
21
+
16
22
  on_duration_warning_threshold_exceeded: VariableOrList[str] = field(
17
23
  default_factory=list
18
24
  )
@@ -53,6 +59,12 @@ class JobEmailNotifications:
53
59
  class JobEmailNotificationsDict(TypedDict, total=False):
54
60
  """"""
55
61
 
62
+ no_alert_for_skipped_runs: VariableOrOptional[bool]
63
+ """
64
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
65
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
66
+ """
67
+
56
68
  on_duration_warning_threshold_exceeded: VariableOrList[str]
57
69
  """
58
70
  A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.
@@ -30,6 +30,11 @@ class Library:
30
30
  Specification of a CRAN library to be installed as part of the library
31
31
  """
32
32
 
33
+ egg: VariableOrOptional[str] = None
34
+ """
35
+ [DEPRECATED] Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.
36
+ """
37
+
33
38
  jar: VariableOrOptional[str] = None
34
39
  """
35
40
  URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.
@@ -82,6 +87,11 @@ class LibraryDict(TypedDict, total=False):
82
87
  Specification of a CRAN library to be installed as part of the library
83
88
  """
84
89
 
90
+ egg: VariableOrOptional[str]
91
+ """
92
+ [DEPRECATED] Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.
93
+ """
94
+
85
95
  jar: VariableOrOptional[str]
86
96
  """
87
97
  URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.
@@ -0,0 +1,38 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class Lifecycle:
14
+ """"""
15
+
16
+ prevent_destroy: VariableOrOptional[bool] = None
17
+ """
18
+ Lifecycle setting to prevent the resource from being destroyed.
19
+ """
20
+
21
+ @classmethod
22
+ def from_dict(cls, value: "LifecycleDict") -> "Self":
23
+ return _transform(cls, value)
24
+
25
+ def as_dict(self) -> "LifecycleDict":
26
+ return _transform_to_json_value(self) # type:ignore
27
+
28
+
29
+ class LifecycleDict(TypedDict, total=False):
30
+ """"""
31
+
32
+ prevent_destroy: VariableOrOptional[bool]
33
+ """
34
+ Lifecycle setting to prevent the resource from being destroyed.
35
+ """
36
+
37
+
38
+ LifecycleParam = LifecycleDict | Lifecycle
@@ -3,7 +3,11 @@ from typing import TYPE_CHECKING, TypedDict
3
3
 
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
- from databricks.bundles.core._variable import VariableOr, VariableOrList
6
+ from databricks.bundles.core._variable import (
7
+ VariableOr,
8
+ VariableOrList,
9
+ VariableOrOptional,
10
+ )
7
11
 
8
12
  if TYPE_CHECKING:
9
13
  from typing_extensions import Self
@@ -20,6 +24,11 @@ class SparkJarTask:
20
24
  The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.
21
25
  """
22
26
 
27
+ jar_uri: VariableOrOptional[str] = None
28
+ """
29
+ [DEPRECATED] Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create.
30
+ """
31
+
23
32
  parameters: VariableOrList[str] = field(default_factory=list)
24
33
  """
25
34
  Parameters passed to the main method.
@@ -27,6 +36,11 @@ class SparkJarTask:
27
36
  Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.
28
37
  """
29
38
 
39
+ run_as_repl: VariableOrOptional[bool] = None
40
+ """
41
+ [DEPRECATED] Deprecated. A value of `false` is no longer supported.
42
+ """
43
+
30
44
  @classmethod
31
45
  def from_dict(cls, value: "SparkJarTaskDict") -> "Self":
32
46
  return _transform(cls, value)
@@ -45,6 +59,11 @@ class SparkJarTaskDict(TypedDict, total=False):
45
59
  The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.
46
60
  """
47
61
 
62
+ jar_uri: VariableOrOptional[str]
63
+ """
64
+ [DEPRECATED] Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create.
65
+ """
66
+
48
67
  parameters: VariableOrList[str]
49
68
  """
50
69
  Parameters passed to the main method.
@@ -52,5 +71,10 @@ class SparkJarTaskDict(TypedDict, total=False):
52
71
  Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.
53
72
  """
54
73
 
74
+ run_as_repl: VariableOrOptional[bool]
75
+ """
76
+ [DEPRECATED] Deprecated. A value of `false` is no longer supported.
77
+ """
78
+
55
79
 
56
80
  SparkJarTaskParam = SparkJarTaskDict | SparkJarTask