databricks-bundles 0.269.0__tar.gz → 0.271.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (183) hide show
  1. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/PKG-INFO +2 -2
  2. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/README.md +1 -1
  3. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/environment.py +2 -2
  4. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/spark_submit_task.py +3 -1
  5. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/task.py +18 -20
  6. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/__init__.py +16 -0
  7. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +18 -0
  8. databricks_bundles-0.271.0/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters.py +95 -0
  9. databricks_bundles-0.271.0/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters_query_key_value.py +70 -0
  10. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/ingestion_source_type.py +2 -0
  11. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/pipeline.py +0 -6
  12. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/run_as.py +0 -2
  13. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/table_specific_config.py +22 -0
  14. databricks_bundles-0.271.0/databricks/bundles/version.py +1 -0
  15. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/pyproject.toml +1 -1
  16. databricks_bundles-0.269.0/databricks/bundles/version.py +0 -1
  17. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/LICENSE +0 -0
  18. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/__init__.py +0 -0
  19. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/__init__.py +0 -0
  20. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/build.py +0 -0
  21. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/__init__.py +0 -0
  22. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_bundle.py +0 -0
  23. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_diagnostics.py +0 -0
  24. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_load.py +0 -0
  25. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_location.py +0 -0
  26. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_resource.py +0 -0
  27. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_resource_mutator.py +0 -0
  28. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_resource_type.py +0 -0
  29. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_resources.py +0 -0
  30. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_transform.py +0 -0
  31. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_transform_to_json.py +0 -0
  32. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/core/_variable.py +0 -0
  33. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/__init__.py +0 -0
  34. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/adlsgen2_info.py +0 -0
  35. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/authentication_method.py +0 -0
  36. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/auto_scale.py +0 -0
  37. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/aws_attributes.py +0 -0
  38. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/aws_availability.py +0 -0
  39. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/azure_attributes.py +0 -0
  40. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/azure_availability.py +0 -0
  41. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/clean_rooms_notebook_task.py +0 -0
  42. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/clients_types.py +0 -0
  43. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/cluster_log_conf.py +0 -0
  44. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/cluster_spec.py +0 -0
  45. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/compute_config.py +0 -0
  46. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/condition.py +0 -0
  47. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/condition_task.py +0 -0
  48. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/condition_task_op.py +0 -0
  49. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/continuous.py +0 -0
  50. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/cron_schedule.py +0 -0
  51. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/dashboard_task.py +0 -0
  52. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/data_security_mode.py +0 -0
  53. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/dbfs_storage_info.py +0 -0
  54. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/dbt_platform_task.py +0 -0
  55. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/dbt_task.py +0 -0
  56. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/docker_basic_auth.py +0 -0
  57. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/docker_image.py +0 -0
  58. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/ebs_volume_type.py +0 -0
  59. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py +0 -0
  60. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/for_each_task.py +0 -0
  61. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/gcp_attributes.py +0 -0
  62. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/gcp_availability.py +0 -0
  63. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/gcs_storage_info.py +0 -0
  64. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/gen_ai_compute_task.py +0 -0
  65. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/git_provider.py +0 -0
  66. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/git_source.py +0 -0
  67. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/init_script_info.py +0 -0
  68. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/job.py +0 -0
  69. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/job_cluster.py +0 -0
  70. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/job_email_notifications.py +0 -0
  71. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/job_environment.py +0 -0
  72. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/job_notification_settings.py +0 -0
  73. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/job_parameter_definition.py +0 -0
  74. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/job_permission.py +0 -0
  75. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/job_permission_level.py +0 -0
  76. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/job_run_as.py +0 -0
  77. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/jobs_health_metric.py +0 -0
  78. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/jobs_health_operator.py +0 -0
  79. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/jobs_health_rule.py +0 -0
  80. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/jobs_health_rules.py +0 -0
  81. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/library.py +0 -0
  82. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/lifecycle.py +0 -0
  83. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/local_file_info.py +0 -0
  84. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/log_analytics_info.py +0 -0
  85. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/maven_library.py +0 -0
  86. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/notebook_task.py +0 -0
  87. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/pause_status.py +0 -0
  88. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/performance_target.py +0 -0
  89. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration.py +0 -0
  90. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration_time_unit.py +0 -0
  91. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/pipeline_params.py +0 -0
  92. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/pipeline_task.py +0 -0
  93. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/power_bi_model.py +0 -0
  94. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/power_bi_table.py +0 -0
  95. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/power_bi_task.py +0 -0
  96. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/python_py_pi_library.py +0 -0
  97. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/python_wheel_task.py +0 -0
  98. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/queue_settings.py +0 -0
  99. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/r_cran_library.py +0 -0
  100. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/run_if.py +0 -0
  101. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/run_job_task.py +0 -0
  102. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/runtime_engine.py +0 -0
  103. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/s3_storage_info.py +0 -0
  104. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/source.py +0 -0
  105. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/spark_jar_task.py +0 -0
  106. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/spark_python_task.py +0 -0
  107. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/sql_task.py +0 -0
  108. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/sql_task_alert.py +0 -0
  109. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/sql_task_dashboard.py +0 -0
  110. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/sql_task_file.py +0 -0
  111. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/sql_task_query.py +0 -0
  112. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/sql_task_subscription.py +0 -0
  113. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/storage_mode.py +0 -0
  114. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/subscription.py +0 -0
  115. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/subscription_subscriber.py +0 -0
  116. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/table_update_trigger_configuration.py +0 -0
  117. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/task_dependency.py +0 -0
  118. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/task_email_notifications.py +0 -0
  119. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/task_notification_settings.py +0 -0
  120. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/task_retry_mode.py +0 -0
  121. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/trigger_settings.py +0 -0
  122. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/volumes_storage_info.py +0 -0
  123. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/webhook.py +0 -0
  124. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/webhook_notifications.py +0 -0
  125. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/workload_type.py +0 -0
  126. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/jobs/_models/workspace_storage_info.py +0 -0
  127. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/adlsgen2_info.py +0 -0
  128. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/aws_attributes.py +0 -0
  129. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/aws_availability.py +0 -0
  130. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/azure_attributes.py +0 -0
  131. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/azure_availability.py +0 -0
  132. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/cluster_log_conf.py +0 -0
  133. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/day_of_week.py +0 -0
  134. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/dbfs_storage_info.py +0 -0
  135. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/ebs_volume_type.py +0 -0
  136. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/event_log_spec.py +0 -0
  137. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/file_library.py +0 -0
  138. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/filters.py +0 -0
  139. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/gcp_attributes.py +0 -0
  140. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/gcp_availability.py +0 -0
  141. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/gcs_storage_info.py +0 -0
  142. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/ingestion_config.py +0 -0
  143. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +0 -0
  144. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_table_specific_config_query_based_connector_config.py +0 -0
  145. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/init_script_info.py +0 -0
  146. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/lifecycle.py +0 -0
  147. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/local_file_info.py +0 -0
  148. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/log_analytics_info.py +0 -0
  149. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/maven_library.py +0 -0
  150. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/notebook_library.py +0 -0
  151. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/notifications.py +0 -0
  152. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/path_pattern.py +0 -0
  153. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/pipeline_cluster.py +0 -0
  154. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py +0 -0
  155. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale_mode.py +0 -0
  156. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/pipeline_library.py +0 -0
  157. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/pipeline_permission.py +0 -0
  158. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/pipeline_permission_level.py +0 -0
  159. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/pipelines_environment.py +0 -0
  160. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/postgres_catalog_config.py +0 -0
  161. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/postgres_slot_config.py +0 -0
  162. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/report_spec.py +0 -0
  163. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/restart_window.py +0 -0
  164. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/s3_storage_info.py +0 -0
  165. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/schema_spec.py +0 -0
  166. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/source_catalog_config.py +0 -0
  167. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/source_config.py +0 -0
  168. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/table_spec.py +0 -0
  169. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py +0 -0
  170. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/volumes_storage_info.py +0 -0
  171. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/pipelines/_models/workspace_storage_info.py +0 -0
  172. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/py.typed +0 -0
  173. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/schemas/__init__.py +0 -0
  174. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/schemas/_models/lifecycle.py +0 -0
  175. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/schemas/_models/schema.py +0 -0
  176. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/schemas/_models/schema_grant.py +0 -0
  177. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/schemas/_models/schema_grant_privilege.py +0 -0
  178. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/volumes/__init__.py +0 -0
  179. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/volumes/_models/lifecycle.py +0 -0
  180. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/volumes/_models/volume.py +0 -0
  181. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/volumes/_models/volume_grant.py +0 -0
  182. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/volumes/_models/volume_grant_privilege.py +0 -0
  183. {databricks_bundles-0.269.0 → databricks_bundles-0.271.0}/databricks/bundles/volumes/_models/volume_type.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-bundles
3
- Version: 0.269.0
3
+ Version: 0.271.0
4
4
  Summary: Python support for Databricks Asset Bundles
5
5
  Author-email: Gleb Kanterov <gleb.kanterov@databricks.com>
6
6
  Requires-Python: >=3.10
@@ -22,7 +22,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
22
22
 
23
23
  To use `databricks-bundles`, you must first:
24
24
 
25
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.269.0 or above
25
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.271.0 or above
26
26
  2. Authenticate to your Databricks workspace if you have not done so already:
27
27
 
28
28
  ```bash
@@ -13,7 +13,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
13
13
 
14
14
  To use `databricks-bundles`, you must first:
15
15
 
16
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.269.0 or above
16
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.271.0 or above
17
17
  2. Authenticate to your Databricks workspace if you have not done so already:
18
18
 
19
19
  ```bash
@@ -33,7 +33,7 @@ class Environment:
33
33
  The version is a string, consisting of an integer.
34
34
  """
35
35
 
36
- jar_dependencies: VariableOrList[str] = field(default_factory=list)
36
+ java_dependencies: VariableOrList[str] = field(default_factory=list)
37
37
  """
38
38
  :meta private: [EXPERIMENTAL]
39
39
 
@@ -68,7 +68,7 @@ class EnvironmentDict(TypedDict, total=False):
68
68
  The version is a string, consisting of an integer.
69
69
  """
70
70
 
71
- jar_dependencies: VariableOrList[str]
71
+ java_dependencies: VariableOrList[str]
72
72
  """
73
73
  :meta private: [EXPERIMENTAL]
74
74
 
@@ -11,7 +11,9 @@ if TYPE_CHECKING:
11
11
 
12
12
  @dataclass(kw_only=True)
13
13
  class SparkSubmitTask:
14
- """"""
14
+ """
15
+ [DEPRECATED]
16
+ """
15
17
 
16
18
  parameters: VariableOrList[str] = field(default_factory=list)
17
19
  """
@@ -104,7 +104,7 @@ class Task:
104
104
 
105
105
  clean_rooms_notebook_task: VariableOrOptional[CleanRoomsNotebookTask] = None
106
106
  """
107
- The task runs a [clean rooms](https://docs.databricks.com/en/clean-rooms/index.html) notebook
107
+ The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook
108
108
  when the `clean_rooms_notebook_task` field is present.
109
109
  """
110
110
 
@@ -145,6 +145,13 @@ class Task:
145
145
  An option to disable auto optimization in serverless
146
146
  """
147
147
 
148
+ disabled: VariableOrOptional[bool] = None
149
+ """
150
+ :meta private: [EXPERIMENTAL]
151
+
152
+ An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.
153
+ """
154
+
148
155
  email_notifications: VariableOrOptional[TaskEmailNotifications] = None
149
156
  """
150
157
  An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.
@@ -261,15 +268,7 @@ class Task:
261
268
 
262
269
  spark_submit_task: VariableOrOptional[SparkSubmitTask] = None
263
270
  """
264
- (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute.
265
-
266
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations.
267
-
268
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters.
269
-
270
- By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage.
271
-
272
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.
271
+ [DEPRECATED] (Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).
273
272
  """
274
273
 
275
274
  sql_task: VariableOrOptional[SqlTask] = None
@@ -307,7 +306,7 @@ class TaskDict(TypedDict, total=False):
307
306
 
308
307
  clean_rooms_notebook_task: VariableOrOptional[CleanRoomsNotebookTaskParam]
309
308
  """
310
- The task runs a [clean rooms](https://docs.databricks.com/en/clean-rooms/index.html) notebook
309
+ The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook
311
310
  when the `clean_rooms_notebook_task` field is present.
312
311
  """
313
312
 
@@ -348,6 +347,13 @@ class TaskDict(TypedDict, total=False):
348
347
  An option to disable auto optimization in serverless
349
348
  """
350
349
 
350
+ disabled: VariableOrOptional[bool]
351
+ """
352
+ :meta private: [EXPERIMENTAL]
353
+
354
+ An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.
355
+ """
356
+
351
357
  email_notifications: VariableOrOptional[TaskEmailNotificationsParam]
352
358
  """
353
359
  An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.
@@ -464,15 +470,7 @@ class TaskDict(TypedDict, total=False):
464
470
 
465
471
  spark_submit_task: VariableOrOptional[SparkSubmitTaskParam]
466
472
  """
467
- (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute.
468
-
469
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations.
470
-
471
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters.
472
-
473
- By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage.
474
-
475
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.
473
+ [DEPRECATED] (Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).
476
474
  """
477
475
 
478
476
  sql_task: VariableOrOptional[SqlTaskParam]
@@ -51,6 +51,12 @@ __all__ = [
51
51
  "IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig",
52
52
  "IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigDict",
53
53
  "IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam",
54
+ "IngestionPipelineDefinitionWorkdayReportParameters",
55
+ "IngestionPipelineDefinitionWorkdayReportParametersDict",
56
+ "IngestionPipelineDefinitionWorkdayReportParametersParam",
57
+ "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue",
58
+ "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict",
59
+ "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam",
54
60
  "IngestionSourceType",
55
61
  "IngestionSourceTypeParam",
56
62
  "InitScriptInfo",
@@ -230,6 +236,16 @@ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_table_sp
230
236
  IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigDict,
231
237
  IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam,
232
238
  )
239
+ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters import (
240
+ IngestionPipelineDefinitionWorkdayReportParameters,
241
+ IngestionPipelineDefinitionWorkdayReportParametersDict,
242
+ IngestionPipelineDefinitionWorkdayReportParametersParam,
243
+ )
244
+ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters_query_key_value import (
245
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue,
246
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict,
247
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam,
248
+ )
233
249
  from databricks.bundles.pipelines._models.ingestion_source_type import (
234
250
  IngestionSourceType,
235
251
  IngestionSourceTypeParam,
@@ -39,6 +39,15 @@ class IngestionPipelineDefinition:
39
39
  Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
40
40
  """
41
41
 
42
+ netsuite_jar_path: VariableOrOptional[str] = None
43
+ """
44
+ :meta private: [EXPERIMENTAL]
45
+
46
+ Netsuite only configuration. When the field is set for a netsuite connector,
47
+ the jar stored in the field will be validated and added to the classpath of
48
+ pipeline's cluster.
49
+ """
50
+
42
51
  objects: VariableOrList[IngestionConfig] = field(default_factory=list)
43
52
  """
44
53
  Required. Settings specifying tables to replicate and the destination for the replicated tables.
@@ -84,6 +93,15 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
84
93
  Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
85
94
  """
86
95
 
96
+ netsuite_jar_path: VariableOrOptional[str]
97
+ """
98
+ :meta private: [EXPERIMENTAL]
99
+
100
+ Netsuite only configuration. When the field is set for a netsuite connector,
101
+ the jar stored in the field will be validated and added to the classpath of
102
+ pipeline's cluster.
103
+ """
104
+
87
105
  objects: VariableOrList[IngestionConfigParam]
88
106
  """
89
107
  Required. Settings specifying tables to replicate and the destination for the replicated tables.
@@ -0,0 +1,95 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import (
7
+ VariableOrDict,
8
+ VariableOrList,
9
+ VariableOrOptional,
10
+ )
11
+ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters_query_key_value import (
12
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue,
13
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam,
14
+ )
15
+
16
+ if TYPE_CHECKING:
17
+ from typing_extensions import Self
18
+
19
+
20
+ @dataclass(kw_only=True)
21
+ class IngestionPipelineDefinitionWorkdayReportParameters:
22
+ """
23
+ :meta private: [EXPERIMENTAL]
24
+ """
25
+
26
+ incremental: VariableOrOptional[bool] = None
27
+ """
28
+ [DEPRECATED] (Optional) Marks the report as incremental.
29
+ This field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now
30
+ controlled by the `parameters` field.
31
+ """
32
+
33
+ parameters: VariableOrDict[str] = field(default_factory=dict)
34
+ """
35
+ Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", "end_date"),
36
+ and the corresponding value is a SQL-like expression used to compute the parameter value at runtime.
37
+ Example:
38
+ {
39
+ "start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) }",
40
+ "end_date": "{ current_date() - INTERVAL 1 DAY }"
41
+ }
42
+ """
43
+
44
+ report_parameters: VariableOrList[
45
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue
46
+ ] = field(default_factory=list)
47
+ """
48
+ [DEPRECATED] (Optional) Additional custom parameters for Workday Report
49
+ This field is deprecated and should not be used. Use `parameters` instead.
50
+ """
51
+
52
+ @classmethod
53
+ def from_dict(
54
+ cls, value: "IngestionPipelineDefinitionWorkdayReportParametersDict"
55
+ ) -> "Self":
56
+ return _transform(cls, value)
57
+
58
+ def as_dict(self) -> "IngestionPipelineDefinitionWorkdayReportParametersDict":
59
+ return _transform_to_json_value(self) # type:ignore
60
+
61
+
62
+ class IngestionPipelineDefinitionWorkdayReportParametersDict(TypedDict, total=False):
63
+ """"""
64
+
65
+ incremental: VariableOrOptional[bool]
66
+ """
67
+ [DEPRECATED] (Optional) Marks the report as incremental.
68
+ This field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now
69
+ controlled by the `parameters` field.
70
+ """
71
+
72
+ parameters: VariableOrDict[str]
73
+ """
74
+ Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", "end_date"),
75
+ and the corresponding value is a SQL-like expression used to compute the parameter value at runtime.
76
+ Example:
77
+ {
78
+ "start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) }",
79
+ "end_date": "{ current_date() - INTERVAL 1 DAY }"
80
+ }
81
+ """
82
+
83
+ report_parameters: VariableOrList[
84
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam
85
+ ]
86
+ """
87
+ [DEPRECATED] (Optional) Additional custom parameters for Workday Report
88
+ This field is deprecated and should not be used. Use `parameters` instead.
89
+ """
90
+
91
+
92
+ IngestionPipelineDefinitionWorkdayReportParametersParam = (
93
+ IngestionPipelineDefinitionWorkdayReportParametersDict
94
+ | IngestionPipelineDefinitionWorkdayReportParameters
95
+ )
@@ -0,0 +1,70 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue:
14
+ """
15
+ :meta private: [EXPERIMENTAL]
16
+
17
+ [DEPRECATED]
18
+ """
19
+
20
+ key: VariableOrOptional[str] = None
21
+ """
22
+ Key for the report parameter, can be a column name or other metadata
23
+ """
24
+
25
+ value: VariableOrOptional[str] = None
26
+ """
27
+ Value for the report parameter.
28
+ Possible values it can take are these sql functions:
29
+ 1. coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed date, else current_offset()
30
+ 2. current_date()
31
+ 3. date_sub(current_date(), x) -> subtract x (some non-negative integer) days from current date
32
+ """
33
+
34
+ @classmethod
35
+ def from_dict(
36
+ cls,
37
+ value: "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict",
38
+ ) -> "Self":
39
+ return _transform(cls, value)
40
+
41
+ def as_dict(
42
+ self,
43
+ ) -> "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict":
44
+ return _transform_to_json_value(self) # type:ignore
45
+
46
+
47
+ class IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict(
48
+ TypedDict, total=False
49
+ ):
50
+ """"""
51
+
52
+ key: VariableOrOptional[str]
53
+ """
54
+ Key for the report parameter, can be a column name or other metadata
55
+ """
56
+
57
+ value: VariableOrOptional[str]
58
+ """
59
+ Value for the report parameter.
60
+ Possible values it can take are these sql functions:
61
+ 1. coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed date, else current_offset()
62
+ 2. current_date()
63
+ 3. date_sub(current_date(), x) -> subtract x (some non-negative integer) days from current date
64
+ """
65
+
66
+
67
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam = (
68
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict
69
+ | IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue
70
+ )
@@ -21,6 +21,7 @@ class IngestionSourceType(Enum):
21
21
  DYNAMICS365 = "DYNAMICS365"
22
22
  CONFLUENCE = "CONFLUENCE"
23
23
  META_MARKETING = "META_MARKETING"
24
+ FOREIGN_CATALOG = "FOREIGN_CATALOG"
24
25
 
25
26
 
26
27
  IngestionSourceTypeParam = (
@@ -43,6 +44,7 @@ IngestionSourceTypeParam = (
43
44
  "DYNAMICS365",
44
45
  "CONFLUENCE",
45
46
  "META_MARKETING",
47
+ "FOREIGN_CATALOG",
46
48
  ]
47
49
  | IngestionSourceType
48
50
  )
@@ -184,9 +184,6 @@ class Pipeline(Resource):
184
184
  """
185
185
 
186
186
  run_as: VariableOrOptional[RunAs] = None
187
- """
188
- :meta private: [EXPERIMENTAL]
189
- """
190
187
 
191
188
  schema: VariableOrOptional[str] = None
192
189
  """
@@ -347,9 +344,6 @@ class PipelineDict(TypedDict, total=False):
347
344
  """
348
345
 
349
346
  run_as: VariableOrOptional[RunAsParam]
350
- """
351
- :meta private: [EXPERIMENTAL]
352
- """
353
347
 
354
348
  schema: VariableOrOptional[str]
355
349
  """
@@ -12,8 +12,6 @@ if TYPE_CHECKING:
12
12
  @dataclass(kw_only=True)
13
13
  class RunAs:
14
14
  """
15
- :meta private: [EXPERIMENTAL]
16
-
17
15
  Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
18
16
 
19
17
  Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown.
@@ -8,6 +8,10 @@ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_table_sp
8
8
  IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig,
9
9
  IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam,
10
10
  )
11
+ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters import (
12
+ IngestionPipelineDefinitionWorkdayReportParameters,
13
+ IngestionPipelineDefinitionWorkdayReportParametersParam,
14
+ )
11
15
  from databricks.bundles.pipelines._models.table_specific_config_scd_type import (
12
16
  TableSpecificConfigScdType,
13
17
  TableSpecificConfigScdTypeParam,
@@ -71,6 +75,15 @@ class TableSpecificConfig:
71
75
  The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.
72
76
  """
73
77
 
78
+ workday_report_parameters: VariableOrOptional[
79
+ IngestionPipelineDefinitionWorkdayReportParameters
80
+ ] = None
81
+ """
82
+ :meta private: [EXPERIMENTAL]
83
+
84
+ (Optional) Additional custom parameters for Workday Report
85
+ """
86
+
74
87
  @classmethod
75
88
  def from_dict(cls, value: "TableSpecificConfigDict") -> "Self":
76
89
  return _transform(cls, value)
@@ -132,5 +145,14 @@ class TableSpecificConfigDict(TypedDict, total=False):
132
145
  The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.
133
146
  """
134
147
 
148
+ workday_report_parameters: VariableOrOptional[
149
+ IngestionPipelineDefinitionWorkdayReportParametersParam
150
+ ]
151
+ """
152
+ :meta private: [EXPERIMENTAL]
153
+
154
+ (Optional) Additional custom parameters for Workday Report
155
+ """
156
+
135
157
 
136
158
  TableSpecificConfigParam = TableSpecificConfigDict | TableSpecificConfig
@@ -0,0 +1 @@
1
+ __version__ = "0.271.0"
@@ -1,7 +1,7 @@
1
1
  [project]
2
2
  name = "databricks-bundles"
3
3
  description = "Python support for Databricks Asset Bundles"
4
- version = "0.269.0"
4
+ version = "0.271.0"
5
5
 
6
6
  authors = [
7
7
  { name = "Gleb Kanterov", email = "gleb.kanterov@databricks.com" },
@@ -1 +0,0 @@
1
- __version__ = "0.269.0"