databricks-bundles 0.278.0__tar.gz → 0.280.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (186) hide show
  1. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/PKG-INFO +2 -2
  2. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/README.md +1 -1
  3. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/__init__.py +14 -0
  4. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/aws_attributes.py +2 -2
  5. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/cluster_spec.py +2 -4
  6. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/dashboard_task.py +4 -1
  7. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/environment.py +0 -10
  8. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/job.py +2 -4
  9. databricks_bundles-0.280.0/databricks/bundles/jobs/_models/model_trigger_configuration.py +98 -0
  10. databricks_bundles-0.280.0/databricks/bundles/jobs/_models/model_trigger_configuration_condition.py +18 -0
  11. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/trigger_settings.py +14 -0
  12. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/__init__.py +8 -0
  13. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/aws_attributes.py +2 -2
  14. databricks_bundles-0.280.0/databricks/bundles/pipelines/_models/connection_parameters.py +50 -0
  15. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/ingestion_config.py +1 -4
  16. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +20 -2
  17. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +20 -12
  18. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/postgres_catalog_config.py +0 -6
  19. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/postgres_slot_config.py +0 -10
  20. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/source_catalog_config.py +0 -10
  21. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/source_config.py +1 -7
  22. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/table_specific_config.py +2 -6
  23. databricks_bundles-0.280.0/databricks/bundles/version.py +1 -0
  24. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/pyproject.toml +3 -3
  25. databricks_bundles-0.278.0/databricks/bundles/version.py +0 -1
  26. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/LICENSE +0 -0
  27. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/__init__.py +0 -0
  28. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/__init__.py +0 -0
  29. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/build.py +0 -0
  30. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/__init__.py +0 -0
  31. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_bundle.py +0 -0
  32. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_diagnostics.py +0 -0
  33. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_load.py +0 -0
  34. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_location.py +0 -0
  35. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_resource.py +0 -0
  36. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_resource_mutator.py +0 -0
  37. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_resource_type.py +0 -0
  38. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_resources.py +0 -0
  39. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_transform.py +0 -0
  40. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_transform_to_json.py +0 -0
  41. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/core/_variable.py +0 -0
  42. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/adlsgen2_info.py +0 -0
  43. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/authentication_method.py +0 -0
  44. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/auto_scale.py +0 -0
  45. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/aws_availability.py +0 -0
  46. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/azure_attributes.py +0 -0
  47. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/azure_availability.py +0 -0
  48. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/clean_rooms_notebook_task.py +0 -0
  49. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/clients_types.py +0 -0
  50. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/cluster_log_conf.py +0 -0
  51. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/compute_config.py +0 -0
  52. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/condition.py +0 -0
  53. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/condition_task.py +0 -0
  54. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/condition_task_op.py +0 -0
  55. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/continuous.py +0 -0
  56. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/cron_schedule.py +0 -0
  57. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/data_security_mode.py +0 -0
  58. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/dbfs_storage_info.py +0 -0
  59. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/dbt_platform_task.py +0 -0
  60. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/dbt_task.py +0 -0
  61. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/docker_basic_auth.py +0 -0
  62. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/docker_image.py +0 -0
  63. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/ebs_volume_type.py +0 -0
  64. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py +0 -0
  65. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/for_each_task.py +0 -0
  66. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/gcp_attributes.py +0 -0
  67. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/gcp_availability.py +0 -0
  68. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/gcs_storage_info.py +0 -0
  69. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/gen_ai_compute_task.py +0 -0
  70. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/git_provider.py +0 -0
  71. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/git_source.py +0 -0
  72. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/init_script_info.py +0 -0
  73. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/job_cluster.py +0 -0
  74. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/job_email_notifications.py +0 -0
  75. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/job_environment.py +0 -0
  76. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/job_notification_settings.py +0 -0
  77. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/job_parameter_definition.py +0 -0
  78. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/job_permission.py +0 -0
  79. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/job_permission_level.py +0 -0
  80. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/job_run_as.py +0 -0
  81. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/jobs_health_metric.py +0 -0
  82. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/jobs_health_operator.py +0 -0
  83. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/jobs_health_rule.py +0 -0
  84. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/jobs_health_rules.py +0 -0
  85. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/kind.py +0 -0
  86. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/library.py +0 -0
  87. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/lifecycle.py +0 -0
  88. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/local_file_info.py +0 -0
  89. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/log_analytics_info.py +0 -0
  90. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/maven_library.py +0 -0
  91. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/notebook_task.py +0 -0
  92. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/pause_status.py +0 -0
  93. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/performance_target.py +0 -0
  94. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration.py +0 -0
  95. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/periodic_trigger_configuration_time_unit.py +0 -0
  96. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/pipeline_params.py +0 -0
  97. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/pipeline_task.py +0 -0
  98. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/power_bi_model.py +0 -0
  99. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/power_bi_table.py +0 -0
  100. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/power_bi_task.py +0 -0
  101. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/python_py_pi_library.py +0 -0
  102. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/python_wheel_task.py +0 -0
  103. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/queue_settings.py +0 -0
  104. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/r_cran_library.py +0 -0
  105. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/run_if.py +0 -0
  106. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/run_job_task.py +0 -0
  107. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/runtime_engine.py +0 -0
  108. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/s3_storage_info.py +0 -0
  109. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/source.py +0 -0
  110. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/spark_jar_task.py +0 -0
  111. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/spark_python_task.py +0 -0
  112. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/spark_submit_task.py +0 -0
  113. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/sql_task.py +0 -0
  114. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/sql_task_alert.py +0 -0
  115. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/sql_task_dashboard.py +0 -0
  116. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/sql_task_file.py +0 -0
  117. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/sql_task_query.py +0 -0
  118. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/sql_task_subscription.py +0 -0
  119. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/storage_mode.py +0 -0
  120. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/subscription.py +0 -0
  121. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/subscription_subscriber.py +0 -0
  122. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/table_update_trigger_configuration.py +0 -0
  123. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/task.py +0 -0
  124. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/task_dependency.py +0 -0
  125. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/task_email_notifications.py +0 -0
  126. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/task_notification_settings.py +0 -0
  127. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/task_retry_mode.py +0 -0
  128. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/volumes_storage_info.py +0 -0
  129. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/webhook.py +0 -0
  130. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/webhook_notifications.py +0 -0
  131. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/workload_type.py +0 -0
  132. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/jobs/_models/workspace_storage_info.py +0 -0
  133. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/adlsgen2_info.py +0 -0
  134. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/aws_availability.py +0 -0
  135. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/azure_attributes.py +0 -0
  136. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/azure_availability.py +0 -0
  137. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/cluster_log_conf.py +0 -0
  138. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/day_of_week.py +0 -0
  139. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/dbfs_storage_info.py +0 -0
  140. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/ebs_volume_type.py +0 -0
  141. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/event_log_spec.py +0 -0
  142. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/file_library.py +0 -0
  143. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/filters.py +0 -0
  144. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/gcp_attributes.py +0 -0
  145. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/gcp_availability.py +0 -0
  146. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/gcs_storage_info.py +0 -0
  147. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_table_specific_config_query_based_connector_config.py +0 -0
  148. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters.py +0 -0
  149. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters_query_key_value.py +0 -0
  150. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/init_script_info.py +0 -0
  151. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/lifecycle.py +0 -0
  152. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/local_file_info.py +0 -0
  153. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/log_analytics_info.py +0 -0
  154. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/maven_library.py +0 -0
  155. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/notebook_library.py +0 -0
  156. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/notifications.py +0 -0
  157. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/path_pattern.py +0 -0
  158. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/pipeline.py +0 -0
  159. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/pipeline_cluster.py +0 -0
  160. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py +0 -0
  161. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale_mode.py +0 -0
  162. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/pipeline_library.py +0 -0
  163. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/pipeline_permission.py +0 -0
  164. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/pipeline_permission_level.py +0 -0
  165. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/pipelines_environment.py +0 -0
  166. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/report_spec.py +0 -0
  167. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/restart_window.py +0 -0
  168. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/run_as.py +0 -0
  169. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/s3_storage_info.py +0 -0
  170. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/schema_spec.py +0 -0
  171. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/table_spec.py +0 -0
  172. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py +0 -0
  173. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/volumes_storage_info.py +0 -0
  174. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/pipelines/_models/workspace_storage_info.py +0 -0
  175. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/py.typed +0 -0
  176. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/schemas/__init__.py +0 -0
  177. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/schemas/_models/lifecycle.py +0 -0
  178. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/schemas/_models/schema.py +0 -0
  179. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/schemas/_models/schema_grant.py +0 -0
  180. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/schemas/_models/schema_grant_privilege.py +0 -0
  181. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/volumes/__init__.py +0 -0
  182. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/volumes/_models/lifecycle.py +0 -0
  183. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/volumes/_models/volume.py +0 -0
  184. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/volumes/_models/volume_grant.py +0 -0
  185. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/volumes/_models/volume_grant_privilege.py +0 -0
  186. {databricks_bundles-0.278.0 → databricks_bundles-0.280.0}/databricks/bundles/volumes/_models/volume_type.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-bundles
3
- Version: 0.278.0
3
+ Version: 0.280.0
4
4
  Summary: Python support for Databricks Asset Bundles
5
5
  Author-email: Gleb Kanterov <gleb.kanterov@databricks.com>
6
6
  Requires-Python: >=3.10
@@ -22,7 +22,7 @@ Reference documentation is available at https://databricks.github.io/cli/python/
22
22
 
23
23
  To use `databricks-bundles`, you must first:
24
24
 
25
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.278.0 or above
25
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.280.0 or above
26
26
  2. Authenticate to your Databricks workspace if you have not done so already:
27
27
 
28
28
  ```bash
@@ -13,7 +13,7 @@ Reference documentation is available at https://databricks.github.io/cli/python/
13
13
 
14
14
  To use `databricks-bundles`, you must first:
15
15
 
16
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.278.0 or above
16
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.280.0 or above
17
17
  2. Authenticate to your Databricks workspace if you have not done so already:
18
18
 
19
19
  ```bash
@@ -148,6 +148,11 @@ __all__ = [
148
148
  "MavenLibrary",
149
149
  "MavenLibraryDict",
150
150
  "MavenLibraryParam",
151
+ "ModelTriggerConfiguration",
152
+ "ModelTriggerConfigurationCondition",
153
+ "ModelTriggerConfigurationConditionParam",
154
+ "ModelTriggerConfigurationDict",
155
+ "ModelTriggerConfigurationParam",
151
156
  "NotebookTask",
152
157
  "NotebookTaskDict",
153
158
  "NotebookTaskParam",
@@ -508,6 +513,15 @@ from databricks.bundles.jobs._models.maven_library import (
508
513
  MavenLibraryDict,
509
514
  MavenLibraryParam,
510
515
  )
516
+ from databricks.bundles.jobs._models.model_trigger_configuration import (
517
+ ModelTriggerConfiguration,
518
+ ModelTriggerConfigurationDict,
519
+ ModelTriggerConfigurationParam,
520
+ )
521
+ from databricks.bundles.jobs._models.model_trigger_configuration_condition import (
522
+ ModelTriggerConfigurationCondition,
523
+ ModelTriggerConfigurationConditionParam,
524
+ )
511
525
  from databricks.bundles.jobs._models.notebook_task import (
512
526
  NotebookTask,
513
527
  NotebookTaskDict,
@@ -104,7 +104,7 @@ class AwsAttributes:
104
104
  This string will be of a form like "us-west-2a". The provided availability
105
105
  zone must be in the same region as the Databricks deployment. For example, "us-west-2a"
106
106
  is not a valid zone id if the Databricks deployment resides in the "us-east-1" region.
107
- This is an optional field at cluster creation, and if not specified, a default zone will be used.
107
+ This is an optional field at cluster creation, and if not specified, the zone "auto" will be used.
108
108
  If the zone specified is "auto", will try to place cluster in a zone with high availability,
109
109
  and will retry placement in a different AZ if there is not enough capacity.
110
110
 
@@ -204,7 +204,7 @@ class AwsAttributesDict(TypedDict, total=False):
204
204
  This string will be of a form like "us-west-2a". The provided availability
205
205
  zone must be in the same region as the Databricks deployment. For example, "us-west-2a"
206
206
  is not a valid zone id if the Databricks deployment resides in the "us-east-1" region.
207
- This is an optional field at cluster creation, and if not specified, a default zone will be used.
207
+ This is an optional field at cluster creation, and if not specified, the zone "auto" will be used.
208
208
  If the zone specified is "auto", will try to place cluster in a zone with high availability,
209
209
  and will retry placement in a different AZ if there is not enough capacity.
210
210
 
@@ -138,8 +138,7 @@ class ClusterSpec:
138
138
  enable_elastic_disk: VariableOrOptional[bool] = None
139
139
  """
140
140
  Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
141
- space when its Spark workers are running low on disk space. This feature requires specific AWS
142
- permissions to function correctly - refer to the User Guide for more details.
141
+ space when its Spark workers are running low on disk space.
143
142
  """
144
143
 
145
144
  enable_local_disk_encryption: VariableOrOptional[bool] = None
@@ -353,8 +352,7 @@ class ClusterSpecDict(TypedDict, total=False):
353
352
  enable_elastic_disk: VariableOrOptional[bool]
354
353
  """
355
354
  Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
356
- space when its Spark workers are running low on disk space. This feature requires specific AWS
357
- permissions to function correctly - refer to the User Guide for more details.
355
+ space when its Spark workers are running low on disk space.
358
356
  """
359
357
 
360
358
  enable_local_disk_encryption: VariableOrOptional[bool]
@@ -4,7 +4,10 @@ from typing import TYPE_CHECKING, TypedDict
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOrOptional
7
- from databricks.bundles.jobs._models.subscription import Subscription, SubscriptionParam
7
+ from databricks.bundles.jobs._models.subscription import (
8
+ Subscription,
9
+ SubscriptionParam,
10
+ )
8
11
 
9
12
  if TYPE_CHECKING:
10
13
  from typing_extensions import Self
@@ -34,11 +34,6 @@ class Environment:
34
34
  """
35
35
 
36
36
  java_dependencies: VariableOrList[str] = field(default_factory=list)
37
- """
38
- :meta private: [EXPERIMENTAL]
39
-
40
- List of java dependencies. Each dependency is a string representing a java library path. For example: `/Volumes/path/to/test.jar`.
41
- """
42
37
 
43
38
  @classmethod
44
39
  def from_dict(cls, value: "EnvironmentDict") -> "Self":
@@ -69,11 +64,6 @@ class EnvironmentDict(TypedDict, total=False):
69
64
  """
70
65
 
71
66
  java_dependencies: VariableOrList[str]
72
- """
73
- :meta private: [EXPERIMENTAL]
74
-
75
- List of java dependencies. Each dependency is a string representing a java library path. For example: `/Volumes/path/to/test.jar`.
76
- """
77
67
 
78
68
 
79
69
  EnvironmentParam = EnvironmentDict | Environment
@@ -96,8 +96,7 @@ class Job(Resource):
96
96
  environments: VariableOrList[JobEnvironment] = field(default_factory=list)
97
97
  """
98
98
  A list of task execution environment specifications that can be referenced by serverless tasks of this job.
99
- An environment is required to be present for serverless tasks.
100
- For serverless notebook tasks, the environment is accessible in the notebook environment panel.
99
+ For serverless notebook tasks, if the environment_key is not specified, the notebook environment will be used if present. If a jobs environment is specified, it will override the notebook environment.
101
100
  For other serverless tasks, the task environment is required to be specified using environment_key in the task settings.
102
101
  """
103
102
 
@@ -241,8 +240,7 @@ class JobDict(TypedDict, total=False):
241
240
  environments: VariableOrList[JobEnvironmentParam]
242
241
  """
243
242
  A list of task execution environment specifications that can be referenced by serverless tasks of this job.
244
- An environment is required to be present for serverless tasks.
245
- For serverless notebook tasks, the environment is accessible in the notebook environment panel.
243
+ For serverless notebook tasks, if the environment_key is not specified, the notebook environment will be used if present. If a jobs environment is specified, it will override the notebook environment.
246
244
  For other serverless tasks, the task environment is required to be specified using environment_key in the task settings.
247
245
  """
248
246
 
@@ -0,0 +1,98 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import (
7
+ VariableOr,
8
+ VariableOrList,
9
+ VariableOrOptional,
10
+ )
11
+ from databricks.bundles.jobs._models.model_trigger_configuration_condition import (
12
+ ModelTriggerConfigurationCondition,
13
+ ModelTriggerConfigurationConditionParam,
14
+ )
15
+
16
+ if TYPE_CHECKING:
17
+ from typing_extensions import Self
18
+
19
+
20
+ @dataclass(kw_only=True)
21
+ class ModelTriggerConfiguration:
22
+ """
23
+ :meta private: [EXPERIMENTAL]
24
+ """
25
+
26
+ condition: VariableOr[ModelTriggerConfigurationCondition]
27
+ """
28
+ The condition based on which to trigger a job run.
29
+ """
30
+
31
+ aliases: VariableOrList[str] = field(default_factory=list)
32
+ """
33
+ Aliases of the model versions to monitor. Can only be used in conjunction with condition MODEL_ALIAS_SET.
34
+ """
35
+
36
+ min_time_between_triggers_seconds: VariableOrOptional[int] = None
37
+ """
38
+ If set, the trigger starts a run only after the specified amount of time has passed since
39
+ the last time the trigger fired. The minimum allowed value is 60 seconds.
40
+ """
41
+
42
+ securable_name: VariableOrOptional[str] = None
43
+ """
44
+ Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level triggers,
45
+ "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of metastore-level triggers.
46
+ """
47
+
48
+ wait_after_last_change_seconds: VariableOrOptional[int] = None
49
+ """
50
+ If set, the trigger starts a run only after no model updates have occurred for the specified time
51
+ and can be used to wait for a series of model updates before triggering a run. The
52
+ minimum allowed value is 60 seconds.
53
+ """
54
+
55
+ @classmethod
56
+ def from_dict(cls, value: "ModelTriggerConfigurationDict") -> "Self":
57
+ return _transform(cls, value)
58
+
59
+ def as_dict(self) -> "ModelTriggerConfigurationDict":
60
+ return _transform_to_json_value(self) # type:ignore
61
+
62
+
63
+ class ModelTriggerConfigurationDict(TypedDict, total=False):
64
+ """"""
65
+
66
+ condition: VariableOr[ModelTriggerConfigurationConditionParam]
67
+ """
68
+ The condition based on which to trigger a job run.
69
+ """
70
+
71
+ aliases: VariableOrList[str]
72
+ """
73
+ Aliases of the model versions to monitor. Can only be used in conjunction with condition MODEL_ALIAS_SET.
74
+ """
75
+
76
+ min_time_between_triggers_seconds: VariableOrOptional[int]
77
+ """
78
+ If set, the trigger starts a run only after the specified amount of time has passed since
79
+ the last time the trigger fired. The minimum allowed value is 60 seconds.
80
+ """
81
+
82
+ securable_name: VariableOrOptional[str]
83
+ """
84
+ Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level triggers,
85
+ "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of metastore-level triggers.
86
+ """
87
+
88
+ wait_after_last_change_seconds: VariableOrOptional[int]
89
+ """
90
+ If set, the trigger starts a run only after no model updates have occurred for the specified time
91
+ and can be used to wait for a series of model updates before triggering a run. The
92
+ minimum allowed value is 60 seconds.
93
+ """
94
+
95
+
96
+ ModelTriggerConfigurationParam = (
97
+ ModelTriggerConfigurationDict | ModelTriggerConfiguration
98
+ )
@@ -0,0 +1,18 @@
1
+ from enum import Enum
2
+ from typing import Literal
3
+
4
+
5
+ class ModelTriggerConfigurationCondition(Enum):
6
+ """
7
+ :meta private: [EXPERIMENTAL]
8
+ """
9
+
10
+ MODEL_CREATED = "MODEL_CREATED"
11
+ MODEL_VERSION_READY = "MODEL_VERSION_READY"
12
+ MODEL_ALIAS_SET = "MODEL_ALIAS_SET"
13
+
14
+
15
+ ModelTriggerConfigurationConditionParam = (
16
+ Literal["MODEL_CREATED", "MODEL_VERSION_READY", "MODEL_ALIAS_SET"]
17
+ | ModelTriggerConfigurationCondition
18
+ )
@@ -8,6 +8,10 @@ from databricks.bundles.jobs._models.file_arrival_trigger_configuration import (
8
8
  FileArrivalTriggerConfiguration,
9
9
  FileArrivalTriggerConfigurationParam,
10
10
  )
11
+ from databricks.bundles.jobs._models.model_trigger_configuration import (
12
+ ModelTriggerConfiguration,
13
+ ModelTriggerConfigurationParam,
14
+ )
11
15
  from databricks.bundles.jobs._models.pause_status import PauseStatus, PauseStatusParam
12
16
  from databricks.bundles.jobs._models.periodic_trigger_configuration import (
13
17
  PeriodicTriggerConfiguration,
@@ -31,6 +35,11 @@ class TriggerSettings:
31
35
  File arrival trigger settings.
32
36
  """
33
37
 
38
+ model: VariableOrOptional[ModelTriggerConfiguration] = None
39
+ """
40
+ :meta private: [EXPERIMENTAL]
41
+ """
42
+
34
43
  pause_status: VariableOrOptional[PauseStatus] = None
35
44
  """
36
45
  Whether this trigger is paused or not.
@@ -59,6 +68,11 @@ class TriggerSettingsDict(TypedDict, total=False):
59
68
  File arrival trigger settings.
60
69
  """
61
70
 
71
+ model: VariableOrOptional[ModelTriggerConfigurationParam]
72
+ """
73
+ :meta private: [EXPERIMENTAL]
74
+ """
75
+
62
76
  pause_status: VariableOrOptional[PauseStatusParam]
63
77
  """
64
78
  Whether this trigger is paused or not.
@@ -15,6 +15,9 @@ __all__ = [
15
15
  "ClusterLogConf",
16
16
  "ClusterLogConfDict",
17
17
  "ClusterLogConfParam",
18
+ "ConnectionParameters",
19
+ "ConnectionParametersDict",
20
+ "ConnectionParametersParam",
18
21
  "DayOfWeek",
19
22
  "DayOfWeekParam",
20
23
  "DbfsStorageInfo",
@@ -175,6 +178,11 @@ from databricks.bundles.pipelines._models.cluster_log_conf import (
175
178
  ClusterLogConfDict,
176
179
  ClusterLogConfParam,
177
180
  )
181
+ from databricks.bundles.pipelines._models.connection_parameters import (
182
+ ConnectionParameters,
183
+ ConnectionParametersDict,
184
+ ConnectionParametersParam,
185
+ )
178
186
  from databricks.bundles.pipelines._models.day_of_week import DayOfWeek, DayOfWeekParam
179
187
  from databricks.bundles.pipelines._models.dbfs_storage_info import (
180
188
  DbfsStorageInfo,
@@ -104,7 +104,7 @@ class AwsAttributes:
104
104
  This string will be of a form like "us-west-2a". The provided availability
105
105
  zone must be in the same region as the Databricks deployment. For example, "us-west-2a"
106
106
  is not a valid zone id if the Databricks deployment resides in the "us-east-1" region.
107
- This is an optional field at cluster creation, and if not specified, a default zone will be used.
107
+ This is an optional field at cluster creation, and if not specified, the zone "auto" will be used.
108
108
  If the zone specified is "auto", will try to place cluster in a zone with high availability,
109
109
  and will retry placement in a different AZ if there is not enough capacity.
110
110
 
@@ -204,7 +204,7 @@ class AwsAttributesDict(TypedDict, total=False):
204
204
  This string will be of a form like "us-west-2a". The provided availability
205
205
  zone must be in the same region as the Databricks deployment. For example, "us-west-2a"
206
206
  is not a valid zone id if the Databricks deployment resides in the "us-east-1" region.
207
- This is an optional field at cluster creation, and if not specified, a default zone will be used.
207
+ This is an optional field at cluster creation, and if not specified, the zone "auto" will be used.
208
208
  If the zone specified is "auto", will try to place cluster in a zone with high availability,
209
209
  and will retry placement in a different AZ if there is not enough capacity.
210
210
 
@@ -0,0 +1,50 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class ConnectionParameters:
14
+ """
15
+ :meta private: [EXPERIMENTAL]
16
+ """
17
+
18
+ source_catalog: VariableOrOptional[str] = None
19
+ """
20
+ :meta private: [EXPERIMENTAL]
21
+
22
+ Source catalog for initial connection.
23
+ This is necessary for schema exploration in some database systems like Oracle, and optional but nice-to-have
24
+ in some other database systems like Postgres.
25
+ For Oracle databases, this maps to a service name.
26
+ """
27
+
28
+ @classmethod
29
+ def from_dict(cls, value: "ConnectionParametersDict") -> "Self":
30
+ return _transform(cls, value)
31
+
32
+ def as_dict(self) -> "ConnectionParametersDict":
33
+ return _transform_to_json_value(self) # type:ignore
34
+
35
+
36
+ class ConnectionParametersDict(TypedDict, total=False):
37
+ """"""
38
+
39
+ source_catalog: VariableOrOptional[str]
40
+ """
41
+ :meta private: [EXPERIMENTAL]
42
+
43
+ Source catalog for initial connection.
44
+ This is necessary for schema exploration in some database systems like Oracle, and optional but nice-to-have
45
+ in some other database systems like Postgres.
46
+ For Oracle databases, this maps to a service name.
47
+ """
48
+
49
+
50
+ ConnectionParametersParam = ConnectionParametersDict | ConnectionParameters
@@ -4,10 +4,7 @@ from typing import TYPE_CHECKING, TypedDict
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOrOptional
7
- from databricks.bundles.pipelines._models.report_spec import (
8
- ReportSpec,
9
- ReportSpecParam,
10
- )
7
+ from databricks.bundles.pipelines._models.report_spec import ReportSpec, ReportSpecParam
11
8
  from databricks.bundles.pipelines._models.schema_spec import SchemaSpec, SchemaSpecParam
12
9
  from databricks.bundles.pipelines._models.table_spec import TableSpec, TableSpecParam
13
10
 
@@ -4,6 +4,10 @@ from typing import TYPE_CHECKING, TypedDict
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOr, VariableOrOptional
7
+ from databricks.bundles.pipelines._models.connection_parameters import (
8
+ ConnectionParameters,
9
+ ConnectionParametersParam,
10
+ )
7
11
 
8
12
  if TYPE_CHECKING:
9
13
  from typing_extensions import Self
@@ -35,11 +39,18 @@ class IngestionGatewayPipelineDefinition:
35
39
  [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
36
40
  """
37
41
 
42
+ connection_parameters: VariableOrOptional[ConnectionParameters] = None
43
+ """
44
+ :meta private: [EXPERIMENTAL]
45
+
46
+ Optional, Internal. Parameters required to establish an initial connection with the source.
47
+ """
48
+
38
49
  gateway_storage_name: VariableOrOptional[str] = None
39
50
  """
40
51
  Optional. The Unity Catalog-compatible name for the gateway storage location.
41
52
  This is the destination to use for the data that is extracted by the gateway.
42
- Delta Live Tables system will automatically create the storage location under the catalog and schema.
53
+ Spark Declarative Pipelines system will automatically create the storage location under the catalog and schema.
43
54
  """
44
55
 
45
56
  @classmethod
@@ -73,11 +84,18 @@ class IngestionGatewayPipelineDefinitionDict(TypedDict, total=False):
73
84
  [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
74
85
  """
75
86
 
87
+ connection_parameters: VariableOrOptional[ConnectionParametersParam]
88
+ """
89
+ :meta private: [EXPERIMENTAL]
90
+
91
+ Optional, Internal. Parameters required to establish an initial connection with the source.
92
+ """
93
+
76
94
  gateway_storage_name: VariableOrOptional[str]
77
95
  """
78
96
  Optional. The Unity Catalog-compatible name for the gateway storage location.
79
97
  This is the destination to use for the data that is extracted by the gateway.
80
- Delta Live Tables system will automatically create the storage location under the catalog and schema.
98
+ Spark Declarative Pipelines system will automatically create the storage location under the catalog and schema.
81
99
  """
82
100
 
83
101
 
@@ -30,6 +30,16 @@ class IngestionPipelineDefinition:
30
30
  Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.
31
31
  """
32
32
 
33
+ ingest_from_uc_foreign_catalog: VariableOrOptional[bool] = None
34
+ """
35
+ :meta private: [EXPERIMENTAL]
36
+
37
+ Immutable. If set to true, the pipeline will ingest tables from the
38
+ UC foreign catalogs directly without the need to specify a UC connection or ingestion gateway.
39
+ The `source_catalog` fields in objects of IngestionConfig are interpreted as
40
+ the UC foreign catalogs to ingest from.
41
+ """
42
+
33
43
  ingestion_gateway_id: VariableOrOptional[str] = None
34
44
  """
35
45
  Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
@@ -38,10 +48,6 @@ class IngestionPipelineDefinition:
38
48
  netsuite_jar_path: VariableOrOptional[str] = None
39
49
  """
40
50
  :meta private: [EXPERIMENTAL]
41
-
42
- Netsuite only configuration. When the field is set for a netsuite connector,
43
- the jar stored in the field will be validated and added to the classpath of
44
- pipeline's cluster.
45
51
  """
46
52
 
47
53
  objects: VariableOrList[IngestionConfig] = field(default_factory=list)
@@ -51,8 +57,6 @@ class IngestionPipelineDefinition:
51
57
 
52
58
  source_configurations: VariableOrList[SourceConfig] = field(default_factory=list)
53
59
  """
54
- :meta private: [EXPERIMENTAL]
55
-
56
60
  Top-level source configurations
57
61
  """
58
62
 
@@ -77,6 +81,16 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
77
81
  Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.
78
82
  """
79
83
 
84
+ ingest_from_uc_foreign_catalog: VariableOrOptional[bool]
85
+ """
86
+ :meta private: [EXPERIMENTAL]
87
+
88
+ Immutable. If set to true, the pipeline will ingest tables from the
89
+ UC foreign catalogs directly without the need to specify a UC connection or ingestion gateway.
90
+ The `source_catalog` fields in objects of IngestionConfig are interpreted as
91
+ the UC foreign catalogs to ingest from.
92
+ """
93
+
80
94
  ingestion_gateway_id: VariableOrOptional[str]
81
95
  """
82
96
  Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
@@ -85,10 +99,6 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
85
99
  netsuite_jar_path: VariableOrOptional[str]
86
100
  """
87
101
  :meta private: [EXPERIMENTAL]
88
-
89
- Netsuite only configuration. When the field is set for a netsuite connector,
90
- the jar stored in the field will be validated and added to the classpath of
91
- pipeline's cluster.
92
102
  """
93
103
 
94
104
  objects: VariableOrList[IngestionConfigParam]
@@ -98,8 +108,6 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
98
108
 
99
109
  source_configurations: VariableOrList[SourceConfigParam]
100
110
  """
101
- :meta private: [EXPERIMENTAL]
102
-
103
111
  Top-level source configurations
104
112
  """
105
113
 
@@ -16,15 +16,11 @@ if TYPE_CHECKING:
16
16
  @dataclass(kw_only=True)
17
17
  class PostgresCatalogConfig:
18
18
  """
19
- :meta private: [EXPERIMENTAL]
20
-
21
19
  PG-specific catalog-level configuration parameters
22
20
  """
23
21
 
24
22
  slot_config: VariableOrOptional[PostgresSlotConfig] = None
25
23
  """
26
- :meta private: [EXPERIMENTAL]
27
-
28
24
  Optional. The Postgres slot configuration to use for logical replication
29
25
  """
30
26
 
@@ -41,8 +37,6 @@ class PostgresCatalogConfigDict(TypedDict, total=False):
41
37
 
42
38
  slot_config: VariableOrOptional[PostgresSlotConfigParam]
43
39
  """
44
- :meta private: [EXPERIMENTAL]
45
-
46
40
  Optional. The Postgres slot configuration to use for logical replication
47
41
  """
48
42
 
@@ -12,22 +12,16 @@ if TYPE_CHECKING:
12
12
  @dataclass(kw_only=True)
13
13
  class PostgresSlotConfig:
14
14
  """
15
- :meta private: [EXPERIMENTAL]
16
-
17
15
  PostgresSlotConfig contains the configuration for a Postgres logical replication slot
18
16
  """
19
17
 
20
18
  publication_name: VariableOrOptional[str] = None
21
19
  """
22
- :meta private: [EXPERIMENTAL]
23
-
24
20
  The name of the publication to use for the Postgres source
25
21
  """
26
22
 
27
23
  slot_name: VariableOrOptional[str] = None
28
24
  """
29
- :meta private: [EXPERIMENTAL]
30
-
31
25
  The name of the logical replication slot to use for the Postgres source
32
26
  """
33
27
 
@@ -44,15 +38,11 @@ class PostgresSlotConfigDict(TypedDict, total=False):
44
38
 
45
39
  publication_name: VariableOrOptional[str]
46
40
  """
47
- :meta private: [EXPERIMENTAL]
48
-
49
41
  The name of the publication to use for the Postgres source
50
42
  """
51
43
 
52
44
  slot_name: VariableOrOptional[str]
53
45
  """
54
- :meta private: [EXPERIMENTAL]
55
-
56
46
  The name of the logical replication slot to use for the Postgres source
57
47
  """
58
48
 
@@ -16,22 +16,16 @@ if TYPE_CHECKING:
16
16
  @dataclass(kw_only=True)
17
17
  class SourceCatalogConfig:
18
18
  """
19
- :meta private: [EXPERIMENTAL]
20
-
21
19
  SourceCatalogConfig contains catalog-level custom configuration parameters for each source
22
20
  """
23
21
 
24
22
  postgres: VariableOrOptional[PostgresCatalogConfig] = None
25
23
  """
26
- :meta private: [EXPERIMENTAL]
27
-
28
24
  Postgres-specific catalog-level configuration parameters
29
25
  """
30
26
 
31
27
  source_catalog: VariableOrOptional[str] = None
32
28
  """
33
- :meta private: [EXPERIMENTAL]
34
-
35
29
  Source catalog name
36
30
  """
37
31
 
@@ -48,15 +42,11 @@ class SourceCatalogConfigDict(TypedDict, total=False):
48
42
 
49
43
  postgres: VariableOrOptional[PostgresCatalogConfigParam]
50
44
  """
51
- :meta private: [EXPERIMENTAL]
52
-
53
45
  Postgres-specific catalog-level configuration parameters
54
46
  """
55
47
 
56
48
  source_catalog: VariableOrOptional[str]
57
49
  """
58
- :meta private: [EXPERIMENTAL]
59
-
60
50
  Source catalog name
61
51
  """
62
52
 
@@ -15,14 +15,10 @@ if TYPE_CHECKING:
15
15
 
16
16
  @dataclass(kw_only=True)
17
17
  class SourceConfig:
18
- """
19
- :meta private: [EXPERIMENTAL]
20
- """
18
+ """"""
21
19
 
22
20
  catalog: VariableOrOptional[SourceCatalogConfig] = None
23
21
  """
24
- :meta private: [EXPERIMENTAL]
25
-
26
22
  Catalog-level source configuration parameters
27
23
  """
28
24
 
@@ -39,8 +35,6 @@ class SourceConfigDict(TypedDict, total=False):
39
35
 
40
36
  catalog: VariableOrOptional[SourceCatalogConfigParam]
41
37
  """
42
- :meta private: [EXPERIMENTAL]
43
-
44
38
  Catalog-level source configuration parameters
45
39
  """
46
40