databricks-bundles 0.266.0__py3-none-any.whl → 0.267.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. databricks/bundles/core/_load.py +2 -2
  2. databricks/bundles/core/_variable.py +1 -1
  3. databricks/bundles/jobs/__init__.py +6 -0
  4. databricks/bundles/jobs/_models/continuous.py +14 -0
  5. databricks/bundles/jobs/_models/environment.py +10 -0
  6. databricks/bundles/jobs/_models/gcp_attributes.py +14 -0
  7. databricks/bundles/jobs/_models/init_script_info.py +16 -0
  8. databricks/bundles/jobs/_models/job_email_notifications.py +13 -1
  9. databricks/bundles/jobs/_models/library.py +10 -0
  10. databricks/bundles/jobs/_models/spark_jar_task.py +25 -1
  11. databricks/bundles/jobs/_models/task_email_notifications.py +13 -1
  12. databricks/bundles/jobs/_models/task_retry_mode.py +17 -0
  13. databricks/bundles/pipelines/__init__.py +32 -0
  14. databricks/bundles/pipelines/_models/gcp_attributes.py +14 -0
  15. databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +10 -0
  16. databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +18 -0
  17. databricks/bundles/pipelines/_models/init_script_info.py +16 -0
  18. databricks/bundles/pipelines/_models/pipeline.py +2 -2
  19. databricks/bundles/pipelines/_models/postgres_catalog_config.py +50 -0
  20. databricks/bundles/pipelines/_models/postgres_slot_config.py +60 -0
  21. databricks/bundles/pipelines/_models/source_catalog_config.py +64 -0
  22. databricks/bundles/pipelines/_models/source_config.py +48 -0
  23. databricks/bundles/version.py +1 -1
  24. {databricks_bundles-0.266.0.dist-info → databricks_bundles-0.267.0.dist-info}/METADATA +2 -2
  25. {databricks_bundles-0.266.0.dist-info → databricks_bundles-0.267.0.dist-info}/RECORD +27 -22
  26. {databricks_bundles-0.266.0.dist-info → databricks_bundles-0.267.0.dist-info}/WHEEL +0 -0
  27. {databricks_bundles-0.266.0.dist-info → databricks_bundles-0.267.0.dist-info}/licenses/LICENSE +0 -0
@@ -14,9 +14,9 @@ from databricks.bundles.core._resources import Resources
14
14
 
15
15
  __all__ = [
16
16
  "load_resources_from_current_package_module",
17
- "load_resources_from_package_module",
18
- "load_resources_from_modules",
19
17
  "load_resources_from_module",
18
+ "load_resources_from_modules",
19
+ "load_resources_from_package_module",
20
20
  ]
21
21
 
22
22
  """
@@ -11,9 +11,9 @@ from typing import (
11
11
  __all__ = [
12
12
  "Variable",
13
13
  "VariableOr",
14
- "VariableOrOptional",
15
14
  "VariableOrDict",
16
15
  "VariableOrList",
16
+ "VariableOrOptional",
17
17
  "variables",
18
18
  ]
19
19
 
@@ -244,6 +244,8 @@ __all__ = [
244
244
  "TaskNotificationSettingsDict",
245
245
  "TaskNotificationSettingsParam",
246
246
  "TaskParam",
247
+ "TaskRetryMode",
248
+ "TaskRetryModeParam",
247
249
  "TriggerSettings",
248
250
  "TriggerSettingsDict",
249
251
  "TriggerSettingsParam",
@@ -648,6 +650,10 @@ from databricks.bundles.jobs._models.task_notification_settings import (
648
650
  TaskNotificationSettingsDict,
649
651
  TaskNotificationSettingsParam,
650
652
  )
653
+ from databricks.bundles.jobs._models.task_retry_mode import (
654
+ TaskRetryMode,
655
+ TaskRetryModeParam,
656
+ )
651
657
  from databricks.bundles.jobs._models.trigger_settings import (
652
658
  TriggerSettings,
653
659
  TriggerSettingsDict,
@@ -5,6 +5,10 @@ from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOrOptional
7
7
  from databricks.bundles.jobs._models.pause_status import PauseStatus, PauseStatusParam
8
+ from databricks.bundles.jobs._models.task_retry_mode import (
9
+ TaskRetryMode,
10
+ TaskRetryModeParam,
11
+ )
8
12
 
9
13
  if TYPE_CHECKING:
10
14
  from typing_extensions import Self
@@ -19,6 +23,11 @@ class Continuous:
19
23
  Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.
20
24
  """
21
25
 
26
+ task_retry_mode: VariableOrOptional[TaskRetryMode] = None
27
+ """
28
+ Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.
29
+ """
30
+
22
31
  @classmethod
23
32
  def from_dict(cls, value: "ContinuousDict") -> "Self":
24
33
  return _transform(cls, value)
@@ -35,5 +44,10 @@ class ContinuousDict(TypedDict, total=False):
35
44
  Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.
36
45
  """
37
46
 
47
+ task_retry_mode: VariableOrOptional[TaskRetryModeParam]
48
+ """
49
+ Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.
50
+ """
51
+
38
52
 
39
53
  ContinuousParam = ContinuousDict | Continuous
@@ -16,6 +16,11 @@ class Environment:
16
16
  In this minimal environment spec, only pip dependencies are supported.
17
17
  """
18
18
 
19
+ client: VariableOrOptional[str] = None
20
+ """
21
+ [DEPRECATED] Use `environment_version` instead.
22
+ """
23
+
19
24
  dependencies: VariableOrList[str] = field(default_factory=list)
20
25
  """
21
26
  List of pip dependencies, as supported by the version of pip in this environment.
@@ -46,6 +51,11 @@ class Environment:
46
51
  class EnvironmentDict(TypedDict, total=False):
47
52
  """"""
48
53
 
54
+ client: VariableOrOptional[str]
55
+ """
56
+ [DEPRECATED] Use `environment_version` instead.
57
+ """
58
+
49
59
  dependencies: VariableOrList[str]
50
60
  """
51
61
  List of pip dependencies, as supported by the version of pip in this environment.
@@ -53,6 +53,13 @@ class GcpAttributes:
53
53
  for the supported number of local SSDs for each instance type.
54
54
  """
55
55
 
56
+ use_preemptible_executors: VariableOrOptional[bool] = None
57
+ """
58
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
59
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
60
+ Note: Soon to be deprecated, use the 'availability' field instead.
61
+ """
62
+
56
63
  zone_id: VariableOrOptional[str] = None
57
64
  """
58
65
  Identifier for the availability zone in which the cluster resides.
@@ -108,6 +115,13 @@ class GcpAttributesDict(TypedDict, total=False):
108
115
  for the supported number of local SSDs for each instance type.
109
116
  """
110
117
 
118
+ use_preemptible_executors: VariableOrOptional[bool]
119
+ """
120
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
121
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
122
+ Note: Soon to be deprecated, use the 'availability' field instead.
123
+ """
124
+
111
125
  zone_id: VariableOrOptional[str]
112
126
  """
113
127
  Identifier for the availability zone in which the cluster resides.
@@ -8,6 +8,10 @@ from databricks.bundles.jobs._models.adlsgen2_info import (
8
8
  Adlsgen2Info,
9
9
  Adlsgen2InfoParam,
10
10
  )
11
+ from databricks.bundles.jobs._models.dbfs_storage_info import (
12
+ DbfsStorageInfo,
13
+ DbfsStorageInfoParam,
14
+ )
11
15
  from databricks.bundles.jobs._models.gcs_storage_info import (
12
16
  GcsStorageInfo,
13
17
  GcsStorageInfoParam,
@@ -45,6 +49,12 @@ class InitScriptInfo:
45
49
  Contains the Azure Data Lake Storage destination path
46
50
  """
47
51
 
52
+ dbfs: VariableOrOptional[DbfsStorageInfo] = None
53
+ """
54
+ [DEPRECATED] destination needs to be provided. e.g.
55
+ `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
56
+ """
57
+
48
58
  file: VariableOrOptional[LocalFileInfo] = None
49
59
  """
50
60
  destination needs to be provided, e.g.
@@ -93,6 +103,12 @@ class InitScriptInfoDict(TypedDict, total=False):
93
103
  Contains the Azure Data Lake Storage destination path
94
104
  """
95
105
 
106
+ dbfs: VariableOrOptional[DbfsStorageInfoParam]
107
+ """
108
+ [DEPRECATED] destination needs to be provided. e.g.
109
+ `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
110
+ """
111
+
96
112
  file: VariableOrOptional[LocalFileInfoParam]
97
113
  """
98
114
  destination needs to be provided, e.g.
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, TypedDict
3
3
 
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
- from databricks.bundles.core._variable import VariableOrList
6
+ from databricks.bundles.core._variable import VariableOrList, VariableOrOptional
7
7
 
8
8
  if TYPE_CHECKING:
9
9
  from typing_extensions import Self
@@ -13,6 +13,12 @@ if TYPE_CHECKING:
13
13
  class JobEmailNotifications:
14
14
  """"""
15
15
 
16
+ no_alert_for_skipped_runs: VariableOrOptional[bool] = None
17
+ """
18
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
19
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
20
+ """
21
+
16
22
  on_duration_warning_threshold_exceeded: VariableOrList[str] = field(
17
23
  default_factory=list
18
24
  )
@@ -53,6 +59,12 @@ class JobEmailNotifications:
53
59
  class JobEmailNotificationsDict(TypedDict, total=False):
54
60
  """"""
55
61
 
62
+ no_alert_for_skipped_runs: VariableOrOptional[bool]
63
+ """
64
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
65
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
66
+ """
67
+
56
68
  on_duration_warning_threshold_exceeded: VariableOrList[str]
57
69
  """
58
70
  A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.
@@ -30,6 +30,11 @@ class Library:
30
30
  Specification of a CRAN library to be installed as part of the library
31
31
  """
32
32
 
33
+ egg: VariableOrOptional[str] = None
34
+ """
35
+ [DEPRECATED] Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.
36
+ """
37
+
33
38
  jar: VariableOrOptional[str] = None
34
39
  """
35
40
  URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.
@@ -82,6 +87,11 @@ class LibraryDict(TypedDict, total=False):
82
87
  Specification of a CRAN library to be installed as part of the library
83
88
  """
84
89
 
90
+ egg: VariableOrOptional[str]
91
+ """
92
+ [DEPRECATED] Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.
93
+ """
94
+
85
95
  jar: VariableOrOptional[str]
86
96
  """
87
97
  URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.
@@ -3,7 +3,11 @@ from typing import TYPE_CHECKING, TypedDict
3
3
 
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
- from databricks.bundles.core._variable import VariableOr, VariableOrList
6
+ from databricks.bundles.core._variable import (
7
+ VariableOr,
8
+ VariableOrList,
9
+ VariableOrOptional,
10
+ )
7
11
 
8
12
  if TYPE_CHECKING:
9
13
  from typing_extensions import Self
@@ -20,6 +24,11 @@ class SparkJarTask:
20
24
  The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.
21
25
  """
22
26
 
27
+ jar_uri: VariableOrOptional[str] = None
28
+ """
29
+ [DEPRECATED] Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create.
30
+ """
31
+
23
32
  parameters: VariableOrList[str] = field(default_factory=list)
24
33
  """
25
34
  Parameters passed to the main method.
@@ -27,6 +36,11 @@ class SparkJarTask:
27
36
  Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.
28
37
  """
29
38
 
39
+ run_as_repl: VariableOrOptional[bool] = None
40
+ """
41
+ [DEPRECATED] Deprecated. A value of `false` is no longer supported.
42
+ """
43
+
30
44
  @classmethod
31
45
  def from_dict(cls, value: "SparkJarTaskDict") -> "Self":
32
46
  return _transform(cls, value)
@@ -45,6 +59,11 @@ class SparkJarTaskDict(TypedDict, total=False):
45
59
  The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.
46
60
  """
47
61
 
62
+ jar_uri: VariableOrOptional[str]
63
+ """
64
+ [DEPRECATED] Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create.
65
+ """
66
+
48
67
  parameters: VariableOrList[str]
49
68
  """
50
69
  Parameters passed to the main method.
@@ -52,5 +71,10 @@ class SparkJarTaskDict(TypedDict, total=False):
52
71
  Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.
53
72
  """
54
73
 
74
+ run_as_repl: VariableOrOptional[bool]
75
+ """
76
+ [DEPRECATED] Deprecated. A value of `false` is no longer supported.
77
+ """
78
+
55
79
 
56
80
  SparkJarTaskParam = SparkJarTaskDict | SparkJarTask
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, TypedDict
3
3
 
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
- from databricks.bundles.core._variable import VariableOrList
6
+ from databricks.bundles.core._variable import VariableOrList, VariableOrOptional
7
7
 
8
8
  if TYPE_CHECKING:
9
9
  from typing_extensions import Self
@@ -13,6 +13,12 @@ if TYPE_CHECKING:
13
13
  class TaskEmailNotifications:
14
14
  """"""
15
15
 
16
+ no_alert_for_skipped_runs: VariableOrOptional[bool] = None
17
+ """
18
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
19
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
20
+ """
21
+
16
22
  on_duration_warning_threshold_exceeded: VariableOrList[str] = field(
17
23
  default_factory=list
18
24
  )
@@ -53,6 +59,12 @@ class TaskEmailNotifications:
53
59
  class TaskEmailNotificationsDict(TypedDict, total=False):
54
60
  """"""
55
61
 
62
+ no_alert_for_skipped_runs: VariableOrOptional[bool]
63
+ """
64
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
65
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
66
+ """
67
+
56
68
  on_duration_warning_threshold_exceeded: VariableOrList[str]
57
69
  """
58
70
  A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.
@@ -0,0 +1,17 @@
1
+ from enum import Enum
2
+ from typing import Literal
3
+
4
+
5
+ class TaskRetryMode(Enum):
6
+ """
7
+ task retry mode of the continuous job
8
+ * NEVER: The failed task will not be retried.
9
+ * ON_FAILURE: Retry a failed task if at least one other task in the job is still running its first attempt.
10
+ When this condition is no longer met or the retry limit is reached, the job run is cancelled and a new run is started.
11
+ """
12
+
13
+ NEVER = "NEVER"
14
+ ON_FAILURE = "ON_FAILURE"
15
+
16
+
17
+ TaskRetryModeParam = Literal["NEVER", "ON_FAILURE"] | TaskRetryMode
@@ -96,6 +96,12 @@ __all__ = [
96
96
  "PipelinesEnvironment",
97
97
  "PipelinesEnvironmentDict",
98
98
  "PipelinesEnvironmentParam",
99
+ "PostgresCatalogConfig",
100
+ "PostgresCatalogConfigDict",
101
+ "PostgresCatalogConfigParam",
102
+ "PostgresSlotConfig",
103
+ "PostgresSlotConfigDict",
104
+ "PostgresSlotConfigParam",
99
105
  "ReportSpec",
100
106
  "ReportSpecDict",
101
107
  "ReportSpecParam",
@@ -111,6 +117,12 @@ __all__ = [
111
117
  "SchemaSpec",
112
118
  "SchemaSpecDict",
113
119
  "SchemaSpecParam",
120
+ "SourceCatalogConfig",
121
+ "SourceCatalogConfigDict",
122
+ "SourceCatalogConfigParam",
123
+ "SourceConfig",
124
+ "SourceConfigDict",
125
+ "SourceConfigParam",
114
126
  "TableSpec",
115
127
  "TableSpecDict",
116
128
  "TableSpecParam",
@@ -292,6 +304,16 @@ from databricks.bundles.pipelines._models.pipelines_environment import (
292
304
  PipelinesEnvironmentDict,
293
305
  PipelinesEnvironmentParam,
294
306
  )
307
+ from databricks.bundles.pipelines._models.postgres_catalog_config import (
308
+ PostgresCatalogConfig,
309
+ PostgresCatalogConfigDict,
310
+ PostgresCatalogConfigParam,
311
+ )
312
+ from databricks.bundles.pipelines._models.postgres_slot_config import (
313
+ PostgresSlotConfig,
314
+ PostgresSlotConfigDict,
315
+ PostgresSlotConfigParam,
316
+ )
295
317
  from databricks.bundles.pipelines._models.report_spec import (
296
318
  ReportSpec,
297
319
  ReportSpecDict,
@@ -313,6 +335,16 @@ from databricks.bundles.pipelines._models.schema_spec import (
313
335
  SchemaSpecDict,
314
336
  SchemaSpecParam,
315
337
  )
338
+ from databricks.bundles.pipelines._models.source_catalog_config import (
339
+ SourceCatalogConfig,
340
+ SourceCatalogConfigDict,
341
+ SourceCatalogConfigParam,
342
+ )
343
+ from databricks.bundles.pipelines._models.source_config import (
344
+ SourceConfig,
345
+ SourceConfigDict,
346
+ SourceConfigParam,
347
+ )
316
348
  from databricks.bundles.pipelines._models.table_spec import (
317
349
  TableSpec,
318
350
  TableSpecDict,
@@ -53,6 +53,13 @@ class GcpAttributes:
53
53
  for the supported number of local SSDs for each instance type.
54
54
  """
55
55
 
56
+ use_preemptible_executors: VariableOrOptional[bool] = None
57
+ """
58
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
59
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
60
+ Note: Soon to be deprecated, use the 'availability' field instead.
61
+ """
62
+
56
63
  zone_id: VariableOrOptional[str] = None
57
64
  """
58
65
  Identifier for the availability zone in which the cluster resides.
@@ -108,6 +115,13 @@ class GcpAttributesDict(TypedDict, total=False):
108
115
  for the supported number of local SSDs for each instance type.
109
116
  """
110
117
 
118
+ use_preemptible_executors: VariableOrOptional[bool]
119
+ """
120
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
121
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
122
+ Note: Soon to be deprecated, use the 'availability' field instead.
123
+ """
124
+
111
125
  zone_id: VariableOrOptional[str]
112
126
  """
113
127
  Identifier for the availability zone in which the cluster resides.
@@ -30,6 +30,11 @@ class IngestionGatewayPipelineDefinition:
30
30
  Required, Immutable. The name of the schema for the gateway pipelines's storage location.
31
31
  """
32
32
 
33
+ connection_id: VariableOrOptional[str] = None
34
+ """
35
+ [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
36
+ """
37
+
33
38
  gateway_storage_name: VariableOrOptional[str] = None
34
39
  """
35
40
  Optional. The Unity Catalog-compatible name for the gateway storage location.
@@ -63,6 +68,11 @@ class IngestionGatewayPipelineDefinitionDict(TypedDict, total=False):
63
68
  Required, Immutable. The name of the schema for the gateway pipelines's storage location.
64
69
  """
65
70
 
71
+ connection_id: VariableOrOptional[str]
72
+ """
73
+ [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
74
+ """
75
+
66
76
  gateway_storage_name: VariableOrOptional[str]
67
77
  """
68
78
  Optional. The Unity Catalog-compatible name for the gateway storage location.
@@ -12,6 +12,10 @@ from databricks.bundles.pipelines._models.ingestion_source_type import (
12
12
  IngestionSourceType,
13
13
  IngestionSourceTypeParam,
14
14
  )
15
+ from databricks.bundles.pipelines._models.source_config import (
16
+ SourceConfig,
17
+ SourceConfigParam,
18
+ )
15
19
  from databricks.bundles.pipelines._models.table_specific_config import (
16
20
  TableSpecificConfig,
17
21
  TableSpecificConfigParam,
@@ -40,6 +44,13 @@ class IngestionPipelineDefinition:
40
44
  Required. Settings specifying tables to replicate and the destination for the replicated tables.
41
45
  """
42
46
 
47
+ source_configurations: VariableOrList[SourceConfig] = field(default_factory=list)
48
+ """
49
+ :meta private: [EXPERIMENTAL]
50
+
51
+ Top-level source configurations
52
+ """
53
+
43
54
  source_type: VariableOrOptional[IngestionSourceType] = None
44
55
  """
45
56
  The type of the foreign source.
@@ -78,6 +89,13 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
78
89
  Required. Settings specifying tables to replicate and the destination for the replicated tables.
79
90
  """
80
91
 
92
+ source_configurations: VariableOrList[SourceConfigParam]
93
+ """
94
+ :meta private: [EXPERIMENTAL]
95
+
96
+ Top-level source configurations
97
+ """
98
+
81
99
  source_type: VariableOrOptional[IngestionSourceTypeParam]
82
100
  """
83
101
  The type of the foreign source.
@@ -8,6 +8,10 @@ from databricks.bundles.pipelines._models.adlsgen2_info import (
8
8
  Adlsgen2Info,
9
9
  Adlsgen2InfoParam,
10
10
  )
11
+ from databricks.bundles.pipelines._models.dbfs_storage_info import (
12
+ DbfsStorageInfo,
13
+ DbfsStorageInfoParam,
14
+ )
11
15
  from databricks.bundles.pipelines._models.gcs_storage_info import (
12
16
  GcsStorageInfo,
13
17
  GcsStorageInfoParam,
@@ -45,6 +49,12 @@ class InitScriptInfo:
45
49
  Contains the Azure Data Lake Storage destination path
46
50
  """
47
51
 
52
+ dbfs: VariableOrOptional[DbfsStorageInfo] = None
53
+ """
54
+ [DEPRECATED] destination needs to be provided. e.g.
55
+ `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
56
+ """
57
+
48
58
  file: VariableOrOptional[LocalFileInfo] = None
49
59
  """
50
60
  destination needs to be provided, e.g.
@@ -93,6 +103,12 @@ class InitScriptInfoDict(TypedDict, total=False):
93
103
  Contains the Azure Data Lake Storage destination path
94
104
  """
95
105
 
106
+ dbfs: VariableOrOptional[DbfsStorageInfoParam]
107
+ """
108
+ [DEPRECATED] destination needs to be provided. e.g.
109
+ `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
110
+ """
111
+
96
112
  file: VariableOrOptional[LocalFileInfoParam]
97
113
  """
98
114
  destination needs to be provided, e.g.
@@ -203,7 +203,7 @@ class Pipeline(Resource):
203
203
 
204
204
  target: VariableOrOptional[str] = None
205
205
  """
206
- Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
206
+ [DEPRECATED] Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
207
207
  """
208
208
 
209
209
  @classmethod
@@ -361,7 +361,7 @@ class PipelineDict(TypedDict, total=False):
361
361
 
362
362
  target: VariableOrOptional[str]
363
363
  """
364
- Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
364
+ [DEPRECATED] Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
365
365
  """
366
366
 
367
367
 
@@ -0,0 +1,50 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+ from databricks.bundles.pipelines._models.postgres_slot_config import (
8
+ PostgresSlotConfig,
9
+ PostgresSlotConfigParam,
10
+ )
11
+
12
+ if TYPE_CHECKING:
13
+ from typing_extensions import Self
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class PostgresCatalogConfig:
18
+ """
19
+ :meta private: [EXPERIMENTAL]
20
+
21
+ PG-specific catalog-level configuration parameters
22
+ """
23
+
24
+ slot_config: VariableOrOptional[PostgresSlotConfig] = None
25
+ """
26
+ :meta private: [EXPERIMENTAL]
27
+
28
+ Optional. The Postgres slot configuration to use for logical replication
29
+ """
30
+
31
+ @classmethod
32
+ def from_dict(cls, value: "PostgresCatalogConfigDict") -> "Self":
33
+ return _transform(cls, value)
34
+
35
+ def as_dict(self) -> "PostgresCatalogConfigDict":
36
+ return _transform_to_json_value(self) # type:ignore
37
+
38
+
39
+ class PostgresCatalogConfigDict(TypedDict, total=False):
40
+ """"""
41
+
42
+ slot_config: VariableOrOptional[PostgresSlotConfigParam]
43
+ """
44
+ :meta private: [EXPERIMENTAL]
45
+
46
+ Optional. The Postgres slot configuration to use for logical replication
47
+ """
48
+
49
+
50
+ PostgresCatalogConfigParam = PostgresCatalogConfigDict | PostgresCatalogConfig
@@ -0,0 +1,60 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class PostgresSlotConfig:
14
+ """
15
+ :meta private: [EXPERIMENTAL]
16
+
17
+ PostgresSlotConfig contains the configuration for a Postgres logical replication slot
18
+ """
19
+
20
+ publication_name: VariableOrOptional[str] = None
21
+ """
22
+ :meta private: [EXPERIMENTAL]
23
+
24
+ The name of the publication to use for the Postgres source
25
+ """
26
+
27
+ slot_name: VariableOrOptional[str] = None
28
+ """
29
+ :meta private: [EXPERIMENTAL]
30
+
31
+ The name of the logical replication slot to use for the Postgres source
32
+ """
33
+
34
+ @classmethod
35
+ def from_dict(cls, value: "PostgresSlotConfigDict") -> "Self":
36
+ return _transform(cls, value)
37
+
38
+ def as_dict(self) -> "PostgresSlotConfigDict":
39
+ return _transform_to_json_value(self) # type:ignore
40
+
41
+
42
+ class PostgresSlotConfigDict(TypedDict, total=False):
43
+ """"""
44
+
45
+ publication_name: VariableOrOptional[str]
46
+ """
47
+ :meta private: [EXPERIMENTAL]
48
+
49
+ The name of the publication to use for the Postgres source
50
+ """
51
+
52
+ slot_name: VariableOrOptional[str]
53
+ """
54
+ :meta private: [EXPERIMENTAL]
55
+
56
+ The name of the logical replication slot to use for the Postgres source
57
+ """
58
+
59
+
60
+ PostgresSlotConfigParam = PostgresSlotConfigDict | PostgresSlotConfig
@@ -0,0 +1,64 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+ from databricks.bundles.pipelines._models.postgres_catalog_config import (
8
+ PostgresCatalogConfig,
9
+ PostgresCatalogConfigParam,
10
+ )
11
+
12
+ if TYPE_CHECKING:
13
+ from typing_extensions import Self
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class SourceCatalogConfig:
18
+ """
19
+ :meta private: [EXPERIMENTAL]
20
+
21
+ SourceCatalogConfig contains catalog-level custom configuration parameters for each source
22
+ """
23
+
24
+ postgres: VariableOrOptional[PostgresCatalogConfig] = None
25
+ """
26
+ :meta private: [EXPERIMENTAL]
27
+
28
+ Postgres-specific catalog-level configuration parameters
29
+ """
30
+
31
+ source_catalog: VariableOrOptional[str] = None
32
+ """
33
+ :meta private: [EXPERIMENTAL]
34
+
35
+ Source catalog name
36
+ """
37
+
38
+ @classmethod
39
+ def from_dict(cls, value: "SourceCatalogConfigDict") -> "Self":
40
+ return _transform(cls, value)
41
+
42
+ def as_dict(self) -> "SourceCatalogConfigDict":
43
+ return _transform_to_json_value(self) # type:ignore
44
+
45
+
46
+ class SourceCatalogConfigDict(TypedDict, total=False):
47
+ """"""
48
+
49
+ postgres: VariableOrOptional[PostgresCatalogConfigParam]
50
+ """
51
+ :meta private: [EXPERIMENTAL]
52
+
53
+ Postgres-specific catalog-level configuration parameters
54
+ """
55
+
56
+ source_catalog: VariableOrOptional[str]
57
+ """
58
+ :meta private: [EXPERIMENTAL]
59
+
60
+ Source catalog name
61
+ """
62
+
63
+
64
+ SourceCatalogConfigParam = SourceCatalogConfigDict | SourceCatalogConfig
@@ -0,0 +1,48 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+ from databricks.bundles.pipelines._models.source_catalog_config import (
8
+ SourceCatalogConfig,
9
+ SourceCatalogConfigParam,
10
+ )
11
+
12
+ if TYPE_CHECKING:
13
+ from typing_extensions import Self
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class SourceConfig:
18
+ """
19
+ :meta private: [EXPERIMENTAL]
20
+ """
21
+
22
+ catalog: VariableOrOptional[SourceCatalogConfig] = None
23
+ """
24
+ :meta private: [EXPERIMENTAL]
25
+
26
+ Catalog-level source configuration parameters
27
+ """
28
+
29
+ @classmethod
30
+ def from_dict(cls, value: "SourceConfigDict") -> "Self":
31
+ return _transform(cls, value)
32
+
33
+ def as_dict(self) -> "SourceConfigDict":
34
+ return _transform_to_json_value(self) # type:ignore
35
+
36
+
37
+ class SourceConfigDict(TypedDict, total=False):
38
+ """"""
39
+
40
+ catalog: VariableOrOptional[SourceCatalogConfigParam]
41
+ """
42
+ :meta private: [EXPERIMENTAL]
43
+
44
+ Catalog-level source configuration parameters
45
+ """
46
+
47
+
48
+ SourceConfigParam = SourceConfigDict | SourceConfig
@@ -1 +1 @@
1
- __version__ = "0.266.0"
1
+ __version__ = "0.267.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-bundles
3
- Version: 0.266.0
3
+ Version: 0.267.0
4
4
  Summary: Python support for Databricks Asset Bundles
5
5
  Author-email: Gleb Kanterov <gleb.kanterov@databricks.com>
6
6
  Requires-Python: >=3.10
@@ -22,7 +22,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
22
22
 
23
23
  To use `databricks-bundles`, you must first:
24
24
 
25
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.266.0 or above
25
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.267.0 or above
26
26
  2. Authenticate to your Databricks workspace if you have not done so already:
27
27
 
28
28
  ```bash
@@ -2,11 +2,11 @@ databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
2
2
  databricks/bundles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  databricks/bundles/build.py,sha256=91H2QpFB7l2tljt05tEql2Y3EMCur_gKt8cQjo79Ke0,15960
4
4
  databricks/bundles/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26
5
- databricks/bundles/version.py,sha256=_XMmXbwjW0tVT93eQ_u4uLKvEI6KEnpd_e9LnPOryUU,24
5
+ databricks/bundles/version.py,sha256=UQ6lk3O2eP8xOF3l0LyyxDYC4LLHeCszvjx1_0Qkye4,24
6
6
  databricks/bundles/core/__init__.py,sha256=3O0Jj4PqU74jPd0NVONt2fQgKhXzKLBHNBgRRgM52bA,1315
7
7
  databricks/bundles/core/_bundle.py,sha256=B5qINwnoRDitkofFaoCWDCS1tbB3a7nufqK0iDRfC6g,3287
8
8
  databricks/bundles/core/_diagnostics.py,sha256=__T_SCC2KQ6UmerON7aIu9oWD8KyMyx_5E5YMb6glFk,5616
9
- databricks/bundles/core/_load.py,sha256=KTkGwcpuENTkCVpUol67CJZDwvWy6_JH48lspCPZOQ4,4962
9
+ databricks/bundles/core/_load.py,sha256=Xwe3hWg6I2gL-Hjw0vN04s09BgypBbQuuIbmL3b9kCU,4962
10
10
  databricks/bundles/core/_location.py,sha256=tMa-wGKOLCqOCk6bxVphZA4GWJxvjzAkDu8wn1la5DE,2142
11
11
  databricks/bundles/core/_resource.py,sha256=ymi21F7e0Xy1fMrtL9ZMRXWQaqrK9F-t24w5vYtodqo,137
12
12
  databricks/bundles/core/_resource_mutator.py,sha256=D2GjDwh4iV6D7JEiiCZ8qTzTzWZkXhTnA5T5vIPsyXo,4710
@@ -14,8 +14,8 @@ databricks/bundles/core/_resource_type.py,sha256=F65b4sKhlbpKqfxwlcehMXV9YpFlLwX
14
14
  databricks/bundles/core/_resources.py,sha256=8vT7eE6xKTHcgyP0D4RnU7DEEF_-Zk4wJ4o9AyDxq9o,9944
15
15
  databricks/bundles/core/_transform.py,sha256=ISupC7du3JnqrQQMgHf5Mt7fpZKEcUNZ5qwgMWZQfAE,8836
16
16
  databricks/bundles/core/_transform_to_json.py,sha256=aGiIBVx7pSO9LiJZ5CPYsTA5U6XhCbn1Ew15dhqC0yc,1890
17
- databricks/bundles/core/_variable.py,sha256=G05eikeX-gA_3Wx-0GX4XwQa1ZOPGWF1Qb2qkoMi1OE,3617
18
- databricks/bundles/jobs/__init__.py,sha256=mFTMLPVvTQpXbstYSRaRgfCLyGPn_-AfXhiPGS3OndM,19003
17
+ databricks/bundles/core/_variable.py,sha256=16g6vSLs5J-Ek2u2LNSnbqK-vpgbeirCddWjS-AiLGU,3617
18
+ databricks/bundles/jobs/__init__.py,sha256=jad50UxN9k_N8bD3wD-yYEJ-SQNiipKDdXvxiGZQFiQ,19157
19
19
  databricks/bundles/jobs/_models/adlsgen2_info.py,sha256=_eGe6ivi2VmokxKxKUji9-fSZLBubAr43uBDnN7vvlY,1104
20
20
  databricks/bundles/jobs/_models/authentication_method.py,sha256=XI8hU5fhPlGTsZdePZtR6FIjyT4iEt2URb61q1MsgNI,198
21
21
  databricks/bundles/jobs/_models/auto_scale.py,sha256=Z5vZa0bZi6LJ_Ac-VJfqqCJAtI-zY1_auTGhsV8khvA,1624
@@ -31,7 +31,7 @@ databricks/bundles/jobs/_models/compute_config.py,sha256=61-BdovRQ48n93GlPc3bjfy
31
31
  databricks/bundles/jobs/_models/condition.py,sha256=79S2RIdrWFY9lwIu8T0mJPuFALFqv1H0JNtM9gSwL4k,258
32
32
  databricks/bundles/jobs/_models/condition_task.py,sha256=4A71BNRZNk9_iS8IFiHr69RLgx_qmabnQUwcy_g7e0E,2814
33
33
  databricks/bundles/jobs/_models/condition_task_op.py,sha256=mj5tx5qmvUCIkDvFYbo1-6fvVP_Q1zYzxv011ufHbx4,1141
34
- databricks/bundles/jobs/_models/continuous.py,sha256=M74ga68NIRv8r3OVhmNuSmzTe2M0FY4tfVf19N9Mf3Y,1150
34
+ databricks/bundles/jobs/_models/continuous.py,sha256=ILsXONBdaHnXoNofLJj5CWu4Hk3NGpvS4izBKOwZHL0,1609
35
35
  databricks/bundles/jobs/_models/cron_schedule.py,sha256=_dUQ8vdfnZvIPvNyB-qdn_9cKKGrys249Wv_SWDdP7Q,2186
36
36
  databricks/bundles/jobs/_models/dashboard_task.py,sha256=HTbz7Tra_gvnPNW2Skug-k-bxjwbY7xu0cq5nlGRYh8,1533
37
37
  databricks/bundles/jobs/_models/data_security_mode.py,sha256=fuelzF06CfyhG9OmK-2orH5EifQnEtPTEmh7NZcL7zA,2660
@@ -41,19 +41,19 @@ databricks/bundles/jobs/_models/dbt_task.py,sha256=3OT0GoU1y1DffwXE_YAXXU807Hj2D
41
41
  databricks/bundles/jobs/_models/docker_basic_auth.py,sha256=jEbSE8CvnTceOm405NA18IvB1lLCu-Wfe3SPSlsSBG4,1084
42
42
  databricks/bundles/jobs/_models/docker_image.py,sha256=h2hp3vnfh_wXxMg6RzNHPfjfb-FMsyABe83XIaX5fA8,1126
43
43
  databricks/bundles/jobs/_models/ebs_volume_type.py,sha256=-93BcybklhLyQEZvF8C1BhnHMeCjfDwI2qwimZ6X5eU,415
44
- databricks/bundles/jobs/_models/environment.py,sha256=RdDX_knqmwMUlSPTiWW9QN4aUvYzMRPUKgsPZhV4vJs,2318
44
+ databricks/bundles/jobs/_models/environment.py,sha256=9SIxvp8h0Y5Dxp9MsE11VBU6assb3-VdChT8-wXXfjI,2535
45
45
  databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py,sha256=VO5bHmN-hywRfg3zfqTdc0YI_5FQRpGLZdc6f6_GKnA,2378
46
46
  databricks/bundles/jobs/_models/for_each_task.py,sha256=MP-6c7zUaRcl5EaM-3IfjhX1Jc840GlnRdjj_SlhypI,1810
47
- databricks/bundles/jobs/_models/gcp_attributes.py,sha256=81Ox2Lr_rlyTxleINNLk438O0M_N82pHgSl70--3r1o,5093
47
+ databricks/bundles/jobs/_models/gcp_attributes.py,sha256=DdKsEfEIGTtKJFdC_h8JukAQgDhE1yluwWn2X2MIiOk,5786
48
48
  databricks/bundles/jobs/_models/gcp_availability.py,sha256=a2ayWsyEQDpIDx-mwDIx_p1VJpcPaUme4ndqbpc4uNs,556
49
49
  databricks/bundles/jobs/_models/gcs_storage_info.py,sha256=hwOowyNKCBhzsUiCQSrtmQPxrMINEq5jg2EefkrE2fQ,1020
50
50
  databricks/bundles/jobs/_models/gen_ai_compute_task.py,sha256=WNjYn_s4bVlB-msh7VKniW1QdXDqBpTJlY4ykU5PLEA,4636
51
51
  databricks/bundles/jobs/_models/git_provider.py,sha256=VzNKsIrOJauZjCTCeq3zVUumrnZoamYXxpgSz0kPsEI,664
52
52
  databricks/bundles/jobs/_models/git_source.py,sha256=0WVNzvbdMRvFC4N-fH2Jw1sh-kuNCt7WVkO5E1Jy5a8,3031
53
- databricks/bundles/jobs/_models/init_script_info.py,sha256=-4tzkuDKL2PrxE4sh_A7DCRIUGa4YcccTVTwdutzrvI,4022
53
+ databricks/bundles/jobs/_models/init_script_info.py,sha256=Skv-u-naj3GRdmwvnxbMIDdOj3YU9PARTBMCbTNKfN8,4511
54
54
  databricks/bundles/jobs/_models/job.py,sha256=oD3dnHAp5bXF_Aug47virt45IwKgOFLZ-EPmWudKqFQ,15295
55
55
  databricks/bundles/jobs/_models/job_cluster.py,sha256=b9mQ993-tmY_KZem8vRCT7MLcw07KhKEfvmyWxoGav0,1595
56
- databricks/bundles/jobs/_models/job_email_notifications.py,sha256=yq7Pl-CxHfkmx97xzcEH4YlXuvhKOHiZu2pQDd3aSr4,4713
56
+ databricks/bundles/jobs/_models/job_email_notifications.py,sha256=rnkjkZ06YTsHLkbjSDGV9Lm5o9r6fNSG9KwGA2rlyjY,5308
57
57
  databricks/bundles/jobs/_models/job_environment.py,sha256=ekMKwai2LtB9Dzg3LyGmoVjLCC6ShTWBjW54_B-qOBo,1185
58
58
  databricks/bundles/jobs/_models/job_notification_settings.py,sha256=DmmMSW44_CaxKPms-oQhHP6MpicAqdoKS9i3Yn3RlkQ,1532
59
59
  databricks/bundles/jobs/_models/job_parameter_definition.py,sha256=KwxtaZzFscXGtltte5ljLcRf6pjnVMEB-t2jue5TUs8,1247
@@ -64,7 +64,7 @@ databricks/bundles/jobs/_models/jobs_health_metric.py,sha256=PuBoMBQunQ0P3qwIwMb
64
64
  databricks/bundles/jobs/_models/jobs_health_operator.py,sha256=rLuNBch8awFmclOck97hbvnaIQ5Q3IZe0VGTD70AF-g,304
65
65
  databricks/bundles/jobs/_models/jobs_health_rule.py,sha256=Shtngs9qtCFkbF3pX_xsyo4aPYJbbid7HZNxvejC2uo,1437
66
66
  databricks/bundles/jobs/_models/jobs_health_rules.py,sha256=Csu88zyYGGpyml0dBIB-QJ3GgTOTXdtHtVY_JPKHte0,1067
67
- databricks/bundles/jobs/_models/library.py,sha256=E-j76-hgWCzX9xJl9eS2v177CVZPl9oCjN1muvU09uE,4626
67
+ databricks/bundles/jobs/_models/library.py,sha256=P8vGEWiLInw2Mj3dVWYadvsTpH-0EqtANWdxccqX778,5063
68
68
  databricks/bundles/jobs/_models/local_file_info.py,sha256=yH12cJKjPrFQxG69DgAdp87PIYVGfjECvabbRPqKZjI,952
69
69
  databricks/bundles/jobs/_models/log_analytics_info.py,sha256=JeEeyNcldckin93yy4xCzWwcgN9iMxGwVyNOP0gpVys,1343
70
70
  databricks/bundles/jobs/_models/maven_library.py,sha256=xz7BIo3XZ4xfp9S3sovADLxDY_DVcZbqduS8VTo1La4,2002
@@ -87,7 +87,7 @@ databricks/bundles/jobs/_models/run_job_task.py,sha256=bdoWeULZIoZcD-aHT6tTVz68w
87
87
  databricks/bundles/jobs/_models/runtime_engine.py,sha256=BFMkK6TJgZJI7Gfs9r1tMPCDJmnfDFbN8v5b4EiVjCk,221
88
88
  databricks/bundles/jobs/_models/s3_storage_info.py,sha256=9DVWOFKrxGXijUnctwuB0_kANXRazPUPNSfmugJVuio,4595
89
89
  databricks/bundles/jobs/_models/source.py,sha256=qsL2OJ6wTiWgY_iuOt6pB0HgsA2ASSyslv8ktZ-cTuM,654
90
- databricks/bundles/jobs/_models/spark_jar_task.py,sha256=bL5LM2DJPJHyVnAAngCpPiibPvjLTX87Nh9F4Avub4Y,1890
90
+ databricks/bundles/jobs/_models/spark_jar_task.py,sha256=UNt1whPeMu3XpZ2H0iGyHzB6YLB6hkVQT2TIl1L-Qgg,2589
91
91
  databricks/bundles/jobs/_models/spark_python_task.py,sha256=IPBPR0RFfGVwsIUqzMj7ZYREPG0T_zDv4EX5hDKRlgg,3283
92
92
  databricks/bundles/jobs/_models/spark_submit_task.py,sha256=wqtXcaOJ8-_aKkJVTj-NZtSSwifbKy_rBddOPeQbrBA,1313
93
93
  databricks/bundles/jobs/_models/sql_task.py,sha256=XlMc_V5QQDUz6jhR5QhpQEd7g13ia1GksymWcoyK2eU,3206
@@ -102,15 +102,16 @@ databricks/bundles/jobs/_models/subscription_subscriber.py,sha256=aD9IKIwqE0LeTc
102
102
  databricks/bundles/jobs/_models/table_update_trigger_configuration.py,sha256=du2LCBsM7iGrxNet_eCgw592-A1iCsEWwG8KR8DI4I8,2808
103
103
  databricks/bundles/jobs/_models/task.py,sha256=9N3MbYkbnEXbwH5kAQTTMF4EhSk77ggrkdNEfRtRc2E,19332
104
104
  databricks/bundles/jobs/_models/task_dependency.py,sha256=aDo85ulTS46OduT357cLP4G013zojY9IAJUCbJk85RA,1328
105
- databricks/bundles/jobs/_models/task_email_notifications.py,sha256=6rc9MMjy5Ice8sIv9iI-YP8XTS62goi9MlOolnegSTw,4720
105
+ databricks/bundles/jobs/_models/task_email_notifications.py,sha256=3s7JnOY2ZMhDiPVk8Da0m3e5URijCTncRlnTH19XMEs,5315
106
106
  databricks/bundles/jobs/_models/task_notification_settings.py,sha256=CzMzpjLDR1oWEjPArI2S4RMM5k7QkBk5yZVrbB43LMo,2086
107
+ databricks/bundles/jobs/_models/task_retry_mode.py,sha256=Ds9-8lvJYAdaO-iLubaUTMq06UC-WCsKH6td9KbMO-s,542
107
108
  databricks/bundles/jobs/_models/trigger_settings.py,sha256=C9pxELtK86n5geMH0PHMCC6qPXBvpy7B4gamuvM9tl8,2297
108
109
  databricks/bundles/jobs/_models/volumes_storage_info.py,sha256=31pQ9fnqQGhT2mD_ScjEhy-dm0307ne7iP_gxfcJXDY,1253
109
110
  databricks/bundles/jobs/_models/webhook.py,sha256=S209r8QqufJLRoACU6a0MnTzuKOvn3r6p91u-7nbFhQ,744
110
111
  databricks/bundles/jobs/_models/webhook_notifications.py,sha256=4FrMTYy4tDeMe3VqSbn9jjNYISTAmYTzENpGvnQGju4,4349
111
112
  databricks/bundles/jobs/_models/workload_type.py,sha256=A8KViUIB4x_gEXVS2p4KTGZ9Lr50Z3LLzIYxyE676xw,1162
112
113
  databricks/bundles/jobs/_models/workspace_storage_info.py,sha256=Qnm6lsw9rwXB7Te_Um0c7TvIH4Vv7ndKKYYV0pxJ6q8,1100
113
- databricks/bundles/pipelines/__init__.py,sha256=s6nDHXU0F0zHxzdEidm69JFGAdayPGC4v_GxzdX3E94,9905
114
+ databricks/bundles/pipelines/__init__.py,sha256=2Mc6R8Wb5BAdxdF_GoQfEFM-vG-X-0ewtnDf5n7oDSY,10858
114
115
  databricks/bundles/pipelines/_models/adlsgen2_info.py,sha256=_eGe6ivi2VmokxKxKUji9-fSZLBubAr43uBDnN7vvlY,1104
115
116
  databricks/bundles/pipelines/_models/aws_attributes.py,sha256=sKKsOBfsg21soMJrdv3ETHIKg40LVGZWirlqSlQ2n8o,10191
116
117
  databricks/bundles/pipelines/_models/aws_availability.py,sha256=C4yzZLt_CSIxlZ3MoCV4MPdQRjVRgRU_vkrLIkL6hkQ,477
@@ -123,22 +124,22 @@ databricks/bundles/pipelines/_models/ebs_volume_type.py,sha256=-93BcybklhLyQEZvF
123
124
  databricks/bundles/pipelines/_models/event_log_spec.py,sha256=diWtjzD4xBbfgeEuVl_XYaOXEyTvSK40VLRue_1Ads0,1428
124
125
  databricks/bundles/pipelines/_models/file_library.py,sha256=qSGlo9OHCOVJol-kuxX5Ahmq4Ytafo1UH1F6j3u_O50,923
125
126
  databricks/bundles/pipelines/_models/filters.py,sha256=9MDrS5ZRDwiu7a3l47Cw1IMNj7hnwA_II5rvgs0fFJk,1053
126
- databricks/bundles/pipelines/_models/gcp_attributes.py,sha256=v36rCfbeQWEMSN5e5jAOiIKWfVyCvZ3-x879bb_XnrE,5098
127
+ databricks/bundles/pipelines/_models/gcp_attributes.py,sha256=XzJQ9RXRvNitnP3QHpYf4Az8gOCe6__qnD6OTXJab1U,5791
127
128
  databricks/bundles/pipelines/_models/gcp_availability.py,sha256=a2ayWsyEQDpIDx-mwDIx_p1VJpcPaUme4ndqbpc4uNs,556
128
129
  databricks/bundles/pipelines/_models/gcs_storage_info.py,sha256=hwOowyNKCBhzsUiCQSrtmQPxrMINEq5jg2EefkrE2fQ,1020
129
130
  databricks/bundles/pipelines/_models/ingestion_config.py,sha256=ERT5ySyVIVJ0T3r3tqjuCFzV5FEIJsfWZvbMmRTHNMk,1679
130
- databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py,sha256=6uwiZ2k-v6qGlPCnNCExYhF8c3qVyCRpacbJnZfU2ow,2577
131
- databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py,sha256=awLNh8jJebDBadzfXeZ_rDTlLoLVzEhlPtmM9CdTA_I,3581
131
+ databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py,sha256=UkpHUaelJYcEZ-3r8FRwEpUcQxHrvQoSVsmadmlQaBU,3030
132
+ databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py,sha256=cafOoHyCon63lBBVGzXGv7Us8csToU-mMhIf48wZfcA,4020
132
133
  databricks/bundles/pipelines/_models/ingestion_pipeline_definition_table_specific_config_query_based_connector_config.py,sha256=IXJqQjQ2aORHJZy1PZ3ST4hx4HVh7O4b-JoKoxYR9QE,5181
133
134
  databricks/bundles/pipelines/_models/ingestion_source_type.py,sha256=IlLe3o4y0bfBLjk9wdMrzcNrp3t4XpuyGdQ2P1_FPo4,1080
134
- databricks/bundles/pipelines/_models/init_script_info.py,sha256=NUH3ooYQcQJ5WB6I046SgOZunvpLFJFwV_lGwfJeRWk,4052
135
+ databricks/bundles/pipelines/_models/init_script_info.py,sha256=PM1qnkkC_fR97DSYML2-UJLGFKl1bGvttESfKosFSM8,4546
135
136
  databricks/bundles/pipelines/_models/local_file_info.py,sha256=yH12cJKjPrFQxG69DgAdp87PIYVGfjECvabbRPqKZjI,952
136
137
  databricks/bundles/pipelines/_models/log_analytics_info.py,sha256=JeEeyNcldckin93yy4xCzWwcgN9iMxGwVyNOP0gpVys,1343
137
138
  databricks/bundles/pipelines/_models/maven_library.py,sha256=xz7BIo3XZ4xfp9S3sovADLxDY_DVcZbqduS8VTo1La4,2002
138
139
  databricks/bundles/pipelines/_models/notebook_library.py,sha256=YFEBdlvoNfB3oLPz-w-n_HBQrRVzFD9pbu-BPza88Rk,951
139
140
  databricks/bundles/pipelines/_models/notifications.py,sha256=Q7xHA5Bii9Zhgr2TISYF9mWKqu-6RzGO76gLexLMM3c,1987
140
141
  databricks/bundles/pipelines/_models/path_pattern.py,sha256=X3DRx7GiZzaUFC_lHRcZFGdbmUB3YyZfrgu9TWZH3LM,935
141
- databricks/bundles/pipelines/_models/pipeline.py,sha256=TPFdzT2On2doHzhN9aE0yx9H8GAE_cH6OTw7izM1tgU,10903
142
+ databricks/bundles/pipelines/_models/pipeline.py,sha256=XLLOSFRVTDUwZ8TwlEagZevnclY3zztncHCwv2tBeTY,10929
142
143
  databricks/bundles/pipelines/_models/pipeline_cluster.py,sha256=O7orq6-TZex8UpHrQLozM8Eq55bGlcDw2byB7wV4p_k,13306
143
144
  databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py,sha256=tKrqppzu25MVaPo5tSPjhHuTGXBYwvXLJ6pSGcTUaes,2685
144
145
  databricks/bundles/pipelines/_models/pipeline_cluster_autoscale_mode.py,sha256=WhhfyIZEI4jlqoj9ks9lLYeTxRxLVLcP6I30Wqx6p8A,619
@@ -146,11 +147,15 @@ databricks/bundles/pipelines/_models/pipeline_library.py,sha256=fkp4382MTXtMrRsf
146
147
  databricks/bundles/pipelines/_models/pipeline_permission.py,sha256=PMK6c0qHliiDjPRN4etPBYlitp_hRETtIV89UDWd6V8,1323
147
148
  databricks/bundles/pipelines/_models/pipeline_permission_level.py,sha256=ULGO2nLL1Z1vXiJMZ9d9rWZIQTe-Ghk7XrOn3uJmVKA,318
148
149
  databricks/bundles/pipelines/_models/pipelines_environment.py,sha256=UDiizKOkHTqflghT0lFnoXI1npRGXIW-Ji5dwQ7_GHk,1898
150
+ databricks/bundles/pipelines/_models/postgres_catalog_config.py,sha256=Tuh2H8b0WrZbf7qsBUQcBCl_oEdiArbFxMdlrWzcg4A,1412
151
+ databricks/bundles/pipelines/_models/postgres_slot_config.py,sha256=h5J4I3nNGA_IMnyFHqZ1BPTiWb_gn36OQeAoBcAF9ZI,1595
149
152
  databricks/bundles/pipelines/_models/report_spec.py,sha256=Yi6ReiD7zm2T8mCn0cdFCPke9VDKOosGhVTO4PBKXHg,2318
150
153
  databricks/bundles/pipelines/_models/restart_window.py,sha256=FefU_DTmOwVKCm6jBlcSyjqAWmc613_emmo0vuzET78,2408
151
154
  databricks/bundles/pipelines/_models/run_as.py,sha256=iKIqp6PMtVQGWM5HzNPf3EF9NKZB0RYV-6RDCEmJVDA,1719
152
155
  databricks/bundles/pipelines/_models/s3_storage_info.py,sha256=9DVWOFKrxGXijUnctwuB0_kANXRazPUPNSfmugJVuio,4595
153
156
  databricks/bundles/pipelines/_models/schema_spec.py,sha256=nNXx-JK2jTPDWJ490yy8DG7gB0_b6My2G3ZhlgGf8zY,2690
157
+ databricks/bundles/pipelines/_models/source_catalog_config.py,sha256=x3f5f8FoPJQ2tiBWGriodH7a5Z3c9bDszz9PA0gWyKk,1670
158
+ databricks/bundles/pipelines/_models/source_config.py,sha256=dv6PigMxxWz5WrIopZkNlJIh6SqfynlPiuolmd4qsRQ,1236
154
159
  databricks/bundles/pipelines/_models/table_spec.py,sha256=3w9nTGzOKDhUgEtfx04i6tN3c4UDCsSaXW-zlwXgqGQ,3033
155
160
  databricks/bundles/pipelines/_models/table_specific_config.py,sha256=oKdZMNeBpgYCQaise6c0tInAqdPvDN925yRlQhDhAHw,5022
156
161
  databricks/bundles/pipelines/_models/table_specific_config_scd_type.py,sha256=_RO5oXr_b4ibygpeWXmkil24TnRQZKxbpjTx-g5qc2Q,404
@@ -161,7 +166,7 @@ databricks/bundles/volumes/_models/volume.py,sha256=TGobGer4e_122jaeyuTwjDAVIrvT
161
166
  databricks/bundles/volumes/_models/volume_grant.py,sha256=U_-4-KL8LM3n5xJBLHj_wjPsqiVjCDRj8ttiUYqFRmI,1083
162
167
  databricks/bundles/volumes/_models/volume_grant_privilege.py,sha256=fCA0LVE9Q3sbHvTAj7e62E9ASq9jH5oK1iREQdp1TxQ,384
163
168
  databricks/bundles/volumes/_models/volume_type.py,sha256=fttRjiYj8qXp7qqs_IhMopATBoU4izOEXLODY2rIkik,511
164
- databricks_bundles-0.266.0.dist-info/licenses/LICENSE,sha256=QKOZO8KtzbS_Qt3Tbl0dfGnidaeilKe0UiIjnEq1tjc,3790
165
- databricks_bundles-0.266.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
166
- databricks_bundles-0.266.0.dist-info/METADATA,sha256=uD8aYMZxYAdxeK4WdVN3KiDmNhfSpF1GOZLngPKeYnw,1541
167
- databricks_bundles-0.266.0.dist-info/RECORD,,
169
+ databricks_bundles-0.267.0.dist-info/licenses/LICENSE,sha256=QKOZO8KtzbS_Qt3Tbl0dfGnidaeilKe0UiIjnEq1tjc,3790
170
+ databricks_bundles-0.267.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
171
+ databricks_bundles-0.267.0.dist-info/METADATA,sha256=FNRQuS7ohc-bXb7vjtCnoPrEuayoEEk6q4I3ztC3lsk,1541
172
+ databricks_bundles-0.267.0.dist-info/RECORD,,