databricks-bundles 0.270.0__py3-none-any.whl → 0.272.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -528,11 +528,23 @@ def _relativize_location(location: Location) -> Location:
528
528
 
529
529
 
530
530
  def _relativize_path(path: str) -> str:
531
+ """
532
+ Attempt to relativize an absolute path to the current working directory.
533
+
534
+ If the path is not absolute or cannot be relativized, return it as is.
535
+ Used to relativize paths in locations to show shorter paths in diagnostics.
536
+ """
537
+
531
538
  if not os.path.isabs(path):
532
539
  return path
533
540
 
534
541
  cwd = os.getcwd()
535
- common = os.path.commonpath([os.getcwd(), path])
542
+
543
+ try:
544
+ common = os.path.commonpath([cwd, path])
545
+ except ValueError:
546
+ # On Windows, paths on different drives don't have a common path
547
+ return path
536
548
 
537
549
  if common == cwd:
538
550
  return os.path.relpath(path, cwd)
@@ -131,6 +131,8 @@ __all__ = [
131
131
  "JobsHealthRules",
132
132
  "JobsHealthRulesDict",
133
133
  "JobsHealthRulesParam",
134
+ "Kind",
135
+ "KindParam",
134
136
  "Library",
135
137
  "LibraryDict",
136
138
  "LibraryParam",
@@ -484,6 +486,7 @@ from databricks.bundles.jobs._models.jobs_health_rules import (
484
486
  JobsHealthRulesDict,
485
487
  JobsHealthRulesParam,
486
488
  )
489
+ from databricks.bundles.jobs._models.kind import Kind, KindParam
487
490
  from databricks.bundles.jobs._models.library import Library, LibraryDict, LibraryParam
488
491
  from databricks.bundles.jobs._models.lifecycle import (
489
492
  Lifecycle,
@@ -37,6 +37,7 @@ from databricks.bundles.jobs._models.init_script_info import (
37
37
  InitScriptInfo,
38
38
  InitScriptInfoParam,
39
39
  )
40
+ from databricks.bundles.jobs._models.kind import Kind, KindParam
40
41
  from databricks.bundles.jobs._models.runtime_engine import (
41
42
  RuntimeEngine,
42
43
  RuntimeEngineParam,
@@ -171,6 +172,8 @@ class ClusterSpec:
171
172
  When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`
172
173
  """
173
174
 
175
+ kind: VariableOrOptional[Kind] = None
176
+
174
177
  node_type_id: VariableOrOptional[str] = None
175
178
  """
176
179
  This field encodes, through a single value, the resources available to each of
@@ -384,6 +387,8 @@ class ClusterSpecDict(TypedDict, total=False):
384
387
  When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`
385
388
  """
386
389
 
390
+ kind: VariableOrOptional[KindParam]
391
+
387
392
  node_type_id: VariableOrOptional[str]
388
393
  """
389
394
  This field encodes, through a single value, the resources available to each of
@@ -33,7 +33,7 @@ class Environment:
33
33
  The version is a string, consisting of an integer.
34
34
  """
35
35
 
36
- jar_dependencies: VariableOrList[str] = field(default_factory=list)
36
+ java_dependencies: VariableOrList[str] = field(default_factory=list)
37
37
  """
38
38
  :meta private: [EXPERIMENTAL]
39
39
 
@@ -68,7 +68,7 @@ class EnvironmentDict(TypedDict, total=False):
68
68
  The version is a string, consisting of an integer.
69
69
  """
70
70
 
71
- jar_dependencies: VariableOrList[str]
71
+ java_dependencies: VariableOrList[str]
72
72
  """
73
73
  :meta private: [EXPERIMENTAL]
74
74
 
@@ -0,0 +1,9 @@
1
+ from enum import Enum
2
+ from typing import Literal
3
+
4
+
5
+ class Kind(Enum):
6
+ CLASSIC_PREVIEW = "CLASSIC_PREVIEW"
7
+
8
+
9
+ KindParam = Literal["CLASSIC_PREVIEW"] | Kind
@@ -11,7 +11,9 @@ if TYPE_CHECKING:
11
11
 
12
12
  @dataclass(kw_only=True)
13
13
  class SparkSubmitTask:
14
- """"""
14
+ """
15
+ [DEPRECATED]
16
+ """
15
17
 
16
18
  parameters: VariableOrList[str] = field(default_factory=list)
17
19
  """
@@ -104,7 +104,7 @@ class Task:
104
104
 
105
105
  clean_rooms_notebook_task: VariableOrOptional[CleanRoomsNotebookTask] = None
106
106
  """
107
- The task runs a [clean rooms](https://docs.databricks.com/en/clean-rooms/index.html) notebook
107
+ The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook
108
108
  when the `clean_rooms_notebook_task` field is present.
109
109
  """
110
110
 
@@ -145,6 +145,13 @@ class Task:
145
145
  An option to disable auto optimization in serverless
146
146
  """
147
147
 
148
+ disabled: VariableOrOptional[bool] = None
149
+ """
150
+ :meta private: [EXPERIMENTAL]
151
+
152
+ An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.
153
+ """
154
+
148
155
  email_notifications: VariableOrOptional[TaskEmailNotifications] = None
149
156
  """
150
157
  An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.
@@ -261,15 +268,7 @@ class Task:
261
268
 
262
269
  spark_submit_task: VariableOrOptional[SparkSubmitTask] = None
263
270
  """
264
- (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute.
265
-
266
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations.
267
-
268
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters.
269
-
270
- By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage.
271
-
272
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.
271
+ [DEPRECATED] (Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).
273
272
  """
274
273
 
275
274
  sql_task: VariableOrOptional[SqlTask] = None
@@ -307,7 +306,7 @@ class TaskDict(TypedDict, total=False):
307
306
 
308
307
  clean_rooms_notebook_task: VariableOrOptional[CleanRoomsNotebookTaskParam]
309
308
  """
310
- The task runs a [clean rooms](https://docs.databricks.com/en/clean-rooms/index.html) notebook
309
+ The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook
311
310
  when the `clean_rooms_notebook_task` field is present.
312
311
  """
313
312
 
@@ -348,6 +347,13 @@ class TaskDict(TypedDict, total=False):
348
347
  An option to disable auto optimization in serverless
349
348
  """
350
349
 
350
+ disabled: VariableOrOptional[bool]
351
+ """
352
+ :meta private: [EXPERIMENTAL]
353
+
354
+ An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.
355
+ """
356
+
351
357
  email_notifications: VariableOrOptional[TaskEmailNotificationsParam]
352
358
  """
353
359
  An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.
@@ -464,15 +470,7 @@ class TaskDict(TypedDict, total=False):
464
470
 
465
471
  spark_submit_task: VariableOrOptional[SparkSubmitTaskParam]
466
472
  """
467
- (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute.
468
-
469
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations.
470
-
471
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters.
472
-
473
- By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage.
474
-
475
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.
473
+ [DEPRECATED] (Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).
476
474
  """
477
475
 
478
476
  sql_task: VariableOrOptional[SqlTaskParam]
@@ -51,6 +51,12 @@ __all__ = [
51
51
  "IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig",
52
52
  "IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigDict",
53
53
  "IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam",
54
+ "IngestionPipelineDefinitionWorkdayReportParameters",
55
+ "IngestionPipelineDefinitionWorkdayReportParametersDict",
56
+ "IngestionPipelineDefinitionWorkdayReportParametersParam",
57
+ "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue",
58
+ "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict",
59
+ "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam",
54
60
  "IngestionSourceType",
55
61
  "IngestionSourceTypeParam",
56
62
  "InitScriptInfo",
@@ -230,6 +236,16 @@ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_table_sp
230
236
  IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigDict,
231
237
  IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam,
232
238
  )
239
+ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters import (
240
+ IngestionPipelineDefinitionWorkdayReportParameters,
241
+ IngestionPipelineDefinitionWorkdayReportParametersDict,
242
+ IngestionPipelineDefinitionWorkdayReportParametersParam,
243
+ )
244
+ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters_query_key_value import (
245
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue,
246
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict,
247
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam,
248
+ )
233
249
  from databricks.bundles.pipelines._models.ingestion_source_type import (
234
250
  IngestionSourceType,
235
251
  IngestionSourceTypeParam,
@@ -39,6 +39,15 @@ class IngestionPipelineDefinition:
39
39
  Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
40
40
  """
41
41
 
42
+ netsuite_jar_path: VariableOrOptional[str] = None
43
+ """
44
+ :meta private: [EXPERIMENTAL]
45
+
46
+ Netsuite only configuration. When the field is set for a netsuite connector,
47
+ the jar stored in the field will be validated and added to the classpath of
48
+ pipeline's cluster.
49
+ """
50
+
42
51
  objects: VariableOrList[IngestionConfig] = field(default_factory=list)
43
52
  """
44
53
  Required. Settings specifying tables to replicate and the destination for the replicated tables.
@@ -84,6 +93,15 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
84
93
  Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
85
94
  """
86
95
 
96
+ netsuite_jar_path: VariableOrOptional[str]
97
+ """
98
+ :meta private: [EXPERIMENTAL]
99
+
100
+ Netsuite only configuration. When the field is set for a netsuite connector,
101
+ the jar stored in the field will be validated and added to the classpath of
102
+ pipeline's cluster.
103
+ """
104
+
87
105
  objects: VariableOrList[IngestionConfigParam]
88
106
  """
89
107
  Required. Settings specifying tables to replicate and the destination for the replicated tables.
@@ -0,0 +1,95 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import (
7
+ VariableOrDict,
8
+ VariableOrList,
9
+ VariableOrOptional,
10
+ )
11
+ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters_query_key_value import (
12
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue,
13
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam,
14
+ )
15
+
16
+ if TYPE_CHECKING:
17
+ from typing_extensions import Self
18
+
19
+
20
+ @dataclass(kw_only=True)
21
+ class IngestionPipelineDefinitionWorkdayReportParameters:
22
+ """
23
+ :meta private: [EXPERIMENTAL]
24
+ """
25
+
26
+ incremental: VariableOrOptional[bool] = None
27
+ """
28
+ [DEPRECATED] (Optional) Marks the report as incremental.
29
+ This field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now
30
+ controlled by the `parameters` field.
31
+ """
32
+
33
+ parameters: VariableOrDict[str] = field(default_factory=dict)
34
+ """
35
+ Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", "end_date"),
36
+ and the corresponding value is a SQL-like expression used to compute the parameter value at runtime.
37
+ Example:
38
+ {
39
+ "start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) }",
40
+ "end_date": "{ current_date() - INTERVAL 1 DAY }"
41
+ }
42
+ """
43
+
44
+ report_parameters: VariableOrList[
45
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue
46
+ ] = field(default_factory=list)
47
+ """
48
+ [DEPRECATED] (Optional) Additional custom parameters for Workday Report
49
+ This field is deprecated and should not be used. Use `parameters` instead.
50
+ """
51
+
52
+ @classmethod
53
+ def from_dict(
54
+ cls, value: "IngestionPipelineDefinitionWorkdayReportParametersDict"
55
+ ) -> "Self":
56
+ return _transform(cls, value)
57
+
58
+ def as_dict(self) -> "IngestionPipelineDefinitionWorkdayReportParametersDict":
59
+ return _transform_to_json_value(self) # type:ignore
60
+
61
+
62
+ class IngestionPipelineDefinitionWorkdayReportParametersDict(TypedDict, total=False):
63
+ """"""
64
+
65
+ incremental: VariableOrOptional[bool]
66
+ """
67
+ [DEPRECATED] (Optional) Marks the report as incremental.
68
+ This field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now
69
+ controlled by the `parameters` field.
70
+ """
71
+
72
+ parameters: VariableOrDict[str]
73
+ """
74
+ Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", "end_date"),
75
+ and the corresponding value is a SQL-like expression used to compute the parameter value at runtime.
76
+ Example:
77
+ {
78
+ "start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) }",
79
+ "end_date": "{ current_date() - INTERVAL 1 DAY }"
80
+ }
81
+ """
82
+
83
+ report_parameters: VariableOrList[
84
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam
85
+ ]
86
+ """
87
+ [DEPRECATED] (Optional) Additional custom parameters for Workday Report
88
+ This field is deprecated and should not be used. Use `parameters` instead.
89
+ """
90
+
91
+
92
+ IngestionPipelineDefinitionWorkdayReportParametersParam = (
93
+ IngestionPipelineDefinitionWorkdayReportParametersDict
94
+ | IngestionPipelineDefinitionWorkdayReportParameters
95
+ )
@@ -0,0 +1,70 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue:
14
+ """
15
+ :meta private: [EXPERIMENTAL]
16
+
17
+ [DEPRECATED]
18
+ """
19
+
20
+ key: VariableOrOptional[str] = None
21
+ """
22
+ Key for the report parameter, can be a column name or other metadata
23
+ """
24
+
25
+ value: VariableOrOptional[str] = None
26
+ """
27
+ Value for the report parameter.
28
+ Possible values it can take are these sql functions:
29
+ 1. coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed date, else current_offset()
30
+ 2. current_date()
31
+ 3. date_sub(current_date(), x) -> subtract x (some non-negative integer) days from current date
32
+ """
33
+
34
+ @classmethod
35
+ def from_dict(
36
+ cls,
37
+ value: "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict",
38
+ ) -> "Self":
39
+ return _transform(cls, value)
40
+
41
+ def as_dict(
42
+ self,
43
+ ) -> "IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict":
44
+ return _transform_to_json_value(self) # type:ignore
45
+
46
+
47
+ class IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict(
48
+ TypedDict, total=False
49
+ ):
50
+ """"""
51
+
52
+ key: VariableOrOptional[str]
53
+ """
54
+ Key for the report parameter, can be a column name or other metadata
55
+ """
56
+
57
+ value: VariableOrOptional[str]
58
+ """
59
+ Value for the report parameter.
60
+ Possible values it can take are these sql functions:
61
+ 1. coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed date, else current_offset()
62
+ 2. current_date()
63
+ 3. date_sub(current_date(), x) -> subtract x (some non-negative integer) days from current date
64
+ """
65
+
66
+
67
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueParam = (
68
+ IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValueDict
69
+ | IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue
70
+ )
@@ -21,6 +21,7 @@ class IngestionSourceType(Enum):
21
21
  DYNAMICS365 = "DYNAMICS365"
22
22
  CONFLUENCE = "CONFLUENCE"
23
23
  META_MARKETING = "META_MARKETING"
24
+ FOREIGN_CATALOG = "FOREIGN_CATALOG"
24
25
 
25
26
 
26
27
  IngestionSourceTypeParam = (
@@ -43,6 +44,7 @@ IngestionSourceTypeParam = (
43
44
  "DYNAMICS365",
44
45
  "CONFLUENCE",
45
46
  "META_MARKETING",
47
+ "FOREIGN_CATALOG",
46
48
  ]
47
49
  | IngestionSourceType
48
50
  )
@@ -184,9 +184,6 @@ class Pipeline(Resource):
184
184
  """
185
185
 
186
186
  run_as: VariableOrOptional[RunAs] = None
187
- """
188
- :meta private: [EXPERIMENTAL]
189
- """
190
187
 
191
188
  schema: VariableOrOptional[str] = None
192
189
  """
@@ -347,9 +344,6 @@ class PipelineDict(TypedDict, total=False):
347
344
  """
348
345
 
349
346
  run_as: VariableOrOptional[RunAsParam]
350
- """
351
- :meta private: [EXPERIMENTAL]
352
- """
353
347
 
354
348
  schema: VariableOrOptional[str]
355
349
  """
@@ -12,8 +12,6 @@ if TYPE_CHECKING:
12
12
  @dataclass(kw_only=True)
13
13
  class RunAs:
14
14
  """
15
- :meta private: [EXPERIMENTAL]
16
-
17
15
  Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
18
16
 
19
17
  Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown.
@@ -8,6 +8,10 @@ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_table_sp
8
8
  IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig,
9
9
  IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfigParam,
10
10
  )
11
+ from databricks.bundles.pipelines._models.ingestion_pipeline_definition_workday_report_parameters import (
12
+ IngestionPipelineDefinitionWorkdayReportParameters,
13
+ IngestionPipelineDefinitionWorkdayReportParametersParam,
14
+ )
11
15
  from databricks.bundles.pipelines._models.table_specific_config_scd_type import (
12
16
  TableSpecificConfigScdType,
13
17
  TableSpecificConfigScdTypeParam,
@@ -71,6 +75,15 @@ class TableSpecificConfig:
71
75
  The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.
72
76
  """
73
77
 
78
+ workday_report_parameters: VariableOrOptional[
79
+ IngestionPipelineDefinitionWorkdayReportParameters
80
+ ] = None
81
+ """
82
+ :meta private: [EXPERIMENTAL]
83
+
84
+ (Optional) Additional custom parameters for Workday Report
85
+ """
86
+
74
87
  @classmethod
75
88
  def from_dict(cls, value: "TableSpecificConfigDict") -> "Self":
76
89
  return _transform(cls, value)
@@ -132,5 +145,14 @@ class TableSpecificConfigDict(TypedDict, total=False):
132
145
  The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.
133
146
  """
134
147
 
148
+ workday_report_parameters: VariableOrOptional[
149
+ IngestionPipelineDefinitionWorkdayReportParametersParam
150
+ ]
151
+ """
152
+ :meta private: [EXPERIMENTAL]
153
+
154
+ (Optional) Additional custom parameters for Workday Report
155
+ """
156
+
135
157
 
136
158
  TableSpecificConfigParam = TableSpecificConfigDict | TableSpecificConfig
@@ -1 +1 @@
1
- __version__ = "0.270.0"
1
+ __version__ = "0.272.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-bundles
3
- Version: 0.270.0
3
+ Version: 0.272.0
4
4
  Summary: Python support for Databricks Asset Bundles
5
5
  Author-email: Gleb Kanterov <gleb.kanterov@databricks.com>
6
6
  Requires-Python: >=3.10
@@ -22,7 +22,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
22
22
 
23
23
  To use `databricks-bundles`, you must first:
24
24
 
25
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.270.0 or above
25
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.272.0 or above
26
26
  2. Authenticate to your Databricks workspace if you have not done so already:
27
27
 
28
28
  ```bash
@@ -1,8 +1,8 @@
1
1
  databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
2
2
  databricks/bundles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- databricks/bundles/build.py,sha256=mdYLkUyzErQrXIaw_AdLclIJyheh-lNoPkGaRkWXYDc,17458
3
+ databricks/bundles/build.py,sha256=Qx40q9qEtIKK7e6OyQHUZGdY045JBchjjvOPBB8TgbM,17830
4
4
  databricks/bundles/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26
5
- databricks/bundles/version.py,sha256=EIz-qO4FPt40xSI6qRnIo_w1_qqPttPXWDiNyKL8AVc,24
5
+ databricks/bundles/version.py,sha256=2go-ela2DnYWKyaCseRoM72vhutwhGTGUvCINuO0UWk,24
6
6
  databricks/bundles/core/__init__.py,sha256=eivW2LIitfT-doDfPWac26vhdhkSACqoRi_48ZJE8zQ,1357
7
7
  databricks/bundles/core/_bundle.py,sha256=B5qINwnoRDitkofFaoCWDCS1tbB3a7nufqK0iDRfC6g,3287
8
8
  databricks/bundles/core/_diagnostics.py,sha256=arhPuRpjfOUjr8_T91-b-YM-ZtpkrLWeJv0BcLfTIOo,5879
@@ -15,7 +15,7 @@ databricks/bundles/core/_resources.py,sha256=mJvqyjnOAG1GniRwKYhDSpRnsdqf2QNFA6r
15
15
  databricks/bundles/core/_transform.py,sha256=ISupC7du3JnqrQQMgHf5Mt7fpZKEcUNZ5qwgMWZQfAE,8836
16
16
  databricks/bundles/core/_transform_to_json.py,sha256=aGiIBVx7pSO9LiJZ5CPYsTA5U6XhCbn1Ew15dhqC0yc,1890
17
17
  databricks/bundles/core/_variable.py,sha256=16g6vSLs5J-Ek2u2LNSnbqK-vpgbeirCddWjS-AiLGU,3617
18
- databricks/bundles/jobs/__init__.py,sha256=uMALr3wDO_BXtgOizjmvj4ZUz7w5g-Ijmz2ESwB_Rw4,19329
18
+ databricks/bundles/jobs/__init__.py,sha256=401hE3tONdr56nGBl7eistqidtf-Z-dtuk4qzsqWmXI,19423
19
19
  databricks/bundles/jobs/_models/adlsgen2_info.py,sha256=_eGe6ivi2VmokxKxKUji9-fSZLBubAr43uBDnN7vvlY,1104
20
20
  databricks/bundles/jobs/_models/authentication_method.py,sha256=XI8hU5fhPlGTsZdePZtR6FIjyT4iEt2URb61q1MsgNI,198
21
21
  databricks/bundles/jobs/_models/auto_scale.py,sha256=Z5vZa0bZi6LJ_Ac-VJfqqCJAtI-zY1_auTGhsV8khvA,1624
@@ -26,7 +26,7 @@ databricks/bundles/jobs/_models/azure_availability.py,sha256=KY8dt1vWA8RHBY3nq5C
26
26
  databricks/bundles/jobs/_models/clean_rooms_notebook_task.py,sha256=zgzDes0v3AEhd_2WXaq1xUo_hKtG927x4qzFzwvfbaY,2016
27
27
  databricks/bundles/jobs/_models/clients_types.py,sha256=awwSptxl3zVXgTyoa_lO4JSp3QGVdRaxJBnxEUsmGwU,1199
28
28
  databricks/bundles/jobs/_models/cluster_log_conf.py,sha256=CP6C3nTXugvSm9-NlMPfENEcHn6Wl1q_9wqAFesos5o,2682
29
- databricks/bundles/jobs/_models/cluster_spec.py,sha256=gByL24VeW4MTZAnXc4X2jLU6DygQCW1ssJqEtDzIi3M,19446
29
+ databricks/bundles/jobs/_models/cluster_spec.py,sha256=u2u4xxksKD7qS552DgFUoKLH_AL2h91ayEN2inUfHcQ,19595
30
30
  databricks/bundles/jobs/_models/compute_config.py,sha256=61-BdovRQ48n93GlPc3bjfy_U5TJ6ERQrViQzTnAg9s,1267
31
31
  databricks/bundles/jobs/_models/condition.py,sha256=79S2RIdrWFY9lwIu8T0mJPuFALFqv1H0JNtM9gSwL4k,258
32
32
  databricks/bundles/jobs/_models/condition_task.py,sha256=4A71BNRZNk9_iS8IFiHr69RLgx_qmabnQUwcy_g7e0E,2814
@@ -41,7 +41,7 @@ databricks/bundles/jobs/_models/dbt_task.py,sha256=3OT0GoU1y1DffwXE_YAXXU807Hj2D
41
41
  databricks/bundles/jobs/_models/docker_basic_auth.py,sha256=jEbSE8CvnTceOm405NA18IvB1lLCu-Wfe3SPSlsSBG4,1084
42
42
  databricks/bundles/jobs/_models/docker_image.py,sha256=h2hp3vnfh_wXxMg6RzNHPfjfb-FMsyABe83XIaX5fA8,1126
43
43
  databricks/bundles/jobs/_models/ebs_volume_type.py,sha256=-93BcybklhLyQEZvF8C1BhnHMeCjfDwI2qwimZ6X5eU,415
44
- databricks/bundles/jobs/_models/environment.py,sha256=9SIxvp8h0Y5Dxp9MsE11VBU6assb3-VdChT8-wXXfjI,2535
44
+ databricks/bundles/jobs/_models/environment.py,sha256=xQc9ktI0SVDBNTn-GoUUecfSrskoYUqMaVrhqem3Q54,2537
45
45
  databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py,sha256=VO5bHmN-hywRfg3zfqTdc0YI_5FQRpGLZdc6f6_GKnA,2378
46
46
  databricks/bundles/jobs/_models/for_each_task.py,sha256=MP-6c7zUaRcl5EaM-3IfjhX1Jc840GlnRdjj_SlhypI,1810
47
47
  databricks/bundles/jobs/_models/gcp_attributes.py,sha256=DdKsEfEIGTtKJFdC_h8JukAQgDhE1yluwWn2X2MIiOk,5786
@@ -64,6 +64,7 @@ databricks/bundles/jobs/_models/jobs_health_metric.py,sha256=PuBoMBQunQ0P3qwIwMb
64
64
  databricks/bundles/jobs/_models/jobs_health_operator.py,sha256=rLuNBch8awFmclOck97hbvnaIQ5Q3IZe0VGTD70AF-g,304
65
65
  databricks/bundles/jobs/_models/jobs_health_rule.py,sha256=Shtngs9qtCFkbF3pX_xsyo4aPYJbbid7HZNxvejC2uo,1437
66
66
  databricks/bundles/jobs/_models/jobs_health_rules.py,sha256=Csu88zyYGGpyml0dBIB-QJ3GgTOTXdtHtVY_JPKHte0,1067
67
+ databricks/bundles/jobs/_models/kind.py,sha256=9glQ0XRn-pcGv5Jyu7-SFecQM_INibbb__KpKd-9CnU,157
67
68
  databricks/bundles/jobs/_models/library.py,sha256=P8vGEWiLInw2Mj3dVWYadvsTpH-0EqtANWdxccqX778,5063
68
69
  databricks/bundles/jobs/_models/lifecycle.py,sha256=_0h3QZ8LPYTnKC8YDRmW0NbQBxwukG1WsqBu8wZrtO4,985
69
70
  databricks/bundles/jobs/_models/local_file_info.py,sha256=yH12cJKjPrFQxG69DgAdp87PIYVGfjECvabbRPqKZjI,952
@@ -90,7 +91,7 @@ databricks/bundles/jobs/_models/s3_storage_info.py,sha256=9DVWOFKrxGXijUnctwuB0_
90
91
  databricks/bundles/jobs/_models/source.py,sha256=qsL2OJ6wTiWgY_iuOt6pB0HgsA2ASSyslv8ktZ-cTuM,654
91
92
  databricks/bundles/jobs/_models/spark_jar_task.py,sha256=UNt1whPeMu3XpZ2H0iGyHzB6YLB6hkVQT2TIl1L-Qgg,2589
92
93
  databricks/bundles/jobs/_models/spark_python_task.py,sha256=IPBPR0RFfGVwsIUqzMj7ZYREPG0T_zDv4EX5hDKRlgg,3283
93
- databricks/bundles/jobs/_models/spark_submit_task.py,sha256=wqtXcaOJ8-_aKkJVTj-NZtSSwifbKy_rBddOPeQbrBA,1313
94
+ databricks/bundles/jobs/_models/spark_submit_task.py,sha256=R5oWCWsZwHTfyl1KPC_RnY2tYXN1RSFPQ3Ck7QFh3E8,1335
94
95
  databricks/bundles/jobs/_models/sql_task.py,sha256=XlMc_V5QQDUz6jhR5QhpQEd7g13ia1GksymWcoyK2eU,3206
95
96
  databricks/bundles/jobs/_models/sql_task_alert.py,sha256=zYYXlzYnLoagVjtRnqPlhKG23mAufrd-4UIhtycxwlM,1680
96
97
  databricks/bundles/jobs/_models/sql_task_dashboard.py,sha256=LUHQR8zfGE_t9rOTRj3VMPGD164h2CDsc_XQCxxBtdg,2023
@@ -101,7 +102,7 @@ databricks/bundles/jobs/_models/storage_mode.py,sha256=-lccr-rOMw_OZYiivuoBBbOj6
101
102
  databricks/bundles/jobs/_models/subscription.py,sha256=ab_GFMlO_9CtKV4yqMk1_UDHNZQ4-ElE3cOddFVPN98,1590
102
103
  databricks/bundles/jobs/_models/subscription_subscriber.py,sha256=aD9IKIwqE0LeTchgFNCQypFJGFSngq9SI6kT74fkWiM,991
103
104
  databricks/bundles/jobs/_models/table_update_trigger_configuration.py,sha256=du2LCBsM7iGrxNet_eCgw592-A1iCsEWwG8KR8DI4I8,2808
104
- databricks/bundles/jobs/_models/task.py,sha256=9N3MbYkbnEXbwH5kAQTTMF4EhSk77ggrkdNEfRtRc2E,19332
105
+ databricks/bundles/jobs/_models/task.py,sha256=bhxWk7stToaG6T7MSmfO0om__LP8M1uBpl58dFGwt1s,18473
105
106
  databricks/bundles/jobs/_models/task_dependency.py,sha256=aDo85ulTS46OduT357cLP4G013zojY9IAJUCbJk85RA,1328
106
107
  databricks/bundles/jobs/_models/task_email_notifications.py,sha256=3s7JnOY2ZMhDiPVk8Da0m3e5URijCTncRlnTH19XMEs,5315
107
108
  databricks/bundles/jobs/_models/task_notification_settings.py,sha256=CzMzpjLDR1oWEjPArI2S4RMM5k7QkBk5yZVrbB43LMo,2086
@@ -112,7 +113,7 @@ databricks/bundles/jobs/_models/webhook.py,sha256=S209r8QqufJLRoACU6a0MnTzuKOvn3
112
113
  databricks/bundles/jobs/_models/webhook_notifications.py,sha256=4FrMTYy4tDeMe3VqSbn9jjNYISTAmYTzENpGvnQGju4,4349
113
114
  databricks/bundles/jobs/_models/workload_type.py,sha256=A8KViUIB4x_gEXVS2p4KTGZ9Lr50Z3LLzIYxyE676xw,1162
114
115
  databricks/bundles/jobs/_models/workspace_storage_info.py,sha256=Qnm6lsw9rwXB7Te_Um0c7TvIH4Vv7ndKKYYV0pxJ6q8,1100
115
- databricks/bundles/pipelines/__init__.py,sha256=Bioe-Mrrq8KJHwBUv1EJUQ2KxuuAbpWlhaOHw2zvLMY,11035
116
+ databricks/bundles/pipelines/__init__.py,sha256=QoEMqHXXj87R5kobtE2GJv7GRu-1nYhsSUC3HsxKQRY,12067
116
117
  databricks/bundles/pipelines/_models/adlsgen2_info.py,sha256=_eGe6ivi2VmokxKxKUji9-fSZLBubAr43uBDnN7vvlY,1104
117
118
  databricks/bundles/pipelines/_models/aws_attributes.py,sha256=sKKsOBfsg21soMJrdv3ETHIKg40LVGZWirlqSlQ2n8o,10191
118
119
  databricks/bundles/pipelines/_models/aws_availability.py,sha256=C4yzZLt_CSIxlZ3MoCV4MPdQRjVRgRU_vkrLIkL6hkQ,477
@@ -130,9 +131,11 @@ databricks/bundles/pipelines/_models/gcp_availability.py,sha256=a2ayWsyEQDpIDx-m
130
131
  databricks/bundles/pipelines/_models/gcs_storage_info.py,sha256=hwOowyNKCBhzsUiCQSrtmQPxrMINEq5jg2EefkrE2fQ,1020
131
132
  databricks/bundles/pipelines/_models/ingestion_config.py,sha256=ERT5ySyVIVJ0T3r3tqjuCFzV5FEIJsfWZvbMmRTHNMk,1679
132
133
  databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py,sha256=UkpHUaelJYcEZ-3r8FRwEpUcQxHrvQoSVsmadmlQaBU,3030
133
- databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py,sha256=cafOoHyCon63lBBVGzXGv7Us8csToU-mMhIf48wZfcA,4020
134
+ databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py,sha256=vTzpSDIHfmp5U8s1yjX56VLwkqHzN5ynZEQkYAkIPZY,4603
134
135
  databricks/bundles/pipelines/_models/ingestion_pipeline_definition_table_specific_config_query_based_connector_config.py,sha256=IXJqQjQ2aORHJZy1PZ3ST4hx4HVh7O4b-JoKoxYR9QE,5181
135
- databricks/bundles/pipelines/_models/ingestion_source_type.py,sha256=IlLe3o4y0bfBLjk9wdMrzcNrp3t4XpuyGdQ2P1_FPo4,1080
136
+ databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters.py,sha256=3foG3H-6eOTpQbH0JqK3EY27isg6ZCm8w88cufh6HRU,3434
137
+ databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters_query_key_value.py,sha256=mC5r259VRZKnd0WKi1Toeb1qG2VZglumzVos4Wv8BoQ,2261
138
+ databricks/bundles/pipelines/_models/ingestion_source_type.py,sha256=WjyeHeyW4iNOXlMsoXGoO57qKLuXYg6BkSvB5d5mLxA,1147
136
139
  databricks/bundles/pipelines/_models/init_script_info.py,sha256=PM1qnkkC_fR97DSYML2-UJLGFKl1bGvttESfKosFSM8,4546
137
140
  databricks/bundles/pipelines/_models/lifecycle.py,sha256=_0h3QZ8LPYTnKC8YDRmW0NbQBxwukG1WsqBu8wZrtO4,985
138
141
  databricks/bundles/pipelines/_models/local_file_info.py,sha256=yH12cJKjPrFQxG69DgAdp87PIYVGfjECvabbRPqKZjI,952
@@ -141,7 +144,7 @@ databricks/bundles/pipelines/_models/maven_library.py,sha256=xz7BIo3XZ4xfp9S3sov
141
144
  databricks/bundles/pipelines/_models/notebook_library.py,sha256=YFEBdlvoNfB3oLPz-w-n_HBQrRVzFD9pbu-BPza88Rk,951
142
145
  databricks/bundles/pipelines/_models/notifications.py,sha256=Q7xHA5Bii9Zhgr2TISYF9mWKqu-6RzGO76gLexLMM3c,1987
143
146
  databricks/bundles/pipelines/_models/path_pattern.py,sha256=X3DRx7GiZzaUFC_lHRcZFGdbmUB3YyZfrgu9TWZH3LM,935
144
- databricks/bundles/pipelines/_models/pipeline.py,sha256=i95ReLydG1uZK5xDHTdMMDrBRQS-HxHDTdAFxAqAcYw,11471
147
+ databricks/bundles/pipelines/_models/pipeline.py,sha256=596tfHmrSiR5s3SwEw3ocS2-dNXWjPo6dPu5pAbc3hg,11371
145
148
  databricks/bundles/pipelines/_models/pipeline_cluster.py,sha256=O7orq6-TZex8UpHrQLozM8Eq55bGlcDw2byB7wV4p_k,13306
146
149
  databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py,sha256=tKrqppzu25MVaPo5tSPjhHuTGXBYwvXLJ6pSGcTUaes,2685
147
150
  databricks/bundles/pipelines/_models/pipeline_cluster_autoscale_mode.py,sha256=WhhfyIZEI4jlqoj9ks9lLYeTxRxLVLcP6I30Wqx6p8A,619
@@ -153,13 +156,13 @@ databricks/bundles/pipelines/_models/postgres_catalog_config.py,sha256=Tuh2H8b0W
153
156
  databricks/bundles/pipelines/_models/postgres_slot_config.py,sha256=h5J4I3nNGA_IMnyFHqZ1BPTiWb_gn36OQeAoBcAF9ZI,1595
154
157
  databricks/bundles/pipelines/_models/report_spec.py,sha256=Yi6ReiD7zm2T8mCn0cdFCPke9VDKOosGhVTO4PBKXHg,2318
155
158
  databricks/bundles/pipelines/_models/restart_window.py,sha256=FefU_DTmOwVKCm6jBlcSyjqAWmc613_emmo0vuzET78,2408
156
- databricks/bundles/pipelines/_models/run_as.py,sha256=iKIqp6PMtVQGWM5HzNPf3EF9NKZB0RYV-6RDCEmJVDA,1719
159
+ databricks/bundles/pipelines/_models/run_as.py,sha256=rZLJgEIvvX-sZmcCUbgDyLoFu00DEOg6mkRkZb2okZI,1684
157
160
  databricks/bundles/pipelines/_models/s3_storage_info.py,sha256=9DVWOFKrxGXijUnctwuB0_kANXRazPUPNSfmugJVuio,4595
158
161
  databricks/bundles/pipelines/_models/schema_spec.py,sha256=nNXx-JK2jTPDWJ490yy8DG7gB0_b6My2G3ZhlgGf8zY,2690
159
162
  databricks/bundles/pipelines/_models/source_catalog_config.py,sha256=x3f5f8FoPJQ2tiBWGriodH7a5Z3c9bDszz9PA0gWyKk,1670
160
163
  databricks/bundles/pipelines/_models/source_config.py,sha256=dv6PigMxxWz5WrIopZkNlJIh6SqfynlPiuolmd4qsRQ,1236
161
164
  databricks/bundles/pipelines/_models/table_spec.py,sha256=3w9nTGzOKDhUgEtfx04i6tN3c4UDCsSaXW-zlwXgqGQ,3033
162
- databricks/bundles/pipelines/_models/table_specific_config.py,sha256=oKdZMNeBpgYCQaise6c0tInAqdPvDN925yRlQhDhAHw,5022
165
+ databricks/bundles/pipelines/_models/table_specific_config.py,sha256=nnAX3YQS0JlVq1ySxYVTvIbpAiAtQOHX1Z0c_G_p4sE,5730
163
166
  databricks/bundles/pipelines/_models/table_specific_config_scd_type.py,sha256=_RO5oXr_b4ibygpeWXmkil24TnRQZKxbpjTx-g5qc2Q,404
164
167
  databricks/bundles/pipelines/_models/volumes_storage_info.py,sha256=31pQ9fnqQGhT2mD_ScjEhy-dm0307ne7iP_gxfcJXDY,1253
165
168
  databricks/bundles/pipelines/_models/workspace_storage_info.py,sha256=Qnm6lsw9rwXB7Te_Um0c7TvIH4Vv7ndKKYYV0pxJ6q8,1100
@@ -174,7 +177,7 @@ databricks/bundles/volumes/_models/volume.py,sha256=ALGmeXW3rGH424pp6SaXPT1I87XX
174
177
  databricks/bundles/volumes/_models/volume_grant.py,sha256=U_-4-KL8LM3n5xJBLHj_wjPsqiVjCDRj8ttiUYqFRmI,1083
175
178
  databricks/bundles/volumes/_models/volume_grant_privilege.py,sha256=fCA0LVE9Q3sbHvTAj7e62E9ASq9jH5oK1iREQdp1TxQ,384
176
179
  databricks/bundles/volumes/_models/volume_type.py,sha256=fttRjiYj8qXp7qqs_IhMopATBoU4izOEXLODY2rIkik,511
177
- databricks_bundles-0.270.0.dist-info/licenses/LICENSE,sha256=QKOZO8KtzbS_Qt3Tbl0dfGnidaeilKe0UiIjnEq1tjc,3790
178
- databricks_bundles-0.270.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
179
- databricks_bundles-0.270.0.dist-info/METADATA,sha256=RpPKHmKvoppWhN44Gq5Z5ndz4lAiin5B5U1f_xIKJc8,1541
180
- databricks_bundles-0.270.0.dist-info/RECORD,,
180
+ databricks_bundles-0.272.0.dist-info/licenses/LICENSE,sha256=QKOZO8KtzbS_Qt3Tbl0dfGnidaeilKe0UiIjnEq1tjc,3790
181
+ databricks_bundles-0.272.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
182
+ databricks_bundles-0.272.0.dist-info/METADATA,sha256=kDPrtpzyYnNnmqykg51SamegyMF9A4fsX4uHwp5U8lI,1541
183
+ databricks_bundles-0.272.0.dist-info/RECORD,,