databricks-bundles 0.278.0__py3-none-any.whl → 0.279.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. databricks/bundles/jobs/__init__.py +14 -0
  2. databricks/bundles/jobs/_models/aws_attributes.py +2 -2
  3. databricks/bundles/jobs/_models/dashboard_task.py +4 -1
  4. databricks/bundles/jobs/_models/environment.py +0 -10
  5. databricks/bundles/jobs/_models/job.py +2 -4
  6. databricks/bundles/jobs/_models/model_trigger_configuration.py +98 -0
  7. databricks/bundles/jobs/_models/model_trigger_configuration_condition.py +18 -0
  8. databricks/bundles/jobs/_models/trigger_settings.py +14 -0
  9. databricks/bundles/pipelines/__init__.py +8 -0
  10. databricks/bundles/pipelines/_models/aws_attributes.py +2 -2
  11. databricks/bundles/pipelines/_models/connection_parameters.py +50 -0
  12. databricks/bundles/pipelines/_models/ingestion_config.py +1 -4
  13. databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +20 -2
  14. databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +20 -12
  15. databricks/bundles/pipelines/_models/postgres_catalog_config.py +0 -6
  16. databricks/bundles/pipelines/_models/postgres_slot_config.py +0 -10
  17. databricks/bundles/pipelines/_models/source_catalog_config.py +0 -10
  18. databricks/bundles/pipelines/_models/source_config.py +1 -7
  19. databricks/bundles/pipelines/_models/table_specific_config.py +2 -6
  20. databricks/bundles/version.py +1 -1
  21. {databricks_bundles-0.278.0.dist-info → databricks_bundles-0.279.0.dist-info}/METADATA +2 -2
  22. {databricks_bundles-0.278.0.dist-info → databricks_bundles-0.279.0.dist-info}/RECORD +24 -21
  23. {databricks_bundles-0.278.0.dist-info → databricks_bundles-0.279.0.dist-info}/WHEEL +0 -0
  24. {databricks_bundles-0.278.0.dist-info → databricks_bundles-0.279.0.dist-info}/licenses/LICENSE +0 -0
@@ -148,6 +148,11 @@ __all__ = [
148
148
  "MavenLibrary",
149
149
  "MavenLibraryDict",
150
150
  "MavenLibraryParam",
151
+ "ModelTriggerConfiguration",
152
+ "ModelTriggerConfigurationCondition",
153
+ "ModelTriggerConfigurationConditionParam",
154
+ "ModelTriggerConfigurationDict",
155
+ "ModelTriggerConfigurationParam",
151
156
  "NotebookTask",
152
157
  "NotebookTaskDict",
153
158
  "NotebookTaskParam",
@@ -508,6 +513,15 @@ from databricks.bundles.jobs._models.maven_library import (
508
513
  MavenLibraryDict,
509
514
  MavenLibraryParam,
510
515
  )
516
+ from databricks.bundles.jobs._models.model_trigger_configuration import (
517
+ ModelTriggerConfiguration,
518
+ ModelTriggerConfigurationDict,
519
+ ModelTriggerConfigurationParam,
520
+ )
521
+ from databricks.bundles.jobs._models.model_trigger_configuration_condition import (
522
+ ModelTriggerConfigurationCondition,
523
+ ModelTriggerConfigurationConditionParam,
524
+ )
511
525
  from databricks.bundles.jobs._models.notebook_task import (
512
526
  NotebookTask,
513
527
  NotebookTaskDict,
@@ -104,7 +104,7 @@ class AwsAttributes:
104
104
  This string will be of a form like "us-west-2a". The provided availability
105
105
  zone must be in the same region as the Databricks deployment. For example, "us-west-2a"
106
106
  is not a valid zone id if the Databricks deployment resides in the "us-east-1" region.
107
- This is an optional field at cluster creation, and if not specified, a default zone will be used.
107
+ This is an optional field at cluster creation, and if not specified, the zone "auto" will be used.
108
108
  If the zone specified is "auto", will try to place cluster in a zone with high availability,
109
109
  and will retry placement in a different AZ if there is not enough capacity.
110
110
 
@@ -204,7 +204,7 @@ class AwsAttributesDict(TypedDict, total=False):
204
204
  This string will be of a form like "us-west-2a". The provided availability
205
205
  zone must be in the same region as the Databricks deployment. For example, "us-west-2a"
206
206
  is not a valid zone id if the Databricks deployment resides in the "us-east-1" region.
207
- This is an optional field at cluster creation, and if not specified, a default zone will be used.
207
+ This is an optional field at cluster creation, and if not specified, the zone "auto" will be used.
208
208
  If the zone specified is "auto", will try to place cluster in a zone with high availability,
209
209
  and will retry placement in a different AZ if there is not enough capacity.
210
210
 
@@ -4,7 +4,10 @@ from typing import TYPE_CHECKING, TypedDict
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOrOptional
7
- from databricks.bundles.jobs._models.subscription import Subscription, SubscriptionParam
7
+ from databricks.bundles.jobs._models.subscription import (
8
+ Subscription,
9
+ SubscriptionParam,
10
+ )
8
11
 
9
12
  if TYPE_CHECKING:
10
13
  from typing_extensions import Self
@@ -34,11 +34,6 @@ class Environment:
34
34
  """
35
35
 
36
36
  java_dependencies: VariableOrList[str] = field(default_factory=list)
37
- """
38
- :meta private: [EXPERIMENTAL]
39
-
40
- List of java dependencies. Each dependency is a string representing a java library path. For example: `/Volumes/path/to/test.jar`.
41
- """
42
37
 
43
38
  @classmethod
44
39
  def from_dict(cls, value: "EnvironmentDict") -> "Self":
@@ -69,11 +64,6 @@ class EnvironmentDict(TypedDict, total=False):
69
64
  """
70
65
 
71
66
  java_dependencies: VariableOrList[str]
72
- """
73
- :meta private: [EXPERIMENTAL]
74
-
75
- List of java dependencies. Each dependency is a string representing a java library path. For example: `/Volumes/path/to/test.jar`.
76
- """
77
67
 
78
68
 
79
69
  EnvironmentParam = EnvironmentDict | Environment
@@ -96,8 +96,7 @@ class Job(Resource):
96
96
  environments: VariableOrList[JobEnvironment] = field(default_factory=list)
97
97
  """
98
98
  A list of task execution environment specifications that can be referenced by serverless tasks of this job.
99
- An environment is required to be present for serverless tasks.
100
- For serverless notebook tasks, the environment is accessible in the notebook environment panel.
99
+ For serverless notebook tasks, if the environment_key is not specified, the notebook environment will be used if present. If a jobs environment is specified, it will override the notebook environment.
101
100
  For other serverless tasks, the task environment is required to be specified using environment_key in the task settings.
102
101
  """
103
102
 
@@ -241,8 +240,7 @@ class JobDict(TypedDict, total=False):
241
240
  environments: VariableOrList[JobEnvironmentParam]
242
241
  """
243
242
  A list of task execution environment specifications that can be referenced by serverless tasks of this job.
244
- An environment is required to be present for serverless tasks.
245
- For serverless notebook tasks, the environment is accessible in the notebook environment panel.
243
+ For serverless notebook tasks, if the environment_key is not specified, the notebook environment will be used if present. If a jobs environment is specified, it will override the notebook environment.
246
244
  For other serverless tasks, the task environment is required to be specified using environment_key in the task settings.
247
245
  """
248
246
 
@@ -0,0 +1,98 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import (
7
+ VariableOr,
8
+ VariableOrList,
9
+ VariableOrOptional,
10
+ )
11
+ from databricks.bundles.jobs._models.model_trigger_configuration_condition import (
12
+ ModelTriggerConfigurationCondition,
13
+ ModelTriggerConfigurationConditionParam,
14
+ )
15
+
16
+ if TYPE_CHECKING:
17
+ from typing_extensions import Self
18
+
19
+
20
+ @dataclass(kw_only=True)
21
+ class ModelTriggerConfiguration:
22
+ """
23
+ :meta private: [EXPERIMENTAL]
24
+ """
25
+
26
+ condition: VariableOr[ModelTriggerConfigurationCondition]
27
+ """
28
+ The condition based on which to trigger a job run.
29
+ """
30
+
31
+ aliases: VariableOrList[str] = field(default_factory=list)
32
+ """
33
+ Aliases of the model versions to monitor. Can only be used in conjunction with condition MODEL_ALIAS_SET.
34
+ """
35
+
36
+ min_time_between_triggers_seconds: VariableOrOptional[int] = None
37
+ """
38
+ If set, the trigger starts a run only after the specified amount of time has passed since
39
+ the last time the trigger fired. The minimum allowed value is 60 seconds.
40
+ """
41
+
42
+ securable_name: VariableOrOptional[str] = None
43
+ """
44
+ Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level triggers,
45
+ "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of metastore-level triggers.
46
+ """
47
+
48
+ wait_after_last_change_seconds: VariableOrOptional[int] = None
49
+ """
50
+ If set, the trigger starts a run only after no model updates have occurred for the specified time
51
+ and can be used to wait for a series of model updates before triggering a run. The
52
+ minimum allowed value is 60 seconds.
53
+ """
54
+
55
+ @classmethod
56
+ def from_dict(cls, value: "ModelTriggerConfigurationDict") -> "Self":
57
+ return _transform(cls, value)
58
+
59
+ def as_dict(self) -> "ModelTriggerConfigurationDict":
60
+ return _transform_to_json_value(self) # type:ignore
61
+
62
+
63
+ class ModelTriggerConfigurationDict(TypedDict, total=False):
64
+ """"""
65
+
66
+ condition: VariableOr[ModelTriggerConfigurationConditionParam]
67
+ """
68
+ The condition based on which to trigger a job run.
69
+ """
70
+
71
+ aliases: VariableOrList[str]
72
+ """
73
+ Aliases of the model versions to monitor. Can only be used in conjunction with condition MODEL_ALIAS_SET.
74
+ """
75
+
76
+ min_time_between_triggers_seconds: VariableOrOptional[int]
77
+ """
78
+ If set, the trigger starts a run only after the specified amount of time has passed since
79
+ the last time the trigger fired. The minimum allowed value is 60 seconds.
80
+ """
81
+
82
+ securable_name: VariableOrOptional[str]
83
+ """
84
+ Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level triggers,
85
+ "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of metastore-level triggers.
86
+ """
87
+
88
+ wait_after_last_change_seconds: VariableOrOptional[int]
89
+ """
90
+ If set, the trigger starts a run only after no model updates have occurred for the specified time
91
+ and can be used to wait for a series of model updates before triggering a run. The
92
+ minimum allowed value is 60 seconds.
93
+ """
94
+
95
+
96
+ ModelTriggerConfigurationParam = (
97
+ ModelTriggerConfigurationDict | ModelTriggerConfiguration
98
+ )
@@ -0,0 +1,18 @@
1
+ from enum import Enum
2
+ from typing import Literal
3
+
4
+
5
+ class ModelTriggerConfigurationCondition(Enum):
6
+ """
7
+ :meta private: [EXPERIMENTAL]
8
+ """
9
+
10
+ MODEL_CREATED = "MODEL_CREATED"
11
+ MODEL_VERSION_READY = "MODEL_VERSION_READY"
12
+ MODEL_ALIAS_SET = "MODEL_ALIAS_SET"
13
+
14
+
15
+ ModelTriggerConfigurationConditionParam = (
16
+ Literal["MODEL_CREATED", "MODEL_VERSION_READY", "MODEL_ALIAS_SET"]
17
+ | ModelTriggerConfigurationCondition
18
+ )
@@ -8,6 +8,10 @@ from databricks.bundles.jobs._models.file_arrival_trigger_configuration import (
8
8
  FileArrivalTriggerConfiguration,
9
9
  FileArrivalTriggerConfigurationParam,
10
10
  )
11
+ from databricks.bundles.jobs._models.model_trigger_configuration import (
12
+ ModelTriggerConfiguration,
13
+ ModelTriggerConfigurationParam,
14
+ )
11
15
  from databricks.bundles.jobs._models.pause_status import PauseStatus, PauseStatusParam
12
16
  from databricks.bundles.jobs._models.periodic_trigger_configuration import (
13
17
  PeriodicTriggerConfiguration,
@@ -31,6 +35,11 @@ class TriggerSettings:
31
35
  File arrival trigger settings.
32
36
  """
33
37
 
38
+ model: VariableOrOptional[ModelTriggerConfiguration] = None
39
+ """
40
+ :meta private: [EXPERIMENTAL]
41
+ """
42
+
34
43
  pause_status: VariableOrOptional[PauseStatus] = None
35
44
  """
36
45
  Whether this trigger is paused or not.
@@ -59,6 +68,11 @@ class TriggerSettingsDict(TypedDict, total=False):
59
68
  File arrival trigger settings.
60
69
  """
61
70
 
71
+ model: VariableOrOptional[ModelTriggerConfigurationParam]
72
+ """
73
+ :meta private: [EXPERIMENTAL]
74
+ """
75
+
62
76
  pause_status: VariableOrOptional[PauseStatusParam]
63
77
  """
64
78
  Whether this trigger is paused or not.
@@ -15,6 +15,9 @@ __all__ = [
15
15
  "ClusterLogConf",
16
16
  "ClusterLogConfDict",
17
17
  "ClusterLogConfParam",
18
+ "ConnectionParameters",
19
+ "ConnectionParametersDict",
20
+ "ConnectionParametersParam",
18
21
  "DayOfWeek",
19
22
  "DayOfWeekParam",
20
23
  "DbfsStorageInfo",
@@ -175,6 +178,11 @@ from databricks.bundles.pipelines._models.cluster_log_conf import (
175
178
  ClusterLogConfDict,
176
179
  ClusterLogConfParam,
177
180
  )
181
+ from databricks.bundles.pipelines._models.connection_parameters import (
182
+ ConnectionParameters,
183
+ ConnectionParametersDict,
184
+ ConnectionParametersParam,
185
+ )
178
186
  from databricks.bundles.pipelines._models.day_of_week import DayOfWeek, DayOfWeekParam
179
187
  from databricks.bundles.pipelines._models.dbfs_storage_info import (
180
188
  DbfsStorageInfo,
@@ -104,7 +104,7 @@ class AwsAttributes:
104
104
  This string will be of a form like "us-west-2a". The provided availability
105
105
  zone must be in the same region as the Databricks deployment. For example, "us-west-2a"
106
106
  is not a valid zone id if the Databricks deployment resides in the "us-east-1" region.
107
- This is an optional field at cluster creation, and if not specified, a default zone will be used.
107
+ This is an optional field at cluster creation, and if not specified, the zone "auto" will be used.
108
108
  If the zone specified is "auto", will try to place cluster in a zone with high availability,
109
109
  and will retry placement in a different AZ if there is not enough capacity.
110
110
 
@@ -204,7 +204,7 @@ class AwsAttributesDict(TypedDict, total=False):
204
204
  This string will be of a form like "us-west-2a". The provided availability
205
205
  zone must be in the same region as the Databricks deployment. For example, "us-west-2a"
206
206
  is not a valid zone id if the Databricks deployment resides in the "us-east-1" region.
207
- This is an optional field at cluster creation, and if not specified, a default zone will be used.
207
+ This is an optional field at cluster creation, and if not specified, the zone "auto" will be used.
208
208
  If the zone specified is "auto", will try to place cluster in a zone with high availability,
209
209
  and will retry placement in a different AZ if there is not enough capacity.
210
210
 
@@ -0,0 +1,50 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class ConnectionParameters:
14
+ """
15
+ :meta private: [EXPERIMENTAL]
16
+ """
17
+
18
+ source_catalog: VariableOrOptional[str] = None
19
+ """
20
+ :meta private: [EXPERIMENTAL]
21
+
22
+ Source catalog for initial connection.
23
+ This is necessary for schema exploration in some database systems like Oracle, and optional but nice-to-have
24
+ in some other database systems like Postgres.
25
+ For Oracle databases, this maps to a service name.
26
+ """
27
+
28
+ @classmethod
29
+ def from_dict(cls, value: "ConnectionParametersDict") -> "Self":
30
+ return _transform(cls, value)
31
+
32
+ def as_dict(self) -> "ConnectionParametersDict":
33
+ return _transform_to_json_value(self) # type:ignore
34
+
35
+
36
+ class ConnectionParametersDict(TypedDict, total=False):
37
+ """"""
38
+
39
+ source_catalog: VariableOrOptional[str]
40
+ """
41
+ :meta private: [EXPERIMENTAL]
42
+
43
+ Source catalog for initial connection.
44
+ This is necessary for schema exploration in some database systems like Oracle, and optional but nice-to-have
45
+ in some other database systems like Postgres.
46
+ For Oracle databases, this maps to a service name.
47
+ """
48
+
49
+
50
+ ConnectionParametersParam = ConnectionParametersDict | ConnectionParameters
@@ -4,10 +4,7 @@ from typing import TYPE_CHECKING, TypedDict
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOrOptional
7
- from databricks.bundles.pipelines._models.report_spec import (
8
- ReportSpec,
9
- ReportSpecParam,
10
- )
7
+ from databricks.bundles.pipelines._models.report_spec import ReportSpec, ReportSpecParam
11
8
  from databricks.bundles.pipelines._models.schema_spec import SchemaSpec, SchemaSpecParam
12
9
  from databricks.bundles.pipelines._models.table_spec import TableSpec, TableSpecParam
13
10
 
@@ -4,6 +4,10 @@ from typing import TYPE_CHECKING, TypedDict
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOr, VariableOrOptional
7
+ from databricks.bundles.pipelines._models.connection_parameters import (
8
+ ConnectionParameters,
9
+ ConnectionParametersParam,
10
+ )
7
11
 
8
12
  if TYPE_CHECKING:
9
13
  from typing_extensions import Self
@@ -35,11 +39,18 @@ class IngestionGatewayPipelineDefinition:
35
39
  [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
36
40
  """
37
41
 
42
+ connection_parameters: VariableOrOptional[ConnectionParameters] = None
43
+ """
44
+ :meta private: [EXPERIMENTAL]
45
+
46
+ Optional, Internal. Parameters required to establish an initial connection with the source.
47
+ """
48
+
38
49
  gateway_storage_name: VariableOrOptional[str] = None
39
50
  """
40
51
  Optional. The Unity Catalog-compatible name for the gateway storage location.
41
52
  This is the destination to use for the data that is extracted by the gateway.
42
- Delta Live Tables system will automatically create the storage location under the catalog and schema.
53
+ Spark Declarative Pipelines system will automatically create the storage location under the catalog and schema.
43
54
  """
44
55
 
45
56
  @classmethod
@@ -73,11 +84,18 @@ class IngestionGatewayPipelineDefinitionDict(TypedDict, total=False):
73
84
  [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
74
85
  """
75
86
 
87
+ connection_parameters: VariableOrOptional[ConnectionParametersParam]
88
+ """
89
+ :meta private: [EXPERIMENTAL]
90
+
91
+ Optional, Internal. Parameters required to establish an initial connection with the source.
92
+ """
93
+
76
94
  gateway_storage_name: VariableOrOptional[str]
77
95
  """
78
96
  Optional. The Unity Catalog-compatible name for the gateway storage location.
79
97
  This is the destination to use for the data that is extracted by the gateway.
80
- Delta Live Tables system will automatically create the storage location under the catalog and schema.
98
+ Spark Declarative Pipelines system will automatically create the storage location under the catalog and schema.
81
99
  """
82
100
 
83
101
 
@@ -30,6 +30,16 @@ class IngestionPipelineDefinition:
30
30
  Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.
31
31
  """
32
32
 
33
+ ingest_from_uc_foreign_catalog: VariableOrOptional[bool] = None
34
+ """
35
+ :meta private: [EXPERIMENTAL]
36
+
37
+ Immutable. If set to true, the pipeline will ingest tables from the
38
+ UC foreign catalogs directly without the need to specify a UC connection or ingestion gateway.
39
+ The `source_catalog` fields in objects of IngestionConfig are interpreted as
40
+ the UC foreign catalogs to ingest from.
41
+ """
42
+
33
43
  ingestion_gateway_id: VariableOrOptional[str] = None
34
44
  """
35
45
  Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
@@ -38,10 +48,6 @@ class IngestionPipelineDefinition:
38
48
  netsuite_jar_path: VariableOrOptional[str] = None
39
49
  """
40
50
  :meta private: [EXPERIMENTAL]
41
-
42
- Netsuite only configuration. When the field is set for a netsuite connector,
43
- the jar stored in the field will be validated and added to the classpath of
44
- pipeline's cluster.
45
51
  """
46
52
 
47
53
  objects: VariableOrList[IngestionConfig] = field(default_factory=list)
@@ -51,8 +57,6 @@ class IngestionPipelineDefinition:
51
57
 
52
58
  source_configurations: VariableOrList[SourceConfig] = field(default_factory=list)
53
59
  """
54
- :meta private: [EXPERIMENTAL]
55
-
56
60
  Top-level source configurations
57
61
  """
58
62
 
@@ -77,6 +81,16 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
77
81
  Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.
78
82
  """
79
83
 
84
+ ingest_from_uc_foreign_catalog: VariableOrOptional[bool]
85
+ """
86
+ :meta private: [EXPERIMENTAL]
87
+
88
+ Immutable. If set to true, the pipeline will ingest tables from the
89
+ UC foreign catalogs directly without the need to specify a UC connection or ingestion gateway.
90
+ The `source_catalog` fields in objects of IngestionConfig are interpreted as
91
+ the UC foreign catalogs to ingest from.
92
+ """
93
+
80
94
  ingestion_gateway_id: VariableOrOptional[str]
81
95
  """
82
96
  Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.
@@ -85,10 +99,6 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
85
99
  netsuite_jar_path: VariableOrOptional[str]
86
100
  """
87
101
  :meta private: [EXPERIMENTAL]
88
-
89
- Netsuite only configuration. When the field is set for a netsuite connector,
90
- the jar stored in the field will be validated and added to the classpath of
91
- pipeline's cluster.
92
102
  """
93
103
 
94
104
  objects: VariableOrList[IngestionConfigParam]
@@ -98,8 +108,6 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
98
108
 
99
109
  source_configurations: VariableOrList[SourceConfigParam]
100
110
  """
101
- :meta private: [EXPERIMENTAL]
102
-
103
111
  Top-level source configurations
104
112
  """
105
113
 
@@ -16,15 +16,11 @@ if TYPE_CHECKING:
16
16
  @dataclass(kw_only=True)
17
17
  class PostgresCatalogConfig:
18
18
  """
19
- :meta private: [EXPERIMENTAL]
20
-
21
19
  PG-specific catalog-level configuration parameters
22
20
  """
23
21
 
24
22
  slot_config: VariableOrOptional[PostgresSlotConfig] = None
25
23
  """
26
- :meta private: [EXPERIMENTAL]
27
-
28
24
  Optional. The Postgres slot configuration to use for logical replication
29
25
  """
30
26
 
@@ -41,8 +37,6 @@ class PostgresCatalogConfigDict(TypedDict, total=False):
41
37
 
42
38
  slot_config: VariableOrOptional[PostgresSlotConfigParam]
43
39
  """
44
- :meta private: [EXPERIMENTAL]
45
-
46
40
  Optional. The Postgres slot configuration to use for logical replication
47
41
  """
48
42
 
@@ -12,22 +12,16 @@ if TYPE_CHECKING:
12
12
  @dataclass(kw_only=True)
13
13
  class PostgresSlotConfig:
14
14
  """
15
- :meta private: [EXPERIMENTAL]
16
-
17
15
  PostgresSlotConfig contains the configuration for a Postgres logical replication slot
18
16
  """
19
17
 
20
18
  publication_name: VariableOrOptional[str] = None
21
19
  """
22
- :meta private: [EXPERIMENTAL]
23
-
24
20
  The name of the publication to use for the Postgres source
25
21
  """
26
22
 
27
23
  slot_name: VariableOrOptional[str] = None
28
24
  """
29
- :meta private: [EXPERIMENTAL]
30
-
31
25
  The name of the logical replication slot to use for the Postgres source
32
26
  """
33
27
 
@@ -44,15 +38,11 @@ class PostgresSlotConfigDict(TypedDict, total=False):
44
38
 
45
39
  publication_name: VariableOrOptional[str]
46
40
  """
47
- :meta private: [EXPERIMENTAL]
48
-
49
41
  The name of the publication to use for the Postgres source
50
42
  """
51
43
 
52
44
  slot_name: VariableOrOptional[str]
53
45
  """
54
- :meta private: [EXPERIMENTAL]
55
-
56
46
  The name of the logical replication slot to use for the Postgres source
57
47
  """
58
48
 
@@ -16,22 +16,16 @@ if TYPE_CHECKING:
16
16
  @dataclass(kw_only=True)
17
17
  class SourceCatalogConfig:
18
18
  """
19
- :meta private: [EXPERIMENTAL]
20
-
21
19
  SourceCatalogConfig contains catalog-level custom configuration parameters for each source
22
20
  """
23
21
 
24
22
  postgres: VariableOrOptional[PostgresCatalogConfig] = None
25
23
  """
26
- :meta private: [EXPERIMENTAL]
27
-
28
24
  Postgres-specific catalog-level configuration parameters
29
25
  """
30
26
 
31
27
  source_catalog: VariableOrOptional[str] = None
32
28
  """
33
- :meta private: [EXPERIMENTAL]
34
-
35
29
  Source catalog name
36
30
  """
37
31
 
@@ -48,15 +42,11 @@ class SourceCatalogConfigDict(TypedDict, total=False):
48
42
 
49
43
  postgres: VariableOrOptional[PostgresCatalogConfigParam]
50
44
  """
51
- :meta private: [EXPERIMENTAL]
52
-
53
45
  Postgres-specific catalog-level configuration parameters
54
46
  """
55
47
 
56
48
  source_catalog: VariableOrOptional[str]
57
49
  """
58
- :meta private: [EXPERIMENTAL]
59
-
60
50
  Source catalog name
61
51
  """
62
52
 
@@ -15,14 +15,10 @@ if TYPE_CHECKING:
15
15
 
16
16
  @dataclass(kw_only=True)
17
17
  class SourceConfig:
18
- """
19
- :meta private: [EXPERIMENTAL]
20
- """
18
+ """"""
21
19
 
22
20
  catalog: VariableOrOptional[SourceCatalogConfig] = None
23
21
  """
24
- :meta private: [EXPERIMENTAL]
25
-
26
22
  Catalog-level source configuration parameters
27
23
  """
28
24
 
@@ -39,8 +35,6 @@ class SourceConfigDict(TypedDict, total=False):
39
35
 
40
36
  catalog: VariableOrOptional[SourceCatalogConfigParam]
41
37
  """
42
- :meta private: [EXPERIMENTAL]
43
-
44
38
  Catalog-level source configuration parameters
45
39
  """
46
40
 
@@ -72,7 +72,7 @@ class TableSpecificConfig:
72
72
 
73
73
  sequence_by: VariableOrList[str] = field(default_factory=list)
74
74
  """
75
- The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.
75
+ The column names specifying the logical order of events in the source data. Spark Declarative Pipelines uses this sequencing to handle change events that arrive out of order.
76
76
  """
77
77
 
78
78
  workday_report_parameters: VariableOrOptional[
@@ -80,8 +80,6 @@ class TableSpecificConfig:
80
80
  ] = None
81
81
  """
82
82
  :meta private: [EXPERIMENTAL]
83
-
84
- (Optional) Additional custom parameters for Workday Report
85
83
  """
86
84
 
87
85
  @classmethod
@@ -142,7 +140,7 @@ class TableSpecificConfigDict(TypedDict, total=False):
142
140
 
143
141
  sequence_by: VariableOrList[str]
144
142
  """
145
- The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.
143
+ The column names specifying the logical order of events in the source data. Spark Declarative Pipelines uses this sequencing to handle change events that arrive out of order.
146
144
  """
147
145
 
148
146
  workday_report_parameters: VariableOrOptional[
@@ -150,8 +148,6 @@ class TableSpecificConfigDict(TypedDict, total=False):
150
148
  ]
151
149
  """
152
150
  :meta private: [EXPERIMENTAL]
153
-
154
- (Optional) Additional custom parameters for Workday Report
155
151
  """
156
152
 
157
153
 
@@ -1 +1 @@
1
- __version__ = "0.278.0"
1
+ __version__ = "0.279.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-bundles
3
- Version: 0.278.0
3
+ Version: 0.279.0
4
4
  Summary: Python support for Databricks Asset Bundles
5
5
  Author-email: Gleb Kanterov <gleb.kanterov@databricks.com>
6
6
  Requires-Python: >=3.10
@@ -22,7 +22,7 @@ Reference documentation is available at https://databricks.github.io/cli/python/
22
22
 
23
23
  To use `databricks-bundles`, you must first:
24
24
 
25
- 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.278.0 or above
25
+ 1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.279.0 or above
26
26
  2. Authenticate to your Databricks workspace if you have not done so already:
27
27
 
28
28
  ```bash
@@ -2,7 +2,7 @@ databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
2
2
  databricks/bundles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  databricks/bundles/build.py,sha256=WpMZiPcyYqmUeimmYlXlV3RLTUFFUQrUzG83q4Zb4xA,17817
4
4
  databricks/bundles/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26
5
- databricks/bundles/version.py,sha256=7qWgZKv1CnXSa7pUQgcTxHu6tn1oepoBYc3wf3flDlM,24
5
+ databricks/bundles/version.py,sha256=_qFhYFTq_G8SSKFRVisLaZ4IdaGke560Mrqo6RXs05c,24
6
6
  databricks/bundles/core/__init__.py,sha256=eivW2LIitfT-doDfPWac26vhdhkSACqoRi_48ZJE8zQ,1357
7
7
  databricks/bundles/core/_bundle.py,sha256=B5qINwnoRDitkofFaoCWDCS1tbB3a7nufqK0iDRfC6g,3287
8
8
  databricks/bundles/core/_diagnostics.py,sha256=arhPuRpjfOUjr8_T91-b-YM-ZtpkrLWeJv0BcLfTIOo,5879
@@ -15,11 +15,11 @@ databricks/bundles/core/_resources.py,sha256=cvW_QbKeazTEJeD1FvmafPvWk29iqx5ruB3
15
15
  databricks/bundles/core/_transform.py,sha256=ISupC7du3JnqrQQMgHf5Mt7fpZKEcUNZ5qwgMWZQfAE,8836
16
16
  databricks/bundles/core/_transform_to_json.py,sha256=aGiIBVx7pSO9LiJZ5CPYsTA5U6XhCbn1Ew15dhqC0yc,1890
17
17
  databricks/bundles/core/_variable.py,sha256=16g6vSLs5J-Ek2u2LNSnbqK-vpgbeirCddWjS-AiLGU,3617
18
- databricks/bundles/jobs/__init__.py,sha256=401hE3tONdr56nGBl7eistqidtf-Z-dtuk4qzsqWmXI,19423
18
+ databricks/bundles/jobs/__init__.py,sha256=iYkL1EP3lcaXLrFkAe6EkZ1a1MhbEViL_E2IMU6sOPg,19969
19
19
  databricks/bundles/jobs/_models/adlsgen2_info.py,sha256=_eGe6ivi2VmokxKxKUji9-fSZLBubAr43uBDnN7vvlY,1104
20
20
  databricks/bundles/jobs/_models/authentication_method.py,sha256=XI8hU5fhPlGTsZdePZtR6FIjyT4iEt2URb61q1MsgNI,198
21
21
  databricks/bundles/jobs/_models/auto_scale.py,sha256=Z5vZa0bZi6LJ_Ac-VJfqqCJAtI-zY1_auTGhsV8khvA,1624
22
- databricks/bundles/jobs/_models/aws_attributes.py,sha256=pfZIHLVty93bMtfwkMKLPuUqMmRcDxom6zhmiglCq2Y,10181
22
+ databricks/bundles/jobs/_models/aws_attributes.py,sha256=_iD4ppogK3lYsOmbcGQ9PZ9lWKnuF3Qc14QlHSkyh7k,10183
23
23
  databricks/bundles/jobs/_models/aws_availability.py,sha256=C4yzZLt_CSIxlZ3MoCV4MPdQRjVRgRU_vkrLIkL6hkQ,477
24
24
  databricks/bundles/jobs/_models/azure_attributes.py,sha256=0w-lwBbr-6Xo2i5Ok6-9cwBoetwmzI3DA7kG81XRv6U,3764
25
25
  databricks/bundles/jobs/_models/azure_availability.py,sha256=KY8dt1vWA8RHBY3nq5CeqPjqhLyn5RvRfoHQ-Y2vlbk,540
@@ -33,7 +33,7 @@ databricks/bundles/jobs/_models/condition_task.py,sha256=4A71BNRZNk9_iS8IFiHr69R
33
33
  databricks/bundles/jobs/_models/condition_task_op.py,sha256=mj5tx5qmvUCIkDvFYbo1-6fvVP_Q1zYzxv011ufHbx4,1141
34
34
  databricks/bundles/jobs/_models/continuous.py,sha256=ILsXONBdaHnXoNofLJj5CWu4Hk3NGpvS4izBKOwZHL0,1609
35
35
  databricks/bundles/jobs/_models/cron_schedule.py,sha256=_dUQ8vdfnZvIPvNyB-qdn_9cKKGrys249Wv_SWDdP7Q,2186
36
- databricks/bundles/jobs/_models/dashboard_task.py,sha256=HTbz7Tra_gvnPNW2Skug-k-bxjwbY7xu0cq5nlGRYh8,1533
36
+ databricks/bundles/jobs/_models/dashboard_task.py,sha256=sh-yHCUoDEj-WAe1w5gDz6luRNg2dgKTK42DlKiNZbo,1546
37
37
  databricks/bundles/jobs/_models/data_security_mode.py,sha256=fuelzF06CfyhG9OmK-2orH5EifQnEtPTEmh7NZcL7zA,2660
38
38
  databricks/bundles/jobs/_models/dbfs_storage_info.py,sha256=JLc9yrL3YOOVlYbQdA07tqF2wxkJdDvAKhQmAKURxaQ,972
39
39
  databricks/bundles/jobs/_models/dbt_platform_task.py,sha256=a_cR8fCEIpN0rAcoLEAy_UCJQAgu1q0XkPZlVhy8PAQ,1491
@@ -41,7 +41,7 @@ databricks/bundles/jobs/_models/dbt_task.py,sha256=3OT0GoU1y1DffwXE_YAXXU807Hj2D
41
41
  databricks/bundles/jobs/_models/docker_basic_auth.py,sha256=jEbSE8CvnTceOm405NA18IvB1lLCu-Wfe3SPSlsSBG4,1084
42
42
  databricks/bundles/jobs/_models/docker_image.py,sha256=h2hp3vnfh_wXxMg6RzNHPfjfb-FMsyABe83XIaX5fA8,1126
43
43
  databricks/bundles/jobs/_models/ebs_volume_type.py,sha256=-93BcybklhLyQEZvF8C1BhnHMeCjfDwI2qwimZ6X5eU,415
44
- databricks/bundles/jobs/_models/environment.py,sha256=jc3mnD08KK9lL__4BbnaKa4sKaXpThp1zqooGwgPlI0,2575
44
+ databricks/bundles/jobs/_models/environment.py,sha256=HZs0uzRLtagbcRLaNjyLUWVZ4I3Qkq_S6G223dQqib4,2195
45
45
  databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py,sha256=VO5bHmN-hywRfg3zfqTdc0YI_5FQRpGLZdc6f6_GKnA,2378
46
46
  databricks/bundles/jobs/_models/for_each_task.py,sha256=MP-6c7zUaRcl5EaM-3IfjhX1Jc840GlnRdjj_SlhypI,1810
47
47
  databricks/bundles/jobs/_models/gcp_attributes.py,sha256=DdKsEfEIGTtKJFdC_h8JukAQgDhE1yluwWn2X2MIiOk,5786
@@ -51,7 +51,7 @@ databricks/bundles/jobs/_models/gen_ai_compute_task.py,sha256=WNjYn_s4bVlB-msh7V
51
51
  databricks/bundles/jobs/_models/git_provider.py,sha256=VzNKsIrOJauZjCTCeq3zVUumrnZoamYXxpgSz0kPsEI,664
52
52
  databricks/bundles/jobs/_models/git_source.py,sha256=0WVNzvbdMRvFC4N-fH2Jw1sh-kuNCt7WVkO5E1Jy5a8,3031
53
53
  databricks/bundles/jobs/_models/init_script_info.py,sha256=Skv-u-naj3GRdmwvnxbMIDdOj3YU9PARTBMCbTNKfN8,4511
54
- databricks/bundles/jobs/_models/job.py,sha256=Wzk_HuU3wTf7ZdAFUOJf31Clua9gsgQjapSaL_UqwGU,15819
54
+ databricks/bundles/jobs/_models/job.py,sha256=O5Fv8ZTO1tXihCTG0LgdZeQFMldmj837Loq2Ci_RZn8,15895
55
55
  databricks/bundles/jobs/_models/job_cluster.py,sha256=b9mQ993-tmY_KZem8vRCT7MLcw07KhKEfvmyWxoGav0,1595
56
56
  databricks/bundles/jobs/_models/job_email_notifications.py,sha256=rnkjkZ06YTsHLkbjSDGV9Lm5o9r6fNSG9KwGA2rlyjY,5308
57
57
  databricks/bundles/jobs/_models/job_environment.py,sha256=ekMKwai2LtB9Dzg3LyGmoVjLCC6ShTWBjW54_B-qOBo,1185
@@ -70,6 +70,8 @@ databricks/bundles/jobs/_models/lifecycle.py,sha256=_0h3QZ8LPYTnKC8YDRmW0NbQBxwu
70
70
  databricks/bundles/jobs/_models/local_file_info.py,sha256=yH12cJKjPrFQxG69DgAdp87PIYVGfjECvabbRPqKZjI,952
71
71
  databricks/bundles/jobs/_models/log_analytics_info.py,sha256=JeEeyNcldckin93yy4xCzWwcgN9iMxGwVyNOP0gpVys,1343
72
72
  databricks/bundles/jobs/_models/maven_library.py,sha256=xz7BIo3XZ4xfp9S3sovADLxDY_DVcZbqduS8VTo1La4,2002
73
+ databricks/bundles/jobs/_models/model_trigger_configuration.py,sha256=_eIyPPfBfHQkQPN7MpQOaA3_EvUIFDc8vmvg7TVF6oI,3416
74
+ databricks/bundles/jobs/_models/model_trigger_configuration_condition.py,sha256=QdaXDlfMtuEV5K8Wh2hSn5AHfzdMsemz2InFQCq_2zw,434
73
75
  databricks/bundles/jobs/_models/notebook_task.py,sha256=e1u7uNHTgW8Y9fAA-tcrAeo__bIN_KyWaca41DYzEOc,5097
74
76
  databricks/bundles/jobs/_models/pause_status.py,sha256=4Zsd3vXwRXLBXO1LLauX6TkdbrfRJYzJxVeTSfaapNk,189
75
77
  databricks/bundles/jobs/_models/performance_target.py,sha256=pSefsgLvGSZ723xQEmHDHt-scofmsrQQGCHhr_vAddI,572
@@ -107,19 +109,20 @@ databricks/bundles/jobs/_models/task_dependency.py,sha256=aDo85ulTS46OduT357cLP4
107
109
  databricks/bundles/jobs/_models/task_email_notifications.py,sha256=3s7JnOY2ZMhDiPVk8Da0m3e5URijCTncRlnTH19XMEs,5315
108
110
  databricks/bundles/jobs/_models/task_notification_settings.py,sha256=CzMzpjLDR1oWEjPArI2S4RMM5k7QkBk5yZVrbB43LMo,2086
109
111
  databricks/bundles/jobs/_models/task_retry_mode.py,sha256=Ds9-8lvJYAdaO-iLubaUTMq06UC-WCsKH6td9KbMO-s,542
110
- databricks/bundles/jobs/_models/trigger_settings.py,sha256=199oDuHeznqhnd0QIKFnzyzupJpHswjw0v3ltsBHuks,2197
112
+ databricks/bundles/jobs/_models/trigger_settings.py,sha256=v2acrKtbucuQe9Ey4OgnEwNKgj_Sq0yjKPkzS1ad_VM,2568
111
113
  databricks/bundles/jobs/_models/volumes_storage_info.py,sha256=31pQ9fnqQGhT2mD_ScjEhy-dm0307ne7iP_gxfcJXDY,1253
112
114
  databricks/bundles/jobs/_models/webhook.py,sha256=S209r8QqufJLRoACU6a0MnTzuKOvn3r6p91u-7nbFhQ,744
113
115
  databricks/bundles/jobs/_models/webhook_notifications.py,sha256=4FrMTYy4tDeMe3VqSbn9jjNYISTAmYTzENpGvnQGju4,4349
114
116
  databricks/bundles/jobs/_models/workload_type.py,sha256=A8KViUIB4x_gEXVS2p4KTGZ9Lr50Z3LLzIYxyE676xw,1162
115
117
  databricks/bundles/jobs/_models/workspace_storage_info.py,sha256=Qnm6lsw9rwXB7Te_Um0c7TvIH4Vv7ndKKYYV0pxJ6q8,1100
116
- databricks/bundles/pipelines/__init__.py,sha256=J9t4B5vd6iGPlyNfHqplwlH2rDi4liCEOesFAqnBiUM,11878
118
+ databricks/bundles/pipelines/__init__.py,sha256=_BOWtW0nBP1ndeBH08uTOvMCEJEtAiIj-MWba2ZThRk,12133
117
119
  databricks/bundles/pipelines/_models/adlsgen2_info.py,sha256=_eGe6ivi2VmokxKxKUji9-fSZLBubAr43uBDnN7vvlY,1104
118
- databricks/bundles/pipelines/_models/aws_attributes.py,sha256=sKKsOBfsg21soMJrdv3ETHIKg40LVGZWirlqSlQ2n8o,10191
120
+ databricks/bundles/pipelines/_models/aws_attributes.py,sha256=_YVduqrylaoZr_JQh15VIKtu5b9rzG1-1f5sAvxElbA,10193
119
121
  databricks/bundles/pipelines/_models/aws_availability.py,sha256=C4yzZLt_CSIxlZ3MoCV4MPdQRjVRgRU_vkrLIkL6hkQ,477
120
122
  databricks/bundles/pipelines/_models/azure_attributes.py,sha256=lh4epnkK9_8issxpxyLpnJ3uNwMNCsr9wBdaX8NfMlQ,3774
121
123
  databricks/bundles/pipelines/_models/azure_availability.py,sha256=KY8dt1vWA8RHBY3nq5CeqPjqhLyn5RvRfoHQ-Y2vlbk,540
122
124
  databricks/bundles/pipelines/_models/cluster_log_conf.py,sha256=EvCwe_ohAdv-mC5Veiqp28VFUaU9S5BeQA3KLLfEQGg,2697
125
+ databricks/bundles/pipelines/_models/connection_parameters.py,sha256=235CXyN25AVcBxzMEWtxNeIb1voVqqhASVNlRv9MVSw,1561
123
126
  databricks/bundles/pipelines/_models/day_of_week.py,sha256=dtri8UkWRZeuZm7o82pxFlJMPMcyYj-BhKzxWhYEJ60,554
124
127
  databricks/bundles/pipelines/_models/dbfs_storage_info.py,sha256=JLc9yrL3YOOVlYbQdA07tqF2wxkJdDvAKhQmAKURxaQ,972
125
128
  databricks/bundles/pipelines/_models/ebs_volume_type.py,sha256=-93BcybklhLyQEZvF8C1BhnHMeCjfDwI2qwimZ6X5eU,415
@@ -129,9 +132,9 @@ databricks/bundles/pipelines/_models/filters.py,sha256=9MDrS5ZRDwiu7a3l47Cw1IMNj
129
132
  databricks/bundles/pipelines/_models/gcp_attributes.py,sha256=XzJQ9RXRvNitnP3QHpYf4Az8gOCe6__qnD6OTXJab1U,5791
130
133
  databricks/bundles/pipelines/_models/gcp_availability.py,sha256=a2ayWsyEQDpIDx-mwDIx_p1VJpcPaUme4ndqbpc4uNs,556
131
134
  databricks/bundles/pipelines/_models/gcs_storage_info.py,sha256=hwOowyNKCBhzsUiCQSrtmQPxrMINEq5jg2EefkrE2fQ,1020
132
- databricks/bundles/pipelines/_models/ingestion_config.py,sha256=ERT5ySyVIVJ0T3r3tqjuCFzV5FEIJsfWZvbMmRTHNMk,1679
133
- databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py,sha256=UkpHUaelJYcEZ-3r8FRwEpUcQxHrvQoSVsmadmlQaBU,3030
134
- databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py,sha256=KHYododL_q3Z8yDvog793nyOJG6Q1ID9odFakRO6SVs,3943
135
+ databricks/bundles/pipelines/_models/ingestion_config.py,sha256=78ZNABAt7du-ri05y4fumS1HrwsThggZMkYrTga0cuQ,1666
136
+ databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py,sha256=6_rqYTz-gn9JUdRHbnfFbbLrr37RrPxS_9Y2pYCVmUQ,3634
137
+ databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py,sha256=536lJt42KWdsnObEsc8vm1wGYqTpljixIOR_QtV8mAw,4318
135
138
  databricks/bundles/pipelines/_models/ingestion_pipeline_definition_table_specific_config_query_based_connector_config.py,sha256=IXJqQjQ2aORHJZy1PZ3ST4hx4HVh7O4b-JoKoxYR9QE,5181
136
139
  databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters.py,sha256=3foG3H-6eOTpQbH0JqK3EY27isg6ZCm8w88cufh6HRU,3434
137
140
  databricks/bundles/pipelines/_models/ingestion_pipeline_definition_workday_report_parameters_query_key_value.py,sha256=mC5r259VRZKnd0WKi1Toeb1qG2VZglumzVos4Wv8BoQ,2261
@@ -151,17 +154,17 @@ databricks/bundles/pipelines/_models/pipeline_library.py,sha256=fkp4382MTXtMrRsf
151
154
  databricks/bundles/pipelines/_models/pipeline_permission.py,sha256=PMK6c0qHliiDjPRN4etPBYlitp_hRETtIV89UDWd6V8,1323
152
155
  databricks/bundles/pipelines/_models/pipeline_permission_level.py,sha256=ULGO2nLL1Z1vXiJMZ9d9rWZIQTe-Ghk7XrOn3uJmVKA,318
153
156
  databricks/bundles/pipelines/_models/pipelines_environment.py,sha256=UDiizKOkHTqflghT0lFnoXI1npRGXIW-Ji5dwQ7_GHk,1898
154
- databricks/bundles/pipelines/_models/postgres_catalog_config.py,sha256=Tuh2H8b0WrZbf7qsBUQcBCl_oEdiArbFxMdlrWzcg4A,1412
155
- databricks/bundles/pipelines/_models/postgres_slot_config.py,sha256=h5J4I3nNGA_IMnyFHqZ1BPTiWb_gn36OQeAoBcAF9ZI,1595
157
+ databricks/bundles/pipelines/_models/postgres_catalog_config.py,sha256=LWTVsmSAVfV7K62PjlpQRJRnPxvSD-gbmvva70FRYq4,1299
158
+ databricks/bundles/pipelines/_models/postgres_slot_config.py,sha256=_xeTYX0mnnhwDFq07BCtRtNoEh78HB2qaHyM4LaoLVE,1404
156
159
  databricks/bundles/pipelines/_models/report_spec.py,sha256=Yi6ReiD7zm2T8mCn0cdFCPke9VDKOosGhVTO4PBKXHg,2318
157
160
  databricks/bundles/pipelines/_models/restart_window.py,sha256=FefU_DTmOwVKCm6jBlcSyjqAWmc613_emmo0vuzET78,2408
158
161
  databricks/bundles/pipelines/_models/run_as.py,sha256=rZLJgEIvvX-sZmcCUbgDyLoFu00DEOg6mkRkZb2okZI,1684
159
162
  databricks/bundles/pipelines/_models/s3_storage_info.py,sha256=9DVWOFKrxGXijUnctwuB0_kANXRazPUPNSfmugJVuio,4595
160
163
  databricks/bundles/pipelines/_models/schema_spec.py,sha256=nNXx-JK2jTPDWJ490yy8DG7gB0_b6My2G3ZhlgGf8zY,2690
161
- databricks/bundles/pipelines/_models/source_catalog_config.py,sha256=x3f5f8FoPJQ2tiBWGriodH7a5Z3c9bDszz9PA0gWyKk,1670
162
- databricks/bundles/pipelines/_models/source_config.py,sha256=dv6PigMxxWz5WrIopZkNlJIh6SqfynlPiuolmd4qsRQ,1236
164
+ databricks/bundles/pipelines/_models/source_catalog_config.py,sha256=4WfC1DfLR8biXXRHSG-1Wbhvzi3TtcHt8p8ugZT-hzM,1479
165
+ databricks/bundles/pipelines/_models/source_config.py,sha256=Z29mIf0XL7TrreIEgzmc5EQBbmgX98saXK0lJo8GgoI,1119
163
166
  databricks/bundles/pipelines/_models/table_spec.py,sha256=3w9nTGzOKDhUgEtfx04i6tN3c4UDCsSaXW-zlwXgqGQ,3033
164
- databricks/bundles/pipelines/_models/table_specific_config.py,sha256=nnAX3YQS0JlVq1ySxYVTvIbpAiAtQOHX1Z0c_G_p4sE,5730
167
+ databricks/bundles/pipelines/_models/table_specific_config.py,sha256=IgNJeYOsCIRvB-6zc4WxUTAUMJ9ePPjgAYygJNVun7o,5614
165
168
  databricks/bundles/pipelines/_models/table_specific_config_scd_type.py,sha256=_RO5oXr_b4ibygpeWXmkil24TnRQZKxbpjTx-g5qc2Q,404
166
169
  databricks/bundles/pipelines/_models/volumes_storage_info.py,sha256=31pQ9fnqQGhT2mD_ScjEhy-dm0307ne7iP_gxfcJXDY,1253
167
170
  databricks/bundles/pipelines/_models/workspace_storage_info.py,sha256=Qnm6lsw9rwXB7Te_Um0c7TvIH4Vv7ndKKYYV0pxJ6q8,1100
@@ -176,7 +179,7 @@ databricks/bundles/volumes/_models/volume.py,sha256=ALGmeXW3rGH424pp6SaXPT1I87XX
176
179
  databricks/bundles/volumes/_models/volume_grant.py,sha256=U_-4-KL8LM3n5xJBLHj_wjPsqiVjCDRj8ttiUYqFRmI,1083
177
180
  databricks/bundles/volumes/_models/volume_grant_privilege.py,sha256=fCA0LVE9Q3sbHvTAj7e62E9ASq9jH5oK1iREQdp1TxQ,384
178
181
  databricks/bundles/volumes/_models/volume_type.py,sha256=kdczwT3EJ0n5ZBV75SgX-6766igJBLTh6ywaaxIbLDk,189
179
- databricks_bundles-0.278.0.dist-info/licenses/LICENSE,sha256=QKOZO8KtzbS_Qt3Tbl0dfGnidaeilKe0UiIjnEq1tjc,3790
180
- databricks_bundles-0.278.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
181
- databricks_bundles-0.278.0.dist-info/METADATA,sha256=DVZpe2eBpLrc4SIRj9PIvtIOhkYt3Lv6m4fKqxji4Ug,1528
182
- databricks_bundles-0.278.0.dist-info/RECORD,,
182
+ databricks_bundles-0.279.0.dist-info/licenses/LICENSE,sha256=QKOZO8KtzbS_Qt3Tbl0dfGnidaeilKe0UiIjnEq1tjc,3790
183
+ databricks_bundles-0.279.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
184
+ databricks_bundles-0.279.0.dist-info/METADATA,sha256=ZS2z1LDfaWvLGlEqHQqY9er3iGf9UMFW3jzgqm8Ee9Y,1528
185
+ databricks_bundles-0.279.0.dist-info/RECORD,,