databricks-sdk 0.50.0__py3-none-any.whl → 0.52.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -758,7 +758,7 @@ class EventLogSpec:
758
758
  @dataclass
759
759
  class FileLibrary:
760
760
  path: Optional[str] = None
761
- """The absolute path of the file."""
761
+ """The absolute path of the source code."""
762
762
 
763
763
  def as_dict(self) -> dict:
764
764
  """Serializes the FileLibrary into a dictionary suitable for use as a JSON request body."""
@@ -1029,25 +1029,25 @@ class IngestionConfig:
1029
1029
 
1030
1030
  @dataclass
1031
1031
  class IngestionGatewayPipelineDefinition:
1032
- connection_id: Optional[str] = None
1033
- """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this
1034
- gateway pipeline uses to communicate with the source."""
1035
-
1036
- connection_name: Optional[str] = None
1032
+ connection_name: str
1037
1033
  """Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the
1038
1034
  source."""
1039
1035
 
1040
- gateway_storage_catalog: Optional[str] = None
1036
+ gateway_storage_catalog: str
1041
1037
  """Required, Immutable. The name of the catalog for the gateway pipeline's storage location."""
1042
1038
 
1039
+ gateway_storage_schema: str
1040
+ """Required, Immutable. The name of the schema for the gateway pipelines's storage location."""
1041
+
1042
+ connection_id: Optional[str] = None
1043
+ """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this
1044
+ gateway pipeline uses to communicate with the source."""
1045
+
1043
1046
  gateway_storage_name: Optional[str] = None
1044
1047
  """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the
1045
1048
  destination to use for the data that is extracted by the gateway. Delta Live Tables system will
1046
1049
  automatically create the storage location under the catalog and schema."""
1047
1050
 
1048
- gateway_storage_schema: Optional[str] = None
1049
- """Required, Immutable. The name of the schema for the gateway pipelines's storage location."""
1050
-
1051
1051
  def as_dict(self) -> dict:
1052
1052
  """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body."""
1053
1053
  body = {}
@@ -1293,7 +1293,7 @@ class MaturityLevel(Enum):
1293
1293
  @dataclass
1294
1294
  class NotebookLibrary:
1295
1295
  path: Optional[str] = None
1296
- """The absolute path of the notebook."""
1296
+ """The absolute path of the source code."""
1297
1297
 
1298
1298
  def as_dict(self) -> dict:
1299
1299
  """Serializes the NotebookLibrary into a dictionary suitable for use as a JSON request body."""
@@ -1892,7 +1892,7 @@ class PipelineClusterAutoscaleMode(Enum):
1892
1892
 
1893
1893
  @dataclass
1894
1894
  class PipelineDeployment:
1895
- kind: Optional[DeploymentKind] = None
1895
+ kind: DeploymentKind
1896
1896
  """The deployment method that manages the pipeline."""
1897
1897
 
1898
1898
  metadata_file_path: Optional[str] = None
@@ -2584,18 +2584,18 @@ class PipelineTrigger:
2584
2584
 
2585
2585
  @dataclass
2586
2586
  class ReportSpec:
2587
- destination_catalog: Optional[str] = None
2587
+ source_url: str
2588
+ """Required. Report URL in the source system."""
2589
+
2590
+ destination_catalog: str
2588
2591
  """Required. Destination catalog to store table."""
2589
2592
 
2590
- destination_schema: Optional[str] = None
2593
+ destination_schema: str
2591
2594
  """Required. Destination schema to store table."""
2592
2595
 
2593
2596
  destination_table: Optional[str] = None
2594
2597
  """Required. Destination table name. The pipeline fails if a table with that name already exists."""
2595
2598
 
2596
- source_url: Optional[str] = None
2597
- """Required. Report URL in the source system."""
2598
-
2599
2599
  table_configuration: Optional[TableSpecificConfig] = None
2600
2600
  """Configuration settings to control the ingestion of tables. These settings override the
2601
2601
  table_configuration defined in the IngestionPipelineDefinition object."""
@@ -2731,10 +2731,13 @@ class RunAs:
2731
2731
 
2732
2732
  @dataclass
2733
2733
  class SchemaSpec:
2734
- destination_catalog: Optional[str] = None
2734
+ source_schema: str
2735
+ """Required. Schema name in the source database."""
2736
+
2737
+ destination_catalog: str
2735
2738
  """Required. Destination catalog to store tables."""
2736
2739
 
2737
- destination_schema: Optional[str] = None
2740
+ destination_schema: str
2738
2741
  """Required. Destination schema to store tables in. Tables with the same name as the source tables
2739
2742
  are created in this destination schema. The pipeline fails If a table with the same name already
2740
2743
  exists."""
@@ -2742,9 +2745,6 @@ class SchemaSpec:
2742
2745
  source_catalog: Optional[str] = None
2743
2746
  """The source catalog name. Might be optional depending on the type of source."""
2744
2747
 
2745
- source_schema: Optional[str] = None
2746
- """Required. Schema name in the source database."""
2747
-
2748
2748
  table_configuration: Optional[TableSpecificConfig] = None
2749
2749
  """Configuration settings to control the ingestion of tables. These settings are applied to all
2750
2750
  tables in this schema and override the table_configuration defined in the
@@ -2924,6 +2924,7 @@ class StackFrame:
2924
2924
  @dataclass
2925
2925
  class StartUpdate:
2926
2926
  cause: Optional[StartUpdateCause] = None
2927
+ """What triggered this update."""
2927
2928
 
2928
2929
  full_refresh: Optional[bool] = None
2929
2930
  """If true, this update will reset all tables before running."""
@@ -2992,6 +2993,7 @@ class StartUpdate:
2992
2993
 
2993
2994
 
2994
2995
  class StartUpdateCause(Enum):
2996
+ """What triggered this update."""
2995
2997
 
2996
2998
  API_CALL = "API_CALL"
2997
2999
  JOB_TASK = "JOB_TASK"
@@ -3045,10 +3047,13 @@ class StopPipelineResponse:
3045
3047
 
3046
3048
  @dataclass
3047
3049
  class TableSpec:
3048
- destination_catalog: Optional[str] = None
3050
+ source_table: str
3051
+ """Required. Table name in the source database."""
3052
+
3053
+ destination_catalog: str
3049
3054
  """Required. Destination catalog to store table."""
3050
3055
 
3051
- destination_schema: Optional[str] = None
3056
+ destination_schema: str
3052
3057
  """Required. Destination schema to store table."""
3053
3058
 
3054
3059
  destination_table: Optional[str] = None
@@ -3061,9 +3066,6 @@ class TableSpec:
3061
3066
  source_schema: Optional[str] = None
3062
3067
  """Schema name in the source database. Might be optional depending on the type of source."""
3063
3068
 
3064
- source_table: Optional[str] = None
3065
- """Required. Table name in the source database."""
3066
-
3067
3069
  table_configuration: Optional[TableSpecificConfig] = None
3068
3070
  """Configuration settings to control the ingestion of tables. These settings override the
3069
3071
  table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec."""
@@ -3122,6 +3124,18 @@ class TableSpec:
3122
3124
 
3123
3125
  @dataclass
3124
3126
  class TableSpecificConfig:
3127
+ exclude_columns: Optional[List[str]] = None
3128
+ """A list of column names to be excluded for the ingestion. When not specified, include_columns
3129
+ fully controls what columns to be ingested. When specified, all other columns including future
3130
+ ones will be automatically included for ingestion. This field in mutually exclusive with
3131
+ `include_columns`."""
3132
+
3133
+ include_columns: Optional[List[str]] = None
3134
+ """A list of column names to be included for the ingestion. When not specified, all columns except
3135
+ ones in exclude_columns will be included. Future columns will be automatically included. When
3136
+ specified, all other future columns will be automatically excluded from ingestion. This field in
3137
+ mutually exclusive with `exclude_columns`."""
3138
+
3125
3139
  primary_keys: Optional[List[str]] = None
3126
3140
  """The primary key of the table used to apply changes."""
3127
3141
 
@@ -3139,6 +3153,10 @@ class TableSpecificConfig:
3139
3153
  def as_dict(self) -> dict:
3140
3154
  """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body."""
3141
3155
  body = {}
3156
+ if self.exclude_columns:
3157
+ body["exclude_columns"] = [v for v in self.exclude_columns]
3158
+ if self.include_columns:
3159
+ body["include_columns"] = [v for v in self.include_columns]
3142
3160
  if self.primary_keys:
3143
3161
  body["primary_keys"] = [v for v in self.primary_keys]
3144
3162
  if self.salesforce_include_formula_fields is not None:
@@ -3152,6 +3170,10 @@ class TableSpecificConfig:
3152
3170
  def as_shallow_dict(self) -> dict:
3153
3171
  """Serializes the TableSpecificConfig into a shallow dictionary of its immediate attributes."""
3154
3172
  body = {}
3173
+ if self.exclude_columns:
3174
+ body["exclude_columns"] = self.exclude_columns
3175
+ if self.include_columns:
3176
+ body["include_columns"] = self.include_columns
3155
3177
  if self.primary_keys:
3156
3178
  body["primary_keys"] = self.primary_keys
3157
3179
  if self.salesforce_include_formula_fields is not None:
@@ -3166,6 +3188,8 @@ class TableSpecificConfig:
3166
3188
  def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig:
3167
3189
  """Deserializes the TableSpecificConfig from a dictionary."""
3168
3190
  return cls(
3191
+ exclude_columns=d.get("exclude_columns", None),
3192
+ include_columns=d.get("include_columns", None),
3169
3193
  primary_keys=d.get("primary_keys", None),
3170
3194
  salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None),
3171
3195
  scd_type=_enum(d, "scd_type", TableSpecificConfigScdType),
@@ -3325,6 +3349,7 @@ class UpdateStateInfo:
3325
3349
  creation_time: Optional[str] = None
3326
3350
 
3327
3351
  state: Optional[UpdateStateInfoState] = None
3352
+ """The update state."""
3328
3353
 
3329
3354
  update_id: Optional[str] = None
3330
3355
 
@@ -3361,6 +3386,7 @@ class UpdateStateInfo:
3361
3386
 
3362
3387
 
3363
3388
  class UpdateStateInfoState(Enum):
3389
+ """The update state."""
3364
3390
 
3365
3391
  CANCELED = "CANCELED"
3366
3392
  COMPLETED = "COMPLETED"
@@ -3687,6 +3713,7 @@ class PipelinesAPI:
3687
3713
  Retrieves events for a pipeline.
3688
3714
 
3689
3715
  :param pipeline_id: str
3716
+ The pipeline to return events for.
3690
3717
  :param filter: str (optional)
3691
3718
  Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters
3692
3719
  are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp
@@ -3867,6 +3894,7 @@ class PipelinesAPI:
3867
3894
 
3868
3895
  :param pipeline_id: str
3869
3896
  :param cause: :class:`StartUpdateCause` (optional)
3897
+ What triggered this update.
3870
3898
  :param full_refresh: bool (optional)
3871
3899
  If true, this update will reset all tables before running.
3872
3900
  :param full_refresh_selection: List[str] (optional)
@@ -2874,7 +2874,8 @@ class ServedEntityInput:
2874
2874
  """The workload size of the served entity. The workload size corresponds to a range of provisioned
2875
2875
  concurrency that the compute autoscales between. A single unit of provisioned concurrency can
2876
2876
  process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
2877
- "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
2877
+ "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
2878
+ Additional custom workload sizes can also be used when available in the workspace. If
2878
2879
  scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
2879
2880
  is 0."""
2880
2881
 
@@ -3014,7 +3015,8 @@ class ServedEntityOutput:
3014
3015
  """The workload size of the served entity. The workload size corresponds to a range of provisioned
3015
3016
  concurrency that the compute autoscales between. A single unit of provisioned concurrency can
3016
3017
  process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
3017
- "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
3018
+ "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
3019
+ Additional custom workload sizes can also be used when available in the workspace. If
3018
3020
  scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
3019
3021
  is 0."""
3020
3022
 
@@ -3204,11 +3206,12 @@ class ServedModelInput:
3204
3206
  model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
3205
3207
  not specified for other entities, it defaults to entity_name-entity_version."""
3206
3208
 
3207
- workload_size: Optional[ServedModelInputWorkloadSize] = None
3209
+ workload_size: Optional[str] = None
3208
3210
  """The workload size of the served entity. The workload size corresponds to a range of provisioned
3209
3211
  concurrency that the compute autoscales between. A single unit of provisioned concurrency can
3210
3212
  process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
3211
- "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
3213
+ "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
3214
+ Additional custom workload sizes can also be used when available in the workspace. If
3212
3215
  scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
3213
3216
  is 0."""
3214
3217
 
@@ -3240,7 +3243,7 @@ class ServedModelInput:
3240
3243
  if self.scale_to_zero_enabled is not None:
3241
3244
  body["scale_to_zero_enabled"] = self.scale_to_zero_enabled
3242
3245
  if self.workload_size is not None:
3243
- body["workload_size"] = self.workload_size.value
3246
+ body["workload_size"] = self.workload_size
3244
3247
  if self.workload_type is not None:
3245
3248
  body["workload_type"] = self.workload_type.value
3246
3249
  return body
@@ -3282,18 +3285,11 @@ class ServedModelInput:
3282
3285
  model_version=d.get("model_version", None),
3283
3286
  name=d.get("name", None),
3284
3287
  scale_to_zero_enabled=d.get("scale_to_zero_enabled", None),
3285
- workload_size=_enum(d, "workload_size", ServedModelInputWorkloadSize),
3288
+ workload_size=d.get("workload_size", None),
3286
3289
  workload_type=_enum(d, "workload_type", ServedModelInputWorkloadType),
3287
3290
  )
3288
3291
 
3289
3292
 
3290
- class ServedModelInputWorkloadSize(Enum):
3291
-
3292
- LARGE = "Large"
3293
- MEDIUM = "Medium"
3294
- SMALL = "Small"
3295
-
3296
-
3297
3293
  class ServedModelInputWorkloadType(Enum):
3298
3294
  """Please keep this in sync with with workload types in InferenceEndpointEntities.scala"""
3299
3295
 
@@ -3338,7 +3334,8 @@ class ServedModelOutput:
3338
3334
  """The workload size of the served entity. The workload size corresponds to a range of provisioned
3339
3335
  concurrency that the compute autoscales between. A single unit of provisioned concurrency can
3340
3336
  process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
3341
- "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
3337
+ "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
3338
+ Additional custom workload sizes can also be used when available in the workspace. If
3342
3339
  scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
3343
3340
  is 0."""
3344
3341