databricks-sdk 0.51.0__py3-none-any.whl → 0.52.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -9167,6 +9167,12 @@ class TerminationReasonCode(Enum):
9167
9167
  METASTORE_COMPONENT_UNHEALTHY = "METASTORE_COMPONENT_UNHEALTHY"
9168
9168
  NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT"
9169
9169
  NETVISOR_SETUP_TIMEOUT = "NETVISOR_SETUP_TIMEOUT"
9170
+ NETWORK_CHECK_CONTROL_PLANE_FAILURE = "NETWORK_CHECK_CONTROL_PLANE_FAILURE"
9171
+ NETWORK_CHECK_DNS_SERVER_FAILURE = "NETWORK_CHECK_DNS_SERVER_FAILURE"
9172
+ NETWORK_CHECK_METADATA_ENDPOINT_FAILURE = "NETWORK_CHECK_METADATA_ENDPOINT_FAILURE"
9173
+ NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE = "NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE"
9174
+ NETWORK_CHECK_NIC_FAILURE = "NETWORK_CHECK_NIC_FAILURE"
9175
+ NETWORK_CHECK_STORAGE_FAILURE = "NETWORK_CHECK_STORAGE_FAILURE"
9170
9176
  NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE"
9171
9177
  NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE"
9172
9178
  NO_MATCHED_K8S = "NO_MATCHED_K8S"
@@ -9179,6 +9185,7 @@ class TerminationReasonCode(Enum):
9179
9185
  REQUEST_THROTTLED = "REQUEST_THROTTLED"
9180
9186
  RESOURCE_USAGE_BLOCKED = "RESOURCE_USAGE_BLOCKED"
9181
9187
  SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE"
9188
+ SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED"
9182
9189
  SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR"
9183
9190
  SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION"
9184
9191
  SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE"
@@ -2097,7 +2097,7 @@ class GenieAPI:
2097
2097
  self.wait_get_message_genie_completed,
2098
2098
  response=GenieMessage.from_dict(op_response),
2099
2099
  conversation_id=conversation_id,
2100
- message_id=op_response["id"],
2100
+ message_id=op_response["message_id"],
2101
2101
  space_id=space_id,
2102
2102
  )
2103
2103
 
@@ -1251,7 +1251,8 @@ class FilesAPI:
1251
1251
  The absolute path of the file.
1252
1252
  :param contents: BinaryIO
1253
1253
  :param overwrite: bool (optional)
1254
- If true, an existing file will be overwritten.
1254
+ If true or unspecified, an existing file will be overwritten. If false, an error will be returned if
1255
+ the path points to an existing file.
1255
1256
 
1256
1257
 
1257
1258
  """
@@ -592,7 +592,7 @@ class CreateExperimentResponse:
592
592
  @dataclass
593
593
  class CreateForecastingExperimentRequest:
594
594
  train_data_path: str
595
- """The fully qualified name of a Unity Catalog table, formatted as
595
+ """The fully qualified path of a Unity Catalog table, formatted as
596
596
  catalog_name.schema_name.table_name, used as training data for the forecasting model."""
597
597
 
598
598
  target_column: str
@@ -617,6 +617,10 @@ class CreateForecastingExperimentRequest:
617
617
  experiment_path: Optional[str] = None
618
618
  """The path in the workspace to store the created experiment."""
619
619
 
620
+ future_feature_data_path: Optional[str] = None
621
+ """The fully qualified path of a Unity Catalog table, formatted as
622
+ catalog_name.schema_name.table_name, used to store future feature data for predictions."""
623
+
620
624
  holiday_regions: Optional[List[str]] = None
621
625
  """The region code(s) to automatically add holiday features. Currently supports only one region."""
622
626
 
@@ -665,6 +669,8 @@ class CreateForecastingExperimentRequest:
665
669
  body["forecast_granularity"] = self.forecast_granularity
666
670
  if self.forecast_horizon is not None:
667
671
  body["forecast_horizon"] = self.forecast_horizon
672
+ if self.future_feature_data_path is not None:
673
+ body["future_feature_data_path"] = self.future_feature_data_path
668
674
  if self.holiday_regions:
669
675
  body["holiday_regions"] = [v for v in self.holiday_regions]
670
676
  if self.include_features:
@@ -702,6 +708,8 @@ class CreateForecastingExperimentRequest:
702
708
  body["forecast_granularity"] = self.forecast_granularity
703
709
  if self.forecast_horizon is not None:
704
710
  body["forecast_horizon"] = self.forecast_horizon
711
+ if self.future_feature_data_path is not None:
712
+ body["future_feature_data_path"] = self.future_feature_data_path
705
713
  if self.holiday_regions:
706
714
  body["holiday_regions"] = self.holiday_regions
707
715
  if self.include_features:
@@ -736,6 +744,7 @@ class CreateForecastingExperimentRequest:
736
744
  experiment_path=d.get("experiment_path", None),
737
745
  forecast_granularity=d.get("forecast_granularity", None),
738
746
  forecast_horizon=d.get("forecast_horizon", None),
747
+ future_feature_data_path=d.get("future_feature_data_path", None),
739
748
  holiday_regions=d.get("holiday_regions", None),
740
749
  include_features=d.get("include_features", None),
741
750
  max_runtime=d.get("max_runtime", None),
@@ -7322,6 +7331,7 @@ class ForecastingAPI:
7322
7331
  *,
7323
7332
  custom_weights_column: Optional[str] = None,
7324
7333
  experiment_path: Optional[str] = None,
7334
+ future_feature_data_path: Optional[str] = None,
7325
7335
  holiday_regions: Optional[List[str]] = None,
7326
7336
  include_features: Optional[List[str]] = None,
7327
7337
  max_runtime: Optional[int] = None,
@@ -7337,7 +7347,7 @@ class ForecastingAPI:
7337
7347
  Creates a serverless forecasting experiment. Returns the experiment ID.
7338
7348
 
7339
7349
  :param train_data_path: str
7340
- The fully qualified name of a Unity Catalog table, formatted as catalog_name.schema_name.table_name,
7350
+ The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name,
7341
7351
  used as training data for the forecasting model.
7342
7352
  :param target_column: str
7343
7353
  The column in the input training table used as the prediction target for model training. The values
@@ -7355,6 +7365,9 @@ class ForecastingAPI:
7355
7365
  The column in the training table used to customize weights for each time series.
7356
7366
  :param experiment_path: str (optional)
7357
7367
  The path in the workspace to store the created experiment.
7368
+ :param future_feature_data_path: str (optional)
7369
+ The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name,
7370
+ used to store future feature data for predictions.
7358
7371
  :param holiday_regions: List[str] (optional)
7359
7372
  The region code(s) to automatically add holiday features. Currently supports only one region.
7360
7373
  :param include_features: List[str] (optional)
@@ -7395,6 +7408,8 @@ class ForecastingAPI:
7395
7408
  body["forecast_granularity"] = forecast_granularity
7396
7409
  if forecast_horizon is not None:
7397
7410
  body["forecast_horizon"] = forecast_horizon
7411
+ if future_feature_data_path is not None:
7412
+ body["future_feature_data_path"] = future_feature_data_path
7398
7413
  if holiday_regions is not None:
7399
7414
  body["holiday_regions"] = [v for v in holiday_regions]
7400
7415
  if include_features is not None:
@@ -7441,6 +7456,7 @@ class ForecastingAPI:
7441
7456
  *,
7442
7457
  custom_weights_column: Optional[str] = None,
7443
7458
  experiment_path: Optional[str] = None,
7459
+ future_feature_data_path: Optional[str] = None,
7444
7460
  holiday_regions: Optional[List[str]] = None,
7445
7461
  include_features: Optional[List[str]] = None,
7446
7462
  max_runtime: Optional[int] = None,
@@ -7457,6 +7473,7 @@ class ForecastingAPI:
7457
7473
  experiment_path=experiment_path,
7458
7474
  forecast_granularity=forecast_granularity,
7459
7475
  forecast_horizon=forecast_horizon,
7476
+ future_feature_data_path=future_feature_data_path,
7460
7477
  holiday_regions=holiday_regions,
7461
7478
  include_features=include_features,
7462
7479
  max_runtime=max_runtime,
@@ -758,7 +758,7 @@ class EventLogSpec:
758
758
  @dataclass
759
759
  class FileLibrary:
760
760
  path: Optional[str] = None
761
- """The absolute path of the file."""
761
+ """The absolute path of the source code."""
762
762
 
763
763
  def as_dict(self) -> dict:
764
764
  """Serializes the FileLibrary into a dictionary suitable for use as a JSON request body."""
@@ -1029,25 +1029,25 @@ class IngestionConfig:
1029
1029
 
1030
1030
  @dataclass
1031
1031
  class IngestionGatewayPipelineDefinition:
1032
- connection_id: Optional[str] = None
1033
- """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this
1034
- gateway pipeline uses to communicate with the source."""
1035
-
1036
- connection_name: Optional[str] = None
1032
+ connection_name: str
1037
1033
  """Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the
1038
1034
  source."""
1039
1035
 
1040
- gateway_storage_catalog: Optional[str] = None
1036
+ gateway_storage_catalog: str
1041
1037
  """Required, Immutable. The name of the catalog for the gateway pipeline's storage location."""
1042
1038
 
1039
+ gateway_storage_schema: str
1040
+ """Required, Immutable. The name of the schema for the gateway pipelines's storage location."""
1041
+
1042
+ connection_id: Optional[str] = None
1043
+ """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this
1044
+ gateway pipeline uses to communicate with the source."""
1045
+
1043
1046
  gateway_storage_name: Optional[str] = None
1044
1047
  """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the
1045
1048
  destination to use for the data that is extracted by the gateway. Delta Live Tables system will
1046
1049
  automatically create the storage location under the catalog and schema."""
1047
1050
 
1048
- gateway_storage_schema: Optional[str] = None
1049
- """Required, Immutable. The name of the schema for the gateway pipelines's storage location."""
1050
-
1051
1051
  def as_dict(self) -> dict:
1052
1052
  """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body."""
1053
1053
  body = {}
@@ -1293,7 +1293,7 @@ class MaturityLevel(Enum):
1293
1293
  @dataclass
1294
1294
  class NotebookLibrary:
1295
1295
  path: Optional[str] = None
1296
- """The absolute path of the notebook."""
1296
+ """The absolute path of the source code."""
1297
1297
 
1298
1298
  def as_dict(self) -> dict:
1299
1299
  """Serializes the NotebookLibrary into a dictionary suitable for use as a JSON request body."""
@@ -1892,7 +1892,7 @@ class PipelineClusterAutoscaleMode(Enum):
1892
1892
 
1893
1893
  @dataclass
1894
1894
  class PipelineDeployment:
1895
- kind: Optional[DeploymentKind] = None
1895
+ kind: DeploymentKind
1896
1896
  """The deployment method that manages the pipeline."""
1897
1897
 
1898
1898
  metadata_file_path: Optional[str] = None
@@ -2584,18 +2584,18 @@ class PipelineTrigger:
2584
2584
 
2585
2585
  @dataclass
2586
2586
  class ReportSpec:
2587
- destination_catalog: Optional[str] = None
2587
+ source_url: str
2588
+ """Required. Report URL in the source system."""
2589
+
2590
+ destination_catalog: str
2588
2591
  """Required. Destination catalog to store table."""
2589
2592
 
2590
- destination_schema: Optional[str] = None
2593
+ destination_schema: str
2591
2594
  """Required. Destination schema to store table."""
2592
2595
 
2593
2596
  destination_table: Optional[str] = None
2594
2597
  """Required. Destination table name. The pipeline fails if a table with that name already exists."""
2595
2598
 
2596
- source_url: Optional[str] = None
2597
- """Required. Report URL in the source system."""
2598
-
2599
2599
  table_configuration: Optional[TableSpecificConfig] = None
2600
2600
  """Configuration settings to control the ingestion of tables. These settings override the
2601
2601
  table_configuration defined in the IngestionPipelineDefinition object."""
@@ -2731,10 +2731,13 @@ class RunAs:
2731
2731
 
2732
2732
  @dataclass
2733
2733
  class SchemaSpec:
2734
- destination_catalog: Optional[str] = None
2734
+ source_schema: str
2735
+ """Required. Schema name in the source database."""
2736
+
2737
+ destination_catalog: str
2735
2738
  """Required. Destination catalog to store tables."""
2736
2739
 
2737
- destination_schema: Optional[str] = None
2740
+ destination_schema: str
2738
2741
  """Required. Destination schema to store tables in. Tables with the same name as the source tables
2739
2742
  are created in this destination schema. The pipeline fails If a table with the same name already
2740
2743
  exists."""
@@ -2742,9 +2745,6 @@ class SchemaSpec:
2742
2745
  source_catalog: Optional[str] = None
2743
2746
  """The source catalog name. Might be optional depending on the type of source."""
2744
2747
 
2745
- source_schema: Optional[str] = None
2746
- """Required. Schema name in the source database."""
2747
-
2748
2748
  table_configuration: Optional[TableSpecificConfig] = None
2749
2749
  """Configuration settings to control the ingestion of tables. These settings are applied to all
2750
2750
  tables in this schema and override the table_configuration defined in the
@@ -2924,6 +2924,7 @@ class StackFrame:
2924
2924
  @dataclass
2925
2925
  class StartUpdate:
2926
2926
  cause: Optional[StartUpdateCause] = None
2927
+ """What triggered this update."""
2927
2928
 
2928
2929
  full_refresh: Optional[bool] = None
2929
2930
  """If true, this update will reset all tables before running."""
@@ -2992,6 +2993,7 @@ class StartUpdate:
2992
2993
 
2993
2994
 
2994
2995
  class StartUpdateCause(Enum):
2996
+ """What triggered this update."""
2995
2997
 
2996
2998
  API_CALL = "API_CALL"
2997
2999
  JOB_TASK = "JOB_TASK"
@@ -3045,10 +3047,13 @@ class StopPipelineResponse:
3045
3047
 
3046
3048
  @dataclass
3047
3049
  class TableSpec:
3048
- destination_catalog: Optional[str] = None
3050
+ source_table: str
3051
+ """Required. Table name in the source database."""
3052
+
3053
+ destination_catalog: str
3049
3054
  """Required. Destination catalog to store table."""
3050
3055
 
3051
- destination_schema: Optional[str] = None
3056
+ destination_schema: str
3052
3057
  """Required. Destination schema to store table."""
3053
3058
 
3054
3059
  destination_table: Optional[str] = None
@@ -3061,9 +3066,6 @@ class TableSpec:
3061
3066
  source_schema: Optional[str] = None
3062
3067
  """Schema name in the source database. Might be optional depending on the type of source."""
3063
3068
 
3064
- source_table: Optional[str] = None
3065
- """Required. Table name in the source database."""
3066
-
3067
3069
  table_configuration: Optional[TableSpecificConfig] = None
3068
3070
  """Configuration settings to control the ingestion of tables. These settings override the
3069
3071
  table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec."""
@@ -3122,6 +3124,18 @@ class TableSpec:
3122
3124
 
3123
3125
  @dataclass
3124
3126
  class TableSpecificConfig:
3127
+ exclude_columns: Optional[List[str]] = None
3128
+ """A list of column names to be excluded for the ingestion. When not specified, include_columns
3129
+ fully controls what columns to be ingested. When specified, all other columns including future
3130
+ ones will be automatically included for ingestion. This field in mutually exclusive with
3131
+ `include_columns`."""
3132
+
3133
+ include_columns: Optional[List[str]] = None
3134
+ """A list of column names to be included for the ingestion. When not specified, all columns except
3135
+ ones in exclude_columns will be included. Future columns will be automatically included. When
3136
+ specified, all other future columns will be automatically excluded from ingestion. This field in
3137
+ mutually exclusive with `exclude_columns`."""
3138
+
3125
3139
  primary_keys: Optional[List[str]] = None
3126
3140
  """The primary key of the table used to apply changes."""
3127
3141
 
@@ -3139,6 +3153,10 @@ class TableSpecificConfig:
3139
3153
  def as_dict(self) -> dict:
3140
3154
  """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body."""
3141
3155
  body = {}
3156
+ if self.exclude_columns:
3157
+ body["exclude_columns"] = [v for v in self.exclude_columns]
3158
+ if self.include_columns:
3159
+ body["include_columns"] = [v for v in self.include_columns]
3142
3160
  if self.primary_keys:
3143
3161
  body["primary_keys"] = [v for v in self.primary_keys]
3144
3162
  if self.salesforce_include_formula_fields is not None:
@@ -3152,6 +3170,10 @@ class TableSpecificConfig:
3152
3170
  def as_shallow_dict(self) -> dict:
3153
3171
  """Serializes the TableSpecificConfig into a shallow dictionary of its immediate attributes."""
3154
3172
  body = {}
3173
+ if self.exclude_columns:
3174
+ body["exclude_columns"] = self.exclude_columns
3175
+ if self.include_columns:
3176
+ body["include_columns"] = self.include_columns
3155
3177
  if self.primary_keys:
3156
3178
  body["primary_keys"] = self.primary_keys
3157
3179
  if self.salesforce_include_formula_fields is not None:
@@ -3166,6 +3188,8 @@ class TableSpecificConfig:
3166
3188
  def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig:
3167
3189
  """Deserializes the TableSpecificConfig from a dictionary."""
3168
3190
  return cls(
3191
+ exclude_columns=d.get("exclude_columns", None),
3192
+ include_columns=d.get("include_columns", None),
3169
3193
  primary_keys=d.get("primary_keys", None),
3170
3194
  salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None),
3171
3195
  scd_type=_enum(d, "scd_type", TableSpecificConfigScdType),
@@ -3325,6 +3349,7 @@ class UpdateStateInfo:
3325
3349
  creation_time: Optional[str] = None
3326
3350
 
3327
3351
  state: Optional[UpdateStateInfoState] = None
3352
+ """The update state."""
3328
3353
 
3329
3354
  update_id: Optional[str] = None
3330
3355
 
@@ -3361,6 +3386,7 @@ class UpdateStateInfo:
3361
3386
 
3362
3387
 
3363
3388
  class UpdateStateInfoState(Enum):
3389
+ """The update state."""
3364
3390
 
3365
3391
  CANCELED = "CANCELED"
3366
3392
  COMPLETED = "COMPLETED"
@@ -3687,6 +3713,7 @@ class PipelinesAPI:
3687
3713
  Retrieves events for a pipeline.
3688
3714
 
3689
3715
  :param pipeline_id: str
3716
+ The pipeline to return events for.
3690
3717
  :param filter: str (optional)
3691
3718
  Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters
3692
3719
  are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp
@@ -3867,6 +3894,7 @@ class PipelinesAPI:
3867
3894
 
3868
3895
  :param pipeline_id: str
3869
3896
  :param cause: :class:`StartUpdateCause` (optional)
3897
+ What triggered this update.
3870
3898
  :param full_refresh: bool (optional)
3871
3899
  If true, this update will reset all tables before running.
3872
3900
  :param full_refresh_selection: List[str] (optional)
@@ -4791,7 +4791,7 @@ class ListAlertsResponseAlert:
4791
4791
  class ListAlertsV2Response:
4792
4792
  next_page_token: Optional[str] = None
4793
4793
 
4794
- results: Optional[List[ListAlertsV2ResponseAlert]] = None
4794
+ results: Optional[List[AlertV2]] = None
4795
4795
 
4796
4796
  def as_dict(self) -> dict:
4797
4797
  """Serializes the ListAlertsV2Response into a dictionary suitable for use as a JSON request body."""
@@ -4814,131 +4814,7 @@ class ListAlertsV2Response:
4814
4814
  @classmethod
4815
4815
  def from_dict(cls, d: Dict[str, Any]) -> ListAlertsV2Response:
4816
4816
  """Deserializes the ListAlertsV2Response from a dictionary."""
4817
- return cls(
4818
- next_page_token=d.get("next_page_token", None),
4819
- results=_repeated_dict(d, "results", ListAlertsV2ResponseAlert),
4820
- )
4821
-
4822
-
4823
- @dataclass
4824
- class ListAlertsV2ResponseAlert:
4825
- create_time: Optional[str] = None
4826
- """The timestamp indicating when the alert was created."""
4827
-
4828
- custom_description: Optional[str] = None
4829
- """Custom description for the alert. support mustache template."""
4830
-
4831
- custom_summary: Optional[str] = None
4832
- """Custom summary for the alert. support mustache template."""
4833
-
4834
- display_name: Optional[str] = None
4835
- """The display name of the alert."""
4836
-
4837
- evaluation: Optional[AlertV2Evaluation] = None
4838
-
4839
- id: Optional[str] = None
4840
- """UUID identifying the alert."""
4841
-
4842
- lifecycle_state: Optional[LifecycleState] = None
4843
- """Indicates whether the query is trashed."""
4844
-
4845
- owner_user_name: Optional[str] = None
4846
- """The owner's username. This field is set to "Unavailable" if the user has been deleted."""
4847
-
4848
- query_text: Optional[str] = None
4849
- """Text of the query to be run."""
4850
-
4851
- run_as_user_name: Optional[str] = None
4852
- """The run as username. This field is set to "Unavailable" if the user has been deleted."""
4853
-
4854
- schedule: Optional[CronSchedule] = None
4855
-
4856
- update_time: Optional[str] = None
4857
- """The timestamp indicating when the alert was updated."""
4858
-
4859
- warehouse_id: Optional[str] = None
4860
- """ID of the SQL warehouse attached to the alert."""
4861
-
4862
- def as_dict(self) -> dict:
4863
- """Serializes the ListAlertsV2ResponseAlert into a dictionary suitable for use as a JSON request body."""
4864
- body = {}
4865
- if self.create_time is not None:
4866
- body["create_time"] = self.create_time
4867
- if self.custom_description is not None:
4868
- body["custom_description"] = self.custom_description
4869
- if self.custom_summary is not None:
4870
- body["custom_summary"] = self.custom_summary
4871
- if self.display_name is not None:
4872
- body["display_name"] = self.display_name
4873
- if self.evaluation:
4874
- body["evaluation"] = self.evaluation.as_dict()
4875
- if self.id is not None:
4876
- body["id"] = self.id
4877
- if self.lifecycle_state is not None:
4878
- body["lifecycle_state"] = self.lifecycle_state.value
4879
- if self.owner_user_name is not None:
4880
- body["owner_user_name"] = self.owner_user_name
4881
- if self.query_text is not None:
4882
- body["query_text"] = self.query_text
4883
- if self.run_as_user_name is not None:
4884
- body["run_as_user_name"] = self.run_as_user_name
4885
- if self.schedule:
4886
- body["schedule"] = self.schedule.as_dict()
4887
- if self.update_time is not None:
4888
- body["update_time"] = self.update_time
4889
- if self.warehouse_id is not None:
4890
- body["warehouse_id"] = self.warehouse_id
4891
- return body
4892
-
4893
- def as_shallow_dict(self) -> dict:
4894
- """Serializes the ListAlertsV2ResponseAlert into a shallow dictionary of its immediate attributes."""
4895
- body = {}
4896
- if self.create_time is not None:
4897
- body["create_time"] = self.create_time
4898
- if self.custom_description is not None:
4899
- body["custom_description"] = self.custom_description
4900
- if self.custom_summary is not None:
4901
- body["custom_summary"] = self.custom_summary
4902
- if self.display_name is not None:
4903
- body["display_name"] = self.display_name
4904
- if self.evaluation:
4905
- body["evaluation"] = self.evaluation
4906
- if self.id is not None:
4907
- body["id"] = self.id
4908
- if self.lifecycle_state is not None:
4909
- body["lifecycle_state"] = self.lifecycle_state
4910
- if self.owner_user_name is not None:
4911
- body["owner_user_name"] = self.owner_user_name
4912
- if self.query_text is not None:
4913
- body["query_text"] = self.query_text
4914
- if self.run_as_user_name is not None:
4915
- body["run_as_user_name"] = self.run_as_user_name
4916
- if self.schedule:
4917
- body["schedule"] = self.schedule
4918
- if self.update_time is not None:
4919
- body["update_time"] = self.update_time
4920
- if self.warehouse_id is not None:
4921
- body["warehouse_id"] = self.warehouse_id
4922
- return body
4923
-
4924
- @classmethod
4925
- def from_dict(cls, d: Dict[str, Any]) -> ListAlertsV2ResponseAlert:
4926
- """Deserializes the ListAlertsV2ResponseAlert from a dictionary."""
4927
- return cls(
4928
- create_time=d.get("create_time", None),
4929
- custom_description=d.get("custom_description", None),
4930
- custom_summary=d.get("custom_summary", None),
4931
- display_name=d.get("display_name", None),
4932
- evaluation=_from_dict(d, "evaluation", AlertV2Evaluation),
4933
- id=d.get("id", None),
4934
- lifecycle_state=_enum(d, "lifecycle_state", LifecycleState),
4935
- owner_user_name=d.get("owner_user_name", None),
4936
- query_text=d.get("query_text", None),
4937
- run_as_user_name=d.get("run_as_user_name", None),
4938
- schedule=_from_dict(d, "schedule", CronSchedule),
4939
- update_time=d.get("update_time", None),
4940
- warehouse_id=d.get("warehouse_id", None),
4941
- )
4817
+ return cls(next_page_token=d.get("next_page_token", None), results=_repeated_dict(d, "results", AlertV2))
4942
4818
 
4943
4819
 
4944
4820
  class ListOrder(Enum):
@@ -8966,9 +8842,7 @@ class AlertsV2API:
8966
8842
  res = self._api.do("GET", f"/api/2.0/alerts/{id}", headers=headers)
8967
8843
  return AlertV2.from_dict(res)
8968
8844
 
8969
- def list_alerts(
8970
- self, *, page_size: Optional[int] = None, page_token: Optional[str] = None
8971
- ) -> Iterator[ListAlertsV2ResponseAlert]:
8845
+ def list_alerts(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[AlertV2]:
8972
8846
  """List alerts.
8973
8847
 
8974
8848
  Gets a list of alerts accessible to the user, ordered by creation time.
@@ -8976,7 +8850,7 @@ class AlertsV2API:
8976
8850
  :param page_size: int (optional)
8977
8851
  :param page_token: str (optional)
8978
8852
 
8979
- :returns: Iterator over :class:`ListAlertsV2ResponseAlert`
8853
+ :returns: Iterator over :class:`AlertV2`
8980
8854
  """
8981
8855
 
8982
8856
  query = {}
@@ -8992,7 +8866,7 @@ class AlertsV2API:
8992
8866
  json = self._api.do("GET", "/api/2.0/alerts", query=query, headers=headers)
8993
8867
  if "results" in json:
8994
8868
  for v in json["results"]:
8995
- yield ListAlertsV2ResponseAlert.from_dict(v)
8869
+ yield AlertV2.from_dict(v)
8996
8870
  if "next_page_token" not in json or not json["next_page_token"]:
8997
8871
  return
8998
8872
  query["page_token"] = json["next_page_token"]
databricks/sdk/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.51.0"
1
+ __version__ = "0.52.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-sdk
3
- Version: 0.51.0
3
+ Version: 0.52.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk
@@ -16,7 +16,7 @@ databricks/sdk/oidc_token_supplier.py,sha256=QrO6J0QY4yFfcdQDL5h2OfxMxvBZJPtPmPe
16
16
  databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
17
17
  databricks/sdk/retries.py,sha256=7k2kEexGqGKXHNAWHbPFSZSugU8UIU0qtyly_hix22Q,2581
18
18
  databricks/sdk/useragent.py,sha256=boEgzTv-Zmo6boipZKjSopNy0CXg4GShC1_lTKpJgqs,7361
19
- databricks/sdk/version.py,sha256=5j_riw1v3MMRXNOOd6h7WlISOYlrHrS61_8RReiM4yA,23
19
+ databricks/sdk/version.py,sha256=uXy9jtvBCb6cwZyNng8d7tehPtkx2Xcdr7Lm3SEzEF8,23
20
20
  databricks/sdk/_widgets/__init__.py,sha256=VhI-VvLlr3rKUT1nbROslHJIbmZX_tPJ9rRhrdFsYUA,2811
21
21
  databricks/sdk/_widgets/default_widgets_utils.py,sha256=_hwCbptLbRzWEmknco0H1wQNAYcuy2pjFO9NiRbvFeo,1127
22
22
  databricks/sdk/_widgets/ipywidgets_utils.py,sha256=mg3rEPG9z76e0yVjGgcLybUvd_zSuN5ziGeKiZ-c8Ew,2927
@@ -47,25 +47,25 @@ databricks/sdk/service/apps.py,sha256=-SO7KhJXg-kRATaDb-Fa0SdIAwRd3FtMoNYDiQY0N4
47
47
  databricks/sdk/service/billing.py,sha256=2_6QVp6KPTKqFHgoaa8dGSOrD1P8i-UjKaupWOxpMW8,99177
48
48
  databricks/sdk/service/catalog.py,sha256=Rf6rX7JPAwmFCry92AnDvMdDyxZR2GCFbkjnYH-j2jY,605734
49
49
  databricks/sdk/service/cleanrooms.py,sha256=jljFgEqNppTlHIEzmjnpe1GtH_x9a8ZXT7OSGIsIi40,58444
50
- databricks/sdk/service/compute.py,sha256=cpxBSh-GU9U9pD88toyPj3tRFHUmy5RjpPX-ye-HhME,572688
51
- databricks/sdk/service/dashboards.py,sha256=y-W0AsHiQNrN9Xr9FcIRPz1XWTte8xYIKrlz4VpQ0Fg,117899
52
- databricks/sdk/service/files.py,sha256=gWjtORuRe7B1VZol0rr_dcSHq1gMmuKszGHaMqunslU,46234
50
+ databricks/sdk/service/compute.py,sha256=X3ZWan4su9xhkvm3bUvKxpwEDflog3O2-RSbaGWb158,573208
51
+ databricks/sdk/service/dashboards.py,sha256=v0tnXhlD-yuIqlW_9IVfyWVQBZct7aZ0kW2gA2m-quo,117907
52
+ databricks/sdk/service/files.py,sha256=nAbN1US56vJYk-YDPShfT_mgk_KIhgbidtSGTraCqPo,46335
53
53
  databricks/sdk/service/iam.py,sha256=23mpLuwFwNAwK632y11dI7TH3jXmShJLiDx_KxKt6JQ,173744
54
54
  databricks/sdk/service/jobs.py,sha256=HyHeFWJ_hpAL9lZL34yRPPxQjxbPHlKud59BkAoewXU,466429
55
55
  databricks/sdk/service/marketplace.py,sha256=NLCJ7zb0Oj5nhLth9R1Szuz6fSSoxAnsKvFhKgx994s,175741
56
- databricks/sdk/service/ml.py,sha256=ToIiQYYc-V4h9VnYO52C2THu1CH7YvKe22bjmL7HYVw,328351
56
+ databricks/sdk/service/ml.py,sha256=5u1HBPkHRdQIkIL4W4yIFt_aQpF4wUwzi3oLbdN1w6E,329433
57
57
  databricks/sdk/service/oauth2.py,sha256=cBTkX-uFUqnFyfC8_tMOjJoohB9ew-1cLSfPJ4lx1lk,80314
58
- databricks/sdk/service/pipelines.py,sha256=DN2PfcsavsV8Bs5hl7coGLXcfhEkk0lcJhSXpfSpyHQ,168165
58
+ databricks/sdk/service/pipelines.py,sha256=SOBIXs311UOwe5tBPHER9xUwLGdhAsqLNokdaz47dgQ,169451
59
59
  databricks/sdk/service/provisioning.py,sha256=-Ly2o02i-jhNmiP9zLPeYF8H2usoB-oTG0RLF5gkIpc,169311
60
60
  databricks/sdk/service/serving.py,sha256=ey_fMpKC1OoOMDxnLAuDqDjrNgAqiCW3CgTbkq3hSVs,213477
61
61
  databricks/sdk/service/settings.py,sha256=M-noQkNmGU7zUR9bFMt_C8GSUn0rzZ3ZAQwDqnlXZKk,347400
62
62
  databricks/sdk/service/sharing.py,sha256=IV-rws5EzsHFfyjSBlbPEDQKZvbH4OgDp9Yxg9Hyrwk,142557
63
- databricks/sdk/service/sql.py,sha256=15l4tkLtvbobWzhljVrTRtoBytNhPe4Gs0EcRsh1Jsw,448655
63
+ databricks/sdk/service/sql.py,sha256=9LswXMc8tO9amPtRoPzEovdFMo51Gur-Evoj8-iqIu0,443485
64
64
  databricks/sdk/service/vectorsearch.py,sha256=1SYRPCJHZ3rVxRFFIPMKpFwKpSHA05ffbFqdnjz6JAU,88620
65
65
  databricks/sdk/service/workspace.py,sha256=T0ZbnG1qcPjKysGO_tBzl5x1PyalydeYJRBZbooYNm0,130893
66
- databricks_sdk-0.51.0.dist-info/licenses/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
67
- databricks_sdk-0.51.0.dist-info/licenses/NOTICE,sha256=tkRcQYA1k68wDLcnOWbg2xJDsUOJw8G8DGBhb8dnI3w,1588
68
- databricks_sdk-0.51.0.dist-info/METADATA,sha256=icR1FRAu2UN3kddUW6m4rZoI_Xw9M97-hD2UPJ3IecE,39397
69
- databricks_sdk-0.51.0.dist-info/WHEEL,sha256=ooBFpIzZCPdw3uqIQsOo4qqbA4ZRPxHnOH7peeONza0,91
70
- databricks_sdk-0.51.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
71
- databricks_sdk-0.51.0.dist-info/RECORD,,
66
+ databricks_sdk-0.52.0.dist-info/licenses/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
67
+ databricks_sdk-0.52.0.dist-info/licenses/NOTICE,sha256=tkRcQYA1k68wDLcnOWbg2xJDsUOJw8G8DGBhb8dnI3w,1588
68
+ databricks_sdk-0.52.0.dist-info/METADATA,sha256=qr03b5p6OiFNP49K2DxzzOy0b4Q5aVFAmOhIKb-91dM,39397
69
+ databricks_sdk-0.52.0.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
70
+ databricks_sdk-0.52.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
71
+ databricks_sdk-0.52.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.0.1)
2
+ Generator: setuptools (80.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5