databricks-sdk 0.65.0__py3-none-any.whl → 0.67.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -1651,9 +1651,7 @@ class ExportRunOutput:
1651
1651
 
1652
1652
  views: Optional[List[ViewItem]] = None
1653
1653
  """The exported content in HTML format (one for every view item). To extract the HTML notebook from
1654
- the JSON response, download and run this [Python script].
1655
-
1656
- [Python script]: https://docs.databricks.com/en/_static/examples/extract.py"""
1654
+ the JSON response, download and run this [Python script](/_static/examples/extract.py)."""
1657
1655
 
1658
1656
  def as_dict(self) -> dict:
1659
1657
  """Serializes the ExportRunOutput into a dictionary suitable for use as a JSON request body."""
@@ -5659,7 +5657,7 @@ class RunTask:
5659
5657
  clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
5660
5658
  """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
5661
5659
 
5662
- [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
5660
+ [clean rooms]: https://docs.databricks.com/clean-rooms/index.html"""
5663
5661
 
5664
5662
  cleanup_duration: Optional[int] = None
5665
5663
  """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts.
@@ -5696,9 +5694,6 @@ class RunTask:
5696
5694
  description: Optional[str] = None
5697
5695
  """An optional description for this task."""
5698
5696
 
5699
- disabled: Optional[bool] = None
5700
- """Deprecated, field was never used in production."""
5701
-
5702
5697
  effective_performance_target: Optional[PerformanceTarget] = None
5703
5698
  """The actual performance target used by the serverless run during execution. This can differ from
5704
5699
  the client-set performance target on the request depending on whether the performance mode is
@@ -5810,21 +5805,9 @@ class RunTask:
5810
5805
  """The task runs a Python file when the `spark_python_task` field is present."""
5811
5806
 
5812
5807
  spark_submit_task: Optional[SparkSubmitTask] = None
5813
- """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
5814
- This task can run only on new clusters and is not compatible with serverless compute.
5815
-
5816
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
5817
- `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
5818
- configurations.
5819
-
5820
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
5821
- _cannot_ specify them in parameters.
5822
-
5823
- By default, the Spark submit job uses all available memory (excluding reserved memory for
5824
- Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value
5825
- to leave some room for off-heap usage.
5826
-
5827
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
5808
+ """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present.
5809
+ Databricks recommends using the spark_jar_task instead; see [Spark Submit task for
5810
+ jobs](/jobs/spark-submit)."""
5828
5811
 
5829
5812
  sql_task: Optional[SqlTask] = None
5830
5813
  """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
@@ -5873,8 +5856,6 @@ class RunTask:
5873
5856
  body["depends_on"] = [v.as_dict() for v in self.depends_on]
5874
5857
  if self.description is not None:
5875
5858
  body["description"] = self.description
5876
- if self.disabled is not None:
5877
- body["disabled"] = self.disabled
5878
5859
  if self.effective_performance_target is not None:
5879
5860
  body["effective_performance_target"] = self.effective_performance_target.value
5880
5861
  if self.email_notifications:
@@ -5972,8 +5953,6 @@ class RunTask:
5972
5953
  body["depends_on"] = self.depends_on
5973
5954
  if self.description is not None:
5974
5955
  body["description"] = self.description
5975
- if self.disabled is not None:
5976
- body["disabled"] = self.disabled
5977
5956
  if self.effective_performance_target is not None:
5978
5957
  body["effective_performance_target"] = self.effective_performance_target
5979
5958
  if self.email_notifications:
@@ -6061,7 +6040,6 @@ class RunTask:
6061
6040
  dbt_task=_from_dict(d, "dbt_task", DbtTask),
6062
6041
  depends_on=_repeated_dict(d, "depends_on", TaskDependency),
6063
6042
  description=d.get("description", None),
6064
- disabled=d.get("disabled", None),
6065
6043
  effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget),
6066
6044
  email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications),
6067
6045
  end_time=d.get("end_time", None),
@@ -6916,7 +6894,7 @@ class SubmitTask:
6916
6894
  clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
6917
6895
  """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
6918
6896
 
6919
- [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
6897
+ [clean rooms]: https://docs.databricks.com/clean-rooms/index.html"""
6920
6898
 
6921
6899
  condition_task: Optional[ConditionTask] = None
6922
6900
  """The task evaluates a condition that can be used to control the execution of other tasks when the
@@ -7003,21 +6981,9 @@ class SubmitTask:
7003
6981
  """The task runs a Python file when the `spark_python_task` field is present."""
7004
6982
 
7005
6983
  spark_submit_task: Optional[SparkSubmitTask] = None
7006
- """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
7007
- This task can run only on new clusters and is not compatible with serverless compute.
7008
-
7009
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
7010
- `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
7011
- configurations.
7012
-
7013
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
7014
- _cannot_ specify them in parameters.
7015
-
7016
- By default, the Spark submit job uses all available memory (excluding reserved memory for
7017
- Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value
7018
- to leave some room for off-heap usage.
7019
-
7020
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
6984
+ """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present.
6985
+ Databricks recommends using the spark_jar_task instead; see [Spark Submit task for
6986
+ jobs](/jobs/spark-submit)."""
7021
6987
 
7022
6988
  sql_task: Optional[SqlTask] = None
7023
6989
  """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
@@ -7407,7 +7373,7 @@ class Task:
7407
7373
  clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
7408
7374
  """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
7409
7375
 
7410
- [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
7376
+ [clean rooms]: https://docs.databricks.com/clean-rooms/index.html"""
7411
7377
 
7412
7378
  condition_task: Optional[ConditionTask] = None
7413
7379
  """The task evaluates a condition that can be used to control the execution of other tasks when the
@@ -7438,6 +7404,10 @@ class Task:
7438
7404
  disable_auto_optimization: Optional[bool] = None
7439
7405
  """An option to disable auto optimization in serverless"""
7440
7406
 
7407
+ disabled: Optional[bool] = None
7408
+ """An optional flag to disable the task. If set to true, the task will not run even if it is part
7409
+ of a job."""
7410
+
7441
7411
  email_notifications: Optional[TaskEmailNotifications] = None
7442
7412
  """An optional set of email addresses that is notified when runs of this task begin or complete as
7443
7413
  well as when this task is deleted. The default behavior is to not send any emails."""
@@ -7520,21 +7490,9 @@ class Task:
7520
7490
  """The task runs a Python file when the `spark_python_task` field is present."""
7521
7491
 
7522
7492
  spark_submit_task: Optional[SparkSubmitTask] = None
7523
- """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
7524
- This task can run only on new clusters and is not compatible with serverless compute.
7525
-
7526
- In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
7527
- `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
7528
- configurations.
7529
-
7530
- `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you
7531
- _cannot_ specify them in parameters.
7532
-
7533
- By default, the Spark submit job uses all available memory (excluding reserved memory for
7534
- Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value
7535
- to leave some room for off-heap usage.
7536
-
7537
- The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
7493
+ """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present.
7494
+ Databricks recommends using the spark_jar_task instead; see [Spark Submit task for
7495
+ jobs](/jobs/spark-submit)."""
7538
7496
 
7539
7497
  sql_task: Optional[SqlTask] = None
7540
7498
  """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
@@ -7568,6 +7526,8 @@ class Task:
7568
7526
  body["description"] = self.description
7569
7527
  if self.disable_auto_optimization is not None:
7570
7528
  body["disable_auto_optimization"] = self.disable_auto_optimization
7529
+ if self.disabled is not None:
7530
+ body["disabled"] = self.disabled
7571
7531
  if self.email_notifications:
7572
7532
  body["email_notifications"] = self.email_notifications.as_dict()
7573
7533
  if self.environment_key is not None:
@@ -7643,6 +7603,8 @@ class Task:
7643
7603
  body["description"] = self.description
7644
7604
  if self.disable_auto_optimization is not None:
7645
7605
  body["disable_auto_optimization"] = self.disable_auto_optimization
7606
+ if self.disabled is not None:
7607
+ body["disabled"] = self.disabled
7646
7608
  if self.email_notifications:
7647
7609
  body["email_notifications"] = self.email_notifications
7648
7610
  if self.environment_key is not None:
@@ -7710,6 +7672,7 @@ class Task:
7710
7672
  depends_on=_repeated_dict(d, "depends_on", TaskDependency),
7711
7673
  description=d.get("description", None),
7712
7674
  disable_auto_optimization=d.get("disable_auto_optimization", None),
7675
+ disabled=d.get("disabled", None),
7713
7676
  email_notifications=_from_dict(d, "email_notifications", TaskEmailNotifications),
7714
7677
  environment_key=d.get("environment_key", None),
7715
7678
  existing_cluster_id=d.get("existing_cluster_id", None),
@@ -512,6 +512,30 @@ class CreateWebhookResponse:
512
512
  return cls(webhook=_from_dict(d, "webhook", RegistryWebhook))
513
513
 
514
514
 
515
+ @dataclass
516
+ class DataSource:
517
+ delta_table_source: Optional[DeltaTableSource] = None
518
+
519
+ def as_dict(self) -> dict:
520
+ """Serializes the DataSource into a dictionary suitable for use as a JSON request body."""
521
+ body = {}
522
+ if self.delta_table_source:
523
+ body["delta_table_source"] = self.delta_table_source.as_dict()
524
+ return body
525
+
526
+ def as_shallow_dict(self) -> dict:
527
+ """Serializes the DataSource into a shallow dictionary of its immediate attributes."""
528
+ body = {}
529
+ if self.delta_table_source:
530
+ body["delta_table_source"] = self.delta_table_source
531
+ return body
532
+
533
+ @classmethod
534
+ def from_dict(cls, d: Dict[str, Any]) -> DataSource:
535
+ """Deserializes the DataSource from a dictionary."""
536
+ return cls(delta_table_source=_from_dict(d, "delta_table_source", DeltaTableSource))
537
+
538
+
515
539
  @dataclass
516
540
  class Dataset:
517
541
  """Dataset. Represents a reference to data used for training, testing, or evaluation during the
@@ -868,6 +892,49 @@ class DeleteWebhookResponse:
868
892
  return cls()
869
893
 
870
894
 
895
+ @dataclass
896
+ class DeltaTableSource:
897
+ full_name: str
898
+ """The full three-part (catalog, schema, table) name of the Delta table."""
899
+
900
+ entity_columns: List[str]
901
+ """The entity columns of the Delta table."""
902
+
903
+ timeseries_column: str
904
+ """The timeseries column of the Delta table."""
905
+
906
+ def as_dict(self) -> dict:
907
+ """Serializes the DeltaTableSource into a dictionary suitable for use as a JSON request body."""
908
+ body = {}
909
+ if self.entity_columns:
910
+ body["entity_columns"] = [v for v in self.entity_columns]
911
+ if self.full_name is not None:
912
+ body["full_name"] = self.full_name
913
+ if self.timeseries_column is not None:
914
+ body["timeseries_column"] = self.timeseries_column
915
+ return body
916
+
917
+ def as_shallow_dict(self) -> dict:
918
+ """Serializes the DeltaTableSource into a shallow dictionary of its immediate attributes."""
919
+ body = {}
920
+ if self.entity_columns:
921
+ body["entity_columns"] = self.entity_columns
922
+ if self.full_name is not None:
923
+ body["full_name"] = self.full_name
924
+ if self.timeseries_column is not None:
925
+ body["timeseries_column"] = self.timeseries_column
926
+ return body
927
+
928
+ @classmethod
929
+ def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSource:
930
+ """Deserializes the DeltaTableSource from a dictionary."""
931
+ return cls(
932
+ entity_columns=d.get("entity_columns", None),
933
+ full_name=d.get("full_name", None),
934
+ timeseries_column=d.get("timeseries_column", None),
935
+ )
936
+
937
+
871
938
  @dataclass
872
939
  class Experiment:
873
940
  """An experiment and its metadata."""
@@ -1210,6 +1277,73 @@ class ExperimentTag:
1210
1277
  return cls(key=d.get("key", None), value=d.get("value", None))
1211
1278
 
1212
1279
 
1280
+ @dataclass
1281
+ class Feature:
1282
+ full_name: str
1283
+ """The full three-part name (catalog, schema, name) of the feature."""
1284
+
1285
+ source: DataSource
1286
+ """The data source of the feature."""
1287
+
1288
+ inputs: List[str]
1289
+ """The input columns from which the feature is computed."""
1290
+
1291
+ function: Function
1292
+ """The function by which the feature is computed."""
1293
+
1294
+ time_window: TimeWindow
1295
+ """The time window in which the feature is computed."""
1296
+
1297
+ description: Optional[str] = None
1298
+ """The description of the feature."""
1299
+
1300
+ def as_dict(self) -> dict:
1301
+ """Serializes the Feature into a dictionary suitable for use as a JSON request body."""
1302
+ body = {}
1303
+ if self.description is not None:
1304
+ body["description"] = self.description
1305
+ if self.full_name is not None:
1306
+ body["full_name"] = self.full_name
1307
+ if self.function:
1308
+ body["function"] = self.function.as_dict()
1309
+ if self.inputs:
1310
+ body["inputs"] = [v for v in self.inputs]
1311
+ if self.source:
1312
+ body["source"] = self.source.as_dict()
1313
+ if self.time_window:
1314
+ body["time_window"] = self.time_window.as_dict()
1315
+ return body
1316
+
1317
+ def as_shallow_dict(self) -> dict:
1318
+ """Serializes the Feature into a shallow dictionary of its immediate attributes."""
1319
+ body = {}
1320
+ if self.description is not None:
1321
+ body["description"] = self.description
1322
+ if self.full_name is not None:
1323
+ body["full_name"] = self.full_name
1324
+ if self.function:
1325
+ body["function"] = self.function
1326
+ if self.inputs:
1327
+ body["inputs"] = self.inputs
1328
+ if self.source:
1329
+ body["source"] = self.source
1330
+ if self.time_window:
1331
+ body["time_window"] = self.time_window
1332
+ return body
1333
+
1334
+ @classmethod
1335
+ def from_dict(cls, d: Dict[str, Any]) -> Feature:
1336
+ """Deserializes the Feature from a dictionary."""
1337
+ return cls(
1338
+ description=d.get("description", None),
1339
+ full_name=d.get("full_name", None),
1340
+ function=_from_dict(d, "function", Function),
1341
+ inputs=d.get("inputs", None),
1342
+ source=_from_dict(d, "source", DataSource),
1343
+ time_window=_from_dict(d, "time_window", TimeWindow),
1344
+ )
1345
+
1346
+
1213
1347
  @dataclass
1214
1348
  class FeatureLineage:
1215
1349
  feature_specs: Optional[List[FeatureLineageFeatureSpec]] = None
@@ -1520,6 +1654,90 @@ class ForecastingExperimentState(Enum):
1520
1654
  SUCCEEDED = "SUCCEEDED"
1521
1655
 
1522
1656
 
1657
+ @dataclass
1658
+ class Function:
1659
+ function_type: FunctionFunctionType
1660
+ """The type of the function."""
1661
+
1662
+ extra_parameters: Optional[List[FunctionExtraParameter]] = None
1663
+ """Extra parameters for parameterized functions."""
1664
+
1665
+ def as_dict(self) -> dict:
1666
+ """Serializes the Function into a dictionary suitable for use as a JSON request body."""
1667
+ body = {}
1668
+ if self.extra_parameters:
1669
+ body["extra_parameters"] = [v.as_dict() for v in self.extra_parameters]
1670
+ if self.function_type is not None:
1671
+ body["function_type"] = self.function_type.value
1672
+ return body
1673
+
1674
+ def as_shallow_dict(self) -> dict:
1675
+ """Serializes the Function into a shallow dictionary of its immediate attributes."""
1676
+ body = {}
1677
+ if self.extra_parameters:
1678
+ body["extra_parameters"] = self.extra_parameters
1679
+ if self.function_type is not None:
1680
+ body["function_type"] = self.function_type
1681
+ return body
1682
+
1683
+ @classmethod
1684
+ def from_dict(cls, d: Dict[str, Any]) -> Function:
1685
+ """Deserializes the Function from a dictionary."""
1686
+ return cls(
1687
+ extra_parameters=_repeated_dict(d, "extra_parameters", FunctionExtraParameter),
1688
+ function_type=_enum(d, "function_type", FunctionFunctionType),
1689
+ )
1690
+
1691
+
1692
+ @dataclass
1693
+ class FunctionExtraParameter:
1694
+ key: str
1695
+ """The name of the parameter."""
1696
+
1697
+ value: str
1698
+ """The value of the parameter."""
1699
+
1700
+ def as_dict(self) -> dict:
1701
+ """Serializes the FunctionExtraParameter into a dictionary suitable for use as a JSON request body."""
1702
+ body = {}
1703
+ if self.key is not None:
1704
+ body["key"] = self.key
1705
+ if self.value is not None:
1706
+ body["value"] = self.value
1707
+ return body
1708
+
1709
+ def as_shallow_dict(self) -> dict:
1710
+ """Serializes the FunctionExtraParameter into a shallow dictionary of its immediate attributes."""
1711
+ body = {}
1712
+ if self.key is not None:
1713
+ body["key"] = self.key
1714
+ if self.value is not None:
1715
+ body["value"] = self.value
1716
+ return body
1717
+
1718
+ @classmethod
1719
+ def from_dict(cls, d: Dict[str, Any]) -> FunctionExtraParameter:
1720
+ """Deserializes the FunctionExtraParameter from a dictionary."""
1721
+ return cls(key=d.get("key", None), value=d.get("value", None))
1722
+
1723
+
1724
+ class FunctionFunctionType(Enum):
1725
+
1726
+ APPROX_COUNT_DISTINCT = "APPROX_COUNT_DISTINCT"
1727
+ APPROX_PERCENTILE = "APPROX_PERCENTILE"
1728
+ AVG = "AVG"
1729
+ COUNT = "COUNT"
1730
+ FIRST = "FIRST"
1731
+ LAST = "LAST"
1732
+ MAX = "MAX"
1733
+ MIN = "MIN"
1734
+ STDDEV_POP = "STDDEV_POP"
1735
+ STDDEV_SAMP = "STDDEV_SAMP"
1736
+ SUM = "SUM"
1737
+ VAR_POP = "VAR_POP"
1738
+ VAR_SAMP = "VAR_SAMP"
1739
+
1740
+
1523
1741
  @dataclass
1524
1742
  class GetExperimentByNameResponse:
1525
1743
  experiment: Optional[Experiment] = None
@@ -2167,6 +2385,38 @@ class ListFeatureTagsResponse:
2167
2385
  )
2168
2386
 
2169
2387
 
2388
+ @dataclass
2389
+ class ListFeaturesResponse:
2390
+ features: Optional[List[Feature]] = None
2391
+ """List of features."""
2392
+
2393
+ next_page_token: Optional[str] = None
2394
+ """Pagination token to request the next page of results for this query."""
2395
+
2396
+ def as_dict(self) -> dict:
2397
+ """Serializes the ListFeaturesResponse into a dictionary suitable for use as a JSON request body."""
2398
+ body = {}
2399
+ if self.features:
2400
+ body["features"] = [v.as_dict() for v in self.features]
2401
+ if self.next_page_token is not None:
2402
+ body["next_page_token"] = self.next_page_token
2403
+ return body
2404
+
2405
+ def as_shallow_dict(self) -> dict:
2406
+ """Serializes the ListFeaturesResponse into a shallow dictionary of its immediate attributes."""
2407
+ body = {}
2408
+ if self.features:
2409
+ body["features"] = self.features
2410
+ if self.next_page_token is not None:
2411
+ body["next_page_token"] = self.next_page_token
2412
+ return body
2413
+
2414
+ @classmethod
2415
+ def from_dict(cls, d: Dict[str, Any]) -> ListFeaturesResponse:
2416
+ """Deserializes the ListFeaturesResponse from a dictionary."""
2417
+ return cls(features=_repeated_dict(d, "features", Feature), next_page_token=d.get("next_page_token", None))
2418
+
2419
+
2170
2420
  @dataclass
2171
2421
  class ListModelsResponse:
2172
2422
  next_page_token: Optional[str] = None
@@ -4734,6 +4984,38 @@ class TestRegistryWebhookResponse:
4734
4984
  return cls(body=d.get("body", None), status_code=d.get("status_code", None))
4735
4985
 
4736
4986
 
4987
+ @dataclass
4988
+ class TimeWindow:
4989
+ duration: str
4990
+ """The duration of the time window."""
4991
+
4992
+ offset: Optional[str] = None
4993
+ """The offset of the time window."""
4994
+
4995
+ def as_dict(self) -> dict:
4996
+ """Serializes the TimeWindow into a dictionary suitable for use as a JSON request body."""
4997
+ body = {}
4998
+ if self.duration is not None:
4999
+ body["duration"] = self.duration
5000
+ if self.offset is not None:
5001
+ body["offset"] = self.offset
5002
+ return body
5003
+
5004
+ def as_shallow_dict(self) -> dict:
5005
+ """Serializes the TimeWindow into a shallow dictionary of its immediate attributes."""
5006
+ body = {}
5007
+ if self.duration is not None:
5008
+ body["duration"] = self.duration
5009
+ if self.offset is not None:
5010
+ body["offset"] = self.offset
5011
+ return body
5012
+
5013
+ @classmethod
5014
+ def from_dict(cls, d: Dict[str, Any]) -> TimeWindow:
5015
+ """Deserializes the TimeWindow from a dictionary."""
5016
+ return cls(duration=d.get("duration", None), offset=d.get("offset", None))
5017
+
5018
+
4737
5019
  @dataclass
4738
5020
  class TransitionRequest:
4739
5021
  """For activities, this contains the activity recorded for the action. For comments, this contains
@@ -6256,6 +6538,116 @@ class ExperimentsAPI:
6256
6538
  return UpdateRunResponse.from_dict(res)
6257
6539
 
6258
6540
 
6541
+ class FeatureEngineeringAPI:
6542
+ """[description]"""
6543
+
6544
+ def __init__(self, api_client):
6545
+ self._api = api_client
6546
+
6547
+ def create_feature(self, feature: Feature) -> Feature:
6548
+ """Create a Feature.
6549
+
6550
+ :param feature: :class:`Feature`
6551
+ Feature to create.
6552
+
6553
+ :returns: :class:`Feature`
6554
+ """
6555
+ body = feature.as_dict()
6556
+ headers = {
6557
+ "Accept": "application/json",
6558
+ "Content-Type": "application/json",
6559
+ }
6560
+
6561
+ res = self._api.do("POST", "/api/2.0/feature-engineering/features", body=body, headers=headers)
6562
+ return Feature.from_dict(res)
6563
+
6564
+ def delete_feature(self, full_name: str):
6565
+ """Delete a Feature.
6566
+
6567
+ :param full_name: str
6568
+ Name of the feature to delete.
6569
+
6570
+
6571
+ """
6572
+
6573
+ headers = {
6574
+ "Accept": "application/json",
6575
+ }
6576
+
6577
+ self._api.do("DELETE", f"/api/2.0/feature-engineering/features/{full_name}", headers=headers)
6578
+
6579
+ def get_feature(self, full_name: str) -> Feature:
6580
+ """Get a Feature.
6581
+
6582
+ :param full_name: str
6583
+ Name of the feature to get.
6584
+
6585
+ :returns: :class:`Feature`
6586
+ """
6587
+
6588
+ headers = {
6589
+ "Accept": "application/json",
6590
+ }
6591
+
6592
+ res = self._api.do("GET", f"/api/2.0/feature-engineering/features/{full_name}", headers=headers)
6593
+ return Feature.from_dict(res)
6594
+
6595
+ def list_features(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Feature]:
6596
+ """List Features.
6597
+
6598
+ :param page_size: int (optional)
6599
+ The maximum number of results to return.
6600
+ :param page_token: str (optional)
6601
+ Pagination token to go to the next page based on a previous query.
6602
+
6603
+ :returns: Iterator over :class:`Feature`
6604
+ """
6605
+
6606
+ query = {}
6607
+ if page_size is not None:
6608
+ query["page_size"] = page_size
6609
+ if page_token is not None:
6610
+ query["page_token"] = page_token
6611
+ headers = {
6612
+ "Accept": "application/json",
6613
+ }
6614
+
6615
+ while True:
6616
+ json = self._api.do("GET", "/api/2.0/feature-engineering/features", query=query, headers=headers)
6617
+ if "features" in json:
6618
+ for v in json["features"]:
6619
+ yield Feature.from_dict(v)
6620
+ if "next_page_token" not in json or not json["next_page_token"]:
6621
+ return
6622
+ query["page_token"] = json["next_page_token"]
6623
+
6624
+ def update_feature(self, full_name: str, feature: Feature, update_mask: str) -> Feature:
6625
+ """Update a Feature.
6626
+
6627
+ :param full_name: str
6628
+ The full three-part name (catalog, schema, name) of the feature.
6629
+ :param feature: :class:`Feature`
6630
+ Feature to update.
6631
+ :param update_mask: str
6632
+ The list of fields to update.
6633
+
6634
+ :returns: :class:`Feature`
6635
+ """
6636
+ body = feature.as_dict()
6637
+ query = {}
6638
+ if update_mask is not None:
6639
+ query["update_mask"] = update_mask
6640
+ headers = {
6641
+ "Accept": "application/json",
6642
+ "Content-Type": "application/json",
6643
+ }
6644
+
6645
+ res = self._api.do(
6646
+ "PATCH", f"/api/2.0/feature-engineering/features/{full_name}", query=query, body=body, headers=headers
6647
+ )
6648
+ return Feature.from_dict(res)
6649
+
6650
+
6259
6651
  class FeatureStoreAPI:
6260
6652
  """A feature store is a centralized repository that enables data scientists to find and share features. Using
6261
6653
  a feature store also ensures that the code used to compute feature values is the same during model
@@ -232,11 +232,11 @@ class FederationPolicy:
232
232
  oidc_policy: Optional[OidcFederationPolicy] = None
233
233
 
234
234
  policy_id: Optional[str] = None
235
- """The ID of the federation policy."""
235
+ """The ID of the federation policy. Output only."""
236
236
 
237
237
  service_principal_id: Optional[int] = None
238
- """The service principal ID that this federation policy applies to. Only set for service principal
239
- federation policies."""
238
+ """The service principal ID that this federation policy applies to. Output only. Only set for
239
+ service principal federation policies."""
240
240
 
241
241
  uid: Optional[str] = None
242
242
  """Unique, immutable id of the federation policy."""