databricks-sdk 0.56.0__py3-none-any.whl → 0.57.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (31) hide show
  1. databricks/sdk/__init__.py +15 -9
  2. databricks/sdk/service/aibuilder.py +157 -16
  3. databricks/sdk/service/apps.py +14 -42
  4. databricks/sdk/service/billing.py +17 -51
  5. databricks/sdk/service/catalog.py +198 -399
  6. databricks/sdk/service/cleanrooms.py +11 -33
  7. databricks/sdk/service/compute.py +63 -189
  8. databricks/sdk/service/dashboards.py +21 -63
  9. databricks/sdk/service/database.py +45 -30
  10. databricks/sdk/service/files.py +18 -54
  11. databricks/sdk/service/iam.py +55 -165
  12. databricks/sdk/service/jobs.py +232 -85
  13. databricks/sdk/service/marketplace.py +46 -146
  14. databricks/sdk/service/ml.py +455 -216
  15. databricks/sdk/service/oauth2.py +17 -45
  16. databricks/sdk/service/pipelines.py +81 -40
  17. databricks/sdk/service/provisioning.py +30 -90
  18. databricks/sdk/service/qualitymonitorv2.py +5 -15
  19. databricks/sdk/service/serving.py +30 -42
  20. databricks/sdk/service/settings.py +103 -314
  21. databricks/sdk/service/sharing.py +30 -86
  22. databricks/sdk/service/sql.py +74 -184
  23. databricks/sdk/service/vectorsearch.py +13 -43
  24. databricks/sdk/service/workspace.py +35 -105
  25. databricks/sdk/version.py +1 -1
  26. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.57.0.dist-info}/METADATA +1 -1
  27. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.57.0.dist-info}/RECORD +31 -31
  28. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.57.0.dist-info}/WHEEL +0 -0
  29. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.57.0.dist-info}/licenses/LICENSE +0 -0
  30. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.57.0.dist-info}/licenses/NOTICE +0 -0
  31. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.57.0.dist-info}/top_level.txt +0 -0
@@ -1758,9 +1758,7 @@ class GenieAPI:
1758
1758
  raise TimeoutError(f"timed out after {timeout}: {status_message}")
1759
1759
 
1760
1760
  def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]:
1761
- """Create conversation message.
1762
-
1763
- Create new message in a [conversation](:method:genie/startconversation). The AI response uses all
1761
+ """Create new message in a [conversation](:method:genie/startconversation). The AI response uses all
1764
1762
  previously created messages in the conversation to respond.
1765
1763
 
1766
1764
  :param space_id: str
@@ -1806,9 +1804,7 @@ class GenieAPI:
1806
1804
  def execute_message_attachment_query(
1807
1805
  self, space_id: str, conversation_id: str, message_id: str, attachment_id: str
1808
1806
  ) -> GenieGetMessageQueryResultResponse:
1809
- """Execute message attachment SQL query.
1810
-
1811
- Execute the SQL for a message query attachment. Use this API when the query attachment has expired and
1807
+ """Execute the SQL for a message query attachment. Use this API when the query attachment has expired and
1812
1808
  needs to be re-executed.
1813
1809
 
1814
1810
  :param space_id: str
@@ -1837,9 +1833,7 @@ class GenieAPI:
1837
1833
  def execute_message_query(
1838
1834
  self, space_id: str, conversation_id: str, message_id: str
1839
1835
  ) -> GenieGetMessageQueryResultResponse:
1840
- """[Deprecated] Execute SQL query in a conversation message.
1841
-
1842
- Execute the SQL query in the message.
1836
+ """Execute the SQL query in the message.
1843
1837
 
1844
1838
  :param space_id: str
1845
1839
  Genie space ID
@@ -1865,9 +1859,7 @@ class GenieAPI:
1865
1859
  def generate_download_full_query_result(
1866
1860
  self, space_id: str, conversation_id: str, message_id: str, attachment_id: str
1867
1861
  ) -> GenieGenerateDownloadFullQueryResultResponse:
1868
- """Generate full query result download.
1869
-
1870
- Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of
1862
+ """Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of
1871
1863
  the download. The query result is stored in an external link and can be retrieved using the [Get
1872
1864
  Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks
1873
1865
  strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition.
@@ -1899,9 +1891,7 @@ class GenieAPI:
1899
1891
  def get_download_full_query_result(
1900
1892
  self, space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str
1901
1893
  ) -> GenieGetDownloadFullQueryResultResponse:
1902
- """Get download full query result.
1903
-
1904
- After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and
1894
+ """After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and
1905
1895
  successfully receiving a `download_id`, use this API to poll the download progress. When the download
1906
1896
  is complete, the API returns one or more external links to the query result files. Warning: Databricks
1907
1897
  strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition.
@@ -1936,9 +1926,7 @@ class GenieAPI:
1936
1926
  return GenieGetDownloadFullQueryResultResponse.from_dict(res)
1937
1927
 
1938
1928
  def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage:
1939
- """Get conversation message.
1940
-
1941
- Get message from conversation.
1929
+ """Get message from conversation.
1942
1930
 
1943
1931
  :param space_id: str
1944
1932
  The ID associated with the Genie space where the target conversation is located.
@@ -1964,9 +1952,7 @@ class GenieAPI:
1964
1952
  def get_message_attachment_query_result(
1965
1953
  self, space_id: str, conversation_id: str, message_id: str, attachment_id: str
1966
1954
  ) -> GenieGetMessageQueryResultResponse:
1967
- """Get message attachment SQL query result.
1968
-
1969
- Get the result of SQL query if the message has a query attachment. This is only available if a message
1955
+ """Get the result of SQL query if the message has a query attachment. This is only available if a message
1970
1956
  has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
1971
1957
 
1972
1958
  :param space_id: str
@@ -1995,9 +1981,7 @@ class GenieAPI:
1995
1981
  def get_message_query_result(
1996
1982
  self, space_id: str, conversation_id: str, message_id: str
1997
1983
  ) -> GenieGetMessageQueryResultResponse:
1998
- """[Deprecated] Get conversation message SQL query result.
1999
-
2000
- Get the result of SQL query if the message has a query attachment. This is only available if a message
1984
+ """Get the result of SQL query if the message has a query attachment. This is only available if a message
2001
1985
  has a query attachment and the message status is `EXECUTING_QUERY`.
2002
1986
 
2003
1987
  :param space_id: str
@@ -2024,9 +2008,7 @@ class GenieAPI:
2024
2008
  def get_message_query_result_by_attachment(
2025
2009
  self, space_id: str, conversation_id: str, message_id: str, attachment_id: str
2026
2010
  ) -> GenieGetMessageQueryResultResponse:
2027
- """[Deprecated] Get conversation message SQL query result.
2028
-
2029
- Get the result of SQL query if the message has a query attachment. This is only available if a message
2011
+ """Get the result of SQL query if the message has a query attachment. This is only available if a message
2030
2012
  has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`.
2031
2013
 
2032
2014
  :param space_id: str
@@ -2053,9 +2035,7 @@ class GenieAPI:
2053
2035
  return GenieGetMessageQueryResultResponse.from_dict(res)
2054
2036
 
2055
2037
  def get_space(self, space_id: str) -> GenieSpace:
2056
- """Get Genie Space.
2057
-
2058
- Get details of a Genie Space.
2038
+ """Get details of a Genie Space.
2059
2039
 
2060
2040
  :param space_id: str
2061
2041
  The ID associated with the Genie space
@@ -2073,9 +2053,7 @@ class GenieAPI:
2073
2053
  def list_spaces(
2074
2054
  self, *, page_size: Optional[int] = None, page_token: Optional[str] = None
2075
2055
  ) -> GenieListSpacesResponse:
2076
- """List Genie spaces.
2077
-
2078
- Get list of Genie Spaces.
2056
+ """Get list of Genie Spaces.
2079
2057
 
2080
2058
  :param page_size: int (optional)
2081
2059
  Maximum number of spaces to return per page
@@ -2098,9 +2076,7 @@ class GenieAPI:
2098
2076
  return GenieListSpacesResponse.from_dict(res)
2099
2077
 
2100
2078
  def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]:
2101
- """Start conversation.
2102
-
2103
- Start a new conversation.
2079
+ """Start a new conversation.
2104
2080
 
2105
2081
  :param space_id: str
2106
2082
  The ID associated with the Genie space where you want to start a conversation.
@@ -2142,9 +2118,7 @@ class LakeviewAPI:
2142
2118
  self._api = api_client
2143
2119
 
2144
2120
  def create(self, dashboard: Dashboard) -> Dashboard:
2145
- """Create dashboard.
2146
-
2147
- Create a draft dashboard.
2121
+ """Create a draft dashboard.
2148
2122
 
2149
2123
  :param dashboard: :class:`Dashboard`
2150
2124
 
@@ -2263,9 +2237,7 @@ class LakeviewAPI:
2263
2237
  )
2264
2238
 
2265
2239
  def get(self, dashboard_id: str) -> Dashboard:
2266
- """Get dashboard.
2267
-
2268
- Get a draft dashboard.
2240
+ """Get a draft dashboard.
2269
2241
 
2270
2242
  :param dashboard_id: str
2271
2243
  UUID identifying the dashboard.
@@ -2281,9 +2253,7 @@ class LakeviewAPI:
2281
2253
  return Dashboard.from_dict(res)
2282
2254
 
2283
2255
  def get_published(self, dashboard_id: str) -> PublishedDashboard:
2284
- """Get published dashboard.
2285
-
2286
- Get the current published dashboard.
2256
+ """Get the current published dashboard.
2287
2257
 
2288
2258
  :param dashboard_id: str
2289
2259
  UUID identifying the published dashboard.
@@ -2473,9 +2443,7 @@ class LakeviewAPI:
2473
2443
  parent_path: Optional[str] = None,
2474
2444
  update_parameter_syntax: Optional[bool] = None,
2475
2445
  ) -> Dashboard:
2476
- """Migrate dashboard.
2477
-
2478
- Migrates a classic SQL dashboard to Lakeview.
2446
+ """Migrates a classic SQL dashboard to Lakeview.
2479
2447
 
2480
2448
  :param source_dashboard_id: str
2481
2449
  UUID of the dashboard to be migrated.
@@ -2509,9 +2477,7 @@ class LakeviewAPI:
2509
2477
  def publish(
2510
2478
  self, dashboard_id: str, *, embed_credentials: Optional[bool] = None, warehouse_id: Optional[str] = None
2511
2479
  ) -> PublishedDashboard:
2512
- """Publish dashboard.
2513
-
2514
- Publish the current draft dashboard.
2480
+ """Publish the current draft dashboard.
2515
2481
 
2516
2482
  :param dashboard_id: str
2517
2483
  UUID identifying the dashboard to be published.
@@ -2537,9 +2503,7 @@ class LakeviewAPI:
2537
2503
  return PublishedDashboard.from_dict(res)
2538
2504
 
2539
2505
  def trash(self, dashboard_id: str):
2540
- """Trash dashboard.
2541
-
2542
- Trash a dashboard.
2506
+ """Trash a dashboard.
2543
2507
 
2544
2508
  :param dashboard_id: str
2545
2509
  UUID identifying the dashboard.
@@ -2554,9 +2518,7 @@ class LakeviewAPI:
2554
2518
  self._api.do("DELETE", f"/api/2.0/lakeview/dashboards/{dashboard_id}", headers=headers)
2555
2519
 
2556
2520
  def unpublish(self, dashboard_id: str):
2557
- """Unpublish dashboard.
2558
-
2559
- Unpublish the dashboard.
2521
+ """Unpublish the dashboard.
2560
2522
 
2561
2523
  :param dashboard_id: str
2562
2524
  UUID identifying the published dashboard.
@@ -2571,9 +2533,7 @@ class LakeviewAPI:
2571
2533
  self._api.do("DELETE", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published", headers=headers)
2572
2534
 
2573
2535
  def update(self, dashboard_id: str, dashboard: Dashboard) -> Dashboard:
2574
- """Update dashboard.
2575
-
2576
- Update a draft dashboard.
2536
+ """Update a draft dashboard.
2577
2537
 
2578
2538
  :param dashboard_id: str
2579
2539
  UUID identifying the dashboard.
@@ -2622,9 +2582,7 @@ class LakeviewEmbeddedAPI:
2622
2582
  def get_published_dashboard_token_info(
2623
2583
  self, dashboard_id: str, *, external_value: Optional[str] = None, external_viewer_id: Optional[str] = None
2624
2584
  ) -> GetPublishedDashboardTokenInfoResponse:
2625
- """Read an information of a published dashboard to mint an OAuth token.
2626
-
2627
- Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The
2585
+ """Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The
2628
2586
  `authorization_details` can be enriched to apply additional restriction.
2629
2587
 
2630
2588
  Example: Adding the following `authorization_details` object to downscope the viewer permission to
@@ -74,11 +74,15 @@ class DatabaseCatalog:
74
74
 
75
75
  @dataclass
76
76
  class DatabaseCredential:
77
+ expiration_time: Optional[str] = None
78
+
77
79
  token: Optional[str] = None
78
80
 
79
81
  def as_dict(self) -> dict:
80
82
  """Serializes the DatabaseCredential into a dictionary suitable for use as a JSON request body."""
81
83
  body = {}
84
+ if self.expiration_time is not None:
85
+ body["expiration_time"] = self.expiration_time
82
86
  if self.token is not None:
83
87
  body["token"] = self.token
84
88
  return body
@@ -86,6 +90,8 @@ class DatabaseCredential:
86
90
  def as_shallow_dict(self) -> dict:
87
91
  """Serializes the DatabaseCredential into a shallow dictionary of its immediate attributes."""
88
92
  body = {}
93
+ if self.expiration_time is not None:
94
+ body["expiration_time"] = self.expiration_time
89
95
  if self.token is not None:
90
96
  body["token"] = self.token
91
97
  return body
@@ -93,7 +99,7 @@ class DatabaseCredential:
93
99
  @classmethod
94
100
  def from_dict(cls, d: Dict[str, Any]) -> DatabaseCredential:
95
101
  """Deserializes the DatabaseCredential from a dictionary."""
96
- return cls(token=d.get("token", None))
102
+ return cls(expiration_time=d.get("expiration_time", None), token=d.get("token", None))
97
103
 
98
104
 
99
105
  @dataclass
@@ -113,6 +119,12 @@ class DatabaseInstance:
113
119
  creator: Optional[str] = None
114
120
  """The email of the creator of the instance."""
115
121
 
122
+ effective_stopped: Optional[bool] = None
123
+ """xref AIP-129. `stopped` is owned by the client, while `effective_stopped` is owned by the
124
+ server. `stopped` will only be set in Create/Update response messages if and only if the user
125
+ provides the field via the request. `effective_stopped` on the other hand will always bet set in
126
+ all response messages (Create/Update/Get/List)."""
127
+
116
128
  pg_version: Optional[str] = None
117
129
  """The version of Postgres running on the instance."""
118
130
 
@@ -137,6 +149,8 @@ class DatabaseInstance:
137
149
  body["creation_time"] = self.creation_time
138
150
  if self.creator is not None:
139
151
  body["creator"] = self.creator
152
+ if self.effective_stopped is not None:
153
+ body["effective_stopped"] = self.effective_stopped
140
154
  if self.name is not None:
141
155
  body["name"] = self.name
142
156
  if self.pg_version is not None:
@@ -160,6 +174,8 @@ class DatabaseInstance:
160
174
  body["creation_time"] = self.creation_time
161
175
  if self.creator is not None:
162
176
  body["creator"] = self.creator
177
+ if self.effective_stopped is not None:
178
+ body["effective_stopped"] = self.effective_stopped
163
179
  if self.name is not None:
164
180
  body["name"] = self.name
165
181
  if self.pg_version is not None:
@@ -181,6 +197,7 @@ class DatabaseInstance:
181
197
  capacity=d.get("capacity", None),
182
198
  creation_time=d.get("creation_time", None),
183
199
  creator=d.get("creator", None),
200
+ effective_stopped=d.get("effective_stopped", None),
184
201
  name=d.get("name", None),
185
202
  pg_version=d.get("pg_version", None),
186
203
  read_write_dns=d.get("read_write_dns", None),
@@ -227,9 +244,6 @@ class DatabaseTable:
227
244
  postgres database. Note that this has implications for the `create_database_objects_is_missing`
228
245
  field in `spec`."""
229
246
 
230
- table_serving_url: Optional[str] = None
231
- """Data serving REST API URL for this table"""
232
-
233
247
  def as_dict(self) -> dict:
234
248
  """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body."""
235
249
  body = {}
@@ -239,8 +253,6 @@ class DatabaseTable:
239
253
  body["logical_database_name"] = self.logical_database_name
240
254
  if self.name is not None:
241
255
  body["name"] = self.name
242
- if self.table_serving_url is not None:
243
- body["table_serving_url"] = self.table_serving_url
244
256
  return body
245
257
 
246
258
  def as_shallow_dict(self) -> dict:
@@ -252,8 +264,6 @@ class DatabaseTable:
252
264
  body["logical_database_name"] = self.logical_database_name
253
265
  if self.name is not None:
254
266
  body["name"] = self.name
255
- if self.table_serving_url is not None:
256
- body["table_serving_url"] = self.table_serving_url
257
267
  return body
258
268
 
259
269
  @classmethod
@@ -263,7 +273,6 @@ class DatabaseTable:
263
273
  database_instance_name=d.get("database_instance_name", None),
264
274
  logical_database_name=d.get("logical_database_name", None),
265
275
  name=d.get("name", None),
266
- table_serving_url=d.get("table_serving_url", None),
267
276
  )
268
277
 
269
278
 
@@ -487,9 +496,6 @@ class SyncedDatabaseTable:
487
496
  spec: Optional[SyncedTableSpec] = None
488
497
  """Specification of a synced database table."""
489
498
 
490
- table_serving_url: Optional[str] = None
491
- """Data serving REST API URL for this table"""
492
-
493
499
  unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None
494
500
  """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
495
501
  state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
@@ -508,8 +514,6 @@ class SyncedDatabaseTable:
508
514
  body["name"] = self.name
509
515
  if self.spec:
510
516
  body["spec"] = self.spec.as_dict()
511
- if self.table_serving_url is not None:
512
- body["table_serving_url"] = self.table_serving_url
513
517
  if self.unity_catalog_provisioning_state is not None:
514
518
  body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value
515
519
  return body
@@ -527,8 +531,6 @@ class SyncedDatabaseTable:
527
531
  body["name"] = self.name
528
532
  if self.spec:
529
533
  body["spec"] = self.spec
530
- if self.table_serving_url is not None:
531
- body["table_serving_url"] = self.table_serving_url
532
534
  if self.unity_catalog_provisioning_state is not None:
533
535
  body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state
534
536
  return body
@@ -542,7 +544,6 @@ class SyncedDatabaseTable:
542
544
  logical_database_name=d.get("logical_database_name", None),
543
545
  name=d.get("name", None),
544
546
  spec=_from_dict(d, "spec", SyncedTableSpec),
545
- table_serving_url=d.get("table_serving_url", None),
546
547
  unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState),
547
548
  )
548
549
 
@@ -744,11 +745,14 @@ class SyncedTableSpec:
744
745
  """If true, the synced table's logical database and schema resources in PG will be created if they
745
746
  do not already exist."""
746
747
 
747
- new_pipeline_spec: Optional[NewPipelineSpec] = None
748
- """Spec of new pipeline. Should be empty if pipeline_id is set"""
748
+ existing_pipeline_id: Optional[str] = None
749
+ """User-specified ID of a pre-existing pipeline to bin pack. This field is optional, and should be
750
+ empty if new_pipeline_spec is set. This field will only be set by the server in response
751
+ messages if it is specified in the request. The SyncedTableStatus message will always contain
752
+ the effective pipeline ID (either client provided or server generated), however."""
749
753
 
750
- pipeline_id: Optional[str] = None
751
- """ID of the associated pipeline. Should be empty if new_pipeline_spec is set"""
754
+ new_pipeline_spec: Optional[NewPipelineSpec] = None
755
+ """Spec of new pipeline. Should be empty if pipeline_id / existing_pipeline_id is set"""
752
756
 
753
757
  primary_key_columns: Optional[List[str]] = None
754
758
  """Primary Key columns to be used for data insert/update in the destination."""
@@ -767,10 +771,10 @@ class SyncedTableSpec:
767
771
  body = {}
768
772
  if self.create_database_objects_if_missing is not None:
769
773
  body["create_database_objects_if_missing"] = self.create_database_objects_if_missing
774
+ if self.existing_pipeline_id is not None:
775
+ body["existing_pipeline_id"] = self.existing_pipeline_id
770
776
  if self.new_pipeline_spec:
771
777
  body["new_pipeline_spec"] = self.new_pipeline_spec.as_dict()
772
- if self.pipeline_id is not None:
773
- body["pipeline_id"] = self.pipeline_id
774
778
  if self.primary_key_columns:
775
779
  body["primary_key_columns"] = [v for v in self.primary_key_columns]
776
780
  if self.scheduling_policy is not None:
@@ -786,10 +790,10 @@ class SyncedTableSpec:
786
790
  body = {}
787
791
  if self.create_database_objects_if_missing is not None:
788
792
  body["create_database_objects_if_missing"] = self.create_database_objects_if_missing
793
+ if self.existing_pipeline_id is not None:
794
+ body["existing_pipeline_id"] = self.existing_pipeline_id
789
795
  if self.new_pipeline_spec:
790
796
  body["new_pipeline_spec"] = self.new_pipeline_spec
791
- if self.pipeline_id is not None:
792
- body["pipeline_id"] = self.pipeline_id
793
797
  if self.primary_key_columns:
794
798
  body["primary_key_columns"] = self.primary_key_columns
795
799
  if self.scheduling_policy is not None:
@@ -805,8 +809,8 @@ class SyncedTableSpec:
805
809
  """Deserializes the SyncedTableSpec from a dictionary."""
806
810
  return cls(
807
811
  create_database_objects_if_missing=d.get("create_database_objects_if_missing", None),
812
+ existing_pipeline_id=d.get("existing_pipeline_id", None),
808
813
  new_pipeline_spec=_from_dict(d, "new_pipeline_spec", NewPipelineSpec),
809
- pipeline_id=d.get("pipeline_id", None),
810
814
  primary_key_columns=d.get("primary_key_columns", None),
811
815
  scheduling_policy=_enum(d, "scheduling_policy", SyncedTableSchedulingPolicy),
812
816
  source_table_full_name=d.get("source_table_full_name", None),
@@ -848,6 +852,10 @@ class SyncedTableStatus:
848
852
  message: Optional[str] = None
849
853
  """A text description of the current state of the synced table."""
850
854
 
855
+ pipeline_id: Optional[str] = None
856
+ """ID of the associated pipeline. The pipeline ID may have been provided by the client (in the case
857
+ of bin packing), or generated by the server (when creating a new pipeline)."""
858
+
851
859
  provisioning_status: Optional[SyncedTableProvisioningStatus] = None
852
860
  """Detailed status of a synced table. Shown if the synced table is in the
853
861
  PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state."""
@@ -867,6 +875,8 @@ class SyncedTableStatus:
867
875
  body["failed_status"] = self.failed_status.as_dict()
868
876
  if self.message is not None:
869
877
  body["message"] = self.message
878
+ if self.pipeline_id is not None:
879
+ body["pipeline_id"] = self.pipeline_id
870
880
  if self.provisioning_status:
871
881
  body["provisioning_status"] = self.provisioning_status.as_dict()
872
882
  if self.triggered_update_status:
@@ -884,6 +894,8 @@ class SyncedTableStatus:
884
894
  body["failed_status"] = self.failed_status
885
895
  if self.message is not None:
886
896
  body["message"] = self.message
897
+ if self.pipeline_id is not None:
898
+ body["pipeline_id"] = self.pipeline_id
887
899
  if self.provisioning_status:
888
900
  body["provisioning_status"] = self.provisioning_status
889
901
  if self.triggered_update_status:
@@ -898,6 +910,7 @@ class SyncedTableStatus:
898
910
  detailed_state=_enum(d, "detailed_state", SyncedTableState),
899
911
  failed_status=_from_dict(d, "failed_status", SyncedTableFailedStatus),
900
912
  message=d.get("message", None),
913
+ pipeline_id=d.get("pipeline_id", None),
901
914
  provisioning_status=_from_dict(d, "provisioning_status", SyncedTableProvisioningStatus),
902
915
  triggered_update_status=_from_dict(d, "triggered_update_status", SyncedTableTriggeredUpdateStatus),
903
916
  )
@@ -1047,10 +1060,12 @@ class DatabaseAPI:
1047
1060
  By default, a instance cannot be deleted if it has descendant instances created via PITR. If this
1048
1061
  flag is specified as true, all descendent instances will be deleted as well.
1049
1062
  :param purge: bool (optional)
1050
- If false, the database instance is soft deleted. Soft deleted instances behave as if they are
1051
- deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by
1052
- calling the undelete API for a limited time. If true, the database instance is hard deleted and
1053
- cannot be undeleted.
1063
+ Note purge=false is in development. If false, the database instance is soft deleted (implementation
1064
+ pending). Soft deleted instances behave as if they are deleted, and cannot be used for CRUD
1065
+ operations nor connected to. However they can be undeleted by calling the undelete API for a limited
1066
+ time (implementation pending). If true, the database instance is hard deleted and cannot be
1067
+ undeleted. For the time being, setting this value to true is required to delete an instance (soft
1068
+ delete is not yet supported).
1054
1069
 
1055
1070
 
1056
1071
  """
@@ -739,9 +739,7 @@ class DbfsAPI:
739
739
  self._api = api_client
740
740
 
741
741
  def add_block(self, handle: int, data: str):
742
- """Append data block.
743
-
744
- Appends a block of data to the stream specified by the input handle. If the handle does not exist,
742
+ """Appends a block of data to the stream specified by the input handle. If the handle does not exist,
745
743
  this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``.
746
744
 
747
745
  If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``.
@@ -766,9 +764,7 @@ class DbfsAPI:
766
764
  self._api.do("POST", "/api/2.0/dbfs/add-block", body=body, headers=headers)
767
765
 
768
766
  def close(self, handle: int):
769
- """Close the stream.
770
-
771
- Closes the stream specified by the input handle. If the handle does not exist, this call throws an
767
+ """Closes the stream specified by the input handle. If the handle does not exist, this call throws an
772
768
  exception with ``RESOURCE_DOES_NOT_EXIST``.
773
769
 
774
770
  :param handle: int
@@ -787,9 +783,7 @@ class DbfsAPI:
787
783
  self._api.do("POST", "/api/2.0/dbfs/close", body=body, headers=headers)
788
784
 
789
785
  def create(self, path: str, *, overwrite: Optional[bool] = None) -> CreateResponse:
790
- """Open a stream.
791
-
792
- Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle
786
+ """Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle
793
787
  timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is
794
788
  set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``.
795
789
 
@@ -819,9 +813,7 @@ class DbfsAPI:
819
813
  return CreateResponse.from_dict(res)
820
814
 
821
815
  def delete(self, path: str, *, recursive: Optional[bool] = None):
822
- """Delete a file/directory.
823
-
824
- Delete the file or directory (optionally recursively delete all files in the directory). This call
816
+ """Delete the file or directory (optionally recursively delete all files in the directory). This call
825
817
  throws an exception with `IO_ERROR` if the path is a non-empty directory and `recursive` is set to
826
818
  `false` or on other similar errors.
827
819
 
@@ -857,9 +849,7 @@ class DbfsAPI:
857
849
  self._api.do("POST", "/api/2.0/dbfs/delete", body=body, headers=headers)
858
850
 
859
851
  def get_status(self, path: str) -> FileInfo:
860
- """Get the information of a file or directory.
861
-
862
- Gets the file information for a file or directory. If the file or directory does not exist, this call
852
+ """Gets the file information for a file or directory. If the file or directory does not exist, this call
863
853
  throws an exception with `RESOURCE_DOES_NOT_EXIST`.
864
854
 
865
855
  :param path: str
@@ -879,9 +869,7 @@ class DbfsAPI:
879
869
  return FileInfo.from_dict(res)
880
870
 
881
871
  def list(self, path: str) -> Iterator[FileInfo]:
882
- """List directory contents or file details.
883
-
884
- List the contents of a directory, or details of the file. If the file or directory does not exist,
872
+ """List the contents of a directory, or details of the file. If the file or directory does not exist,
885
873
  this call throws an exception with `RESOURCE_DOES_NOT_EXIST`.
886
874
 
887
875
  When calling list on a large directory, the list operation will time out after approximately 60
@@ -909,9 +897,7 @@ class DbfsAPI:
909
897
  return parsed if parsed is not None else []
910
898
 
911
899
  def mkdirs(self, path: str):
912
- """Create a directory.
913
-
914
- Creates the given directory and necessary parent directories if they do not exist. If a file (not a
900
+ """Creates the given directory and necessary parent directories if they do not exist. If a file (not a
915
901
  directory) exists at any prefix of the input path, this call throws an exception with
916
902
  `RESOURCE_ALREADY_EXISTS`. **Note**: If this operation fails, it might have succeeded in creating some
917
903
  of the necessary parent directories.
@@ -932,9 +918,7 @@ class DbfsAPI:
932
918
  self._api.do("POST", "/api/2.0/dbfs/mkdirs", body=body, headers=headers)
933
919
 
934
920
  def move(self, source_path: str, destination_path: str):
935
- """Move a file.
936
-
937
- Moves a file from one location to another location within DBFS. If the source file does not exist,
921
+ """Moves a file from one location to another location within DBFS. If the source file does not exist,
938
922
  this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the
939
923
  destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source
940
924
  path is a directory, this call always recursively moves all files.
@@ -959,9 +943,7 @@ class DbfsAPI:
959
943
  self._api.do("POST", "/api/2.0/dbfs/move", body=body, headers=headers)
960
944
 
961
945
  def put(self, path: str, *, contents: Optional[str] = None, overwrite: Optional[bool] = None):
962
- """Upload a file.
963
-
964
- Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but
946
+ """Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but
965
947
  can also be used as a convenient single call for data upload.
966
948
 
967
949
  Alternatively you can pass contents as base64 string.
@@ -996,9 +978,7 @@ class DbfsAPI:
996
978
  self._api.do("POST", "/api/2.0/dbfs/put", body=body, headers=headers)
997
979
 
998
980
  def read(self, path: str, *, length: Optional[int] = None, offset: Optional[int] = None) -> ReadResponse:
999
- """Get the contents of a file.
1000
-
1001
- Returns the contents of a file. If the file does not exist, this call throws an exception with
981
+ """Returns the contents of a file. If the file does not exist, this call throws an exception with
1002
982
  `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset
1003
983
  is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds
1004
984
  1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`.
@@ -1057,9 +1037,7 @@ class FilesAPI:
1057
1037
  self._api = api_client
1058
1038
 
1059
1039
  def create_directory(self, directory_path: str):
1060
- """Create a directory.
1061
-
1062
- Creates an empty directory. If necessary, also creates any parent directories of the new, empty
1040
+ """Creates an empty directory. If necessary, also creates any parent directories of the new, empty
1063
1041
  directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success
1064
1042
  response; this method is idempotent (it will succeed if the directory already exists).
1065
1043
 
@@ -1076,9 +1054,7 @@ class FilesAPI:
1076
1054
  )
1077
1055
 
1078
1056
  def delete(self, file_path: str):
1079
- """Delete a file.
1080
-
1081
- Deletes a file. If the request is successful, there is no response body.
1057
+ """Deletes a file. If the request is successful, there is no response body.
1082
1058
 
1083
1059
  :param file_path: str
1084
1060
  The absolute path of the file.
@@ -1091,9 +1067,7 @@ class FilesAPI:
1091
1067
  self._api.do("DELETE", f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}", headers=headers)
1092
1068
 
1093
1069
  def delete_directory(self, directory_path: str):
1094
- """Delete a directory.
1095
-
1096
- Deletes an empty directory.
1070
+ """Deletes an empty directory.
1097
1071
 
1098
1072
  To delete a non-empty directory, first delete all of its contents. This can be done by listing the
1099
1073
  directory contents and deleting each file and subdirectory recursively.
@@ -1111,9 +1085,7 @@ class FilesAPI:
1111
1085
  )
1112
1086
 
1113
1087
  def download(self, file_path: str) -> DownloadResponse:
1114
- """Download a file.
1115
-
1116
- Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
1088
+ """Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
1117
1089
  a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
1118
1090
 
1119
1091
  :param file_path: str
@@ -1140,9 +1112,7 @@ class FilesAPI:
1140
1112
  return DownloadResponse.from_dict(res)
1141
1113
 
1142
1114
  def get_directory_metadata(self, directory_path: str):
1143
- """Get directory metadata.
1144
-
1145
- Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response
1115
+ """Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response
1146
1116
  body.
1147
1117
 
1148
1118
  This method is useful to check if a directory exists and the caller has access to it.
@@ -1163,9 +1133,7 @@ class FilesAPI:
1163
1133
  )
1164
1134
 
1165
1135
  def get_metadata(self, file_path: str) -> GetMetadataResponse:
1166
- """Get file metadata.
1167
-
1168
- Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body.
1136
+ """Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body.
1169
1137
 
1170
1138
  :param file_path: str
1171
1139
  The absolute path of the file.
@@ -1190,9 +1158,7 @@ class FilesAPI:
1190
1158
  def list_directory_contents(
1191
1159
  self, directory_path: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None
1192
1160
  ) -> Iterator[DirectoryEntry]:
1193
- """List directory contents.
1194
-
1195
- Returns the contents of a directory. If there is no directory at the specified path, the API returns a
1161
+ """Returns the contents of a directory. If there is no directory at the specified path, the API returns a
1196
1162
  HTTP 404 error.
1197
1163
 
1198
1164
  :param directory_path: str
@@ -1242,9 +1208,7 @@ class FilesAPI:
1242
1208
  query["page_token"] = json["next_page_token"]
1243
1209
 
1244
1210
  def upload(self, file_path: str, contents: BinaryIO, *, overwrite: Optional[bool] = None):
1245
- """Upload a file.
1246
-
1247
- Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an
1211
+ """Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an
1248
1212
  octet stream); do not encode or otherwise modify the bytes before sending. The contents of the
1249
1213
  resulting file will be exactly the bytes sent in the request body. If the request is successful, there
1250
1214
  is no response body.