databricks-sdk 0.55.0__py3-none-any.whl → 0.57.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (31) hide show
  1. databricks/sdk/__init__.py +41 -24
  2. databricks/sdk/service/aibuilder.py +505 -0
  3. databricks/sdk/service/apps.py +14 -42
  4. databricks/sdk/service/billing.py +167 -220
  5. databricks/sdk/service/catalog.py +462 -1235
  6. databricks/sdk/service/cleanrooms.py +26 -43
  7. databricks/sdk/service/compute.py +75 -211
  8. databricks/sdk/service/dashboards.py +77 -511
  9. databricks/sdk/service/database.py +1271 -0
  10. databricks/sdk/service/files.py +20 -54
  11. databricks/sdk/service/iam.py +61 -171
  12. databricks/sdk/service/jobs.py +453 -68
  13. databricks/sdk/service/marketplace.py +46 -146
  14. databricks/sdk/service/ml.py +453 -477
  15. databricks/sdk/service/oauth2.py +17 -45
  16. databricks/sdk/service/pipelines.py +125 -40
  17. databricks/sdk/service/provisioning.py +30 -93
  18. databricks/sdk/service/qualitymonitorv2.py +265 -0
  19. databricks/sdk/service/serving.py +106 -46
  20. databricks/sdk/service/settings.py +1062 -390
  21. databricks/sdk/service/sharing.py +33 -88
  22. databricks/sdk/service/sql.py +292 -185
  23. databricks/sdk/service/vectorsearch.py +13 -43
  24. databricks/sdk/service/workspace.py +35 -105
  25. databricks/sdk/version.py +1 -1
  26. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/METADATA +1 -1
  27. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/RECORD +31 -28
  28. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/WHEEL +0 -0
  29. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/licenses/LICENSE +0 -0
  30. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/licenses/NOTICE +0 -0
  31. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1271 @@
1
+ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
2
+
3
+ from __future__ import annotations
4
+
5
+ import logging
6
+ from dataclasses import dataclass
7
+ from enum import Enum
8
+ from typing import Any, Dict, Iterator, List, Optional
9
+
10
+ from ._internal import _enum, _from_dict, _repeated_dict
11
+
12
+ _LOG = logging.getLogger("databricks.sdk")
13
+
14
+
15
+ # all definitions in this file are in alphabetical order
16
+
17
+
18
+ @dataclass
19
+ class DatabaseCatalog:
20
+ name: str
21
+ """The name of the catalog in UC."""
22
+
23
+ database_instance_name: str
24
+ """The name of the DatabaseInstance housing the database."""
25
+
26
+ database_name: str
27
+ """The name of the database (in a instance) associated with the catalog."""
28
+
29
+ create_database_if_not_exists: Optional[bool] = None
30
+
31
+ uid: Optional[str] = None
32
+
33
+ def as_dict(self) -> dict:
34
+ """Serializes the DatabaseCatalog into a dictionary suitable for use as a JSON request body."""
35
+ body = {}
36
+ if self.create_database_if_not_exists is not None:
37
+ body["create_database_if_not_exists"] = self.create_database_if_not_exists
38
+ if self.database_instance_name is not None:
39
+ body["database_instance_name"] = self.database_instance_name
40
+ if self.database_name is not None:
41
+ body["database_name"] = self.database_name
42
+ if self.name is not None:
43
+ body["name"] = self.name
44
+ if self.uid is not None:
45
+ body["uid"] = self.uid
46
+ return body
47
+
48
+ def as_shallow_dict(self) -> dict:
49
+ """Serializes the DatabaseCatalog into a shallow dictionary of its immediate attributes."""
50
+ body = {}
51
+ if self.create_database_if_not_exists is not None:
52
+ body["create_database_if_not_exists"] = self.create_database_if_not_exists
53
+ if self.database_instance_name is not None:
54
+ body["database_instance_name"] = self.database_instance_name
55
+ if self.database_name is not None:
56
+ body["database_name"] = self.database_name
57
+ if self.name is not None:
58
+ body["name"] = self.name
59
+ if self.uid is not None:
60
+ body["uid"] = self.uid
61
+ return body
62
+
63
+ @classmethod
64
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseCatalog:
65
+ """Deserializes the DatabaseCatalog from a dictionary."""
66
+ return cls(
67
+ create_database_if_not_exists=d.get("create_database_if_not_exists", None),
68
+ database_instance_name=d.get("database_instance_name", None),
69
+ database_name=d.get("database_name", None),
70
+ name=d.get("name", None),
71
+ uid=d.get("uid", None),
72
+ )
73
+
74
+
75
+ @dataclass
76
+ class DatabaseCredential:
77
+ expiration_time: Optional[str] = None
78
+
79
+ token: Optional[str] = None
80
+
81
+ def as_dict(self) -> dict:
82
+ """Serializes the DatabaseCredential into a dictionary suitable for use as a JSON request body."""
83
+ body = {}
84
+ if self.expiration_time is not None:
85
+ body["expiration_time"] = self.expiration_time
86
+ if self.token is not None:
87
+ body["token"] = self.token
88
+ return body
89
+
90
+ def as_shallow_dict(self) -> dict:
91
+ """Serializes the DatabaseCredential into a shallow dictionary of its immediate attributes."""
92
+ body = {}
93
+ if self.expiration_time is not None:
94
+ body["expiration_time"] = self.expiration_time
95
+ if self.token is not None:
96
+ body["token"] = self.token
97
+ return body
98
+
99
+ @classmethod
100
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseCredential:
101
+ """Deserializes the DatabaseCredential from a dictionary."""
102
+ return cls(expiration_time=d.get("expiration_time", None), token=d.get("token", None))
103
+
104
+
105
+ @dataclass
106
+ class DatabaseInstance:
107
+ """A DatabaseInstance represents a logical Postgres instance, comprised of both compute and
108
+ storage."""
109
+
110
+ name: str
111
+ """The name of the instance. This is the unique identifier for the instance."""
112
+
113
+ capacity: Optional[str] = None
114
+ """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8"."""
115
+
116
+ creation_time: Optional[str] = None
117
+ """The timestamp when the instance was created."""
118
+
119
+ creator: Optional[str] = None
120
+ """The email of the creator of the instance."""
121
+
122
+ effective_stopped: Optional[bool] = None
123
+ """xref AIP-129. `stopped` is owned by the client, while `effective_stopped` is owned by the
124
+ server. `stopped` will only be set in Create/Update response messages if and only if the user
125
+ provides the field via the request. `effective_stopped` on the other hand will always bet set in
126
+ all response messages (Create/Update/Get/List)."""
127
+
128
+ pg_version: Optional[str] = None
129
+ """The version of Postgres running on the instance."""
130
+
131
+ read_write_dns: Optional[str] = None
132
+ """The DNS endpoint to connect to the instance for read+write access."""
133
+
134
+ state: Optional[DatabaseInstanceState] = None
135
+ """The current state of the instance."""
136
+
137
+ stopped: Optional[bool] = None
138
+ """Whether the instance is stopped."""
139
+
140
+ uid: Optional[str] = None
141
+ """An immutable UUID identifier for the instance."""
142
+
143
+ def as_dict(self) -> dict:
144
+ """Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body."""
145
+ body = {}
146
+ if self.capacity is not None:
147
+ body["capacity"] = self.capacity
148
+ if self.creation_time is not None:
149
+ body["creation_time"] = self.creation_time
150
+ if self.creator is not None:
151
+ body["creator"] = self.creator
152
+ if self.effective_stopped is not None:
153
+ body["effective_stopped"] = self.effective_stopped
154
+ if self.name is not None:
155
+ body["name"] = self.name
156
+ if self.pg_version is not None:
157
+ body["pg_version"] = self.pg_version
158
+ if self.read_write_dns is not None:
159
+ body["read_write_dns"] = self.read_write_dns
160
+ if self.state is not None:
161
+ body["state"] = self.state.value
162
+ if self.stopped is not None:
163
+ body["stopped"] = self.stopped
164
+ if self.uid is not None:
165
+ body["uid"] = self.uid
166
+ return body
167
+
168
+ def as_shallow_dict(self) -> dict:
169
+ """Serializes the DatabaseInstance into a shallow dictionary of its immediate attributes."""
170
+ body = {}
171
+ if self.capacity is not None:
172
+ body["capacity"] = self.capacity
173
+ if self.creation_time is not None:
174
+ body["creation_time"] = self.creation_time
175
+ if self.creator is not None:
176
+ body["creator"] = self.creator
177
+ if self.effective_stopped is not None:
178
+ body["effective_stopped"] = self.effective_stopped
179
+ if self.name is not None:
180
+ body["name"] = self.name
181
+ if self.pg_version is not None:
182
+ body["pg_version"] = self.pg_version
183
+ if self.read_write_dns is not None:
184
+ body["read_write_dns"] = self.read_write_dns
185
+ if self.state is not None:
186
+ body["state"] = self.state
187
+ if self.stopped is not None:
188
+ body["stopped"] = self.stopped
189
+ if self.uid is not None:
190
+ body["uid"] = self.uid
191
+ return body
192
+
193
+ @classmethod
194
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance:
195
+ """Deserializes the DatabaseInstance from a dictionary."""
196
+ return cls(
197
+ capacity=d.get("capacity", None),
198
+ creation_time=d.get("creation_time", None),
199
+ creator=d.get("creator", None),
200
+ effective_stopped=d.get("effective_stopped", None),
201
+ name=d.get("name", None),
202
+ pg_version=d.get("pg_version", None),
203
+ read_write_dns=d.get("read_write_dns", None),
204
+ state=_enum(d, "state", DatabaseInstanceState),
205
+ stopped=d.get("stopped", None),
206
+ uid=d.get("uid", None),
207
+ )
208
+
209
+
210
+ class DatabaseInstanceState(Enum):
211
+
212
+ AVAILABLE = "AVAILABLE"
213
+ DELETING = "DELETING"
214
+ FAILING_OVER = "FAILING_OVER"
215
+ STARTING = "STARTING"
216
+ STOPPED = "STOPPED"
217
+ UPDATING = "UPDATING"
218
+
219
+
220
+ @dataclass
221
+ class DatabaseTable:
222
+ """Next field marker: 13"""
223
+
224
+ name: str
225
+ """Full three-part (catalog, schema, table) name of the table."""
226
+
227
+ database_instance_name: Optional[str] = None
228
+ """Name of the target database instance. This is required when creating database tables in standard
229
+ catalogs. This is optional when creating database tables in registered catalogs. If this field
230
+ is specified when creating database tables in registered catalogs, the database instance name
231
+ MUST match that of the registered catalog (or the request will be rejected)."""
232
+
233
+ logical_database_name: Optional[str] = None
234
+ """Target Postgres database object (logical database) name for this table. This field is optional
235
+ in all scenarios.
236
+
237
+ When creating a table in a registered Postgres catalog, the target Postgres database name is
238
+ inferred to be that of the registered catalog. If this field is specified in this scenario, the
239
+ Postgres database name MUST match that of the registered catalog (or the request will be
240
+ rejected).
241
+
242
+ When creating a table in a standard catalog, the target database name is inferred to be that of
243
+ the standard catalog. In this scenario, specifying this field will allow targeting an arbitrary
244
+ postgres database. Note that this has implications for the `create_database_objects_is_missing`
245
+ field in `spec`."""
246
+
247
+ def as_dict(self) -> dict:
248
+ """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body."""
249
+ body = {}
250
+ if self.database_instance_name is not None:
251
+ body["database_instance_name"] = self.database_instance_name
252
+ if self.logical_database_name is not None:
253
+ body["logical_database_name"] = self.logical_database_name
254
+ if self.name is not None:
255
+ body["name"] = self.name
256
+ return body
257
+
258
+ def as_shallow_dict(self) -> dict:
259
+ """Serializes the DatabaseTable into a shallow dictionary of its immediate attributes."""
260
+ body = {}
261
+ if self.database_instance_name is not None:
262
+ body["database_instance_name"] = self.database_instance_name
263
+ if self.logical_database_name is not None:
264
+ body["logical_database_name"] = self.logical_database_name
265
+ if self.name is not None:
266
+ body["name"] = self.name
267
+ return body
268
+
269
+ @classmethod
270
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseTable:
271
+ """Deserializes the DatabaseTable from a dictionary."""
272
+ return cls(
273
+ database_instance_name=d.get("database_instance_name", None),
274
+ logical_database_name=d.get("logical_database_name", None),
275
+ name=d.get("name", None),
276
+ )
277
+
278
+
279
+ @dataclass
280
+ class DeleteDatabaseCatalogResponse:
281
+ def as_dict(self) -> dict:
282
+ """Serializes the DeleteDatabaseCatalogResponse into a dictionary suitable for use as a JSON request body."""
283
+ body = {}
284
+ return body
285
+
286
+ def as_shallow_dict(self) -> dict:
287
+ """Serializes the DeleteDatabaseCatalogResponse into a shallow dictionary of its immediate attributes."""
288
+ body = {}
289
+ return body
290
+
291
+ @classmethod
292
+ def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseCatalogResponse:
293
+ """Deserializes the DeleteDatabaseCatalogResponse from a dictionary."""
294
+ return cls()
295
+
296
+
297
+ @dataclass
298
+ class DeleteDatabaseInstanceResponse:
299
+ def as_dict(self) -> dict:
300
+ """Serializes the DeleteDatabaseInstanceResponse into a dictionary suitable for use as a JSON request body."""
301
+ body = {}
302
+ return body
303
+
304
+ def as_shallow_dict(self) -> dict:
305
+ """Serializes the DeleteDatabaseInstanceResponse into a shallow dictionary of its immediate attributes."""
306
+ body = {}
307
+ return body
308
+
309
+ @classmethod
310
+ def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseInstanceResponse:
311
+ """Deserializes the DeleteDatabaseInstanceResponse from a dictionary."""
312
+ return cls()
313
+
314
+
315
+ @dataclass
316
+ class DeleteDatabaseTableResponse:
317
+ def as_dict(self) -> dict:
318
+ """Serializes the DeleteDatabaseTableResponse into a dictionary suitable for use as a JSON request body."""
319
+ body = {}
320
+ return body
321
+
322
+ def as_shallow_dict(self) -> dict:
323
+ """Serializes the DeleteDatabaseTableResponse into a shallow dictionary of its immediate attributes."""
324
+ body = {}
325
+ return body
326
+
327
+ @classmethod
328
+ def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseTableResponse:
329
+ """Deserializes the DeleteDatabaseTableResponse from a dictionary."""
330
+ return cls()
331
+
332
+
333
+ @dataclass
334
+ class DeleteSyncedDatabaseTableResponse:
335
+ def as_dict(self) -> dict:
336
+ """Serializes the DeleteSyncedDatabaseTableResponse into a dictionary suitable for use as a JSON request body."""
337
+ body = {}
338
+ return body
339
+
340
+ def as_shallow_dict(self) -> dict:
341
+ """Serializes the DeleteSyncedDatabaseTableResponse into a shallow dictionary of its immediate attributes."""
342
+ body = {}
343
+ return body
344
+
345
+ @classmethod
346
+ def from_dict(cls, d: Dict[str, Any]) -> DeleteSyncedDatabaseTableResponse:
347
+ """Deserializes the DeleteSyncedDatabaseTableResponse from a dictionary."""
348
+ return cls()
349
+
350
+
351
+ @dataclass
352
+ class GenerateDatabaseCredentialRequest:
353
+ """Generates a credential that can be used to access database instances"""
354
+
355
+ instance_names: Optional[List[str]] = None
356
+ """Instances to which the token will be scoped."""
357
+
358
+ request_id: Optional[str] = None
359
+
360
+ def as_dict(self) -> dict:
361
+ """Serializes the GenerateDatabaseCredentialRequest into a dictionary suitable for use as a JSON request body."""
362
+ body = {}
363
+ if self.instance_names:
364
+ body["instance_names"] = [v for v in self.instance_names]
365
+ if self.request_id is not None:
366
+ body["request_id"] = self.request_id
367
+ return body
368
+
369
+ def as_shallow_dict(self) -> dict:
370
+ """Serializes the GenerateDatabaseCredentialRequest into a shallow dictionary of its immediate attributes."""
371
+ body = {}
372
+ if self.instance_names:
373
+ body["instance_names"] = self.instance_names
374
+ if self.request_id is not None:
375
+ body["request_id"] = self.request_id
376
+ return body
377
+
378
+ @classmethod
379
+ def from_dict(cls, d: Dict[str, Any]) -> GenerateDatabaseCredentialRequest:
380
+ """Deserializes the GenerateDatabaseCredentialRequest from a dictionary."""
381
+ return cls(instance_names=d.get("instance_names", None), request_id=d.get("request_id", None))
382
+
383
+
384
+ @dataclass
385
+ class ListDatabaseInstancesResponse:
386
+ database_instances: Optional[List[DatabaseInstance]] = None
387
+ """List of instances."""
388
+
389
+ next_page_token: Optional[str] = None
390
+ """Pagination token to request the next page of instances."""
391
+
392
+ def as_dict(self) -> dict:
393
+ """Serializes the ListDatabaseInstancesResponse into a dictionary suitable for use as a JSON request body."""
394
+ body = {}
395
+ if self.database_instances:
396
+ body["database_instances"] = [v.as_dict() for v in self.database_instances]
397
+ if self.next_page_token is not None:
398
+ body["next_page_token"] = self.next_page_token
399
+ return body
400
+
401
+ def as_shallow_dict(self) -> dict:
402
+ """Serializes the ListDatabaseInstancesResponse into a shallow dictionary of its immediate attributes."""
403
+ body = {}
404
+ if self.database_instances:
405
+ body["database_instances"] = self.database_instances
406
+ if self.next_page_token is not None:
407
+ body["next_page_token"] = self.next_page_token
408
+ return body
409
+
410
+ @classmethod
411
+ def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse:
412
+ """Deserializes the ListDatabaseInstancesResponse from a dictionary."""
413
+ return cls(
414
+ database_instances=_repeated_dict(d, "database_instances", DatabaseInstance),
415
+ next_page_token=d.get("next_page_token", None),
416
+ )
417
+
418
+
419
+ @dataclass
420
+ class NewPipelineSpec:
421
+ """Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other
422
+ fields of pipeline are still inferred by table def internally"""
423
+
424
+ storage_catalog: Optional[str] = None
425
+ """UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This
426
+ needs to be a standard catalog where the user has permissions to create Delta tables."""
427
+
428
+ storage_schema: Optional[str] = None
429
+ """UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This needs
430
+ to be in the standard catalog where the user has permissions to create Delta tables."""
431
+
432
+ def as_dict(self) -> dict:
433
+ """Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body."""
434
+ body = {}
435
+ if self.storage_catalog is not None:
436
+ body["storage_catalog"] = self.storage_catalog
437
+ if self.storage_schema is not None:
438
+ body["storage_schema"] = self.storage_schema
439
+ return body
440
+
441
+ def as_shallow_dict(self) -> dict:
442
+ """Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes."""
443
+ body = {}
444
+ if self.storage_catalog is not None:
445
+ body["storage_catalog"] = self.storage_catalog
446
+ if self.storage_schema is not None:
447
+ body["storage_schema"] = self.storage_schema
448
+ return body
449
+
450
+ @classmethod
451
+ def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec:
452
+ """Deserializes the NewPipelineSpec from a dictionary."""
453
+ return cls(storage_catalog=d.get("storage_catalog", None), storage_schema=d.get("storage_schema", None))
454
+
455
+
456
+ class ProvisioningInfoState(Enum):
457
+
458
+ ACTIVE = "ACTIVE"
459
+ DEGRADED = "DEGRADED"
460
+ DELETING = "DELETING"
461
+ FAILED = "FAILED"
462
+ PROVISIONING = "PROVISIONING"
463
+ UPDATING = "UPDATING"
464
+
465
+
466
+ @dataclass
467
+ class SyncedDatabaseTable:
468
+ """Next field marker: 12"""
469
+
470
+ name: str
471
+ """Full three-part (catalog, schema, table) name of the table."""
472
+
473
+ data_synchronization_status: Optional[SyncedTableStatus] = None
474
+ """Synced Table data synchronization status"""
475
+
476
+ database_instance_name: Optional[str] = None
477
+ """Name of the target database instance. This is required when creating synced database tables in
478
+ standard catalogs. This is optional when creating synced database tables in registered catalogs.
479
+ If this field is specified when creating synced database tables in registered catalogs, the
480
+ database instance name MUST match that of the registered catalog (or the request will be
481
+ rejected)."""
482
+
483
+ logical_database_name: Optional[str] = None
484
+ """Target Postgres database object (logical database) name for this table. This field is optional
485
+ in all scenarios.
486
+
487
+ When creating a synced table in a registered Postgres catalog, the target Postgres database name
488
+ is inferred to be that of the registered catalog. If this field is specified in this scenario,
489
+ the Postgres database name MUST match that of the registered catalog (or the request will be
490
+ rejected).
491
+
492
+ When creating a synced table in a standard catalog, the target database name is inferred to be
493
+ that of the standard catalog. In this scenario, specifying this field will allow targeting an
494
+ arbitrary postgres database."""
495
+
496
+ spec: Optional[SyncedTableSpec] = None
497
+ """Specification of a synced database table."""
498
+
499
+ unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None
500
+ """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
501
+ state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
502
+ may be in "PROVISIONING" as it runs asynchronously)."""
503
+
504
+ def as_dict(self) -> dict:
505
+ """Serializes the SyncedDatabaseTable into a dictionary suitable for use as a JSON request body."""
506
+ body = {}
507
+ if self.data_synchronization_status:
508
+ body["data_synchronization_status"] = self.data_synchronization_status.as_dict()
509
+ if self.database_instance_name is not None:
510
+ body["database_instance_name"] = self.database_instance_name
511
+ if self.logical_database_name is not None:
512
+ body["logical_database_name"] = self.logical_database_name
513
+ if self.name is not None:
514
+ body["name"] = self.name
515
+ if self.spec:
516
+ body["spec"] = self.spec.as_dict()
517
+ if self.unity_catalog_provisioning_state is not None:
518
+ body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value
519
+ return body
520
+
521
+ def as_shallow_dict(self) -> dict:
522
+ """Serializes the SyncedDatabaseTable into a shallow dictionary of its immediate attributes."""
523
+ body = {}
524
+ if self.data_synchronization_status:
525
+ body["data_synchronization_status"] = self.data_synchronization_status
526
+ if self.database_instance_name is not None:
527
+ body["database_instance_name"] = self.database_instance_name
528
+ if self.logical_database_name is not None:
529
+ body["logical_database_name"] = self.logical_database_name
530
+ if self.name is not None:
531
+ body["name"] = self.name
532
+ if self.spec:
533
+ body["spec"] = self.spec
534
+ if self.unity_catalog_provisioning_state is not None:
535
+ body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state
536
+ return body
537
+
538
+ @classmethod
539
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable:
540
+ """Deserializes the SyncedDatabaseTable from a dictionary."""
541
+ return cls(
542
+ data_synchronization_status=_from_dict(d, "data_synchronization_status", SyncedTableStatus),
543
+ database_instance_name=d.get("database_instance_name", None),
544
+ logical_database_name=d.get("logical_database_name", None),
545
+ name=d.get("name", None),
546
+ spec=_from_dict(d, "spec", SyncedTableSpec),
547
+ unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState),
548
+ )
549
+
550
+
551
+ @dataclass
552
+ class SyncedTableContinuousUpdateStatus:
553
+ """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE
554
+ or the SYNCED_UPDATING_PIPELINE_RESOURCES state."""
555
+
556
+ initial_pipeline_sync_progress: Optional[SyncedTablePipelineProgress] = None
557
+ """Progress of the initial data synchronization."""
558
+
559
+ last_processed_commit_version: Optional[int] = None
560
+ """The last source table Delta version that was synced to the synced table. Note that this Delta
561
+ version may not be completely synced to the synced table yet."""
562
+
563
+ timestamp: Optional[str] = None
564
+ """The timestamp of the last time any data was synchronized from the source table to the synced
565
+ table."""
566
+
567
+ def as_dict(self) -> dict:
568
+ """Serializes the SyncedTableContinuousUpdateStatus into a dictionary suitable for use as a JSON request body."""
569
+ body = {}
570
+ if self.initial_pipeline_sync_progress:
571
+ body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict()
572
+ if self.last_processed_commit_version is not None:
573
+ body["last_processed_commit_version"] = self.last_processed_commit_version
574
+ if self.timestamp is not None:
575
+ body["timestamp"] = self.timestamp
576
+ return body
577
+
578
+ def as_shallow_dict(self) -> dict:
579
+ """Serializes the SyncedTableContinuousUpdateStatus into a shallow dictionary of its immediate attributes."""
580
+ body = {}
581
+ if self.initial_pipeline_sync_progress:
582
+ body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress
583
+ if self.last_processed_commit_version is not None:
584
+ body["last_processed_commit_version"] = self.last_processed_commit_version
585
+ if self.timestamp is not None:
586
+ body["timestamp"] = self.timestamp
587
+ return body
588
+
589
+ @classmethod
590
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableContinuousUpdateStatus:
591
+ """Deserializes the SyncedTableContinuousUpdateStatus from a dictionary."""
592
+ return cls(
593
+ initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", SyncedTablePipelineProgress),
594
+ last_processed_commit_version=d.get("last_processed_commit_version", None),
595
+ timestamp=d.get("timestamp", None),
596
+ )
597
+
598
+
599
+ @dataclass
600
+ class SyncedTableFailedStatus:
601
+ """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the
602
+ SYNCED_PIPELINE_FAILED state."""
603
+
604
+ last_processed_commit_version: Optional[int] = None
605
+ """The last source table Delta version that was synced to the synced table. Note that this Delta
606
+ version may only be partially synced to the synced table. Only populated if the table is still
607
+ synced and available for serving."""
608
+
609
+ timestamp: Optional[str] = None
610
+ """The timestamp of the last time any data was synchronized from the source table to the synced
611
+ table. Only populated if the table is still synced and available for serving."""
612
+
613
+ def as_dict(self) -> dict:
614
+ """Serializes the SyncedTableFailedStatus into a dictionary suitable for use as a JSON request body."""
615
+ body = {}
616
+ if self.last_processed_commit_version is not None:
617
+ body["last_processed_commit_version"] = self.last_processed_commit_version
618
+ if self.timestamp is not None:
619
+ body["timestamp"] = self.timestamp
620
+ return body
621
+
622
+ def as_shallow_dict(self) -> dict:
623
+ """Serializes the SyncedTableFailedStatus into a shallow dictionary of its immediate attributes."""
624
+ body = {}
625
+ if self.last_processed_commit_version is not None:
626
+ body["last_processed_commit_version"] = self.last_processed_commit_version
627
+ if self.timestamp is not None:
628
+ body["timestamp"] = self.timestamp
629
+ return body
630
+
631
+ @classmethod
632
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableFailedStatus:
633
+ """Deserializes the SyncedTableFailedStatus from a dictionary."""
634
+ return cls(
635
+ last_processed_commit_version=d.get("last_processed_commit_version", None),
636
+ timestamp=d.get("timestamp", None),
637
+ )
638
+
639
+
640
+ @dataclass
641
+ class SyncedTablePipelineProgress:
642
+ """Progress information of the Synced Table data synchronization pipeline."""
643
+
644
+ estimated_completion_time_seconds: Optional[float] = None
645
+ """The estimated time remaining to complete this update in seconds."""
646
+
647
+ latest_version_currently_processing: Optional[int] = None
648
+ """The source table Delta version that was last processed by the pipeline. The pipeline may not
649
+ have completely processed this version yet."""
650
+
651
+ sync_progress_completion: Optional[float] = None
652
+ """The completion ratio of this update. This is a number between 0 and 1."""
653
+
654
+ synced_row_count: Optional[int] = None
655
+ """The number of rows that have been synced in this update."""
656
+
657
+ total_row_count: Optional[int] = None
658
+ """The total number of rows that need to be synced in this update. This number may be an estimate."""
659
+
660
+ def as_dict(self) -> dict:
661
+ """Serializes the SyncedTablePipelineProgress into a dictionary suitable for use as a JSON request body."""
662
+ body = {}
663
+ if self.estimated_completion_time_seconds is not None:
664
+ body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
665
+ if self.latest_version_currently_processing is not None:
666
+ body["latest_version_currently_processing"] = self.latest_version_currently_processing
667
+ if self.sync_progress_completion is not None:
668
+ body["sync_progress_completion"] = self.sync_progress_completion
669
+ if self.synced_row_count is not None:
670
+ body["synced_row_count"] = self.synced_row_count
671
+ if self.total_row_count is not None:
672
+ body["total_row_count"] = self.total_row_count
673
+ return body
674
+
675
+ def as_shallow_dict(self) -> dict:
676
+ """Serializes the SyncedTablePipelineProgress into a shallow dictionary of its immediate attributes."""
677
+ body = {}
678
+ if self.estimated_completion_time_seconds is not None:
679
+ body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
680
+ if self.latest_version_currently_processing is not None:
681
+ body["latest_version_currently_processing"] = self.latest_version_currently_processing
682
+ if self.sync_progress_completion is not None:
683
+ body["sync_progress_completion"] = self.sync_progress_completion
684
+ if self.synced_row_count is not None:
685
+ body["synced_row_count"] = self.synced_row_count
686
+ if self.total_row_count is not None:
687
+ body["total_row_count"] = self.total_row_count
688
+ return body
689
+
690
+ @classmethod
691
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTablePipelineProgress:
692
+ """Deserializes the SyncedTablePipelineProgress from a dictionary."""
693
+ return cls(
694
+ estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None),
695
+ latest_version_currently_processing=d.get("latest_version_currently_processing", None),
696
+ sync_progress_completion=d.get("sync_progress_completion", None),
697
+ synced_row_count=d.get("synced_row_count", None),
698
+ total_row_count=d.get("total_row_count", None),
699
+ )
700
+
701
+
702
+ @dataclass
703
+ class SyncedTableProvisioningStatus:
704
+ """Detailed status of a synced table. Shown if the synced table is in the
705
+ PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state."""
706
+
707
+ initial_pipeline_sync_progress: Optional[SyncedTablePipelineProgress] = None
708
+ """Details about initial data synchronization. Only populated when in the
709
+ PROVISIONING_INITIAL_SNAPSHOT state."""
710
+
711
+ def as_dict(self) -> dict:
712
+ """Serializes the SyncedTableProvisioningStatus into a dictionary suitable for use as a JSON request body."""
713
+ body = {}
714
+ if self.initial_pipeline_sync_progress:
715
+ body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict()
716
+ return body
717
+
718
+ def as_shallow_dict(self) -> dict:
719
+ """Serializes the SyncedTableProvisioningStatus into a shallow dictionary of its immediate attributes."""
720
+ body = {}
721
+ if self.initial_pipeline_sync_progress:
722
+ body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress
723
+ return body
724
+
725
+ @classmethod
726
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableProvisioningStatus:
727
+ """Deserializes the SyncedTableProvisioningStatus from a dictionary."""
728
+ return cls(
729
+ initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", SyncedTablePipelineProgress)
730
+ )
731
+
732
+
733
+ class SyncedTableSchedulingPolicy(Enum):
734
+
735
+ CONTINUOUS = "CONTINUOUS"
736
+ SNAPSHOT = "SNAPSHOT"
737
+ TRIGGERED = "TRIGGERED"
738
+
739
+
740
+ @dataclass
741
+ class SyncedTableSpec:
742
+ """Specification of a synced database table."""
743
+
744
+ create_database_objects_if_missing: Optional[bool] = None
745
+ """If true, the synced table's logical database and schema resources in PG will be created if they
746
+ do not already exist."""
747
+
748
+ existing_pipeline_id: Optional[str] = None
749
+ """User-specified ID of a pre-existing pipeline to bin pack. This field is optional, and should be
750
+ empty if new_pipeline_spec is set. This field will only be set by the server in response
751
+ messages if it is specified in the request. The SyncedTableStatus message will always contain
752
+ the effective pipeline ID (either client provided or server generated), however."""
753
+
754
+ new_pipeline_spec: Optional[NewPipelineSpec] = None
755
+ """Spec of new pipeline. Should be empty if pipeline_id / existing_pipeline_id is set"""
756
+
757
+ primary_key_columns: Optional[List[str]] = None
758
+ """Primary Key columns to be used for data insert/update in the destination."""
759
+
760
+ scheduling_policy: Optional[SyncedTableSchedulingPolicy] = None
761
+ """Scheduling policy of the underlying pipeline."""
762
+
763
+ source_table_full_name: Optional[str] = None
764
+ """Three-part (catalog, schema, table) name of the source Delta table."""
765
+
766
+ timeseries_key: Optional[str] = None
767
+ """Time series key to deduplicate (tie-break) rows with the same primary key."""
768
+
769
+ def as_dict(self) -> dict:
770
+ """Serializes the SyncedTableSpec into a dictionary suitable for use as a JSON request body."""
771
+ body = {}
772
+ if self.create_database_objects_if_missing is not None:
773
+ body["create_database_objects_if_missing"] = self.create_database_objects_if_missing
774
+ if self.existing_pipeline_id is not None:
775
+ body["existing_pipeline_id"] = self.existing_pipeline_id
776
+ if self.new_pipeline_spec:
777
+ body["new_pipeline_spec"] = self.new_pipeline_spec.as_dict()
778
+ if self.primary_key_columns:
779
+ body["primary_key_columns"] = [v for v in self.primary_key_columns]
780
+ if self.scheduling_policy is not None:
781
+ body["scheduling_policy"] = self.scheduling_policy.value
782
+ if self.source_table_full_name is not None:
783
+ body["source_table_full_name"] = self.source_table_full_name
784
+ if self.timeseries_key is not None:
785
+ body["timeseries_key"] = self.timeseries_key
786
+ return body
787
+
788
+ def as_shallow_dict(self) -> dict:
789
+ """Serializes the SyncedTableSpec into a shallow dictionary of its immediate attributes."""
790
+ body = {}
791
+ if self.create_database_objects_if_missing is not None:
792
+ body["create_database_objects_if_missing"] = self.create_database_objects_if_missing
793
+ if self.existing_pipeline_id is not None:
794
+ body["existing_pipeline_id"] = self.existing_pipeline_id
795
+ if self.new_pipeline_spec:
796
+ body["new_pipeline_spec"] = self.new_pipeline_spec
797
+ if self.primary_key_columns:
798
+ body["primary_key_columns"] = self.primary_key_columns
799
+ if self.scheduling_policy is not None:
800
+ body["scheduling_policy"] = self.scheduling_policy
801
+ if self.source_table_full_name is not None:
802
+ body["source_table_full_name"] = self.source_table_full_name
803
+ if self.timeseries_key is not None:
804
+ body["timeseries_key"] = self.timeseries_key
805
+ return body
806
+
807
+ @classmethod
808
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableSpec:
809
+ """Deserializes the SyncedTableSpec from a dictionary."""
810
+ return cls(
811
+ create_database_objects_if_missing=d.get("create_database_objects_if_missing", None),
812
+ existing_pipeline_id=d.get("existing_pipeline_id", None),
813
+ new_pipeline_spec=_from_dict(d, "new_pipeline_spec", NewPipelineSpec),
814
+ primary_key_columns=d.get("primary_key_columns", None),
815
+ scheduling_policy=_enum(d, "scheduling_policy", SyncedTableSchedulingPolicy),
816
+ source_table_full_name=d.get("source_table_full_name", None),
817
+ timeseries_key=d.get("timeseries_key", None),
818
+ )
819
+
820
+
821
+ class SyncedTableState(Enum):
822
+ """The state of a synced table."""
823
+
824
+ SYNCED_TABLED_OFFLINE = "SYNCED_TABLED_OFFLINE"
825
+ SYNCED_TABLE_OFFLINE_FAILED = "SYNCED_TABLE_OFFLINE_FAILED"
826
+ SYNCED_TABLE_ONLINE = "SYNCED_TABLE_ONLINE"
827
+ SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE = "SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE"
828
+ SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE = "SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE"
829
+ SYNCED_TABLE_ONLINE_PIPELINE_FAILED = "SYNCED_TABLE_ONLINE_PIPELINE_FAILED"
830
+ SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE = "SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE"
831
+ SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES = "SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES"
832
+ SYNCED_TABLE_PROVISIONING = "SYNCED_TABLE_PROVISIONING"
833
+ SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT = "SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT"
834
+ SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES = "SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES"
835
+
836
+
837
+ @dataclass
838
+ class SyncedTableStatus:
839
+ """Status of a synced table."""
840
+
841
+ continuous_update_status: Optional[SyncedTableContinuousUpdateStatus] = None
842
+ """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE
843
+ or the SYNCED_UPDATING_PIPELINE_RESOURCES state."""
844
+
845
+ detailed_state: Optional[SyncedTableState] = None
846
+ """The state of the synced table."""
847
+
848
+ failed_status: Optional[SyncedTableFailedStatus] = None
849
+ """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the
850
+ SYNCED_PIPELINE_FAILED state."""
851
+
852
+ message: Optional[str] = None
853
+ """A text description of the current state of the synced table."""
854
+
855
+ pipeline_id: Optional[str] = None
856
+ """ID of the associated pipeline. The pipeline ID may have been provided by the client (in the case
857
+ of bin packing), or generated by the server (when creating a new pipeline)."""
858
+
859
+ provisioning_status: Optional[SyncedTableProvisioningStatus] = None
860
+ """Detailed status of a synced table. Shown if the synced table is in the
861
+ PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state."""
862
+
863
+ triggered_update_status: Optional[SyncedTableTriggeredUpdateStatus] = None
864
+ """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE
865
+ or the SYNCED_NO_PENDING_UPDATE state."""
866
+
867
+ def as_dict(self) -> dict:
868
+ """Serializes the SyncedTableStatus into a dictionary suitable for use as a JSON request body."""
869
+ body = {}
870
+ if self.continuous_update_status:
871
+ body["continuous_update_status"] = self.continuous_update_status.as_dict()
872
+ if self.detailed_state is not None:
873
+ body["detailed_state"] = self.detailed_state.value
874
+ if self.failed_status:
875
+ body["failed_status"] = self.failed_status.as_dict()
876
+ if self.message is not None:
877
+ body["message"] = self.message
878
+ if self.pipeline_id is not None:
879
+ body["pipeline_id"] = self.pipeline_id
880
+ if self.provisioning_status:
881
+ body["provisioning_status"] = self.provisioning_status.as_dict()
882
+ if self.triggered_update_status:
883
+ body["triggered_update_status"] = self.triggered_update_status.as_dict()
884
+ return body
885
+
886
+ def as_shallow_dict(self) -> dict:
887
+ """Serializes the SyncedTableStatus into a shallow dictionary of its immediate attributes."""
888
+ body = {}
889
+ if self.continuous_update_status:
890
+ body["continuous_update_status"] = self.continuous_update_status
891
+ if self.detailed_state is not None:
892
+ body["detailed_state"] = self.detailed_state
893
+ if self.failed_status:
894
+ body["failed_status"] = self.failed_status
895
+ if self.message is not None:
896
+ body["message"] = self.message
897
+ if self.pipeline_id is not None:
898
+ body["pipeline_id"] = self.pipeline_id
899
+ if self.provisioning_status:
900
+ body["provisioning_status"] = self.provisioning_status
901
+ if self.triggered_update_status:
902
+ body["triggered_update_status"] = self.triggered_update_status
903
+ return body
904
+
905
+ @classmethod
906
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableStatus:
907
+ """Deserializes the SyncedTableStatus from a dictionary."""
908
+ return cls(
909
+ continuous_update_status=_from_dict(d, "continuous_update_status", SyncedTableContinuousUpdateStatus),
910
+ detailed_state=_enum(d, "detailed_state", SyncedTableState),
911
+ failed_status=_from_dict(d, "failed_status", SyncedTableFailedStatus),
912
+ message=d.get("message", None),
913
+ pipeline_id=d.get("pipeline_id", None),
914
+ provisioning_status=_from_dict(d, "provisioning_status", SyncedTableProvisioningStatus),
915
+ triggered_update_status=_from_dict(d, "triggered_update_status", SyncedTableTriggeredUpdateStatus),
916
+ )
917
+
918
+
919
+ @dataclass
920
+ class SyncedTableTriggeredUpdateStatus:
921
+ """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE
922
+ or the SYNCED_NO_PENDING_UPDATE state."""
923
+
924
+ last_processed_commit_version: Optional[int] = None
925
+ """The last source table Delta version that was synced to the synced table. Note that this Delta
926
+ version may not be completely synced to the synced table yet."""
927
+
928
+ timestamp: Optional[str] = None
929
+ """The timestamp of the last time any data was synchronized from the source table to the synced
930
+ table."""
931
+
932
+ triggered_update_progress: Optional[SyncedTablePipelineProgress] = None
933
+ """Progress of the active data synchronization pipeline."""
934
+
935
+ def as_dict(self) -> dict:
936
+ """Serializes the SyncedTableTriggeredUpdateStatus into a dictionary suitable for use as a JSON request body."""
937
+ body = {}
938
+ if self.last_processed_commit_version is not None:
939
+ body["last_processed_commit_version"] = self.last_processed_commit_version
940
+ if self.timestamp is not None:
941
+ body["timestamp"] = self.timestamp
942
+ if self.triggered_update_progress:
943
+ body["triggered_update_progress"] = self.triggered_update_progress.as_dict()
944
+ return body
945
+
946
+ def as_shallow_dict(self) -> dict:
947
+ """Serializes the SyncedTableTriggeredUpdateStatus into a shallow dictionary of its immediate attributes."""
948
+ body = {}
949
+ if self.last_processed_commit_version is not None:
950
+ body["last_processed_commit_version"] = self.last_processed_commit_version
951
+ if self.timestamp is not None:
952
+ body["timestamp"] = self.timestamp
953
+ if self.triggered_update_progress:
954
+ body["triggered_update_progress"] = self.triggered_update_progress
955
+ return body
956
+
957
+ @classmethod
958
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableTriggeredUpdateStatus:
959
+ """Deserializes the SyncedTableTriggeredUpdateStatus from a dictionary."""
960
+ return cls(
961
+ last_processed_commit_version=d.get("last_processed_commit_version", None),
962
+ timestamp=d.get("timestamp", None),
963
+ triggered_update_progress=_from_dict(d, "triggered_update_progress", SyncedTablePipelineProgress),
964
+ )
965
+
966
+
967
+ class DatabaseAPI:
968
+ """Database Instances provide access to a database via REST API or direct SQL."""
969
+
970
+ def __init__(self, api_client):
971
+ self._api = api_client
972
+
973
+ def create_database_catalog(self, catalog: DatabaseCatalog) -> DatabaseCatalog:
974
+ """Create a Database Catalog.
975
+
976
+ :param catalog: :class:`DatabaseCatalog`
977
+
978
+ :returns: :class:`DatabaseCatalog`
979
+ """
980
+ body = catalog.as_dict()
981
+ headers = {
982
+ "Accept": "application/json",
983
+ "Content-Type": "application/json",
984
+ }
985
+
986
+ res = self._api.do("POST", "/api/2.0/database/catalogs", body=body, headers=headers)
987
+ return DatabaseCatalog.from_dict(res)
988
+
989
+ def create_database_instance(self, database_instance: DatabaseInstance) -> DatabaseInstance:
990
+ """Create a Database Instance.
991
+
992
+ :param database_instance: :class:`DatabaseInstance`
993
+ A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
994
+
995
+ :returns: :class:`DatabaseInstance`
996
+ """
997
+ body = database_instance.as_dict()
998
+ headers = {
999
+ "Accept": "application/json",
1000
+ "Content-Type": "application/json",
1001
+ }
1002
+
1003
+ res = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers)
1004
+ return DatabaseInstance.from_dict(res)
1005
+
1006
+ def create_database_table(self, table: DatabaseTable) -> DatabaseTable:
1007
+ """Create a Database Table.
1008
+
1009
+ :param table: :class:`DatabaseTable`
1010
+ Next field marker: 13
1011
+
1012
+ :returns: :class:`DatabaseTable`
1013
+ """
1014
+ body = table.as_dict()
1015
+ headers = {
1016
+ "Accept": "application/json",
1017
+ "Content-Type": "application/json",
1018
+ }
1019
+
1020
+ res = self._api.do("POST", "/api/2.0/database/tables", body=body, headers=headers)
1021
+ return DatabaseTable.from_dict(res)
1022
+
1023
+ def create_synced_database_table(self, synced_table: SyncedDatabaseTable) -> SyncedDatabaseTable:
1024
+ """Create a Synced Database Table.
1025
+
1026
+ :param synced_table: :class:`SyncedDatabaseTable`
1027
+ Next field marker: 12
1028
+
1029
+ :returns: :class:`SyncedDatabaseTable`
1030
+ """
1031
+ body = synced_table.as_dict()
1032
+ headers = {
1033
+ "Accept": "application/json",
1034
+ "Content-Type": "application/json",
1035
+ }
1036
+
1037
+ res = self._api.do("POST", "/api/2.0/database/synced_tables", body=body, headers=headers)
1038
+ return SyncedDatabaseTable.from_dict(res)
1039
+
1040
+ def delete_database_catalog(self, name: str):
1041
+ """Delete a Database Catalog.
1042
+
1043
+ :param name: str
1044
+
1045
+
1046
+ """
1047
+
1048
+ headers = {
1049
+ "Accept": "application/json",
1050
+ }
1051
+
1052
+ self._api.do("DELETE", f"/api/2.0/database/catalogs/{name}", headers=headers)
1053
+
1054
+ def delete_database_instance(self, name: str, *, force: Optional[bool] = None, purge: Optional[bool] = None):
1055
+ """Delete a Database Instance.
1056
+
1057
+ :param name: str
1058
+ Name of the instance to delete.
1059
+ :param force: bool (optional)
1060
+ By default, a instance cannot be deleted if it has descendant instances created via PITR. If this
1061
+ flag is specified as true, all descendent instances will be deleted as well.
1062
+ :param purge: bool (optional)
1063
+ Note purge=false is in development. If false, the database instance is soft deleted (implementation
1064
+ pending). Soft deleted instances behave as if they are deleted, and cannot be used for CRUD
1065
+ operations nor connected to. However they can be undeleted by calling the undelete API for a limited
1066
+ time (implementation pending). If true, the database instance is hard deleted and cannot be
1067
+ undeleted. For the time being, setting this value to true is required to delete an instance (soft
1068
+ delete is not yet supported).
1069
+
1070
+
1071
+ """
1072
+
1073
+ query = {}
1074
+ if force is not None:
1075
+ query["force"] = force
1076
+ if purge is not None:
1077
+ query["purge"] = purge
1078
+ headers = {
1079
+ "Accept": "application/json",
1080
+ }
1081
+
1082
+ self._api.do("DELETE", f"/api/2.0/database/instances/{name}", query=query, headers=headers)
1083
+
1084
+ def delete_database_table(self, name: str):
1085
+ """Delete a Database Table.
1086
+
1087
+ :param name: str
1088
+
1089
+
1090
+ """
1091
+
1092
+ headers = {
1093
+ "Accept": "application/json",
1094
+ }
1095
+
1096
+ self._api.do("DELETE", f"/api/2.0/database/tables/{name}", headers=headers)
1097
+
1098
+ def delete_synced_database_table(self, name: str):
1099
+ """Delete a Synced Database Table.
1100
+
1101
+ :param name: str
1102
+
1103
+
1104
+ """
1105
+
1106
+ headers = {
1107
+ "Accept": "application/json",
1108
+ }
1109
+
1110
+ self._api.do("DELETE", f"/api/2.0/database/synced_tables/{name}", headers=headers)
1111
+
1112
+ def find_database_instance_by_uid(self, *, uid: Optional[str] = None) -> DatabaseInstance:
1113
+ """Find a Database Instance by uid.
1114
+
1115
+ :param uid: str (optional)
1116
+ UID of the cluster to get.
1117
+
1118
+ :returns: :class:`DatabaseInstance`
1119
+ """
1120
+
1121
+ query = {}
1122
+ if uid is not None:
1123
+ query["uid"] = uid
1124
+ headers = {
1125
+ "Accept": "application/json",
1126
+ }
1127
+
1128
+ res = self._api.do("GET", "/api/2.0/database/instances:findByUid", query=query, headers=headers)
1129
+ return DatabaseInstance.from_dict(res)
1130
+
1131
+ def generate_database_credential(
1132
+ self, *, instance_names: Optional[List[str]] = None, request_id: Optional[str] = None
1133
+ ) -> DatabaseCredential:
1134
+ """Generates a credential that can be used to access database instances.
1135
+
1136
+ :param instance_names: List[str] (optional)
1137
+ Instances to which the token will be scoped.
1138
+ :param request_id: str (optional)
1139
+
1140
+ :returns: :class:`DatabaseCredential`
1141
+ """
1142
+ body = {}
1143
+ if instance_names is not None:
1144
+ body["instance_names"] = [v for v in instance_names]
1145
+ if request_id is not None:
1146
+ body["request_id"] = request_id
1147
+ headers = {
1148
+ "Accept": "application/json",
1149
+ "Content-Type": "application/json",
1150
+ }
1151
+
1152
+ res = self._api.do("POST", "/api/2.0/database/credentials", body=body, headers=headers)
1153
+ return DatabaseCredential.from_dict(res)
1154
+
1155
+ def get_database_catalog(self, name: str) -> DatabaseCatalog:
1156
+ """Get a Database Catalog.
1157
+
1158
+ :param name: str
1159
+
1160
+ :returns: :class:`DatabaseCatalog`
1161
+ """
1162
+
1163
+ headers = {
1164
+ "Accept": "application/json",
1165
+ }
1166
+
1167
+ res = self._api.do("GET", f"/api/2.0/database/catalogs/{name}", headers=headers)
1168
+ return DatabaseCatalog.from_dict(res)
1169
+
1170
+ def get_database_instance(self, name: str) -> DatabaseInstance:
1171
+ """Get a Database Instance.
1172
+
1173
+ :param name: str
1174
+ Name of the cluster to get.
1175
+
1176
+ :returns: :class:`DatabaseInstance`
1177
+ """
1178
+
1179
+ headers = {
1180
+ "Accept": "application/json",
1181
+ }
1182
+
1183
+ res = self._api.do("GET", f"/api/2.0/database/instances/{name}", headers=headers)
1184
+ return DatabaseInstance.from_dict(res)
1185
+
1186
+ def get_database_table(self, name: str) -> DatabaseTable:
1187
+ """Get a Database Table.
1188
+
1189
+ :param name: str
1190
+
1191
+ :returns: :class:`DatabaseTable`
1192
+ """
1193
+
1194
+ headers = {
1195
+ "Accept": "application/json",
1196
+ }
1197
+
1198
+ res = self._api.do("GET", f"/api/2.0/database/tables/{name}", headers=headers)
1199
+ return DatabaseTable.from_dict(res)
1200
+
1201
+ def get_synced_database_table(self, name: str) -> SyncedDatabaseTable:
1202
+ """Get a Synced Database Table.
1203
+
1204
+ :param name: str
1205
+
1206
+ :returns: :class:`SyncedDatabaseTable`
1207
+ """
1208
+
1209
+ headers = {
1210
+ "Accept": "application/json",
1211
+ }
1212
+
1213
+ res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers)
1214
+ return SyncedDatabaseTable.from_dict(res)
1215
+
1216
+ def list_database_instances(
1217
+ self, *, page_size: Optional[int] = None, page_token: Optional[str] = None
1218
+ ) -> Iterator[DatabaseInstance]:
1219
+ """List Database Instances.
1220
+
1221
+ :param page_size: int (optional)
1222
+ Upper bound for items returned.
1223
+ :param page_token: str (optional)
1224
+ Pagination token to go to the next page of Database Instances. Requests first page if absent.
1225
+
1226
+ :returns: Iterator over :class:`DatabaseInstance`
1227
+ """
1228
+
1229
+ query = {}
1230
+ if page_size is not None:
1231
+ query["page_size"] = page_size
1232
+ if page_token is not None:
1233
+ query["page_token"] = page_token
1234
+ headers = {
1235
+ "Accept": "application/json",
1236
+ }
1237
+
1238
+ while True:
1239
+ json = self._api.do("GET", "/api/2.0/database/instances", query=query, headers=headers)
1240
+ if "database_instances" in json:
1241
+ for v in json["database_instances"]:
1242
+ yield DatabaseInstance.from_dict(v)
1243
+ if "next_page_token" not in json or not json["next_page_token"]:
1244
+ return
1245
+ query["page_token"] = json["next_page_token"]
1246
+
1247
+ def update_database_instance(
1248
+ self, name: str, database_instance: DatabaseInstance, update_mask: str
1249
+ ) -> DatabaseInstance:
1250
+ """Update a Database Instance.
1251
+
1252
+ :param name: str
1253
+ The name of the instance. This is the unique identifier for the instance.
1254
+ :param database_instance: :class:`DatabaseInstance`
1255
+ A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
1256
+ :param update_mask: str
1257
+ The list of fields to update.
1258
+
1259
+ :returns: :class:`DatabaseInstance`
1260
+ """
1261
+ body = database_instance.as_dict()
1262
+ query = {}
1263
+ if update_mask is not None:
1264
+ query["update_mask"] = update_mask
1265
+ headers = {
1266
+ "Accept": "application/json",
1267
+ "Content-Type": "application/json",
1268
+ }
1269
+
1270
+ res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers)
1271
+ return DatabaseInstance.from_dict(res)