databricks-sdk 0.55.0__py3-none-any.whl → 0.56.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -0,0 +1,1256 @@
1
+ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
2
+
3
+ from __future__ import annotations
4
+
5
+ import logging
6
+ from dataclasses import dataclass
7
+ from enum import Enum
8
+ from typing import Any, Dict, Iterator, List, Optional
9
+
10
+ from ._internal import _enum, _from_dict, _repeated_dict
11
+
12
+ _LOG = logging.getLogger("databricks.sdk")
13
+
14
+
15
+ # all definitions in this file are in alphabetical order
16
+
17
+
18
+ @dataclass
19
+ class DatabaseCatalog:
20
+ name: str
21
+ """The name of the catalog in UC."""
22
+
23
+ database_instance_name: str
24
+ """The name of the DatabaseInstance housing the database."""
25
+
26
+ database_name: str
27
+ """The name of the database (in a instance) associated with the catalog."""
28
+
29
+ create_database_if_not_exists: Optional[bool] = None
30
+
31
+ uid: Optional[str] = None
32
+
33
+ def as_dict(self) -> dict:
34
+ """Serializes the DatabaseCatalog into a dictionary suitable for use as a JSON request body."""
35
+ body = {}
36
+ if self.create_database_if_not_exists is not None:
37
+ body["create_database_if_not_exists"] = self.create_database_if_not_exists
38
+ if self.database_instance_name is not None:
39
+ body["database_instance_name"] = self.database_instance_name
40
+ if self.database_name is not None:
41
+ body["database_name"] = self.database_name
42
+ if self.name is not None:
43
+ body["name"] = self.name
44
+ if self.uid is not None:
45
+ body["uid"] = self.uid
46
+ return body
47
+
48
+ def as_shallow_dict(self) -> dict:
49
+ """Serializes the DatabaseCatalog into a shallow dictionary of its immediate attributes."""
50
+ body = {}
51
+ if self.create_database_if_not_exists is not None:
52
+ body["create_database_if_not_exists"] = self.create_database_if_not_exists
53
+ if self.database_instance_name is not None:
54
+ body["database_instance_name"] = self.database_instance_name
55
+ if self.database_name is not None:
56
+ body["database_name"] = self.database_name
57
+ if self.name is not None:
58
+ body["name"] = self.name
59
+ if self.uid is not None:
60
+ body["uid"] = self.uid
61
+ return body
62
+
63
+ @classmethod
64
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseCatalog:
65
+ """Deserializes the DatabaseCatalog from a dictionary."""
66
+ return cls(
67
+ create_database_if_not_exists=d.get("create_database_if_not_exists", None),
68
+ database_instance_name=d.get("database_instance_name", None),
69
+ database_name=d.get("database_name", None),
70
+ name=d.get("name", None),
71
+ uid=d.get("uid", None),
72
+ )
73
+
74
+
75
+ @dataclass
76
+ class DatabaseCredential:
77
+ token: Optional[str] = None
78
+
79
+ def as_dict(self) -> dict:
80
+ """Serializes the DatabaseCredential into a dictionary suitable for use as a JSON request body."""
81
+ body = {}
82
+ if self.token is not None:
83
+ body["token"] = self.token
84
+ return body
85
+
86
+ def as_shallow_dict(self) -> dict:
87
+ """Serializes the DatabaseCredential into a shallow dictionary of its immediate attributes."""
88
+ body = {}
89
+ if self.token is not None:
90
+ body["token"] = self.token
91
+ return body
92
+
93
+ @classmethod
94
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseCredential:
95
+ """Deserializes the DatabaseCredential from a dictionary."""
96
+ return cls(token=d.get("token", None))
97
+
98
+
99
+ @dataclass
100
+ class DatabaseInstance:
101
+ """A DatabaseInstance represents a logical Postgres instance, comprised of both compute and
102
+ storage."""
103
+
104
+ name: str
105
+ """The name of the instance. This is the unique identifier for the instance."""
106
+
107
+ capacity: Optional[str] = None
108
+ """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8"."""
109
+
110
+ creation_time: Optional[str] = None
111
+ """The timestamp when the instance was created."""
112
+
113
+ creator: Optional[str] = None
114
+ """The email of the creator of the instance."""
115
+
116
+ pg_version: Optional[str] = None
117
+ """The version of Postgres running on the instance."""
118
+
119
+ read_write_dns: Optional[str] = None
120
+ """The DNS endpoint to connect to the instance for read+write access."""
121
+
122
+ state: Optional[DatabaseInstanceState] = None
123
+ """The current state of the instance."""
124
+
125
+ stopped: Optional[bool] = None
126
+ """Whether the instance is stopped."""
127
+
128
+ uid: Optional[str] = None
129
+ """An immutable UUID identifier for the instance."""
130
+
131
+ def as_dict(self) -> dict:
132
+ """Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body."""
133
+ body = {}
134
+ if self.capacity is not None:
135
+ body["capacity"] = self.capacity
136
+ if self.creation_time is not None:
137
+ body["creation_time"] = self.creation_time
138
+ if self.creator is not None:
139
+ body["creator"] = self.creator
140
+ if self.name is not None:
141
+ body["name"] = self.name
142
+ if self.pg_version is not None:
143
+ body["pg_version"] = self.pg_version
144
+ if self.read_write_dns is not None:
145
+ body["read_write_dns"] = self.read_write_dns
146
+ if self.state is not None:
147
+ body["state"] = self.state.value
148
+ if self.stopped is not None:
149
+ body["stopped"] = self.stopped
150
+ if self.uid is not None:
151
+ body["uid"] = self.uid
152
+ return body
153
+
154
+ def as_shallow_dict(self) -> dict:
155
+ """Serializes the DatabaseInstance into a shallow dictionary of its immediate attributes."""
156
+ body = {}
157
+ if self.capacity is not None:
158
+ body["capacity"] = self.capacity
159
+ if self.creation_time is not None:
160
+ body["creation_time"] = self.creation_time
161
+ if self.creator is not None:
162
+ body["creator"] = self.creator
163
+ if self.name is not None:
164
+ body["name"] = self.name
165
+ if self.pg_version is not None:
166
+ body["pg_version"] = self.pg_version
167
+ if self.read_write_dns is not None:
168
+ body["read_write_dns"] = self.read_write_dns
169
+ if self.state is not None:
170
+ body["state"] = self.state
171
+ if self.stopped is not None:
172
+ body["stopped"] = self.stopped
173
+ if self.uid is not None:
174
+ body["uid"] = self.uid
175
+ return body
176
+
177
+ @classmethod
178
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance:
179
+ """Deserializes the DatabaseInstance from a dictionary."""
180
+ return cls(
181
+ capacity=d.get("capacity", None),
182
+ creation_time=d.get("creation_time", None),
183
+ creator=d.get("creator", None),
184
+ name=d.get("name", None),
185
+ pg_version=d.get("pg_version", None),
186
+ read_write_dns=d.get("read_write_dns", None),
187
+ state=_enum(d, "state", DatabaseInstanceState),
188
+ stopped=d.get("stopped", None),
189
+ uid=d.get("uid", None),
190
+ )
191
+
192
+
193
+ class DatabaseInstanceState(Enum):
194
+
195
+ AVAILABLE = "AVAILABLE"
196
+ DELETING = "DELETING"
197
+ FAILING_OVER = "FAILING_OVER"
198
+ STARTING = "STARTING"
199
+ STOPPED = "STOPPED"
200
+ UPDATING = "UPDATING"
201
+
202
+
203
+ @dataclass
204
+ class DatabaseTable:
205
+ """Next field marker: 13"""
206
+
207
+ name: str
208
+ """Full three-part (catalog, schema, table) name of the table."""
209
+
210
+ database_instance_name: Optional[str] = None
211
+ """Name of the target database instance. This is required when creating database tables in standard
212
+ catalogs. This is optional when creating database tables in registered catalogs. If this field
213
+ is specified when creating database tables in registered catalogs, the database instance name
214
+ MUST match that of the registered catalog (or the request will be rejected)."""
215
+
216
+ logical_database_name: Optional[str] = None
217
+ """Target Postgres database object (logical database) name for this table. This field is optional
218
+ in all scenarios.
219
+
220
+ When creating a table in a registered Postgres catalog, the target Postgres database name is
221
+ inferred to be that of the registered catalog. If this field is specified in this scenario, the
222
+ Postgres database name MUST match that of the registered catalog (or the request will be
223
+ rejected).
224
+
225
+ When creating a table in a standard catalog, the target database name is inferred to be that of
226
+ the standard catalog. In this scenario, specifying this field will allow targeting an arbitrary
227
+ postgres database. Note that this has implications for the `create_database_objects_is_missing`
228
+ field in `spec`."""
229
+
230
+ table_serving_url: Optional[str] = None
231
+ """Data serving REST API URL for this table"""
232
+
233
+ def as_dict(self) -> dict:
234
+ """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body."""
235
+ body = {}
236
+ if self.database_instance_name is not None:
237
+ body["database_instance_name"] = self.database_instance_name
238
+ if self.logical_database_name is not None:
239
+ body["logical_database_name"] = self.logical_database_name
240
+ if self.name is not None:
241
+ body["name"] = self.name
242
+ if self.table_serving_url is not None:
243
+ body["table_serving_url"] = self.table_serving_url
244
+ return body
245
+
246
+ def as_shallow_dict(self) -> dict:
247
+ """Serializes the DatabaseTable into a shallow dictionary of its immediate attributes."""
248
+ body = {}
249
+ if self.database_instance_name is not None:
250
+ body["database_instance_name"] = self.database_instance_name
251
+ if self.logical_database_name is not None:
252
+ body["logical_database_name"] = self.logical_database_name
253
+ if self.name is not None:
254
+ body["name"] = self.name
255
+ if self.table_serving_url is not None:
256
+ body["table_serving_url"] = self.table_serving_url
257
+ return body
258
+
259
+ @classmethod
260
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseTable:
261
+ """Deserializes the DatabaseTable from a dictionary."""
262
+ return cls(
263
+ database_instance_name=d.get("database_instance_name", None),
264
+ logical_database_name=d.get("logical_database_name", None),
265
+ name=d.get("name", None),
266
+ table_serving_url=d.get("table_serving_url", None),
267
+ )
268
+
269
+
270
+ @dataclass
271
+ class DeleteDatabaseCatalogResponse:
272
+ def as_dict(self) -> dict:
273
+ """Serializes the DeleteDatabaseCatalogResponse into a dictionary suitable for use as a JSON request body."""
274
+ body = {}
275
+ return body
276
+
277
+ def as_shallow_dict(self) -> dict:
278
+ """Serializes the DeleteDatabaseCatalogResponse into a shallow dictionary of its immediate attributes."""
279
+ body = {}
280
+ return body
281
+
282
+ @classmethod
283
+ def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseCatalogResponse:
284
+ """Deserializes the DeleteDatabaseCatalogResponse from a dictionary."""
285
+ return cls()
286
+
287
+
288
+ @dataclass
289
+ class DeleteDatabaseInstanceResponse:
290
+ def as_dict(self) -> dict:
291
+ """Serializes the DeleteDatabaseInstanceResponse into a dictionary suitable for use as a JSON request body."""
292
+ body = {}
293
+ return body
294
+
295
+ def as_shallow_dict(self) -> dict:
296
+ """Serializes the DeleteDatabaseInstanceResponse into a shallow dictionary of its immediate attributes."""
297
+ body = {}
298
+ return body
299
+
300
+ @classmethod
301
+ def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseInstanceResponse:
302
+ """Deserializes the DeleteDatabaseInstanceResponse from a dictionary."""
303
+ return cls()
304
+
305
+
306
+ @dataclass
307
+ class DeleteDatabaseTableResponse:
308
+ def as_dict(self) -> dict:
309
+ """Serializes the DeleteDatabaseTableResponse into a dictionary suitable for use as a JSON request body."""
310
+ body = {}
311
+ return body
312
+
313
+ def as_shallow_dict(self) -> dict:
314
+ """Serializes the DeleteDatabaseTableResponse into a shallow dictionary of its immediate attributes."""
315
+ body = {}
316
+ return body
317
+
318
+ @classmethod
319
+ def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseTableResponse:
320
+ """Deserializes the DeleteDatabaseTableResponse from a dictionary."""
321
+ return cls()
322
+
323
+
324
+ @dataclass
325
+ class DeleteSyncedDatabaseTableResponse:
326
+ def as_dict(self) -> dict:
327
+ """Serializes the DeleteSyncedDatabaseTableResponse into a dictionary suitable for use as a JSON request body."""
328
+ body = {}
329
+ return body
330
+
331
+ def as_shallow_dict(self) -> dict:
332
+ """Serializes the DeleteSyncedDatabaseTableResponse into a shallow dictionary of its immediate attributes."""
333
+ body = {}
334
+ return body
335
+
336
+ @classmethod
337
+ def from_dict(cls, d: Dict[str, Any]) -> DeleteSyncedDatabaseTableResponse:
338
+ """Deserializes the DeleteSyncedDatabaseTableResponse from a dictionary."""
339
+ return cls()
340
+
341
+
342
+ @dataclass
343
+ class GenerateDatabaseCredentialRequest:
344
+ """Generates a credential that can be used to access database instances"""
345
+
346
+ instance_names: Optional[List[str]] = None
347
+ """Instances to which the token will be scoped."""
348
+
349
+ request_id: Optional[str] = None
350
+
351
+ def as_dict(self) -> dict:
352
+ """Serializes the GenerateDatabaseCredentialRequest into a dictionary suitable for use as a JSON request body."""
353
+ body = {}
354
+ if self.instance_names:
355
+ body["instance_names"] = [v for v in self.instance_names]
356
+ if self.request_id is not None:
357
+ body["request_id"] = self.request_id
358
+ return body
359
+
360
+ def as_shallow_dict(self) -> dict:
361
+ """Serializes the GenerateDatabaseCredentialRequest into a shallow dictionary of its immediate attributes."""
362
+ body = {}
363
+ if self.instance_names:
364
+ body["instance_names"] = self.instance_names
365
+ if self.request_id is not None:
366
+ body["request_id"] = self.request_id
367
+ return body
368
+
369
+ @classmethod
370
+ def from_dict(cls, d: Dict[str, Any]) -> GenerateDatabaseCredentialRequest:
371
+ """Deserializes the GenerateDatabaseCredentialRequest from a dictionary."""
372
+ return cls(instance_names=d.get("instance_names", None), request_id=d.get("request_id", None))
373
+
374
+
375
+ @dataclass
376
+ class ListDatabaseInstancesResponse:
377
+ database_instances: Optional[List[DatabaseInstance]] = None
378
+ """List of instances."""
379
+
380
+ next_page_token: Optional[str] = None
381
+ """Pagination token to request the next page of instances."""
382
+
383
+ def as_dict(self) -> dict:
384
+ """Serializes the ListDatabaseInstancesResponse into a dictionary suitable for use as a JSON request body."""
385
+ body = {}
386
+ if self.database_instances:
387
+ body["database_instances"] = [v.as_dict() for v in self.database_instances]
388
+ if self.next_page_token is not None:
389
+ body["next_page_token"] = self.next_page_token
390
+ return body
391
+
392
+ def as_shallow_dict(self) -> dict:
393
+ """Serializes the ListDatabaseInstancesResponse into a shallow dictionary of its immediate attributes."""
394
+ body = {}
395
+ if self.database_instances:
396
+ body["database_instances"] = self.database_instances
397
+ if self.next_page_token is not None:
398
+ body["next_page_token"] = self.next_page_token
399
+ return body
400
+
401
+ @classmethod
402
+ def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse:
403
+ """Deserializes the ListDatabaseInstancesResponse from a dictionary."""
404
+ return cls(
405
+ database_instances=_repeated_dict(d, "database_instances", DatabaseInstance),
406
+ next_page_token=d.get("next_page_token", None),
407
+ )
408
+
409
+
410
+ @dataclass
411
+ class NewPipelineSpec:
412
+ """Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other
413
+ fields of pipeline are still inferred by table def internally"""
414
+
415
+ storage_catalog: Optional[str] = None
416
+ """UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This
417
+ needs to be a standard catalog where the user has permissions to create Delta tables."""
418
+
419
+ storage_schema: Optional[str] = None
420
+ """UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This needs
421
+ to be in the standard catalog where the user has permissions to create Delta tables."""
422
+
423
+ def as_dict(self) -> dict:
424
+ """Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body."""
425
+ body = {}
426
+ if self.storage_catalog is not None:
427
+ body["storage_catalog"] = self.storage_catalog
428
+ if self.storage_schema is not None:
429
+ body["storage_schema"] = self.storage_schema
430
+ return body
431
+
432
+ def as_shallow_dict(self) -> dict:
433
+ """Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes."""
434
+ body = {}
435
+ if self.storage_catalog is not None:
436
+ body["storage_catalog"] = self.storage_catalog
437
+ if self.storage_schema is not None:
438
+ body["storage_schema"] = self.storage_schema
439
+ return body
440
+
441
+ @classmethod
442
+ def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec:
443
+ """Deserializes the NewPipelineSpec from a dictionary."""
444
+ return cls(storage_catalog=d.get("storage_catalog", None), storage_schema=d.get("storage_schema", None))
445
+
446
+
447
+ class ProvisioningInfoState(Enum):
448
+
449
+ ACTIVE = "ACTIVE"
450
+ DEGRADED = "DEGRADED"
451
+ DELETING = "DELETING"
452
+ FAILED = "FAILED"
453
+ PROVISIONING = "PROVISIONING"
454
+ UPDATING = "UPDATING"
455
+
456
+
457
+ @dataclass
458
+ class SyncedDatabaseTable:
459
+ """Next field marker: 12"""
460
+
461
+ name: str
462
+ """Full three-part (catalog, schema, table) name of the table."""
463
+
464
+ data_synchronization_status: Optional[SyncedTableStatus] = None
465
+ """Synced Table data synchronization status"""
466
+
467
+ database_instance_name: Optional[str] = None
468
+ """Name of the target database instance. This is required when creating synced database tables in
469
+ standard catalogs. This is optional when creating synced database tables in registered catalogs.
470
+ If this field is specified when creating synced database tables in registered catalogs, the
471
+ database instance name MUST match that of the registered catalog (or the request will be
472
+ rejected)."""
473
+
474
+ logical_database_name: Optional[str] = None
475
+ """Target Postgres database object (logical database) name for this table. This field is optional
476
+ in all scenarios.
477
+
478
+ When creating a synced table in a registered Postgres catalog, the target Postgres database name
479
+ is inferred to be that of the registered catalog. If this field is specified in this scenario,
480
+ the Postgres database name MUST match that of the registered catalog (or the request will be
481
+ rejected).
482
+
483
+ When creating a synced table in a standard catalog, the target database name is inferred to be
484
+ that of the standard catalog. In this scenario, specifying this field will allow targeting an
485
+ arbitrary postgres database."""
486
+
487
+ spec: Optional[SyncedTableSpec] = None
488
+ """Specification of a synced database table."""
489
+
490
+ table_serving_url: Optional[str] = None
491
+ """Data serving REST API URL for this table"""
492
+
493
+ unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None
494
+ """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
495
+ state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
496
+ may be in "PROVISIONING" as it runs asynchronously)."""
497
+
498
+ def as_dict(self) -> dict:
499
+ """Serializes the SyncedDatabaseTable into a dictionary suitable for use as a JSON request body."""
500
+ body = {}
501
+ if self.data_synchronization_status:
502
+ body["data_synchronization_status"] = self.data_synchronization_status.as_dict()
503
+ if self.database_instance_name is not None:
504
+ body["database_instance_name"] = self.database_instance_name
505
+ if self.logical_database_name is not None:
506
+ body["logical_database_name"] = self.logical_database_name
507
+ if self.name is not None:
508
+ body["name"] = self.name
509
+ if self.spec:
510
+ body["spec"] = self.spec.as_dict()
511
+ if self.table_serving_url is not None:
512
+ body["table_serving_url"] = self.table_serving_url
513
+ if self.unity_catalog_provisioning_state is not None:
514
+ body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value
515
+ return body
516
+
517
+ def as_shallow_dict(self) -> dict:
518
+ """Serializes the SyncedDatabaseTable into a shallow dictionary of its immediate attributes."""
519
+ body = {}
520
+ if self.data_synchronization_status:
521
+ body["data_synchronization_status"] = self.data_synchronization_status
522
+ if self.database_instance_name is not None:
523
+ body["database_instance_name"] = self.database_instance_name
524
+ if self.logical_database_name is not None:
525
+ body["logical_database_name"] = self.logical_database_name
526
+ if self.name is not None:
527
+ body["name"] = self.name
528
+ if self.spec:
529
+ body["spec"] = self.spec
530
+ if self.table_serving_url is not None:
531
+ body["table_serving_url"] = self.table_serving_url
532
+ if self.unity_catalog_provisioning_state is not None:
533
+ body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state
534
+ return body
535
+
536
+ @classmethod
537
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable:
538
+ """Deserializes the SyncedDatabaseTable from a dictionary."""
539
+ return cls(
540
+ data_synchronization_status=_from_dict(d, "data_synchronization_status", SyncedTableStatus),
541
+ database_instance_name=d.get("database_instance_name", None),
542
+ logical_database_name=d.get("logical_database_name", None),
543
+ name=d.get("name", None),
544
+ spec=_from_dict(d, "spec", SyncedTableSpec),
545
+ table_serving_url=d.get("table_serving_url", None),
546
+ unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState),
547
+ )
548
+
549
+
550
+ @dataclass
551
+ class SyncedTableContinuousUpdateStatus:
552
+ """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE
553
+ or the SYNCED_UPDATING_PIPELINE_RESOURCES state."""
554
+
555
+ initial_pipeline_sync_progress: Optional[SyncedTablePipelineProgress] = None
556
+ """Progress of the initial data synchronization."""
557
+
558
+ last_processed_commit_version: Optional[int] = None
559
+ """The last source table Delta version that was synced to the synced table. Note that this Delta
560
+ version may not be completely synced to the synced table yet."""
561
+
562
+ timestamp: Optional[str] = None
563
+ """The timestamp of the last time any data was synchronized from the source table to the synced
564
+ table."""
565
+
566
+ def as_dict(self) -> dict:
567
+ """Serializes the SyncedTableContinuousUpdateStatus into a dictionary suitable for use as a JSON request body."""
568
+ body = {}
569
+ if self.initial_pipeline_sync_progress:
570
+ body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict()
571
+ if self.last_processed_commit_version is not None:
572
+ body["last_processed_commit_version"] = self.last_processed_commit_version
573
+ if self.timestamp is not None:
574
+ body["timestamp"] = self.timestamp
575
+ return body
576
+
577
+ def as_shallow_dict(self) -> dict:
578
+ """Serializes the SyncedTableContinuousUpdateStatus into a shallow dictionary of its immediate attributes."""
579
+ body = {}
580
+ if self.initial_pipeline_sync_progress:
581
+ body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress
582
+ if self.last_processed_commit_version is not None:
583
+ body["last_processed_commit_version"] = self.last_processed_commit_version
584
+ if self.timestamp is not None:
585
+ body["timestamp"] = self.timestamp
586
+ return body
587
+
588
+ @classmethod
589
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableContinuousUpdateStatus:
590
+ """Deserializes the SyncedTableContinuousUpdateStatus from a dictionary."""
591
+ return cls(
592
+ initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", SyncedTablePipelineProgress),
593
+ last_processed_commit_version=d.get("last_processed_commit_version", None),
594
+ timestamp=d.get("timestamp", None),
595
+ )
596
+
597
+
598
+ @dataclass
599
+ class SyncedTableFailedStatus:
600
+ """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the
601
+ SYNCED_PIPELINE_FAILED state."""
602
+
603
+ last_processed_commit_version: Optional[int] = None
604
+ """The last source table Delta version that was synced to the synced table. Note that this Delta
605
+ version may only be partially synced to the synced table. Only populated if the table is still
606
+ synced and available for serving."""
607
+
608
+ timestamp: Optional[str] = None
609
+ """The timestamp of the last time any data was synchronized from the source table to the synced
610
+ table. Only populated if the table is still synced and available for serving."""
611
+
612
+ def as_dict(self) -> dict:
613
+ """Serializes the SyncedTableFailedStatus into a dictionary suitable for use as a JSON request body."""
614
+ body = {}
615
+ if self.last_processed_commit_version is not None:
616
+ body["last_processed_commit_version"] = self.last_processed_commit_version
617
+ if self.timestamp is not None:
618
+ body["timestamp"] = self.timestamp
619
+ return body
620
+
621
+ def as_shallow_dict(self) -> dict:
622
+ """Serializes the SyncedTableFailedStatus into a shallow dictionary of its immediate attributes."""
623
+ body = {}
624
+ if self.last_processed_commit_version is not None:
625
+ body["last_processed_commit_version"] = self.last_processed_commit_version
626
+ if self.timestamp is not None:
627
+ body["timestamp"] = self.timestamp
628
+ return body
629
+
630
+ @classmethod
631
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableFailedStatus:
632
+ """Deserializes the SyncedTableFailedStatus from a dictionary."""
633
+ return cls(
634
+ last_processed_commit_version=d.get("last_processed_commit_version", None),
635
+ timestamp=d.get("timestamp", None),
636
+ )
637
+
638
+
639
+ @dataclass
640
+ class SyncedTablePipelineProgress:
641
+ """Progress information of the Synced Table data synchronization pipeline."""
642
+
643
+ estimated_completion_time_seconds: Optional[float] = None
644
+ """The estimated time remaining to complete this update in seconds."""
645
+
646
+ latest_version_currently_processing: Optional[int] = None
647
+ """The source table Delta version that was last processed by the pipeline. The pipeline may not
648
+ have completely processed this version yet."""
649
+
650
+ sync_progress_completion: Optional[float] = None
651
+ """The completion ratio of this update. This is a number between 0 and 1."""
652
+
653
+ synced_row_count: Optional[int] = None
654
+ """The number of rows that have been synced in this update."""
655
+
656
+ total_row_count: Optional[int] = None
657
+ """The total number of rows that need to be synced in this update. This number may be an estimate."""
658
+
659
+ def as_dict(self) -> dict:
660
+ """Serializes the SyncedTablePipelineProgress into a dictionary suitable for use as a JSON request body."""
661
+ body = {}
662
+ if self.estimated_completion_time_seconds is not None:
663
+ body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
664
+ if self.latest_version_currently_processing is not None:
665
+ body["latest_version_currently_processing"] = self.latest_version_currently_processing
666
+ if self.sync_progress_completion is not None:
667
+ body["sync_progress_completion"] = self.sync_progress_completion
668
+ if self.synced_row_count is not None:
669
+ body["synced_row_count"] = self.synced_row_count
670
+ if self.total_row_count is not None:
671
+ body["total_row_count"] = self.total_row_count
672
+ return body
673
+
674
+ def as_shallow_dict(self) -> dict:
675
+ """Serializes the SyncedTablePipelineProgress into a shallow dictionary of its immediate attributes."""
676
+ body = {}
677
+ if self.estimated_completion_time_seconds is not None:
678
+ body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
679
+ if self.latest_version_currently_processing is not None:
680
+ body["latest_version_currently_processing"] = self.latest_version_currently_processing
681
+ if self.sync_progress_completion is not None:
682
+ body["sync_progress_completion"] = self.sync_progress_completion
683
+ if self.synced_row_count is not None:
684
+ body["synced_row_count"] = self.synced_row_count
685
+ if self.total_row_count is not None:
686
+ body["total_row_count"] = self.total_row_count
687
+ return body
688
+
689
+ @classmethod
690
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTablePipelineProgress:
691
+ """Deserializes the SyncedTablePipelineProgress from a dictionary."""
692
+ return cls(
693
+ estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None),
694
+ latest_version_currently_processing=d.get("latest_version_currently_processing", None),
695
+ sync_progress_completion=d.get("sync_progress_completion", None),
696
+ synced_row_count=d.get("synced_row_count", None),
697
+ total_row_count=d.get("total_row_count", None),
698
+ )
699
+
700
+
701
+ @dataclass
702
+ class SyncedTableProvisioningStatus:
703
+ """Detailed status of a synced table. Shown if the synced table is in the
704
+ PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state."""
705
+
706
+ initial_pipeline_sync_progress: Optional[SyncedTablePipelineProgress] = None
707
+ """Details about initial data synchronization. Only populated when in the
708
+ PROVISIONING_INITIAL_SNAPSHOT state."""
709
+
710
+ def as_dict(self) -> dict:
711
+ """Serializes the SyncedTableProvisioningStatus into a dictionary suitable for use as a JSON request body."""
712
+ body = {}
713
+ if self.initial_pipeline_sync_progress:
714
+ body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict()
715
+ return body
716
+
717
+ def as_shallow_dict(self) -> dict:
718
+ """Serializes the SyncedTableProvisioningStatus into a shallow dictionary of its immediate attributes."""
719
+ body = {}
720
+ if self.initial_pipeline_sync_progress:
721
+ body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress
722
+ return body
723
+
724
+ @classmethod
725
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableProvisioningStatus:
726
+ """Deserializes the SyncedTableProvisioningStatus from a dictionary."""
727
+ return cls(
728
+ initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", SyncedTablePipelineProgress)
729
+ )
730
+
731
+
732
+ class SyncedTableSchedulingPolicy(Enum):
733
+
734
+ CONTINUOUS = "CONTINUOUS"
735
+ SNAPSHOT = "SNAPSHOT"
736
+ TRIGGERED = "TRIGGERED"
737
+
738
+
739
+ @dataclass
740
+ class SyncedTableSpec:
741
+ """Specification of a synced database table."""
742
+
743
+ create_database_objects_if_missing: Optional[bool] = None
744
+ """If true, the synced table's logical database and schema resources in PG will be created if they
745
+ do not already exist."""
746
+
747
+ new_pipeline_spec: Optional[NewPipelineSpec] = None
748
+ """Spec of new pipeline. Should be empty if pipeline_id is set"""
749
+
750
+ pipeline_id: Optional[str] = None
751
+ """ID of the associated pipeline. Should be empty if new_pipeline_spec is set"""
752
+
753
+ primary_key_columns: Optional[List[str]] = None
754
+ """Primary Key columns to be used for data insert/update in the destination."""
755
+
756
+ scheduling_policy: Optional[SyncedTableSchedulingPolicy] = None
757
+ """Scheduling policy of the underlying pipeline."""
758
+
759
+ source_table_full_name: Optional[str] = None
760
+ """Three-part (catalog, schema, table) name of the source Delta table."""
761
+
762
+ timeseries_key: Optional[str] = None
763
+ """Time series key to deduplicate (tie-break) rows with the same primary key."""
764
+
765
+ def as_dict(self) -> dict:
766
+ """Serializes the SyncedTableSpec into a dictionary suitable for use as a JSON request body."""
767
+ body = {}
768
+ if self.create_database_objects_if_missing is not None:
769
+ body["create_database_objects_if_missing"] = self.create_database_objects_if_missing
770
+ if self.new_pipeline_spec:
771
+ body["new_pipeline_spec"] = self.new_pipeline_spec.as_dict()
772
+ if self.pipeline_id is not None:
773
+ body["pipeline_id"] = self.pipeline_id
774
+ if self.primary_key_columns:
775
+ body["primary_key_columns"] = [v for v in self.primary_key_columns]
776
+ if self.scheduling_policy is not None:
777
+ body["scheduling_policy"] = self.scheduling_policy.value
778
+ if self.source_table_full_name is not None:
779
+ body["source_table_full_name"] = self.source_table_full_name
780
+ if self.timeseries_key is not None:
781
+ body["timeseries_key"] = self.timeseries_key
782
+ return body
783
+
784
+ def as_shallow_dict(self) -> dict:
785
+ """Serializes the SyncedTableSpec into a shallow dictionary of its immediate attributes."""
786
+ body = {}
787
+ if self.create_database_objects_if_missing is not None:
788
+ body["create_database_objects_if_missing"] = self.create_database_objects_if_missing
789
+ if self.new_pipeline_spec:
790
+ body["new_pipeline_spec"] = self.new_pipeline_spec
791
+ if self.pipeline_id is not None:
792
+ body["pipeline_id"] = self.pipeline_id
793
+ if self.primary_key_columns:
794
+ body["primary_key_columns"] = self.primary_key_columns
795
+ if self.scheduling_policy is not None:
796
+ body["scheduling_policy"] = self.scheduling_policy
797
+ if self.source_table_full_name is not None:
798
+ body["source_table_full_name"] = self.source_table_full_name
799
+ if self.timeseries_key is not None:
800
+ body["timeseries_key"] = self.timeseries_key
801
+ return body
802
+
803
+ @classmethod
804
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableSpec:
805
+ """Deserializes the SyncedTableSpec from a dictionary."""
806
+ return cls(
807
+ create_database_objects_if_missing=d.get("create_database_objects_if_missing", None),
808
+ new_pipeline_spec=_from_dict(d, "new_pipeline_spec", NewPipelineSpec),
809
+ pipeline_id=d.get("pipeline_id", None),
810
+ primary_key_columns=d.get("primary_key_columns", None),
811
+ scheduling_policy=_enum(d, "scheduling_policy", SyncedTableSchedulingPolicy),
812
+ source_table_full_name=d.get("source_table_full_name", None),
813
+ timeseries_key=d.get("timeseries_key", None),
814
+ )
815
+
816
+
817
+ class SyncedTableState(Enum):
818
+ """The state of a synced table."""
819
+
820
+ SYNCED_TABLED_OFFLINE = "SYNCED_TABLED_OFFLINE"
821
+ SYNCED_TABLE_OFFLINE_FAILED = "SYNCED_TABLE_OFFLINE_FAILED"
822
+ SYNCED_TABLE_ONLINE = "SYNCED_TABLE_ONLINE"
823
+ SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE = "SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE"
824
+ SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE = "SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE"
825
+ SYNCED_TABLE_ONLINE_PIPELINE_FAILED = "SYNCED_TABLE_ONLINE_PIPELINE_FAILED"
826
+ SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE = "SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE"
827
+ SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES = "SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES"
828
+ SYNCED_TABLE_PROVISIONING = "SYNCED_TABLE_PROVISIONING"
829
+ SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT = "SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT"
830
+ SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES = "SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES"
831
+
832
+
833
+ @dataclass
834
+ class SyncedTableStatus:
835
+ """Status of a synced table."""
836
+
837
+ continuous_update_status: Optional[SyncedTableContinuousUpdateStatus] = None
838
+ """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE
839
+ or the SYNCED_UPDATING_PIPELINE_RESOURCES state."""
840
+
841
+ detailed_state: Optional[SyncedTableState] = None
842
+ """The state of the synced table."""
843
+
844
+ failed_status: Optional[SyncedTableFailedStatus] = None
845
+ """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the
846
+ SYNCED_PIPELINE_FAILED state."""
847
+
848
+ message: Optional[str] = None
849
+ """A text description of the current state of the synced table."""
850
+
851
+ provisioning_status: Optional[SyncedTableProvisioningStatus] = None
852
+ """Detailed status of a synced table. Shown if the synced table is in the
853
+ PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state."""
854
+
855
+ triggered_update_status: Optional[SyncedTableTriggeredUpdateStatus] = None
856
+ """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE
857
+ or the SYNCED_NO_PENDING_UPDATE state."""
858
+
859
+ def as_dict(self) -> dict:
860
+ """Serializes the SyncedTableStatus into a dictionary suitable for use as a JSON request body."""
861
+ body = {}
862
+ if self.continuous_update_status:
863
+ body["continuous_update_status"] = self.continuous_update_status.as_dict()
864
+ if self.detailed_state is not None:
865
+ body["detailed_state"] = self.detailed_state.value
866
+ if self.failed_status:
867
+ body["failed_status"] = self.failed_status.as_dict()
868
+ if self.message is not None:
869
+ body["message"] = self.message
870
+ if self.provisioning_status:
871
+ body["provisioning_status"] = self.provisioning_status.as_dict()
872
+ if self.triggered_update_status:
873
+ body["triggered_update_status"] = self.triggered_update_status.as_dict()
874
+ return body
875
+
876
+ def as_shallow_dict(self) -> dict:
877
+ """Serializes the SyncedTableStatus into a shallow dictionary of its immediate attributes."""
878
+ body = {}
879
+ if self.continuous_update_status:
880
+ body["continuous_update_status"] = self.continuous_update_status
881
+ if self.detailed_state is not None:
882
+ body["detailed_state"] = self.detailed_state
883
+ if self.failed_status:
884
+ body["failed_status"] = self.failed_status
885
+ if self.message is not None:
886
+ body["message"] = self.message
887
+ if self.provisioning_status:
888
+ body["provisioning_status"] = self.provisioning_status
889
+ if self.triggered_update_status:
890
+ body["triggered_update_status"] = self.triggered_update_status
891
+ return body
892
+
893
+ @classmethod
894
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableStatus:
895
+ """Deserializes the SyncedTableStatus from a dictionary."""
896
+ return cls(
897
+ continuous_update_status=_from_dict(d, "continuous_update_status", SyncedTableContinuousUpdateStatus),
898
+ detailed_state=_enum(d, "detailed_state", SyncedTableState),
899
+ failed_status=_from_dict(d, "failed_status", SyncedTableFailedStatus),
900
+ message=d.get("message", None),
901
+ provisioning_status=_from_dict(d, "provisioning_status", SyncedTableProvisioningStatus),
902
+ triggered_update_status=_from_dict(d, "triggered_update_status", SyncedTableTriggeredUpdateStatus),
903
+ )
904
+
905
+
906
+ @dataclass
907
+ class SyncedTableTriggeredUpdateStatus:
908
+ """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE
909
+ or the SYNCED_NO_PENDING_UPDATE state."""
910
+
911
+ last_processed_commit_version: Optional[int] = None
912
+ """The last source table Delta version that was synced to the synced table. Note that this Delta
913
+ version may not be completely synced to the synced table yet."""
914
+
915
+ timestamp: Optional[str] = None
916
+ """The timestamp of the last time any data was synchronized from the source table to the synced
917
+ table."""
918
+
919
+ triggered_update_progress: Optional[SyncedTablePipelineProgress] = None
920
+ """Progress of the active data synchronization pipeline."""
921
+
922
+ def as_dict(self) -> dict:
923
+ """Serializes the SyncedTableTriggeredUpdateStatus into a dictionary suitable for use as a JSON request body."""
924
+ body = {}
925
+ if self.last_processed_commit_version is not None:
926
+ body["last_processed_commit_version"] = self.last_processed_commit_version
927
+ if self.timestamp is not None:
928
+ body["timestamp"] = self.timestamp
929
+ if self.triggered_update_progress:
930
+ body["triggered_update_progress"] = self.triggered_update_progress.as_dict()
931
+ return body
932
+
933
+ def as_shallow_dict(self) -> dict:
934
+ """Serializes the SyncedTableTriggeredUpdateStatus into a shallow dictionary of its immediate attributes."""
935
+ body = {}
936
+ if self.last_processed_commit_version is not None:
937
+ body["last_processed_commit_version"] = self.last_processed_commit_version
938
+ if self.timestamp is not None:
939
+ body["timestamp"] = self.timestamp
940
+ if self.triggered_update_progress:
941
+ body["triggered_update_progress"] = self.triggered_update_progress
942
+ return body
943
+
944
+ @classmethod
945
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableTriggeredUpdateStatus:
946
+ """Deserializes the SyncedTableTriggeredUpdateStatus from a dictionary."""
947
+ return cls(
948
+ last_processed_commit_version=d.get("last_processed_commit_version", None),
949
+ timestamp=d.get("timestamp", None),
950
+ triggered_update_progress=_from_dict(d, "triggered_update_progress", SyncedTablePipelineProgress),
951
+ )
952
+
953
+
954
+ class DatabaseAPI:
955
+ """Database Instances provide access to a database via REST API or direct SQL."""
956
+
957
+ def __init__(self, api_client):
958
+ self._api = api_client
959
+
960
+ def create_database_catalog(self, catalog: DatabaseCatalog) -> DatabaseCatalog:
961
+ """Create a Database Catalog.
962
+
963
+ :param catalog: :class:`DatabaseCatalog`
964
+
965
+ :returns: :class:`DatabaseCatalog`
966
+ """
967
+ body = catalog.as_dict()
968
+ headers = {
969
+ "Accept": "application/json",
970
+ "Content-Type": "application/json",
971
+ }
972
+
973
+ res = self._api.do("POST", "/api/2.0/database/catalogs", body=body, headers=headers)
974
+ return DatabaseCatalog.from_dict(res)
975
+
976
+ def create_database_instance(self, database_instance: DatabaseInstance) -> DatabaseInstance:
977
+ """Create a Database Instance.
978
+
979
+ :param database_instance: :class:`DatabaseInstance`
980
+ A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
981
+
982
+ :returns: :class:`DatabaseInstance`
983
+ """
984
+ body = database_instance.as_dict()
985
+ headers = {
986
+ "Accept": "application/json",
987
+ "Content-Type": "application/json",
988
+ }
989
+
990
+ res = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers)
991
+ return DatabaseInstance.from_dict(res)
992
+
993
+ def create_database_table(self, table: DatabaseTable) -> DatabaseTable:
994
+ """Create a Database Table.
995
+
996
+ :param table: :class:`DatabaseTable`
997
+ Next field marker: 13
998
+
999
+ :returns: :class:`DatabaseTable`
1000
+ """
1001
+ body = table.as_dict()
1002
+ headers = {
1003
+ "Accept": "application/json",
1004
+ "Content-Type": "application/json",
1005
+ }
1006
+
1007
+ res = self._api.do("POST", "/api/2.0/database/tables", body=body, headers=headers)
1008
+ return DatabaseTable.from_dict(res)
1009
+
1010
+ def create_synced_database_table(self, synced_table: SyncedDatabaseTable) -> SyncedDatabaseTable:
1011
+ """Create a Synced Database Table.
1012
+
1013
+ :param synced_table: :class:`SyncedDatabaseTable`
1014
+ Next field marker: 12
1015
+
1016
+ :returns: :class:`SyncedDatabaseTable`
1017
+ """
1018
+ body = synced_table.as_dict()
1019
+ headers = {
1020
+ "Accept": "application/json",
1021
+ "Content-Type": "application/json",
1022
+ }
1023
+
1024
+ res = self._api.do("POST", "/api/2.0/database/synced_tables", body=body, headers=headers)
1025
+ return SyncedDatabaseTable.from_dict(res)
1026
+
1027
+ def delete_database_catalog(self, name: str):
1028
+ """Delete a Database Catalog.
1029
+
1030
+ :param name: str
1031
+
1032
+
1033
+ """
1034
+
1035
+ headers = {
1036
+ "Accept": "application/json",
1037
+ }
1038
+
1039
+ self._api.do("DELETE", f"/api/2.0/database/catalogs/{name}", headers=headers)
1040
+
1041
+ def delete_database_instance(self, name: str, *, force: Optional[bool] = None, purge: Optional[bool] = None):
1042
+ """Delete a Database Instance.
1043
+
1044
+ :param name: str
1045
+ Name of the instance to delete.
1046
+ :param force: bool (optional)
1047
+ By default, a instance cannot be deleted if it has descendant instances created via PITR. If this
1048
+ flag is specified as true, all descendent instances will be deleted as well.
1049
+ :param purge: bool (optional)
1050
+ If false, the database instance is soft deleted. Soft deleted instances behave as if they are
1051
+ deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by
1052
+ calling the undelete API for a limited time. If true, the database instance is hard deleted and
1053
+ cannot be undeleted.
1054
+
1055
+
1056
+ """
1057
+
1058
+ query = {}
1059
+ if force is not None:
1060
+ query["force"] = force
1061
+ if purge is not None:
1062
+ query["purge"] = purge
1063
+ headers = {
1064
+ "Accept": "application/json",
1065
+ }
1066
+
1067
+ self._api.do("DELETE", f"/api/2.0/database/instances/{name}", query=query, headers=headers)
1068
+
1069
+ def delete_database_table(self, name: str):
1070
+ """Delete a Database Table.
1071
+
1072
+ :param name: str
1073
+
1074
+
1075
+ """
1076
+
1077
+ headers = {
1078
+ "Accept": "application/json",
1079
+ }
1080
+
1081
+ self._api.do("DELETE", f"/api/2.0/database/tables/{name}", headers=headers)
1082
+
1083
+ def delete_synced_database_table(self, name: str):
1084
+ """Delete a Synced Database Table.
1085
+
1086
+ :param name: str
1087
+
1088
+
1089
+ """
1090
+
1091
+ headers = {
1092
+ "Accept": "application/json",
1093
+ }
1094
+
1095
+ self._api.do("DELETE", f"/api/2.0/database/synced_tables/{name}", headers=headers)
1096
+
1097
+ def find_database_instance_by_uid(self, *, uid: Optional[str] = None) -> DatabaseInstance:
1098
+ """Find a Database Instance by uid.
1099
+
1100
+ :param uid: str (optional)
1101
+ UID of the cluster to get.
1102
+
1103
+ :returns: :class:`DatabaseInstance`
1104
+ """
1105
+
1106
+ query = {}
1107
+ if uid is not None:
1108
+ query["uid"] = uid
1109
+ headers = {
1110
+ "Accept": "application/json",
1111
+ }
1112
+
1113
+ res = self._api.do("GET", "/api/2.0/database/instances:findByUid", query=query, headers=headers)
1114
+ return DatabaseInstance.from_dict(res)
1115
+
1116
+ def generate_database_credential(
1117
+ self, *, instance_names: Optional[List[str]] = None, request_id: Optional[str] = None
1118
+ ) -> DatabaseCredential:
1119
+ """Generates a credential that can be used to access database instances.
1120
+
1121
+ :param instance_names: List[str] (optional)
1122
+ Instances to which the token will be scoped.
1123
+ :param request_id: str (optional)
1124
+
1125
+ :returns: :class:`DatabaseCredential`
1126
+ """
1127
+ body = {}
1128
+ if instance_names is not None:
1129
+ body["instance_names"] = [v for v in instance_names]
1130
+ if request_id is not None:
1131
+ body["request_id"] = request_id
1132
+ headers = {
1133
+ "Accept": "application/json",
1134
+ "Content-Type": "application/json",
1135
+ }
1136
+
1137
+ res = self._api.do("POST", "/api/2.0/database/credentials", body=body, headers=headers)
1138
+ return DatabaseCredential.from_dict(res)
1139
+
1140
+ def get_database_catalog(self, name: str) -> DatabaseCatalog:
1141
+ """Get a Database Catalog.
1142
+
1143
+ :param name: str
1144
+
1145
+ :returns: :class:`DatabaseCatalog`
1146
+ """
1147
+
1148
+ headers = {
1149
+ "Accept": "application/json",
1150
+ }
1151
+
1152
+ res = self._api.do("GET", f"/api/2.0/database/catalogs/{name}", headers=headers)
1153
+ return DatabaseCatalog.from_dict(res)
1154
+
1155
+ def get_database_instance(self, name: str) -> DatabaseInstance:
1156
+ """Get a Database Instance.
1157
+
1158
+ :param name: str
1159
+ Name of the cluster to get.
1160
+
1161
+ :returns: :class:`DatabaseInstance`
1162
+ """
1163
+
1164
+ headers = {
1165
+ "Accept": "application/json",
1166
+ }
1167
+
1168
+ res = self._api.do("GET", f"/api/2.0/database/instances/{name}", headers=headers)
1169
+ return DatabaseInstance.from_dict(res)
1170
+
1171
+ def get_database_table(self, name: str) -> DatabaseTable:
1172
+ """Get a Database Table.
1173
+
1174
+ :param name: str
1175
+
1176
+ :returns: :class:`DatabaseTable`
1177
+ """
1178
+
1179
+ headers = {
1180
+ "Accept": "application/json",
1181
+ }
1182
+
1183
+ res = self._api.do("GET", f"/api/2.0/database/tables/{name}", headers=headers)
1184
+ return DatabaseTable.from_dict(res)
1185
+
1186
+ def get_synced_database_table(self, name: str) -> SyncedDatabaseTable:
1187
+ """Get a Synced Database Table.
1188
+
1189
+ :param name: str
1190
+
1191
+ :returns: :class:`SyncedDatabaseTable`
1192
+ """
1193
+
1194
+ headers = {
1195
+ "Accept": "application/json",
1196
+ }
1197
+
1198
+ res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers)
1199
+ return SyncedDatabaseTable.from_dict(res)
1200
+
1201
+ def list_database_instances(
1202
+ self, *, page_size: Optional[int] = None, page_token: Optional[str] = None
1203
+ ) -> Iterator[DatabaseInstance]:
1204
+ """List Database Instances.
1205
+
1206
+ :param page_size: int (optional)
1207
+ Upper bound for items returned.
1208
+ :param page_token: str (optional)
1209
+ Pagination token to go to the next page of Database Instances. Requests first page if absent.
1210
+
1211
+ :returns: Iterator over :class:`DatabaseInstance`
1212
+ """
1213
+
1214
+ query = {}
1215
+ if page_size is not None:
1216
+ query["page_size"] = page_size
1217
+ if page_token is not None:
1218
+ query["page_token"] = page_token
1219
+ headers = {
1220
+ "Accept": "application/json",
1221
+ }
1222
+
1223
+ while True:
1224
+ json = self._api.do("GET", "/api/2.0/database/instances", query=query, headers=headers)
1225
+ if "database_instances" in json:
1226
+ for v in json["database_instances"]:
1227
+ yield DatabaseInstance.from_dict(v)
1228
+ if "next_page_token" not in json or not json["next_page_token"]:
1229
+ return
1230
+ query["page_token"] = json["next_page_token"]
1231
+
1232
+ def update_database_instance(
1233
+ self, name: str, database_instance: DatabaseInstance, update_mask: str
1234
+ ) -> DatabaseInstance:
1235
+ """Update a Database Instance.
1236
+
1237
+ :param name: str
1238
+ The name of the instance. This is the unique identifier for the instance.
1239
+ :param database_instance: :class:`DatabaseInstance`
1240
+ A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
1241
+ :param update_mask: str
1242
+ The list of fields to update.
1243
+
1244
+ :returns: :class:`DatabaseInstance`
1245
+ """
1246
+ body = database_instance.as_dict()
1247
+ query = {}
1248
+ if update_mask is not None:
1249
+ query["update_mask"] = update_mask
1250
+ headers = {
1251
+ "Accept": "application/json",
1252
+ "Content-Type": "application/json",
1253
+ }
1254
+
1255
+ res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers)
1256
+ return DatabaseInstance.from_dict(res)