databricks-sdk 0.56.0__py3-none-any.whl → 0.58.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (31) hide show
  1. databricks/sdk/__init__.py +38 -11
  2. databricks/sdk/service/aibuilder.py +122 -17
  3. databricks/sdk/service/apps.py +15 -45
  4. databricks/sdk/service/billing.py +70 -74
  5. databricks/sdk/service/catalog.py +1898 -557
  6. databricks/sdk/service/cleanrooms.py +14 -55
  7. databricks/sdk/service/compute.py +305 -508
  8. databricks/sdk/service/dashboards.py +148 -223
  9. databricks/sdk/service/database.py +657 -127
  10. databricks/sdk/service/files.py +18 -54
  11. databricks/sdk/service/iam.py +55 -165
  12. databricks/sdk/service/jobs.py +238 -214
  13. databricks/sdk/service/marketplace.py +47 -146
  14. databricks/sdk/service/ml.py +1137 -447
  15. databricks/sdk/service/oauth2.py +17 -46
  16. databricks/sdk/service/pipelines.py +93 -69
  17. databricks/sdk/service/provisioning.py +34 -212
  18. databricks/sdk/service/qualitymonitorv2.py +5 -33
  19. databricks/sdk/service/serving.py +69 -55
  20. databricks/sdk/service/settings.py +106 -434
  21. databricks/sdk/service/sharing.py +33 -95
  22. databricks/sdk/service/sql.py +164 -254
  23. databricks/sdk/service/vectorsearch.py +13 -62
  24. databricks/sdk/service/workspace.py +36 -110
  25. databricks/sdk/version.py +1 -1
  26. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/METADATA +1 -1
  27. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/RECORD +31 -31
  28. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/WHEEL +0 -0
  29. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/LICENSE +0 -0
  30. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/NOTICE +0 -0
  31. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/top_level.txt +0 -0
@@ -74,11 +74,15 @@ class DatabaseCatalog:
74
74
 
75
75
  @dataclass
76
76
  class DatabaseCredential:
77
+ expiration_time: Optional[str] = None
78
+
77
79
  token: Optional[str] = None
78
80
 
79
81
  def as_dict(self) -> dict:
80
82
  """Serializes the DatabaseCredential into a dictionary suitable for use as a JSON request body."""
81
83
  body = {}
84
+ if self.expiration_time is not None:
85
+ body["expiration_time"] = self.expiration_time
82
86
  if self.token is not None:
83
87
  body["token"] = self.token
84
88
  return body
@@ -86,6 +90,8 @@ class DatabaseCredential:
86
90
  def as_shallow_dict(self) -> dict:
87
91
  """Serializes the DatabaseCredential into a shallow dictionary of its immediate attributes."""
88
92
  body = {}
93
+ if self.expiration_time is not None:
94
+ body["expiration_time"] = self.expiration_time
89
95
  if self.token is not None:
90
96
  body["token"] = self.token
91
97
  return body
@@ -93,7 +99,7 @@ class DatabaseCredential:
93
99
  @classmethod
94
100
  def from_dict(cls, d: Dict[str, Any]) -> DatabaseCredential:
95
101
  """Deserializes the DatabaseCredential from a dictionary."""
96
- return cls(token=d.get("token", None))
102
+ return cls(expiration_time=d.get("expiration_time", None), token=d.get("token", None))
97
103
 
98
104
 
99
105
  @dataclass
@@ -107,18 +113,67 @@ class DatabaseInstance:
107
113
  capacity: Optional[str] = None
108
114
  """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8"."""
109
115
 
116
+ child_instance_refs: Optional[List[DatabaseInstanceRef]] = None
117
+ """The refs of the child instances. This is only available if the instance is parent instance."""
118
+
110
119
  creation_time: Optional[str] = None
111
120
  """The timestamp when the instance was created."""
112
121
 
113
122
  creator: Optional[str] = None
114
123
  """The email of the creator of the instance."""
115
124
 
125
+ effective_enable_readable_secondaries: Optional[bool] = None
126
+ """xref AIP-129. `enable_readable_secondaries` is owned by the client, while
127
+ `effective_enable_readable_secondaries` is owned by the server. `enable_readable_secondaries`
128
+ will only be set in Create/Update response messages if and only if the user provides the field
129
+ via the request. `effective_enable_readable_secondaries` on the other hand will always bet set
130
+ in all response messages (Create/Update/Get/List)."""
131
+
132
+ effective_node_count: Optional[int] = None
133
+ """xref AIP-129. `node_count` is owned by the client, while `effective_node_count` is owned by the
134
+ server. `node_count` will only be set in Create/Update response messages if and only if the user
135
+ provides the field via the request. `effective_node_count` on the other hand will always bet set
136
+ in all response messages (Create/Update/Get/List)."""
137
+
138
+ effective_retention_window_in_days: Optional[int] = None
139
+ """xref AIP-129. `retention_window_in_days` is owned by the client, while
140
+ `effective_retention_window_in_days` is owned by the server. `retention_window_in_days` will
141
+ only be set in Create/Update response messages if and only if the user provides the field via
142
+ the request. `effective_retention_window_in_days` on the other hand will always bet set in all
143
+ response messages (Create/Update/Get/List)."""
144
+
145
+ effective_stopped: Optional[bool] = None
146
+ """xref AIP-129. `stopped` is owned by the client, while `effective_stopped` is owned by the
147
+ server. `stopped` will only be set in Create/Update response messages if and only if the user
148
+ provides the field via the request. `effective_stopped` on the other hand will always bet set in
149
+ all response messages (Create/Update/Get/List)."""
150
+
151
+ enable_readable_secondaries: Optional[bool] = None
152
+ """Whether to enable secondaries to serve read-only traffic. Defaults to false."""
153
+
154
+ node_count: Optional[int] = None
155
+ """The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults
156
+ to 1 primary and 0 secondaries."""
157
+
158
+ parent_instance_ref: Optional[DatabaseInstanceRef] = None
159
+ """The ref of the parent instance. This is only available if the instance is child instance. Input:
160
+ For specifying the parent instance to create a child instance. Optional. Output: Only populated
161
+ if provided as input to create a child instance."""
162
+
116
163
  pg_version: Optional[str] = None
117
164
  """The version of Postgres running on the instance."""
118
165
 
166
+ read_only_dns: Optional[str] = None
167
+ """The DNS endpoint to connect to the instance for read only access. This is only available if
168
+ enable_readable_secondaries is true."""
169
+
119
170
  read_write_dns: Optional[str] = None
120
171
  """The DNS endpoint to connect to the instance for read+write access."""
121
172
 
173
+ retention_window_in_days: Optional[int] = None
174
+ """The retention window for the instance. This is the time window in days for which the historical
175
+ data is retained. The default value is 7 days. Valid values are 2 to 35 days."""
176
+
122
177
  state: Optional[DatabaseInstanceState] = None
123
178
  """The current state of the instance."""
124
179
 
@@ -133,16 +188,36 @@ class DatabaseInstance:
133
188
  body = {}
134
189
  if self.capacity is not None:
135
190
  body["capacity"] = self.capacity
191
+ if self.child_instance_refs:
192
+ body["child_instance_refs"] = [v.as_dict() for v in self.child_instance_refs]
136
193
  if self.creation_time is not None:
137
194
  body["creation_time"] = self.creation_time
138
195
  if self.creator is not None:
139
196
  body["creator"] = self.creator
197
+ if self.effective_enable_readable_secondaries is not None:
198
+ body["effective_enable_readable_secondaries"] = self.effective_enable_readable_secondaries
199
+ if self.effective_node_count is not None:
200
+ body["effective_node_count"] = self.effective_node_count
201
+ if self.effective_retention_window_in_days is not None:
202
+ body["effective_retention_window_in_days"] = self.effective_retention_window_in_days
203
+ if self.effective_stopped is not None:
204
+ body["effective_stopped"] = self.effective_stopped
205
+ if self.enable_readable_secondaries is not None:
206
+ body["enable_readable_secondaries"] = self.enable_readable_secondaries
140
207
  if self.name is not None:
141
208
  body["name"] = self.name
209
+ if self.node_count is not None:
210
+ body["node_count"] = self.node_count
211
+ if self.parent_instance_ref:
212
+ body["parent_instance_ref"] = self.parent_instance_ref.as_dict()
142
213
  if self.pg_version is not None:
143
214
  body["pg_version"] = self.pg_version
215
+ if self.read_only_dns is not None:
216
+ body["read_only_dns"] = self.read_only_dns
144
217
  if self.read_write_dns is not None:
145
218
  body["read_write_dns"] = self.read_write_dns
219
+ if self.retention_window_in_days is not None:
220
+ body["retention_window_in_days"] = self.retention_window_in_days
146
221
  if self.state is not None:
147
222
  body["state"] = self.state.value
148
223
  if self.stopped is not None:
@@ -156,16 +231,36 @@ class DatabaseInstance:
156
231
  body = {}
157
232
  if self.capacity is not None:
158
233
  body["capacity"] = self.capacity
234
+ if self.child_instance_refs:
235
+ body["child_instance_refs"] = self.child_instance_refs
159
236
  if self.creation_time is not None:
160
237
  body["creation_time"] = self.creation_time
161
238
  if self.creator is not None:
162
239
  body["creator"] = self.creator
240
+ if self.effective_enable_readable_secondaries is not None:
241
+ body["effective_enable_readable_secondaries"] = self.effective_enable_readable_secondaries
242
+ if self.effective_node_count is not None:
243
+ body["effective_node_count"] = self.effective_node_count
244
+ if self.effective_retention_window_in_days is not None:
245
+ body["effective_retention_window_in_days"] = self.effective_retention_window_in_days
246
+ if self.effective_stopped is not None:
247
+ body["effective_stopped"] = self.effective_stopped
248
+ if self.enable_readable_secondaries is not None:
249
+ body["enable_readable_secondaries"] = self.enable_readable_secondaries
163
250
  if self.name is not None:
164
251
  body["name"] = self.name
252
+ if self.node_count is not None:
253
+ body["node_count"] = self.node_count
254
+ if self.parent_instance_ref:
255
+ body["parent_instance_ref"] = self.parent_instance_ref
165
256
  if self.pg_version is not None:
166
257
  body["pg_version"] = self.pg_version
258
+ if self.read_only_dns is not None:
259
+ body["read_only_dns"] = self.read_only_dns
167
260
  if self.read_write_dns is not None:
168
261
  body["read_write_dns"] = self.read_write_dns
262
+ if self.retention_window_in_days is not None:
263
+ body["retention_window_in_days"] = self.retention_window_in_days
169
264
  if self.state is not None:
170
265
  body["state"] = self.state
171
266
  if self.stopped is not None:
@@ -179,17 +274,216 @@ class DatabaseInstance:
179
274
  """Deserializes the DatabaseInstance from a dictionary."""
180
275
  return cls(
181
276
  capacity=d.get("capacity", None),
277
+ child_instance_refs=_repeated_dict(d, "child_instance_refs", DatabaseInstanceRef),
182
278
  creation_time=d.get("creation_time", None),
183
279
  creator=d.get("creator", None),
280
+ effective_enable_readable_secondaries=d.get("effective_enable_readable_secondaries", None),
281
+ effective_node_count=d.get("effective_node_count", None),
282
+ effective_retention_window_in_days=d.get("effective_retention_window_in_days", None),
283
+ effective_stopped=d.get("effective_stopped", None),
284
+ enable_readable_secondaries=d.get("enable_readable_secondaries", None),
184
285
  name=d.get("name", None),
286
+ node_count=d.get("node_count", None),
287
+ parent_instance_ref=_from_dict(d, "parent_instance_ref", DatabaseInstanceRef),
185
288
  pg_version=d.get("pg_version", None),
289
+ read_only_dns=d.get("read_only_dns", None),
186
290
  read_write_dns=d.get("read_write_dns", None),
291
+ retention_window_in_days=d.get("retention_window_in_days", None),
187
292
  state=_enum(d, "state", DatabaseInstanceState),
188
293
  stopped=d.get("stopped", None),
189
294
  uid=d.get("uid", None),
190
295
  )
191
296
 
192
297
 
298
+ @dataclass
299
+ class DatabaseInstanceRef:
300
+ """DatabaseInstanceRef is a reference to a database instance. It is used in the DatabaseInstance
301
+ object to refer to the parent instance of an instance and to refer the child instances of an
302
+ instance. To specify as a parent instance during creation of an instance, the lsn and
303
+ branch_time fields are optional. If not specified, the child instance will be created from the
304
+ latest lsn of the parent. If both lsn and branch_time are specified, the lsn will be used to
305
+ create the child instance."""
306
+
307
+ branch_time: Optional[str] = None
308
+ """Branch time of the ref database instance. For a parent ref instance, this is the point in time
309
+ on the parent instance from which the instance was created. For a child ref instance, this is
310
+ the point in time on the instance from which the child instance was created. Input: For
311
+ specifying the point in time to create a child instance. Optional. Output: Only populated if
312
+ provided as input to create a child instance."""
313
+
314
+ effective_lsn: Optional[str] = None
315
+ """xref AIP-129. `lsn` is owned by the client, while `effective_lsn` is owned by the server. `lsn`
316
+ will only be set in Create/Update response messages if and only if the user provides the field
317
+ via the request. `effective_lsn` on the other hand will always bet set in all response messages
318
+ (Create/Update/Get/List). For a parent ref instance, this is the LSN on the parent instance from
319
+ which the instance was created. For a child ref instance, this is the LSN on the instance from
320
+ which the child instance was created."""
321
+
322
+ lsn: Optional[str] = None
323
+ """User-specified WAL LSN of the ref database instance.
324
+
325
+ Input: For specifying the WAL LSN to create a child instance. Optional. Output: Only populated
326
+ if provided as input to create a child instance."""
327
+
328
+ name: Optional[str] = None
329
+ """Name of the ref database instance."""
330
+
331
+ uid: Optional[str] = None
332
+ """Id of the ref database instance."""
333
+
334
+ def as_dict(self) -> dict:
335
+ """Serializes the DatabaseInstanceRef into a dictionary suitable for use as a JSON request body."""
336
+ body = {}
337
+ if self.branch_time is not None:
338
+ body["branch_time"] = self.branch_time
339
+ if self.effective_lsn is not None:
340
+ body["effective_lsn"] = self.effective_lsn
341
+ if self.lsn is not None:
342
+ body["lsn"] = self.lsn
343
+ if self.name is not None:
344
+ body["name"] = self.name
345
+ if self.uid is not None:
346
+ body["uid"] = self.uid
347
+ return body
348
+
349
+ def as_shallow_dict(self) -> dict:
350
+ """Serializes the DatabaseInstanceRef into a shallow dictionary of its immediate attributes."""
351
+ body = {}
352
+ if self.branch_time is not None:
353
+ body["branch_time"] = self.branch_time
354
+ if self.effective_lsn is not None:
355
+ body["effective_lsn"] = self.effective_lsn
356
+ if self.lsn is not None:
357
+ body["lsn"] = self.lsn
358
+ if self.name is not None:
359
+ body["name"] = self.name
360
+ if self.uid is not None:
361
+ body["uid"] = self.uid
362
+ return body
363
+
364
+ @classmethod
365
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRef:
366
+ """Deserializes the DatabaseInstanceRef from a dictionary."""
367
+ return cls(
368
+ branch_time=d.get("branch_time", None),
369
+ effective_lsn=d.get("effective_lsn", None),
370
+ lsn=d.get("lsn", None),
371
+ name=d.get("name", None),
372
+ uid=d.get("uid", None),
373
+ )
374
+
375
+
376
+ @dataclass
377
+ class DatabaseInstanceRole:
378
+ """A DatabaseInstanceRole represents a Postgres role in a database instance."""
379
+
380
+ attributes: Optional[DatabaseInstanceRoleAttributes] = None
381
+ """API-exposed Postgres role attributes"""
382
+
383
+ identity_type: Optional[DatabaseInstanceRoleIdentityType] = None
384
+ """The type of the role."""
385
+
386
+ membership_role: Optional[DatabaseInstanceRoleMembershipRole] = None
387
+ """An enum value for a standard role that this role is a member of."""
388
+
389
+ name: Optional[str] = None
390
+ """The name of the role. This is the unique identifier for the role in an instance."""
391
+
392
+ def as_dict(self) -> dict:
393
+ """Serializes the DatabaseInstanceRole into a dictionary suitable for use as a JSON request body."""
394
+ body = {}
395
+ if self.attributes:
396
+ body["attributes"] = self.attributes.as_dict()
397
+ if self.identity_type is not None:
398
+ body["identity_type"] = self.identity_type.value
399
+ if self.membership_role is not None:
400
+ body["membership_role"] = self.membership_role.value
401
+ if self.name is not None:
402
+ body["name"] = self.name
403
+ return body
404
+
405
+ def as_shallow_dict(self) -> dict:
406
+ """Serializes the DatabaseInstanceRole into a shallow dictionary of its immediate attributes."""
407
+ body = {}
408
+ if self.attributes:
409
+ body["attributes"] = self.attributes
410
+ if self.identity_type is not None:
411
+ body["identity_type"] = self.identity_type
412
+ if self.membership_role is not None:
413
+ body["membership_role"] = self.membership_role
414
+ if self.name is not None:
415
+ body["name"] = self.name
416
+ return body
417
+
418
+ @classmethod
419
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRole:
420
+ """Deserializes the DatabaseInstanceRole from a dictionary."""
421
+ return cls(
422
+ attributes=_from_dict(d, "attributes", DatabaseInstanceRoleAttributes),
423
+ identity_type=_enum(d, "identity_type", DatabaseInstanceRoleIdentityType),
424
+ membership_role=_enum(d, "membership_role", DatabaseInstanceRoleMembershipRole),
425
+ name=d.get("name", None),
426
+ )
427
+
428
+
429
+ @dataclass
430
+ class DatabaseInstanceRoleAttributes:
431
+ """Attributes that can be granted to a Postgres role. We are only implementing a subset for now,
432
+ see xref: https://www.postgresql.org/docs/16/sql-createrole.html The values follow Postgres
433
+ keyword naming e.g. CREATEDB, BYPASSRLS, etc. which is why they don't include typical
434
+ underscores between words. We were requested to make this a nested object/struct representation
435
+ since these are knobs from an external spec."""
436
+
437
+ bypassrls: Optional[bool] = None
438
+
439
+ createdb: Optional[bool] = None
440
+
441
+ createrole: Optional[bool] = None
442
+
443
+ def as_dict(self) -> dict:
444
+ """Serializes the DatabaseInstanceRoleAttributes into a dictionary suitable for use as a JSON request body."""
445
+ body = {}
446
+ if self.bypassrls is not None:
447
+ body["bypassrls"] = self.bypassrls
448
+ if self.createdb is not None:
449
+ body["createdb"] = self.createdb
450
+ if self.createrole is not None:
451
+ body["createrole"] = self.createrole
452
+ return body
453
+
454
+ def as_shallow_dict(self) -> dict:
455
+ """Serializes the DatabaseInstanceRoleAttributes into a shallow dictionary of its immediate attributes."""
456
+ body = {}
457
+ if self.bypassrls is not None:
458
+ body["bypassrls"] = self.bypassrls
459
+ if self.createdb is not None:
460
+ body["createdb"] = self.createdb
461
+ if self.createrole is not None:
462
+ body["createrole"] = self.createrole
463
+ return body
464
+
465
+ @classmethod
466
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRoleAttributes:
467
+ """Deserializes the DatabaseInstanceRoleAttributes from a dictionary."""
468
+ return cls(
469
+ bypassrls=d.get("bypassrls", None), createdb=d.get("createdb", None), createrole=d.get("createrole", None)
470
+ )
471
+
472
+
473
+ class DatabaseInstanceRoleIdentityType(Enum):
474
+
475
+ GROUP = "GROUP"
476
+ PG_ONLY = "PG_ONLY"
477
+ SERVICE_PRINCIPAL = "SERVICE_PRINCIPAL"
478
+ USER = "USER"
479
+
480
+
481
+ class DatabaseInstanceRoleMembershipRole(Enum):
482
+ """Roles that the DatabaseInstanceRole can be a member of."""
483
+
484
+ DATABRICKS_SUPERUSER = "DATABRICKS_SUPERUSER"
485
+
486
+
193
487
  class DatabaseInstanceState(Enum):
194
488
 
195
489
  AVAILABLE = "AVAILABLE"
@@ -214,21 +508,15 @@ class DatabaseTable:
214
508
  MUST match that of the registered catalog (or the request will be rejected)."""
215
509
 
216
510
  logical_database_name: Optional[str] = None
217
- """Target Postgres database object (logical database) name for this table. This field is optional
218
- in all scenarios.
511
+ """Target Postgres database object (logical database) name for this table.
219
512
 
220
513
  When creating a table in a registered Postgres catalog, the target Postgres database name is
221
514
  inferred to be that of the registered catalog. If this field is specified in this scenario, the
222
515
  Postgres database name MUST match that of the registered catalog (or the request will be
223
516
  rejected).
224
517
 
225
- When creating a table in a standard catalog, the target database name is inferred to be that of
226
- the standard catalog. In this scenario, specifying this field will allow targeting an arbitrary
227
- postgres database. Note that this has implications for the `create_database_objects_is_missing`
228
- field in `spec`."""
229
-
230
- table_serving_url: Optional[str] = None
231
- """Data serving REST API URL for this table"""
518
+ When creating a table in a standard catalog, this field is required. In this scenario,
519
+ specifying this field will allow targeting an arbitrary postgres database."""
232
520
 
233
521
  def as_dict(self) -> dict:
234
522
  """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body."""
@@ -239,8 +527,6 @@ class DatabaseTable:
239
527
  body["logical_database_name"] = self.logical_database_name
240
528
  if self.name is not None:
241
529
  body["name"] = self.name
242
- if self.table_serving_url is not None:
243
- body["table_serving_url"] = self.table_serving_url
244
530
  return body
245
531
 
246
532
  def as_shallow_dict(self) -> dict:
@@ -252,8 +538,6 @@ class DatabaseTable:
252
538
  body["logical_database_name"] = self.logical_database_name
253
539
  if self.name is not None:
254
540
  body["name"] = self.name
255
- if self.table_serving_url is not None:
256
- body["table_serving_url"] = self.table_serving_url
257
541
  return body
258
542
 
259
543
  @classmethod
@@ -263,86 +547,53 @@ class DatabaseTable:
263
547
  database_instance_name=d.get("database_instance_name", None),
264
548
  logical_database_name=d.get("logical_database_name", None),
265
549
  name=d.get("name", None),
266
- table_serving_url=d.get("table_serving_url", None),
267
550
  )
268
551
 
269
552
 
270
553
  @dataclass
271
- class DeleteDatabaseCatalogResponse:
272
- def as_dict(self) -> dict:
273
- """Serializes the DeleteDatabaseCatalogResponse into a dictionary suitable for use as a JSON request body."""
274
- body = {}
275
- return body
276
-
277
- def as_shallow_dict(self) -> dict:
278
- """Serializes the DeleteDatabaseCatalogResponse into a shallow dictionary of its immediate attributes."""
279
- body = {}
280
- return body
281
-
282
- @classmethod
283
- def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseCatalogResponse:
284
- """Deserializes the DeleteDatabaseCatalogResponse from a dictionary."""
285
- return cls()
286
-
287
-
288
- @dataclass
289
- class DeleteDatabaseInstanceResponse:
290
- def as_dict(self) -> dict:
291
- """Serializes the DeleteDatabaseInstanceResponse into a dictionary suitable for use as a JSON request body."""
292
- body = {}
293
- return body
294
-
295
- def as_shallow_dict(self) -> dict:
296
- """Serializes the DeleteDatabaseInstanceResponse into a shallow dictionary of its immediate attributes."""
297
- body = {}
298
- return body
299
-
300
- @classmethod
301
- def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseInstanceResponse:
302
- """Deserializes the DeleteDatabaseInstanceResponse from a dictionary."""
303
- return cls()
304
-
305
-
306
- @dataclass
307
- class DeleteDatabaseTableResponse:
308
- def as_dict(self) -> dict:
309
- """Serializes the DeleteDatabaseTableResponse into a dictionary suitable for use as a JSON request body."""
310
- body = {}
311
- return body
312
-
313
- def as_shallow_dict(self) -> dict:
314
- """Serializes the DeleteDatabaseTableResponse into a shallow dictionary of its immediate attributes."""
315
- body = {}
316
- return body
317
-
318
- @classmethod
319
- def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseTableResponse:
320
- """Deserializes the DeleteDatabaseTableResponse from a dictionary."""
321
- return cls()
554
+ class DeltaTableSyncInfo:
555
+ delta_commit_timestamp: Optional[str] = None
556
+ """The timestamp when the above Delta version was committed in the source Delta table. Note: This
557
+ is the Delta commit time, not the time the data was written to the synced table."""
322
558
 
559
+ delta_commit_version: Optional[int] = None
560
+ """The Delta Lake commit version that was last successfully synced."""
323
561
 
324
- @dataclass
325
- class DeleteSyncedDatabaseTableResponse:
326
562
  def as_dict(self) -> dict:
327
- """Serializes the DeleteSyncedDatabaseTableResponse into a dictionary suitable for use as a JSON request body."""
563
+ """Serializes the DeltaTableSyncInfo into a dictionary suitable for use as a JSON request body."""
328
564
  body = {}
565
+ if self.delta_commit_timestamp is not None:
566
+ body["delta_commit_timestamp"] = self.delta_commit_timestamp
567
+ if self.delta_commit_version is not None:
568
+ body["delta_commit_version"] = self.delta_commit_version
329
569
  return body
330
570
 
331
571
  def as_shallow_dict(self) -> dict:
332
- """Serializes the DeleteSyncedDatabaseTableResponse into a shallow dictionary of its immediate attributes."""
572
+ """Serializes the DeltaTableSyncInfo into a shallow dictionary of its immediate attributes."""
333
573
  body = {}
574
+ if self.delta_commit_timestamp is not None:
575
+ body["delta_commit_timestamp"] = self.delta_commit_timestamp
576
+ if self.delta_commit_version is not None:
577
+ body["delta_commit_version"] = self.delta_commit_version
334
578
  return body
335
579
 
336
580
  @classmethod
337
- def from_dict(cls, d: Dict[str, Any]) -> DeleteSyncedDatabaseTableResponse:
338
- """Deserializes the DeleteSyncedDatabaseTableResponse from a dictionary."""
339
- return cls()
581
+ def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSyncInfo:
582
+ """Deserializes the DeltaTableSyncInfo from a dictionary."""
583
+ return cls(
584
+ delta_commit_timestamp=d.get("delta_commit_timestamp", None),
585
+ delta_commit_version=d.get("delta_commit_version", None),
586
+ )
340
587
 
341
588
 
342
589
  @dataclass
343
590
  class GenerateDatabaseCredentialRequest:
344
591
  """Generates a credential that can be used to access database instances"""
345
592
 
593
+ claims: Optional[List[RequestedClaims]] = None
594
+ """The returned token will be scoped to the union of instance_names and instances containing the
595
+ specified UC tables, so instance_names is allowed to be empty."""
596
+
346
597
  instance_names: Optional[List[str]] = None
347
598
  """Instances to which the token will be scoped."""
348
599
 
@@ -351,6 +602,8 @@ class GenerateDatabaseCredentialRequest:
351
602
  def as_dict(self) -> dict:
352
603
  """Serializes the GenerateDatabaseCredentialRequest into a dictionary suitable for use as a JSON request body."""
353
604
  body = {}
605
+ if self.claims:
606
+ body["claims"] = [v.as_dict() for v in self.claims]
354
607
  if self.instance_names:
355
608
  body["instance_names"] = [v for v in self.instance_names]
356
609
  if self.request_id is not None:
@@ -360,6 +613,8 @@ class GenerateDatabaseCredentialRequest:
360
613
  def as_shallow_dict(self) -> dict:
361
614
  """Serializes the GenerateDatabaseCredentialRequest into a shallow dictionary of its immediate attributes."""
362
615
  body = {}
616
+ if self.claims:
617
+ body["claims"] = self.claims
363
618
  if self.instance_names:
364
619
  body["instance_names"] = self.instance_names
365
620
  if self.request_id is not None:
@@ -369,7 +624,46 @@ class GenerateDatabaseCredentialRequest:
369
624
  @classmethod
370
625
  def from_dict(cls, d: Dict[str, Any]) -> GenerateDatabaseCredentialRequest:
371
626
  """Deserializes the GenerateDatabaseCredentialRequest from a dictionary."""
372
- return cls(instance_names=d.get("instance_names", None), request_id=d.get("request_id", None))
627
+ return cls(
628
+ claims=_repeated_dict(d, "claims", RequestedClaims),
629
+ instance_names=d.get("instance_names", None),
630
+ request_id=d.get("request_id", None),
631
+ )
632
+
633
+
634
+ @dataclass
635
+ class ListDatabaseInstanceRolesResponse:
636
+ database_instance_roles: Optional[List[DatabaseInstanceRole]] = None
637
+ """List of database instance roles."""
638
+
639
+ next_page_token: Optional[str] = None
640
+ """Pagination token to request the next page of instances."""
641
+
642
+ def as_dict(self) -> dict:
643
+ """Serializes the ListDatabaseInstanceRolesResponse into a dictionary suitable for use as a JSON request body."""
644
+ body = {}
645
+ if self.database_instance_roles:
646
+ body["database_instance_roles"] = [v.as_dict() for v in self.database_instance_roles]
647
+ if self.next_page_token is not None:
648
+ body["next_page_token"] = self.next_page_token
649
+ return body
650
+
651
+ def as_shallow_dict(self) -> dict:
652
+ """Serializes the ListDatabaseInstanceRolesResponse into a shallow dictionary of its immediate attributes."""
653
+ body = {}
654
+ if self.database_instance_roles:
655
+ body["database_instance_roles"] = self.database_instance_roles
656
+ if self.next_page_token is not None:
657
+ body["next_page_token"] = self.next_page_token
658
+ return body
659
+
660
+ @classmethod
661
+ def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstanceRolesResponse:
662
+ """Deserializes the ListDatabaseInstanceRolesResponse from a dictionary."""
663
+ return cls(
664
+ database_instance_roles=_repeated_dict(d, "database_instance_roles", DatabaseInstanceRole),
665
+ next_page_token=d.get("next_page_token", None),
666
+ )
373
667
 
374
668
 
375
669
  @dataclass
@@ -454,6 +748,77 @@ class ProvisioningInfoState(Enum):
454
748
  UPDATING = "UPDATING"
455
749
 
456
750
 
751
+ @dataclass
752
+ class RequestedClaims:
753
+ permission_set: Optional[RequestedClaimsPermissionSet] = None
754
+
755
+ resources: Optional[List[RequestedResource]] = None
756
+
757
+ def as_dict(self) -> dict:
758
+ """Serializes the RequestedClaims into a dictionary suitable for use as a JSON request body."""
759
+ body = {}
760
+ if self.permission_set is not None:
761
+ body["permission_set"] = self.permission_set.value
762
+ if self.resources:
763
+ body["resources"] = [v.as_dict() for v in self.resources]
764
+ return body
765
+
766
+ def as_shallow_dict(self) -> dict:
767
+ """Serializes the RequestedClaims into a shallow dictionary of its immediate attributes."""
768
+ body = {}
769
+ if self.permission_set is not None:
770
+ body["permission_set"] = self.permission_set
771
+ if self.resources:
772
+ body["resources"] = self.resources
773
+ return body
774
+
775
+ @classmethod
776
+ def from_dict(cls, d: Dict[str, Any]) -> RequestedClaims:
777
+ """Deserializes the RequestedClaims from a dictionary."""
778
+ return cls(
779
+ permission_set=_enum(d, "permission_set", RequestedClaimsPermissionSet),
780
+ resources=_repeated_dict(d, "resources", RequestedResource),
781
+ )
782
+
783
+
784
+ class RequestedClaimsPermissionSet(Enum):
785
+ """Might add WRITE in the future"""
786
+
787
+ READ_ONLY = "READ_ONLY"
788
+
789
+
790
+ @dataclass
791
+ class RequestedResource:
792
+ table_name: Optional[str] = None
793
+
794
+ unspecified_resource_name: Optional[str] = None
795
+
796
+ def as_dict(self) -> dict:
797
+ """Serializes the RequestedResource into a dictionary suitable for use as a JSON request body."""
798
+ body = {}
799
+ if self.table_name is not None:
800
+ body["table_name"] = self.table_name
801
+ if self.unspecified_resource_name is not None:
802
+ body["unspecified_resource_name"] = self.unspecified_resource_name
803
+ return body
804
+
805
+ def as_shallow_dict(self) -> dict:
806
+ """Serializes the RequestedResource into a shallow dictionary of its immediate attributes."""
807
+ body = {}
808
+ if self.table_name is not None:
809
+ body["table_name"] = self.table_name
810
+ if self.unspecified_resource_name is not None:
811
+ body["unspecified_resource_name"] = self.unspecified_resource_name
812
+ return body
813
+
814
+ @classmethod
815
+ def from_dict(cls, d: Dict[str, Any]) -> RequestedResource:
816
+ """Deserializes the RequestedResource from a dictionary."""
817
+ return cls(
818
+ table_name=d.get("table_name", None), unspecified_resource_name=d.get("unspecified_resource_name", None)
819
+ )
820
+
821
+
457
822
  @dataclass
458
823
  class SyncedDatabaseTable:
459
824
  """Next field marker: 12"""
@@ -472,23 +837,18 @@ class SyncedDatabaseTable:
472
837
  rejected)."""
473
838
 
474
839
  logical_database_name: Optional[str] = None
475
- """Target Postgres database object (logical database) name for this table. This field is optional
476
- in all scenarios.
840
+ """Target Postgres database object (logical database) name for this table.
477
841
 
478
842
  When creating a synced table in a registered Postgres catalog, the target Postgres database name
479
843
  is inferred to be that of the registered catalog. If this field is specified in this scenario,
480
844
  the Postgres database name MUST match that of the registered catalog (or the request will be
481
845
  rejected).
482
846
 
483
- When creating a synced table in a standard catalog, the target database name is inferred to be
484
- that of the standard catalog. In this scenario, specifying this field will allow targeting an
485
- arbitrary postgres database."""
847
+ When creating a synced table in a standard catalog, this field is required. In this scenario,
848
+ specifying this field will allow targeting an arbitrary postgres database. Note that this has
849
+ implications for the `create_database_objects_is_missing` field in `spec`."""
486
850
 
487
851
  spec: Optional[SyncedTableSpec] = None
488
- """Specification of a synced database table."""
489
-
490
- table_serving_url: Optional[str] = None
491
- """Data serving REST API URL for this table"""
492
852
 
493
853
  unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None
494
854
  """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
@@ -508,8 +868,6 @@ class SyncedDatabaseTable:
508
868
  body["name"] = self.name
509
869
  if self.spec:
510
870
  body["spec"] = self.spec.as_dict()
511
- if self.table_serving_url is not None:
512
- body["table_serving_url"] = self.table_serving_url
513
871
  if self.unity_catalog_provisioning_state is not None:
514
872
  body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value
515
873
  return body
@@ -527,8 +885,6 @@ class SyncedDatabaseTable:
527
885
  body["name"] = self.name
528
886
  if self.spec:
529
887
  body["spec"] = self.spec
530
- if self.table_serving_url is not None:
531
- body["table_serving_url"] = self.table_serving_url
532
888
  if self.unity_catalog_provisioning_state is not None:
533
889
  body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state
534
890
  return body
@@ -542,7 +898,6 @@ class SyncedDatabaseTable:
542
898
  logical_database_name=d.get("logical_database_name", None),
543
899
  name=d.get("name", None),
544
900
  spec=_from_dict(d, "spec", SyncedTableSpec),
545
- table_serving_url=d.get("table_serving_url", None),
546
901
  unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState),
547
902
  )
548
903
 
@@ -556,12 +911,11 @@ class SyncedTableContinuousUpdateStatus:
556
911
  """Progress of the initial data synchronization."""
557
912
 
558
913
  last_processed_commit_version: Optional[int] = None
559
- """The last source table Delta version that was synced to the synced table. Note that this Delta
560
- version may not be completely synced to the synced table yet."""
914
+ """The last source table Delta version that was successfully synced to the synced table."""
561
915
 
562
916
  timestamp: Optional[str] = None
563
- """The timestamp of the last time any data was synchronized from the source table to the synced
564
- table."""
917
+ """The end timestamp of the last time any data was synchronized from the source table to the synced
918
+ table. This is when the data is available in the synced table."""
565
919
 
566
920
  def as_dict(self) -> dict:
567
921
  """Serializes the SyncedTableContinuousUpdateStatus into a dictionary suitable for use as a JSON request body."""
@@ -601,12 +955,12 @@ class SyncedTableFailedStatus:
601
955
  SYNCED_PIPELINE_FAILED state."""
602
956
 
603
957
  last_processed_commit_version: Optional[int] = None
604
- """The last source table Delta version that was synced to the synced table. Note that this Delta
605
- version may only be partially synced to the synced table. Only populated if the table is still
606
- synced and available for serving."""
958
+ """The last source table Delta version that was successfully synced to the synced table. The last
959
+ source table Delta version that was synced to the synced table. Only populated if the table is
960
+ still synced and available for serving."""
607
961
 
608
962
  timestamp: Optional[str] = None
609
- """The timestamp of the last time any data was synchronized from the source table to the synced
963
+ """The end timestamp of the last time any data was synchronized from the source table to the synced
610
964
  table. Only populated if the table is still synced and available for serving."""
611
965
 
612
966
  def as_dict(self) -> dict:
@@ -698,6 +1052,51 @@ class SyncedTablePipelineProgress:
698
1052
  )
699
1053
 
700
1054
 
1055
+ @dataclass
1056
+ class SyncedTablePosition:
1057
+ delta_table_sync_info: Optional[DeltaTableSyncInfo] = None
1058
+
1059
+ sync_end_timestamp: Optional[str] = None
1060
+ """The end timestamp of the most recent successful synchronization. This is the time when the data
1061
+ is available in the synced table."""
1062
+
1063
+ sync_start_timestamp: Optional[str] = None
1064
+ """The starting timestamp of the most recent successful synchronization from the source table to
1065
+ the destination (synced) table. Note this is the starting timestamp of the sync operation, not
1066
+ the end time. E.g., for a batch, this is the time when the sync operation started."""
1067
+
1068
+ def as_dict(self) -> dict:
1069
+ """Serializes the SyncedTablePosition into a dictionary suitable for use as a JSON request body."""
1070
+ body = {}
1071
+ if self.delta_table_sync_info:
1072
+ body["delta_table_sync_info"] = self.delta_table_sync_info.as_dict()
1073
+ if self.sync_end_timestamp is not None:
1074
+ body["sync_end_timestamp"] = self.sync_end_timestamp
1075
+ if self.sync_start_timestamp is not None:
1076
+ body["sync_start_timestamp"] = self.sync_start_timestamp
1077
+ return body
1078
+
1079
+ def as_shallow_dict(self) -> dict:
1080
+ """Serializes the SyncedTablePosition into a shallow dictionary of its immediate attributes."""
1081
+ body = {}
1082
+ if self.delta_table_sync_info:
1083
+ body["delta_table_sync_info"] = self.delta_table_sync_info
1084
+ if self.sync_end_timestamp is not None:
1085
+ body["sync_end_timestamp"] = self.sync_end_timestamp
1086
+ if self.sync_start_timestamp is not None:
1087
+ body["sync_start_timestamp"] = self.sync_start_timestamp
1088
+ return body
1089
+
1090
+ @classmethod
1091
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTablePosition:
1092
+ """Deserializes the SyncedTablePosition from a dictionary."""
1093
+ return cls(
1094
+ delta_table_sync_info=_from_dict(d, "delta_table_sync_info", DeltaTableSyncInfo),
1095
+ sync_end_timestamp=d.get("sync_end_timestamp", None),
1096
+ sync_start_timestamp=d.get("sync_start_timestamp", None),
1097
+ )
1098
+
1099
+
701
1100
  @dataclass
702
1101
  class SyncedTableProvisioningStatus:
703
1102
  """Detailed status of a synced table. Shown if the synced table is in the
@@ -744,11 +1143,14 @@ class SyncedTableSpec:
744
1143
  """If true, the synced table's logical database and schema resources in PG will be created if they
745
1144
  do not already exist."""
746
1145
 
747
- new_pipeline_spec: Optional[NewPipelineSpec] = None
748
- """Spec of new pipeline. Should be empty if pipeline_id is set"""
1146
+ existing_pipeline_id: Optional[str] = None
1147
+ """User-specified ID of a pre-existing pipeline to bin pack. This field is optional, and should be
1148
+ empty if new_pipeline_spec is set. This field will only be set by the server in response
1149
+ messages if it is specified in the request. The SyncedTableStatus message will always contain
1150
+ the effective pipeline ID (either client provided or server generated), however."""
749
1151
 
750
- pipeline_id: Optional[str] = None
751
- """ID of the associated pipeline. Should be empty if new_pipeline_spec is set"""
1152
+ new_pipeline_spec: Optional[NewPipelineSpec] = None
1153
+ """Spec of new pipeline. Should be empty if pipeline_id / existing_pipeline_id is set"""
752
1154
 
753
1155
  primary_key_columns: Optional[List[str]] = None
754
1156
  """Primary Key columns to be used for data insert/update in the destination."""
@@ -767,10 +1169,10 @@ class SyncedTableSpec:
767
1169
  body = {}
768
1170
  if self.create_database_objects_if_missing is not None:
769
1171
  body["create_database_objects_if_missing"] = self.create_database_objects_if_missing
1172
+ if self.existing_pipeline_id is not None:
1173
+ body["existing_pipeline_id"] = self.existing_pipeline_id
770
1174
  if self.new_pipeline_spec:
771
1175
  body["new_pipeline_spec"] = self.new_pipeline_spec.as_dict()
772
- if self.pipeline_id is not None:
773
- body["pipeline_id"] = self.pipeline_id
774
1176
  if self.primary_key_columns:
775
1177
  body["primary_key_columns"] = [v for v in self.primary_key_columns]
776
1178
  if self.scheduling_policy is not None:
@@ -786,10 +1188,10 @@ class SyncedTableSpec:
786
1188
  body = {}
787
1189
  if self.create_database_objects_if_missing is not None:
788
1190
  body["create_database_objects_if_missing"] = self.create_database_objects_if_missing
1191
+ if self.existing_pipeline_id is not None:
1192
+ body["existing_pipeline_id"] = self.existing_pipeline_id
789
1193
  if self.new_pipeline_spec:
790
1194
  body["new_pipeline_spec"] = self.new_pipeline_spec
791
- if self.pipeline_id is not None:
792
- body["pipeline_id"] = self.pipeline_id
793
1195
  if self.primary_key_columns:
794
1196
  body["primary_key_columns"] = self.primary_key_columns
795
1197
  if self.scheduling_policy is not None:
@@ -805,8 +1207,8 @@ class SyncedTableSpec:
805
1207
  """Deserializes the SyncedTableSpec from a dictionary."""
806
1208
  return cls(
807
1209
  create_database_objects_if_missing=d.get("create_database_objects_if_missing", None),
1210
+ existing_pipeline_id=d.get("existing_pipeline_id", None),
808
1211
  new_pipeline_spec=_from_dict(d, "new_pipeline_spec", NewPipelineSpec),
809
- pipeline_id=d.get("pipeline_id", None),
810
1212
  primary_key_columns=d.get("primary_key_columns", None),
811
1213
  scheduling_policy=_enum(d, "scheduling_policy", SyncedTableSchedulingPolicy),
812
1214
  source_table_full_name=d.get("source_table_full_name", None),
@@ -835,26 +1237,35 @@ class SyncedTableStatus:
835
1237
  """Status of a synced table."""
836
1238
 
837
1239
  continuous_update_status: Optional[SyncedTableContinuousUpdateStatus] = None
838
- """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE
839
- or the SYNCED_UPDATING_PIPELINE_RESOURCES state."""
840
1240
 
841
1241
  detailed_state: Optional[SyncedTableState] = None
842
1242
  """The state of the synced table."""
843
1243
 
844
1244
  failed_status: Optional[SyncedTableFailedStatus] = None
845
- """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the
846
- SYNCED_PIPELINE_FAILED state."""
1245
+
1246
+ last_sync: Optional[SyncedTablePosition] = None
1247
+ """Summary of the last successful synchronization from source to destination.
1248
+
1249
+ Will always be present if there has been a successful sync. Even if the most recent syncs have
1250
+ failed.
1251
+
1252
+ Limitation: The only exception is if the synced table is doing a FULL REFRESH, then the last
1253
+ sync information will not be available until the full refresh is complete. This limitation will
1254
+ be addressed in a future version.
1255
+
1256
+ This top-level field is a convenience for consumers who want easy access to last sync
1257
+ information without having to traverse detailed_status."""
847
1258
 
848
1259
  message: Optional[str] = None
849
1260
  """A text description of the current state of the synced table."""
850
1261
 
1262
+ pipeline_id: Optional[str] = None
1263
+ """ID of the associated pipeline. The pipeline ID may have been provided by the client (in the case
1264
+ of bin packing), or generated by the server (when creating a new pipeline)."""
1265
+
851
1266
  provisioning_status: Optional[SyncedTableProvisioningStatus] = None
852
- """Detailed status of a synced table. Shown if the synced table is in the
853
- PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state."""
854
1267
 
855
1268
  triggered_update_status: Optional[SyncedTableTriggeredUpdateStatus] = None
856
- """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE
857
- or the SYNCED_NO_PENDING_UPDATE state."""
858
1269
 
859
1270
  def as_dict(self) -> dict:
860
1271
  """Serializes the SyncedTableStatus into a dictionary suitable for use as a JSON request body."""
@@ -865,8 +1276,12 @@ class SyncedTableStatus:
865
1276
  body["detailed_state"] = self.detailed_state.value
866
1277
  if self.failed_status:
867
1278
  body["failed_status"] = self.failed_status.as_dict()
1279
+ if self.last_sync:
1280
+ body["last_sync"] = self.last_sync.as_dict()
868
1281
  if self.message is not None:
869
1282
  body["message"] = self.message
1283
+ if self.pipeline_id is not None:
1284
+ body["pipeline_id"] = self.pipeline_id
870
1285
  if self.provisioning_status:
871
1286
  body["provisioning_status"] = self.provisioning_status.as_dict()
872
1287
  if self.triggered_update_status:
@@ -882,8 +1297,12 @@ class SyncedTableStatus:
882
1297
  body["detailed_state"] = self.detailed_state
883
1298
  if self.failed_status:
884
1299
  body["failed_status"] = self.failed_status
1300
+ if self.last_sync:
1301
+ body["last_sync"] = self.last_sync
885
1302
  if self.message is not None:
886
1303
  body["message"] = self.message
1304
+ if self.pipeline_id is not None:
1305
+ body["pipeline_id"] = self.pipeline_id
887
1306
  if self.provisioning_status:
888
1307
  body["provisioning_status"] = self.provisioning_status
889
1308
  if self.triggered_update_status:
@@ -897,7 +1316,9 @@ class SyncedTableStatus:
897
1316
  continuous_update_status=_from_dict(d, "continuous_update_status", SyncedTableContinuousUpdateStatus),
898
1317
  detailed_state=_enum(d, "detailed_state", SyncedTableState),
899
1318
  failed_status=_from_dict(d, "failed_status", SyncedTableFailedStatus),
1319
+ last_sync=_from_dict(d, "last_sync", SyncedTablePosition),
900
1320
  message=d.get("message", None),
1321
+ pipeline_id=d.get("pipeline_id", None),
901
1322
  provisioning_status=_from_dict(d, "provisioning_status", SyncedTableProvisioningStatus),
902
1323
  triggered_update_status=_from_dict(d, "triggered_update_status", SyncedTableTriggeredUpdateStatus),
903
1324
  )
@@ -909,12 +1330,11 @@ class SyncedTableTriggeredUpdateStatus:
909
1330
  or the SYNCED_NO_PENDING_UPDATE state."""
910
1331
 
911
1332
  last_processed_commit_version: Optional[int] = None
912
- """The last source table Delta version that was synced to the synced table. Note that this Delta
913
- version may not be completely synced to the synced table yet."""
1333
+ """The last source table Delta version that was successfully synced to the synced table."""
914
1334
 
915
1335
  timestamp: Optional[str] = None
916
- """The timestamp of the last time any data was synchronized from the source table to the synced
917
- table."""
1336
+ """The end timestamp of the last time any data was synchronized from the source table to the synced
1337
+ table. This is when the data is available in the synced table."""
918
1338
 
919
1339
  triggered_update_progress: Optional[SyncedTablePipelineProgress] = None
920
1340
  """Progress of the active data synchronization pipeline."""
@@ -977,7 +1397,7 @@ class DatabaseAPI:
977
1397
  """Create a Database Instance.
978
1398
 
979
1399
  :param database_instance: :class:`DatabaseInstance`
980
- A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
1400
+ Instance to create.
981
1401
 
982
1402
  :returns: :class:`DatabaseInstance`
983
1403
  """
@@ -990,11 +1410,30 @@ class DatabaseAPI:
990
1410
  res = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers)
991
1411
  return DatabaseInstance.from_dict(res)
992
1412
 
1413
+ def create_database_instance_role(
1414
+ self, instance_name: str, database_instance_role: DatabaseInstanceRole
1415
+ ) -> DatabaseInstanceRole:
1416
+ """Create a role for a Database Instance.
1417
+
1418
+ :param instance_name: str
1419
+ :param database_instance_role: :class:`DatabaseInstanceRole`
1420
+
1421
+ :returns: :class:`DatabaseInstanceRole`
1422
+ """
1423
+ body = database_instance_role.as_dict()
1424
+ headers = {
1425
+ "Accept": "application/json",
1426
+ "Content-Type": "application/json",
1427
+ }
1428
+
1429
+ res = self._api.do("POST", f"/api/2.0/database/instances/{instance_name}/roles", body=body, headers=headers)
1430
+ return DatabaseInstanceRole.from_dict(res)
1431
+
993
1432
  def create_database_table(self, table: DatabaseTable) -> DatabaseTable:
994
- """Create a Database Table.
1433
+ """Create a Database Table. Useful for registering pre-existing PG tables in UC. See
1434
+ CreateSyncedDatabaseTable for creating synced tables in PG from a source table in UC.
995
1435
 
996
1436
  :param table: :class:`DatabaseTable`
997
- Next field marker: 13
998
1437
 
999
1438
  :returns: :class:`DatabaseTable`
1000
1439
  """
@@ -1011,7 +1450,6 @@ class DatabaseAPI:
1011
1450
  """Create a Synced Database Table.
1012
1451
 
1013
1452
  :param synced_table: :class:`SyncedDatabaseTable`
1014
- Next field marker: 12
1015
1453
 
1016
1454
  :returns: :class:`SyncedDatabaseTable`
1017
1455
  """
@@ -1047,10 +1485,12 @@ class DatabaseAPI:
1047
1485
  By default, a instance cannot be deleted if it has descendant instances created via PITR. If this
1048
1486
  flag is specified as true, all descendent instances will be deleted as well.
1049
1487
  :param purge: bool (optional)
1050
- If false, the database instance is soft deleted. Soft deleted instances behave as if they are
1051
- deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by
1052
- calling the undelete API for a limited time. If true, the database instance is hard deleted and
1053
- cannot be undeleted.
1488
+ Note purge=false is in development. If false, the database instance is soft deleted (implementation
1489
+ pending). Soft deleted instances behave as if they are deleted, and cannot be used for CRUD
1490
+ operations nor connected to. However they can be undeleted by calling the undelete API for a limited
1491
+ time (implementation pending). If true, the database instance is hard deleted and cannot be
1492
+ undeleted. For the time being, setting this value to true is required to delete an instance (soft
1493
+ delete is not yet supported).
1054
1494
 
1055
1495
 
1056
1496
  """
@@ -1066,6 +1506,38 @@ class DatabaseAPI:
1066
1506
 
1067
1507
  self._api.do("DELETE", f"/api/2.0/database/instances/{name}", query=query, headers=headers)
1068
1508
 
1509
+ def delete_database_instance_role(
1510
+ self,
1511
+ instance_name: str,
1512
+ name: str,
1513
+ *,
1514
+ allow_missing: Optional[bool] = None,
1515
+ reassign_owned_to: Optional[str] = None,
1516
+ ):
1517
+ """Deletes a role for a Database Instance.
1518
+
1519
+ :param instance_name: str
1520
+ :param name: str
1521
+ :param allow_missing: bool (optional)
1522
+ This is the AIP standard name for the equivalent of Postgres' `IF EXISTS` option
1523
+ :param reassign_owned_to: str (optional)
1524
+
1525
+
1526
+ """
1527
+
1528
+ query = {}
1529
+ if allow_missing is not None:
1530
+ query["allow_missing"] = allow_missing
1531
+ if reassign_owned_to is not None:
1532
+ query["reassign_owned_to"] = reassign_owned_to
1533
+ headers = {
1534
+ "Accept": "application/json",
1535
+ }
1536
+
1537
+ self._api.do(
1538
+ "DELETE", f"/api/2.0/database/instances/{instance_name}/roles/{name}", query=query, headers=headers
1539
+ )
1540
+
1069
1541
  def delete_database_table(self, name: str):
1070
1542
  """Delete a Database Table.
1071
1543
 
@@ -1114,10 +1586,17 @@ class DatabaseAPI:
1114
1586
  return DatabaseInstance.from_dict(res)
1115
1587
 
1116
1588
  def generate_database_credential(
1117
- self, *, instance_names: Optional[List[str]] = None, request_id: Optional[str] = None
1589
+ self,
1590
+ *,
1591
+ claims: Optional[List[RequestedClaims]] = None,
1592
+ instance_names: Optional[List[str]] = None,
1593
+ request_id: Optional[str] = None,
1118
1594
  ) -> DatabaseCredential:
1119
1595
  """Generates a credential that can be used to access database instances.
1120
1596
 
1597
+ :param claims: List[:class:`RequestedClaims`] (optional)
1598
+ The returned token will be scoped to the union of instance_names and instances containing the
1599
+ specified UC tables, so instance_names is allowed to be empty.
1121
1600
  :param instance_names: List[str] (optional)
1122
1601
  Instances to which the token will be scoped.
1123
1602
  :param request_id: str (optional)
@@ -1125,6 +1604,8 @@ class DatabaseAPI:
1125
1604
  :returns: :class:`DatabaseCredential`
1126
1605
  """
1127
1606
  body = {}
1607
+ if claims is not None:
1608
+ body["claims"] = [v.as_dict() for v in claims]
1128
1609
  if instance_names is not None:
1129
1610
  body["instance_names"] = [v for v in instance_names]
1130
1611
  if request_id is not None:
@@ -1168,6 +1649,22 @@ class DatabaseAPI:
1168
1649
  res = self._api.do("GET", f"/api/2.0/database/instances/{name}", headers=headers)
1169
1650
  return DatabaseInstance.from_dict(res)
1170
1651
 
1652
+ def get_database_instance_role(self, instance_name: str, name: str) -> DatabaseInstanceRole:
1653
+ """Gets a role for a Database Instance.
1654
+
1655
+ :param instance_name: str
1656
+ :param name: str
1657
+
1658
+ :returns: :class:`DatabaseInstanceRole`
1659
+ """
1660
+
1661
+ headers = {
1662
+ "Accept": "application/json",
1663
+ }
1664
+
1665
+ res = self._api.do("GET", f"/api/2.0/database/instances/{instance_name}/roles/{name}", headers=headers)
1666
+ return DatabaseInstanceRole.from_dict(res)
1667
+
1171
1668
  def get_database_table(self, name: str) -> DatabaseTable:
1172
1669
  """Get a Database Table.
1173
1670
 
@@ -1198,6 +1695,40 @@ class DatabaseAPI:
1198
1695
  res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers)
1199
1696
  return SyncedDatabaseTable.from_dict(res)
1200
1697
 
1698
+ def list_database_instance_roles(
1699
+ self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None
1700
+ ) -> Iterator[DatabaseInstanceRole]:
1701
+ """START OF PG ROLE APIs Section
1702
+
1703
+ :param instance_name: str
1704
+ :param page_size: int (optional)
1705
+ Upper bound for items returned.
1706
+ :param page_token: str (optional)
1707
+ Pagination token to go to the next page of Database Instances. Requests first page if absent.
1708
+
1709
+ :returns: Iterator over :class:`DatabaseInstanceRole`
1710
+ """
1711
+
1712
+ query = {}
1713
+ if page_size is not None:
1714
+ query["page_size"] = page_size
1715
+ if page_token is not None:
1716
+ query["page_token"] = page_token
1717
+ headers = {
1718
+ "Accept": "application/json",
1719
+ }
1720
+
1721
+ while True:
1722
+ json = self._api.do(
1723
+ "GET", f"/api/2.0/database/instances/{instance_name}/roles", query=query, headers=headers
1724
+ )
1725
+ if "database_instance_roles" in json:
1726
+ for v in json["database_instance_roles"]:
1727
+ yield DatabaseInstanceRole.from_dict(v)
1728
+ if "next_page_token" not in json or not json["next_page_token"]:
1729
+ return
1730
+ query["page_token"] = json["next_page_token"]
1731
+
1201
1732
  def list_database_instances(
1202
1733
  self, *, page_size: Optional[int] = None, page_token: Optional[str] = None
1203
1734
  ) -> Iterator[DatabaseInstance]:
@@ -1237,7 +1768,6 @@ class DatabaseAPI:
1237
1768
  :param name: str
1238
1769
  The name of the instance. This is the unique identifier for the instance.
1239
1770
  :param database_instance: :class:`DatabaseInstance`
1240
- A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
1241
1771
  :param update_mask: str
1242
1772
  The list of fields to update.
1243
1773