databricks-sdk 0.57.0__py3-none-any.whl → 0.58.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (30) hide show
  1. databricks/sdk/__init__.py +25 -4
  2. databricks/sdk/service/aibuilder.py +0 -36
  3. databricks/sdk/service/apps.py +1 -3
  4. databricks/sdk/service/billing.py +53 -23
  5. databricks/sdk/service/catalog.py +1692 -150
  6. databricks/sdk/service/cleanrooms.py +3 -22
  7. databricks/sdk/service/compute.py +245 -322
  8. databricks/sdk/service/dashboards.py +129 -162
  9. databricks/sdk/service/database.py +612 -97
  10. databricks/sdk/service/iam.py +3 -3
  11. databricks/sdk/service/jobs.py +6 -129
  12. databricks/sdk/service/marketplace.py +3 -2
  13. databricks/sdk/service/ml.py +713 -262
  14. databricks/sdk/service/oauth2.py +0 -1
  15. databricks/sdk/service/pipelines.py +12 -29
  16. databricks/sdk/service/provisioning.py +7 -125
  17. databricks/sdk/service/qualitymonitorv2.py +0 -18
  18. databricks/sdk/service/serving.py +39 -13
  19. databricks/sdk/service/settings.py +11 -128
  20. databricks/sdk/service/sharing.py +3 -9
  21. databricks/sdk/service/sql.py +94 -74
  22. databricks/sdk/service/vectorsearch.py +0 -19
  23. databricks/sdk/service/workspace.py +2 -6
  24. databricks/sdk/version.py +1 -1
  25. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/METADATA +1 -1
  26. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/RECORD +30 -30
  27. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/WHEEL +0 -0
  28. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/LICENSE +0 -0
  29. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/NOTICE +0 -0
  30. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.58.0.dist-info}/top_level.txt +0 -0
@@ -113,24 +113,67 @@ class DatabaseInstance:
113
113
  capacity: Optional[str] = None
114
114
  """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8"."""
115
115
 
116
+ child_instance_refs: Optional[List[DatabaseInstanceRef]] = None
117
+ """The refs of the child instances. This is only available if the instance is parent instance."""
118
+
116
119
  creation_time: Optional[str] = None
117
120
  """The timestamp when the instance was created."""
118
121
 
119
122
  creator: Optional[str] = None
120
123
  """The email of the creator of the instance."""
121
124
 
125
+ effective_enable_readable_secondaries: Optional[bool] = None
126
+ """xref AIP-129. `enable_readable_secondaries` is owned by the client, while
127
+ `effective_enable_readable_secondaries` is owned by the server. `enable_readable_secondaries`
128
+ will only be set in Create/Update response messages if and only if the user provides the field
129
+ via the request. `effective_enable_readable_secondaries` on the other hand will always bet set
130
+ in all response messages (Create/Update/Get/List)."""
131
+
132
+ effective_node_count: Optional[int] = None
133
+ """xref AIP-129. `node_count` is owned by the client, while `effective_node_count` is owned by the
134
+ server. `node_count` will only be set in Create/Update response messages if and only if the user
135
+ provides the field via the request. `effective_node_count` on the other hand will always bet set
136
+ in all response messages (Create/Update/Get/List)."""
137
+
138
+ effective_retention_window_in_days: Optional[int] = None
139
+ """xref AIP-129. `retention_window_in_days` is owned by the client, while
140
+ `effective_retention_window_in_days` is owned by the server. `retention_window_in_days` will
141
+ only be set in Create/Update response messages if and only if the user provides the field via
142
+ the request. `effective_retention_window_in_days` on the other hand will always bet set in all
143
+ response messages (Create/Update/Get/List)."""
144
+
122
145
  effective_stopped: Optional[bool] = None
123
146
  """xref AIP-129. `stopped` is owned by the client, while `effective_stopped` is owned by the
124
147
  server. `stopped` will only be set in Create/Update response messages if and only if the user
125
148
  provides the field via the request. `effective_stopped` on the other hand will always bet set in
126
149
  all response messages (Create/Update/Get/List)."""
127
150
 
151
+ enable_readable_secondaries: Optional[bool] = None
152
+ """Whether to enable secondaries to serve read-only traffic. Defaults to false."""
153
+
154
+ node_count: Optional[int] = None
155
+ """The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults
156
+ to 1 primary and 0 secondaries."""
157
+
158
+ parent_instance_ref: Optional[DatabaseInstanceRef] = None
159
+ """The ref of the parent instance. This is only available if the instance is child instance. Input:
160
+ For specifying the parent instance to create a child instance. Optional. Output: Only populated
161
+ if provided as input to create a child instance."""
162
+
128
163
  pg_version: Optional[str] = None
129
164
  """The version of Postgres running on the instance."""
130
165
 
166
+ read_only_dns: Optional[str] = None
167
+ """The DNS endpoint to connect to the instance for read only access. This is only available if
168
+ enable_readable_secondaries is true."""
169
+
131
170
  read_write_dns: Optional[str] = None
132
171
  """The DNS endpoint to connect to the instance for read+write access."""
133
172
 
173
+ retention_window_in_days: Optional[int] = None
174
+ """The retention window for the instance. This is the time window in days for which the historical
175
+ data is retained. The default value is 7 days. Valid values are 2 to 35 days."""
176
+
134
177
  state: Optional[DatabaseInstanceState] = None
135
178
  """The current state of the instance."""
136
179
 
@@ -145,18 +188,36 @@ class DatabaseInstance:
145
188
  body = {}
146
189
  if self.capacity is not None:
147
190
  body["capacity"] = self.capacity
191
+ if self.child_instance_refs:
192
+ body["child_instance_refs"] = [v.as_dict() for v in self.child_instance_refs]
148
193
  if self.creation_time is not None:
149
194
  body["creation_time"] = self.creation_time
150
195
  if self.creator is not None:
151
196
  body["creator"] = self.creator
197
+ if self.effective_enable_readable_secondaries is not None:
198
+ body["effective_enable_readable_secondaries"] = self.effective_enable_readable_secondaries
199
+ if self.effective_node_count is not None:
200
+ body["effective_node_count"] = self.effective_node_count
201
+ if self.effective_retention_window_in_days is not None:
202
+ body["effective_retention_window_in_days"] = self.effective_retention_window_in_days
152
203
  if self.effective_stopped is not None:
153
204
  body["effective_stopped"] = self.effective_stopped
205
+ if self.enable_readable_secondaries is not None:
206
+ body["enable_readable_secondaries"] = self.enable_readable_secondaries
154
207
  if self.name is not None:
155
208
  body["name"] = self.name
209
+ if self.node_count is not None:
210
+ body["node_count"] = self.node_count
211
+ if self.parent_instance_ref:
212
+ body["parent_instance_ref"] = self.parent_instance_ref.as_dict()
156
213
  if self.pg_version is not None:
157
214
  body["pg_version"] = self.pg_version
215
+ if self.read_only_dns is not None:
216
+ body["read_only_dns"] = self.read_only_dns
158
217
  if self.read_write_dns is not None:
159
218
  body["read_write_dns"] = self.read_write_dns
219
+ if self.retention_window_in_days is not None:
220
+ body["retention_window_in_days"] = self.retention_window_in_days
160
221
  if self.state is not None:
161
222
  body["state"] = self.state.value
162
223
  if self.stopped is not None:
@@ -170,18 +231,36 @@ class DatabaseInstance:
170
231
  body = {}
171
232
  if self.capacity is not None:
172
233
  body["capacity"] = self.capacity
234
+ if self.child_instance_refs:
235
+ body["child_instance_refs"] = self.child_instance_refs
173
236
  if self.creation_time is not None:
174
237
  body["creation_time"] = self.creation_time
175
238
  if self.creator is not None:
176
239
  body["creator"] = self.creator
240
+ if self.effective_enable_readable_secondaries is not None:
241
+ body["effective_enable_readable_secondaries"] = self.effective_enable_readable_secondaries
242
+ if self.effective_node_count is not None:
243
+ body["effective_node_count"] = self.effective_node_count
244
+ if self.effective_retention_window_in_days is not None:
245
+ body["effective_retention_window_in_days"] = self.effective_retention_window_in_days
177
246
  if self.effective_stopped is not None:
178
247
  body["effective_stopped"] = self.effective_stopped
248
+ if self.enable_readable_secondaries is not None:
249
+ body["enable_readable_secondaries"] = self.enable_readable_secondaries
179
250
  if self.name is not None:
180
251
  body["name"] = self.name
252
+ if self.node_count is not None:
253
+ body["node_count"] = self.node_count
254
+ if self.parent_instance_ref:
255
+ body["parent_instance_ref"] = self.parent_instance_ref
181
256
  if self.pg_version is not None:
182
257
  body["pg_version"] = self.pg_version
258
+ if self.read_only_dns is not None:
259
+ body["read_only_dns"] = self.read_only_dns
183
260
  if self.read_write_dns is not None:
184
261
  body["read_write_dns"] = self.read_write_dns
262
+ if self.retention_window_in_days is not None:
263
+ body["retention_window_in_days"] = self.retention_window_in_days
185
264
  if self.state is not None:
186
265
  body["state"] = self.state
187
266
  if self.stopped is not None:
@@ -195,18 +274,216 @@ class DatabaseInstance:
195
274
  """Deserializes the DatabaseInstance from a dictionary."""
196
275
  return cls(
197
276
  capacity=d.get("capacity", None),
277
+ child_instance_refs=_repeated_dict(d, "child_instance_refs", DatabaseInstanceRef),
198
278
  creation_time=d.get("creation_time", None),
199
279
  creator=d.get("creator", None),
280
+ effective_enable_readable_secondaries=d.get("effective_enable_readable_secondaries", None),
281
+ effective_node_count=d.get("effective_node_count", None),
282
+ effective_retention_window_in_days=d.get("effective_retention_window_in_days", None),
200
283
  effective_stopped=d.get("effective_stopped", None),
284
+ enable_readable_secondaries=d.get("enable_readable_secondaries", None),
201
285
  name=d.get("name", None),
286
+ node_count=d.get("node_count", None),
287
+ parent_instance_ref=_from_dict(d, "parent_instance_ref", DatabaseInstanceRef),
202
288
  pg_version=d.get("pg_version", None),
289
+ read_only_dns=d.get("read_only_dns", None),
203
290
  read_write_dns=d.get("read_write_dns", None),
291
+ retention_window_in_days=d.get("retention_window_in_days", None),
204
292
  state=_enum(d, "state", DatabaseInstanceState),
205
293
  stopped=d.get("stopped", None),
206
294
  uid=d.get("uid", None),
207
295
  )
208
296
 
209
297
 
298
+ @dataclass
299
+ class DatabaseInstanceRef:
300
+ """DatabaseInstanceRef is a reference to a database instance. It is used in the DatabaseInstance
301
+ object to refer to the parent instance of an instance and to refer the child instances of an
302
+ instance. To specify as a parent instance during creation of an instance, the lsn and
303
+ branch_time fields are optional. If not specified, the child instance will be created from the
304
+ latest lsn of the parent. If both lsn and branch_time are specified, the lsn will be used to
305
+ create the child instance."""
306
+
307
+ branch_time: Optional[str] = None
308
+ """Branch time of the ref database instance. For a parent ref instance, this is the point in time
309
+ on the parent instance from which the instance was created. For a child ref instance, this is
310
+ the point in time on the instance from which the child instance was created. Input: For
311
+ specifying the point in time to create a child instance. Optional. Output: Only populated if
312
+ provided as input to create a child instance."""
313
+
314
+ effective_lsn: Optional[str] = None
315
+ """xref AIP-129. `lsn` is owned by the client, while `effective_lsn` is owned by the server. `lsn`
316
+ will only be set in Create/Update response messages if and only if the user provides the field
317
+ via the request. `effective_lsn` on the other hand will always bet set in all response messages
318
+ (Create/Update/Get/List). For a parent ref instance, this is the LSN on the parent instance from
319
+ which the instance was created. For a child ref instance, this is the LSN on the instance from
320
+ which the child instance was created."""
321
+
322
+ lsn: Optional[str] = None
323
+ """User-specified WAL LSN of the ref database instance.
324
+
325
+ Input: For specifying the WAL LSN to create a child instance. Optional. Output: Only populated
326
+ if provided as input to create a child instance."""
327
+
328
+ name: Optional[str] = None
329
+ """Name of the ref database instance."""
330
+
331
+ uid: Optional[str] = None
332
+ """Id of the ref database instance."""
333
+
334
+ def as_dict(self) -> dict:
335
+ """Serializes the DatabaseInstanceRef into a dictionary suitable for use as a JSON request body."""
336
+ body = {}
337
+ if self.branch_time is not None:
338
+ body["branch_time"] = self.branch_time
339
+ if self.effective_lsn is not None:
340
+ body["effective_lsn"] = self.effective_lsn
341
+ if self.lsn is not None:
342
+ body["lsn"] = self.lsn
343
+ if self.name is not None:
344
+ body["name"] = self.name
345
+ if self.uid is not None:
346
+ body["uid"] = self.uid
347
+ return body
348
+
349
+ def as_shallow_dict(self) -> dict:
350
+ """Serializes the DatabaseInstanceRef into a shallow dictionary of its immediate attributes."""
351
+ body = {}
352
+ if self.branch_time is not None:
353
+ body["branch_time"] = self.branch_time
354
+ if self.effective_lsn is not None:
355
+ body["effective_lsn"] = self.effective_lsn
356
+ if self.lsn is not None:
357
+ body["lsn"] = self.lsn
358
+ if self.name is not None:
359
+ body["name"] = self.name
360
+ if self.uid is not None:
361
+ body["uid"] = self.uid
362
+ return body
363
+
364
+ @classmethod
365
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRef:
366
+ """Deserializes the DatabaseInstanceRef from a dictionary."""
367
+ return cls(
368
+ branch_time=d.get("branch_time", None),
369
+ effective_lsn=d.get("effective_lsn", None),
370
+ lsn=d.get("lsn", None),
371
+ name=d.get("name", None),
372
+ uid=d.get("uid", None),
373
+ )
374
+
375
+
376
+ @dataclass
377
+ class DatabaseInstanceRole:
378
+ """A DatabaseInstanceRole represents a Postgres role in a database instance."""
379
+
380
+ attributes: Optional[DatabaseInstanceRoleAttributes] = None
381
+ """API-exposed Postgres role attributes"""
382
+
383
+ identity_type: Optional[DatabaseInstanceRoleIdentityType] = None
384
+ """The type of the role."""
385
+
386
+ membership_role: Optional[DatabaseInstanceRoleMembershipRole] = None
387
+ """An enum value for a standard role that this role is a member of."""
388
+
389
+ name: Optional[str] = None
390
+ """The name of the role. This is the unique identifier for the role in an instance."""
391
+
392
+ def as_dict(self) -> dict:
393
+ """Serializes the DatabaseInstanceRole into a dictionary suitable for use as a JSON request body."""
394
+ body = {}
395
+ if self.attributes:
396
+ body["attributes"] = self.attributes.as_dict()
397
+ if self.identity_type is not None:
398
+ body["identity_type"] = self.identity_type.value
399
+ if self.membership_role is not None:
400
+ body["membership_role"] = self.membership_role.value
401
+ if self.name is not None:
402
+ body["name"] = self.name
403
+ return body
404
+
405
+ def as_shallow_dict(self) -> dict:
406
+ """Serializes the DatabaseInstanceRole into a shallow dictionary of its immediate attributes."""
407
+ body = {}
408
+ if self.attributes:
409
+ body["attributes"] = self.attributes
410
+ if self.identity_type is not None:
411
+ body["identity_type"] = self.identity_type
412
+ if self.membership_role is not None:
413
+ body["membership_role"] = self.membership_role
414
+ if self.name is not None:
415
+ body["name"] = self.name
416
+ return body
417
+
418
+ @classmethod
419
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRole:
420
+ """Deserializes the DatabaseInstanceRole from a dictionary."""
421
+ return cls(
422
+ attributes=_from_dict(d, "attributes", DatabaseInstanceRoleAttributes),
423
+ identity_type=_enum(d, "identity_type", DatabaseInstanceRoleIdentityType),
424
+ membership_role=_enum(d, "membership_role", DatabaseInstanceRoleMembershipRole),
425
+ name=d.get("name", None),
426
+ )
427
+
428
+
429
+ @dataclass
430
+ class DatabaseInstanceRoleAttributes:
431
+ """Attributes that can be granted to a Postgres role. We are only implementing a subset for now,
432
+ see xref: https://www.postgresql.org/docs/16/sql-createrole.html The values follow Postgres
433
+ keyword naming e.g. CREATEDB, BYPASSRLS, etc. which is why they don't include typical
434
+ underscores between words. We were requested to make this a nested object/struct representation
435
+ since these are knobs from an external spec."""
436
+
437
+ bypassrls: Optional[bool] = None
438
+
439
+ createdb: Optional[bool] = None
440
+
441
+ createrole: Optional[bool] = None
442
+
443
+ def as_dict(self) -> dict:
444
+ """Serializes the DatabaseInstanceRoleAttributes into a dictionary suitable for use as a JSON request body."""
445
+ body = {}
446
+ if self.bypassrls is not None:
447
+ body["bypassrls"] = self.bypassrls
448
+ if self.createdb is not None:
449
+ body["createdb"] = self.createdb
450
+ if self.createrole is not None:
451
+ body["createrole"] = self.createrole
452
+ return body
453
+
454
+ def as_shallow_dict(self) -> dict:
455
+ """Serializes the DatabaseInstanceRoleAttributes into a shallow dictionary of its immediate attributes."""
456
+ body = {}
457
+ if self.bypassrls is not None:
458
+ body["bypassrls"] = self.bypassrls
459
+ if self.createdb is not None:
460
+ body["createdb"] = self.createdb
461
+ if self.createrole is not None:
462
+ body["createrole"] = self.createrole
463
+ return body
464
+
465
+ @classmethod
466
+ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRoleAttributes:
467
+ """Deserializes the DatabaseInstanceRoleAttributes from a dictionary."""
468
+ return cls(
469
+ bypassrls=d.get("bypassrls", None), createdb=d.get("createdb", None), createrole=d.get("createrole", None)
470
+ )
471
+
472
+
473
+ class DatabaseInstanceRoleIdentityType(Enum):
474
+
475
+ GROUP = "GROUP"
476
+ PG_ONLY = "PG_ONLY"
477
+ SERVICE_PRINCIPAL = "SERVICE_PRINCIPAL"
478
+ USER = "USER"
479
+
480
+
481
+ class DatabaseInstanceRoleMembershipRole(Enum):
482
+ """Roles that the DatabaseInstanceRole can be a member of."""
483
+
484
+ DATABRICKS_SUPERUSER = "DATABRICKS_SUPERUSER"
485
+
486
+
210
487
  class DatabaseInstanceState(Enum):
211
488
 
212
489
  AVAILABLE = "AVAILABLE"
@@ -231,18 +508,15 @@ class DatabaseTable:
231
508
  MUST match that of the registered catalog (or the request will be rejected)."""
232
509
 
233
510
  logical_database_name: Optional[str] = None
234
- """Target Postgres database object (logical database) name for this table. This field is optional
235
- in all scenarios.
511
+ """Target Postgres database object (logical database) name for this table.
236
512
 
237
513
  When creating a table in a registered Postgres catalog, the target Postgres database name is
238
514
  inferred to be that of the registered catalog. If this field is specified in this scenario, the
239
515
  Postgres database name MUST match that of the registered catalog (or the request will be
240
516
  rejected).
241
517
 
242
- When creating a table in a standard catalog, the target database name is inferred to be that of
243
- the standard catalog. In this scenario, specifying this field will allow targeting an arbitrary
244
- postgres database. Note that this has implications for the `create_database_objects_is_missing`
245
- field in `spec`."""
518
+ When creating a table in a standard catalog, this field is required. In this scenario,
519
+ specifying this field will allow targeting an arbitrary postgres database."""
246
520
 
247
521
  def as_dict(self) -> dict:
248
522
  """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body."""
@@ -277,81 +551,49 @@ class DatabaseTable:
277
551
 
278
552
 
279
553
  @dataclass
280
- class DeleteDatabaseCatalogResponse:
281
- def as_dict(self) -> dict:
282
- """Serializes the DeleteDatabaseCatalogResponse into a dictionary suitable for use as a JSON request body."""
283
- body = {}
284
- return body
285
-
286
- def as_shallow_dict(self) -> dict:
287
- """Serializes the DeleteDatabaseCatalogResponse into a shallow dictionary of its immediate attributes."""
288
- body = {}
289
- return body
290
-
291
- @classmethod
292
- def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseCatalogResponse:
293
- """Deserializes the DeleteDatabaseCatalogResponse from a dictionary."""
294
- return cls()
295
-
296
-
297
- @dataclass
298
- class DeleteDatabaseInstanceResponse:
299
- def as_dict(self) -> dict:
300
- """Serializes the DeleteDatabaseInstanceResponse into a dictionary suitable for use as a JSON request body."""
301
- body = {}
302
- return body
303
-
304
- def as_shallow_dict(self) -> dict:
305
- """Serializes the DeleteDatabaseInstanceResponse into a shallow dictionary of its immediate attributes."""
306
- body = {}
307
- return body
308
-
309
- @classmethod
310
- def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseInstanceResponse:
311
- """Deserializes the DeleteDatabaseInstanceResponse from a dictionary."""
312
- return cls()
313
-
314
-
315
- @dataclass
316
- class DeleteDatabaseTableResponse:
317
- def as_dict(self) -> dict:
318
- """Serializes the DeleteDatabaseTableResponse into a dictionary suitable for use as a JSON request body."""
319
- body = {}
320
- return body
321
-
322
- def as_shallow_dict(self) -> dict:
323
- """Serializes the DeleteDatabaseTableResponse into a shallow dictionary of its immediate attributes."""
324
- body = {}
325
- return body
554
+ class DeltaTableSyncInfo:
555
+ delta_commit_timestamp: Optional[str] = None
556
+ """The timestamp when the above Delta version was committed in the source Delta table. Note: This
557
+ is the Delta commit time, not the time the data was written to the synced table."""
326
558
 
327
- @classmethod
328
- def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseTableResponse:
329
- """Deserializes the DeleteDatabaseTableResponse from a dictionary."""
330
- return cls()
559
+ delta_commit_version: Optional[int] = None
560
+ """The Delta Lake commit version that was last successfully synced."""
331
561
 
332
-
333
- @dataclass
334
- class DeleteSyncedDatabaseTableResponse:
335
562
  def as_dict(self) -> dict:
336
- """Serializes the DeleteSyncedDatabaseTableResponse into a dictionary suitable for use as a JSON request body."""
563
+ """Serializes the DeltaTableSyncInfo into a dictionary suitable for use as a JSON request body."""
337
564
  body = {}
565
+ if self.delta_commit_timestamp is not None:
566
+ body["delta_commit_timestamp"] = self.delta_commit_timestamp
567
+ if self.delta_commit_version is not None:
568
+ body["delta_commit_version"] = self.delta_commit_version
338
569
  return body
339
570
 
340
571
  def as_shallow_dict(self) -> dict:
341
- """Serializes the DeleteSyncedDatabaseTableResponse into a shallow dictionary of its immediate attributes."""
572
+ """Serializes the DeltaTableSyncInfo into a shallow dictionary of its immediate attributes."""
342
573
  body = {}
574
+ if self.delta_commit_timestamp is not None:
575
+ body["delta_commit_timestamp"] = self.delta_commit_timestamp
576
+ if self.delta_commit_version is not None:
577
+ body["delta_commit_version"] = self.delta_commit_version
343
578
  return body
344
579
 
345
580
  @classmethod
346
- def from_dict(cls, d: Dict[str, Any]) -> DeleteSyncedDatabaseTableResponse:
347
- """Deserializes the DeleteSyncedDatabaseTableResponse from a dictionary."""
348
- return cls()
581
+ def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSyncInfo:
582
+ """Deserializes the DeltaTableSyncInfo from a dictionary."""
583
+ return cls(
584
+ delta_commit_timestamp=d.get("delta_commit_timestamp", None),
585
+ delta_commit_version=d.get("delta_commit_version", None),
586
+ )
349
587
 
350
588
 
351
589
  @dataclass
352
590
  class GenerateDatabaseCredentialRequest:
353
591
  """Generates a credential that can be used to access database instances"""
354
592
 
593
+ claims: Optional[List[RequestedClaims]] = None
594
+ """The returned token will be scoped to the union of instance_names and instances containing the
595
+ specified UC tables, so instance_names is allowed to be empty."""
596
+
355
597
  instance_names: Optional[List[str]] = None
356
598
  """Instances to which the token will be scoped."""
357
599
 
@@ -360,6 +602,8 @@ class GenerateDatabaseCredentialRequest:
360
602
  def as_dict(self) -> dict:
361
603
  """Serializes the GenerateDatabaseCredentialRequest into a dictionary suitable for use as a JSON request body."""
362
604
  body = {}
605
+ if self.claims:
606
+ body["claims"] = [v.as_dict() for v in self.claims]
363
607
  if self.instance_names:
364
608
  body["instance_names"] = [v for v in self.instance_names]
365
609
  if self.request_id is not None:
@@ -369,6 +613,8 @@ class GenerateDatabaseCredentialRequest:
369
613
  def as_shallow_dict(self) -> dict:
370
614
  """Serializes the GenerateDatabaseCredentialRequest into a shallow dictionary of its immediate attributes."""
371
615
  body = {}
616
+ if self.claims:
617
+ body["claims"] = self.claims
372
618
  if self.instance_names:
373
619
  body["instance_names"] = self.instance_names
374
620
  if self.request_id is not None:
@@ -378,7 +624,46 @@ class GenerateDatabaseCredentialRequest:
378
624
  @classmethod
379
625
  def from_dict(cls, d: Dict[str, Any]) -> GenerateDatabaseCredentialRequest:
380
626
  """Deserializes the GenerateDatabaseCredentialRequest from a dictionary."""
381
- return cls(instance_names=d.get("instance_names", None), request_id=d.get("request_id", None))
627
+ return cls(
628
+ claims=_repeated_dict(d, "claims", RequestedClaims),
629
+ instance_names=d.get("instance_names", None),
630
+ request_id=d.get("request_id", None),
631
+ )
632
+
633
+
634
+ @dataclass
635
+ class ListDatabaseInstanceRolesResponse:
636
+ database_instance_roles: Optional[List[DatabaseInstanceRole]] = None
637
+ """List of database instance roles."""
638
+
639
+ next_page_token: Optional[str] = None
640
+ """Pagination token to request the next page of instances."""
641
+
642
+ def as_dict(self) -> dict:
643
+ """Serializes the ListDatabaseInstanceRolesResponse into a dictionary suitable for use as a JSON request body."""
644
+ body = {}
645
+ if self.database_instance_roles:
646
+ body["database_instance_roles"] = [v.as_dict() for v in self.database_instance_roles]
647
+ if self.next_page_token is not None:
648
+ body["next_page_token"] = self.next_page_token
649
+ return body
650
+
651
+ def as_shallow_dict(self) -> dict:
652
+ """Serializes the ListDatabaseInstanceRolesResponse into a shallow dictionary of its immediate attributes."""
653
+ body = {}
654
+ if self.database_instance_roles:
655
+ body["database_instance_roles"] = self.database_instance_roles
656
+ if self.next_page_token is not None:
657
+ body["next_page_token"] = self.next_page_token
658
+ return body
659
+
660
+ @classmethod
661
+ def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstanceRolesResponse:
662
+ """Deserializes the ListDatabaseInstanceRolesResponse from a dictionary."""
663
+ return cls(
664
+ database_instance_roles=_repeated_dict(d, "database_instance_roles", DatabaseInstanceRole),
665
+ next_page_token=d.get("next_page_token", None),
666
+ )
382
667
 
383
668
 
384
669
  @dataclass
@@ -463,6 +748,77 @@ class ProvisioningInfoState(Enum):
463
748
  UPDATING = "UPDATING"
464
749
 
465
750
 
751
+ @dataclass
752
+ class RequestedClaims:
753
+ permission_set: Optional[RequestedClaimsPermissionSet] = None
754
+
755
+ resources: Optional[List[RequestedResource]] = None
756
+
757
+ def as_dict(self) -> dict:
758
+ """Serializes the RequestedClaims into a dictionary suitable for use as a JSON request body."""
759
+ body = {}
760
+ if self.permission_set is not None:
761
+ body["permission_set"] = self.permission_set.value
762
+ if self.resources:
763
+ body["resources"] = [v.as_dict() for v in self.resources]
764
+ return body
765
+
766
+ def as_shallow_dict(self) -> dict:
767
+ """Serializes the RequestedClaims into a shallow dictionary of its immediate attributes."""
768
+ body = {}
769
+ if self.permission_set is not None:
770
+ body["permission_set"] = self.permission_set
771
+ if self.resources:
772
+ body["resources"] = self.resources
773
+ return body
774
+
775
+ @classmethod
776
+ def from_dict(cls, d: Dict[str, Any]) -> RequestedClaims:
777
+ """Deserializes the RequestedClaims from a dictionary."""
778
+ return cls(
779
+ permission_set=_enum(d, "permission_set", RequestedClaimsPermissionSet),
780
+ resources=_repeated_dict(d, "resources", RequestedResource),
781
+ )
782
+
783
+
784
+ class RequestedClaimsPermissionSet(Enum):
785
+ """Might add WRITE in the future"""
786
+
787
+ READ_ONLY = "READ_ONLY"
788
+
789
+
790
+ @dataclass
791
+ class RequestedResource:
792
+ table_name: Optional[str] = None
793
+
794
+ unspecified_resource_name: Optional[str] = None
795
+
796
+ def as_dict(self) -> dict:
797
+ """Serializes the RequestedResource into a dictionary suitable for use as a JSON request body."""
798
+ body = {}
799
+ if self.table_name is not None:
800
+ body["table_name"] = self.table_name
801
+ if self.unspecified_resource_name is not None:
802
+ body["unspecified_resource_name"] = self.unspecified_resource_name
803
+ return body
804
+
805
+ def as_shallow_dict(self) -> dict:
806
+ """Serializes the RequestedResource into a shallow dictionary of its immediate attributes."""
807
+ body = {}
808
+ if self.table_name is not None:
809
+ body["table_name"] = self.table_name
810
+ if self.unspecified_resource_name is not None:
811
+ body["unspecified_resource_name"] = self.unspecified_resource_name
812
+ return body
813
+
814
+ @classmethod
815
+ def from_dict(cls, d: Dict[str, Any]) -> RequestedResource:
816
+ """Deserializes the RequestedResource from a dictionary."""
817
+ return cls(
818
+ table_name=d.get("table_name", None), unspecified_resource_name=d.get("unspecified_resource_name", None)
819
+ )
820
+
821
+
466
822
  @dataclass
467
823
  class SyncedDatabaseTable:
468
824
  """Next field marker: 12"""
@@ -481,20 +837,18 @@ class SyncedDatabaseTable:
481
837
  rejected)."""
482
838
 
483
839
  logical_database_name: Optional[str] = None
484
- """Target Postgres database object (logical database) name for this table. This field is optional
485
- in all scenarios.
840
+ """Target Postgres database object (logical database) name for this table.
486
841
 
487
842
  When creating a synced table in a registered Postgres catalog, the target Postgres database name
488
843
  is inferred to be that of the registered catalog. If this field is specified in this scenario,
489
844
  the Postgres database name MUST match that of the registered catalog (or the request will be
490
845
  rejected).
491
846
 
492
- When creating a synced table in a standard catalog, the target database name is inferred to be
493
- that of the standard catalog. In this scenario, specifying this field will allow targeting an
494
- arbitrary postgres database."""
847
+ When creating a synced table in a standard catalog, this field is required. In this scenario,
848
+ specifying this field will allow targeting an arbitrary postgres database. Note that this has
849
+ implications for the `create_database_objects_is_missing` field in `spec`."""
495
850
 
496
851
  spec: Optional[SyncedTableSpec] = None
497
- """Specification of a synced database table."""
498
852
 
499
853
  unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None
500
854
  """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
@@ -557,12 +911,11 @@ class SyncedTableContinuousUpdateStatus:
557
911
  """Progress of the initial data synchronization."""
558
912
 
559
913
  last_processed_commit_version: Optional[int] = None
560
- """The last source table Delta version that was synced to the synced table. Note that this Delta
561
- version may not be completely synced to the synced table yet."""
914
+ """The last source table Delta version that was successfully synced to the synced table."""
562
915
 
563
916
  timestamp: Optional[str] = None
564
- """The timestamp of the last time any data was synchronized from the source table to the synced
565
- table."""
917
+ """The end timestamp of the last time any data was synchronized from the source table to the synced
918
+ table. This is when the data is available in the synced table."""
566
919
 
567
920
  def as_dict(self) -> dict:
568
921
  """Serializes the SyncedTableContinuousUpdateStatus into a dictionary suitable for use as a JSON request body."""
@@ -602,12 +955,12 @@ class SyncedTableFailedStatus:
602
955
  SYNCED_PIPELINE_FAILED state."""
603
956
 
604
957
  last_processed_commit_version: Optional[int] = None
605
- """The last source table Delta version that was synced to the synced table. Note that this Delta
606
- version may only be partially synced to the synced table. Only populated if the table is still
607
- synced and available for serving."""
958
+ """The last source table Delta version that was successfully synced to the synced table. The last
959
+ source table Delta version that was synced to the synced table. Only populated if the table is
960
+ still synced and available for serving."""
608
961
 
609
962
  timestamp: Optional[str] = None
610
- """The timestamp of the last time any data was synchronized from the source table to the synced
963
+ """The end timestamp of the last time any data was synchronized from the source table to the synced
611
964
  table. Only populated if the table is still synced and available for serving."""
612
965
 
613
966
  def as_dict(self) -> dict:
@@ -699,6 +1052,51 @@ class SyncedTablePipelineProgress:
699
1052
  )
700
1053
 
701
1054
 
1055
+ @dataclass
1056
+ class SyncedTablePosition:
1057
+ delta_table_sync_info: Optional[DeltaTableSyncInfo] = None
1058
+
1059
+ sync_end_timestamp: Optional[str] = None
1060
+ """The end timestamp of the most recent successful synchronization. This is the time when the data
1061
+ is available in the synced table."""
1062
+
1063
+ sync_start_timestamp: Optional[str] = None
1064
+ """The starting timestamp of the most recent successful synchronization from the source table to
1065
+ the destination (synced) table. Note this is the starting timestamp of the sync operation, not
1066
+ the end time. E.g., for a batch, this is the time when the sync operation started."""
1067
+
1068
+ def as_dict(self) -> dict:
1069
+ """Serializes the SyncedTablePosition into a dictionary suitable for use as a JSON request body."""
1070
+ body = {}
1071
+ if self.delta_table_sync_info:
1072
+ body["delta_table_sync_info"] = self.delta_table_sync_info.as_dict()
1073
+ if self.sync_end_timestamp is not None:
1074
+ body["sync_end_timestamp"] = self.sync_end_timestamp
1075
+ if self.sync_start_timestamp is not None:
1076
+ body["sync_start_timestamp"] = self.sync_start_timestamp
1077
+ return body
1078
+
1079
+ def as_shallow_dict(self) -> dict:
1080
+ """Serializes the SyncedTablePosition into a shallow dictionary of its immediate attributes."""
1081
+ body = {}
1082
+ if self.delta_table_sync_info:
1083
+ body["delta_table_sync_info"] = self.delta_table_sync_info
1084
+ if self.sync_end_timestamp is not None:
1085
+ body["sync_end_timestamp"] = self.sync_end_timestamp
1086
+ if self.sync_start_timestamp is not None:
1087
+ body["sync_start_timestamp"] = self.sync_start_timestamp
1088
+ return body
1089
+
1090
+ @classmethod
1091
+ def from_dict(cls, d: Dict[str, Any]) -> SyncedTablePosition:
1092
+ """Deserializes the SyncedTablePosition from a dictionary."""
1093
+ return cls(
1094
+ delta_table_sync_info=_from_dict(d, "delta_table_sync_info", DeltaTableSyncInfo),
1095
+ sync_end_timestamp=d.get("sync_end_timestamp", None),
1096
+ sync_start_timestamp=d.get("sync_start_timestamp", None),
1097
+ )
1098
+
1099
+
702
1100
  @dataclass
703
1101
  class SyncedTableProvisioningStatus:
704
1102
  """Detailed status of a synced table. Shown if the synced table is in the
@@ -839,15 +1237,24 @@ class SyncedTableStatus:
839
1237
  """Status of a synced table."""
840
1238
 
841
1239
  continuous_update_status: Optional[SyncedTableContinuousUpdateStatus] = None
842
- """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE
843
- or the SYNCED_UPDATING_PIPELINE_RESOURCES state."""
844
1240
 
845
1241
  detailed_state: Optional[SyncedTableState] = None
846
1242
  """The state of the synced table."""
847
1243
 
848
1244
  failed_status: Optional[SyncedTableFailedStatus] = None
849
- """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the
850
- SYNCED_PIPELINE_FAILED state."""
1245
+
1246
+ last_sync: Optional[SyncedTablePosition] = None
1247
+ """Summary of the last successful synchronization from source to destination.
1248
+
1249
+ Will always be present if there has been a successful sync. Even if the most recent syncs have
1250
+ failed.
1251
+
1252
+ Limitation: The only exception is if the synced table is doing a FULL REFRESH, then the last
1253
+ sync information will not be available until the full refresh is complete. This limitation will
1254
+ be addressed in a future version.
1255
+
1256
+ This top-level field is a convenience for consumers who want easy access to last sync
1257
+ information without having to traverse detailed_status."""
851
1258
 
852
1259
  message: Optional[str] = None
853
1260
  """A text description of the current state of the synced table."""
@@ -857,12 +1264,8 @@ class SyncedTableStatus:
857
1264
  of bin packing), or generated by the server (when creating a new pipeline)."""
858
1265
 
859
1266
  provisioning_status: Optional[SyncedTableProvisioningStatus] = None
860
- """Detailed status of a synced table. Shown if the synced table is in the
861
- PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state."""
862
1267
 
863
1268
  triggered_update_status: Optional[SyncedTableTriggeredUpdateStatus] = None
864
- """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE
865
- or the SYNCED_NO_PENDING_UPDATE state."""
866
1269
 
867
1270
  def as_dict(self) -> dict:
868
1271
  """Serializes the SyncedTableStatus into a dictionary suitable for use as a JSON request body."""
@@ -873,6 +1276,8 @@ class SyncedTableStatus:
873
1276
  body["detailed_state"] = self.detailed_state.value
874
1277
  if self.failed_status:
875
1278
  body["failed_status"] = self.failed_status.as_dict()
1279
+ if self.last_sync:
1280
+ body["last_sync"] = self.last_sync.as_dict()
876
1281
  if self.message is not None:
877
1282
  body["message"] = self.message
878
1283
  if self.pipeline_id is not None:
@@ -892,6 +1297,8 @@ class SyncedTableStatus:
892
1297
  body["detailed_state"] = self.detailed_state
893
1298
  if self.failed_status:
894
1299
  body["failed_status"] = self.failed_status
1300
+ if self.last_sync:
1301
+ body["last_sync"] = self.last_sync
895
1302
  if self.message is not None:
896
1303
  body["message"] = self.message
897
1304
  if self.pipeline_id is not None:
@@ -909,6 +1316,7 @@ class SyncedTableStatus:
909
1316
  continuous_update_status=_from_dict(d, "continuous_update_status", SyncedTableContinuousUpdateStatus),
910
1317
  detailed_state=_enum(d, "detailed_state", SyncedTableState),
911
1318
  failed_status=_from_dict(d, "failed_status", SyncedTableFailedStatus),
1319
+ last_sync=_from_dict(d, "last_sync", SyncedTablePosition),
912
1320
  message=d.get("message", None),
913
1321
  pipeline_id=d.get("pipeline_id", None),
914
1322
  provisioning_status=_from_dict(d, "provisioning_status", SyncedTableProvisioningStatus),
@@ -922,12 +1330,11 @@ class SyncedTableTriggeredUpdateStatus:
922
1330
  or the SYNCED_NO_PENDING_UPDATE state."""
923
1331
 
924
1332
  last_processed_commit_version: Optional[int] = None
925
- """The last source table Delta version that was synced to the synced table. Note that this Delta
926
- version may not be completely synced to the synced table yet."""
1333
+ """The last source table Delta version that was successfully synced to the synced table."""
927
1334
 
928
1335
  timestamp: Optional[str] = None
929
- """The timestamp of the last time any data was synchronized from the source table to the synced
930
- table."""
1336
+ """The end timestamp of the last time any data was synchronized from the source table to the synced
1337
+ table. This is when the data is available in the synced table."""
931
1338
 
932
1339
  triggered_update_progress: Optional[SyncedTablePipelineProgress] = None
933
1340
  """Progress of the active data synchronization pipeline."""
@@ -990,7 +1397,7 @@ class DatabaseAPI:
990
1397
  """Create a Database Instance.
991
1398
 
992
1399
  :param database_instance: :class:`DatabaseInstance`
993
- A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
1400
+ Instance to create.
994
1401
 
995
1402
  :returns: :class:`DatabaseInstance`
996
1403
  """
@@ -1003,11 +1410,30 @@ class DatabaseAPI:
1003
1410
  res = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers)
1004
1411
  return DatabaseInstance.from_dict(res)
1005
1412
 
1413
+ def create_database_instance_role(
1414
+ self, instance_name: str, database_instance_role: DatabaseInstanceRole
1415
+ ) -> DatabaseInstanceRole:
1416
+ """Create a role for a Database Instance.
1417
+
1418
+ :param instance_name: str
1419
+ :param database_instance_role: :class:`DatabaseInstanceRole`
1420
+
1421
+ :returns: :class:`DatabaseInstanceRole`
1422
+ """
1423
+ body = database_instance_role.as_dict()
1424
+ headers = {
1425
+ "Accept": "application/json",
1426
+ "Content-Type": "application/json",
1427
+ }
1428
+
1429
+ res = self._api.do("POST", f"/api/2.0/database/instances/{instance_name}/roles", body=body, headers=headers)
1430
+ return DatabaseInstanceRole.from_dict(res)
1431
+
1006
1432
  def create_database_table(self, table: DatabaseTable) -> DatabaseTable:
1007
- """Create a Database Table.
1433
+ """Create a Database Table. Useful for registering pre-existing PG tables in UC. See
1434
+ CreateSyncedDatabaseTable for creating synced tables in PG from a source table in UC.
1008
1435
 
1009
1436
  :param table: :class:`DatabaseTable`
1010
- Next field marker: 13
1011
1437
 
1012
1438
  :returns: :class:`DatabaseTable`
1013
1439
  """
@@ -1024,7 +1450,6 @@ class DatabaseAPI:
1024
1450
  """Create a Synced Database Table.
1025
1451
 
1026
1452
  :param synced_table: :class:`SyncedDatabaseTable`
1027
- Next field marker: 12
1028
1453
 
1029
1454
  :returns: :class:`SyncedDatabaseTable`
1030
1455
  """
@@ -1081,6 +1506,38 @@ class DatabaseAPI:
1081
1506
 
1082
1507
  self._api.do("DELETE", f"/api/2.0/database/instances/{name}", query=query, headers=headers)
1083
1508
 
1509
+ def delete_database_instance_role(
1510
+ self,
1511
+ instance_name: str,
1512
+ name: str,
1513
+ *,
1514
+ allow_missing: Optional[bool] = None,
1515
+ reassign_owned_to: Optional[str] = None,
1516
+ ):
1517
+ """Deletes a role for a Database Instance.
1518
+
1519
+ :param instance_name: str
1520
+ :param name: str
1521
+ :param allow_missing: bool (optional)
1522
+ This is the AIP standard name for the equivalent of Postgres' `IF EXISTS` option
1523
+ :param reassign_owned_to: str (optional)
1524
+
1525
+
1526
+ """
1527
+
1528
+ query = {}
1529
+ if allow_missing is not None:
1530
+ query["allow_missing"] = allow_missing
1531
+ if reassign_owned_to is not None:
1532
+ query["reassign_owned_to"] = reassign_owned_to
1533
+ headers = {
1534
+ "Accept": "application/json",
1535
+ }
1536
+
1537
+ self._api.do(
1538
+ "DELETE", f"/api/2.0/database/instances/{instance_name}/roles/{name}", query=query, headers=headers
1539
+ )
1540
+
1084
1541
  def delete_database_table(self, name: str):
1085
1542
  """Delete a Database Table.
1086
1543
 
@@ -1129,10 +1586,17 @@ class DatabaseAPI:
1129
1586
  return DatabaseInstance.from_dict(res)
1130
1587
 
1131
1588
  def generate_database_credential(
1132
- self, *, instance_names: Optional[List[str]] = None, request_id: Optional[str] = None
1589
+ self,
1590
+ *,
1591
+ claims: Optional[List[RequestedClaims]] = None,
1592
+ instance_names: Optional[List[str]] = None,
1593
+ request_id: Optional[str] = None,
1133
1594
  ) -> DatabaseCredential:
1134
1595
  """Generates a credential that can be used to access database instances.
1135
1596
 
1597
+ :param claims: List[:class:`RequestedClaims`] (optional)
1598
+ The returned token will be scoped to the union of instance_names and instances containing the
1599
+ specified UC tables, so instance_names is allowed to be empty.
1136
1600
  :param instance_names: List[str] (optional)
1137
1601
  Instances to which the token will be scoped.
1138
1602
  :param request_id: str (optional)
@@ -1140,6 +1604,8 @@ class DatabaseAPI:
1140
1604
  :returns: :class:`DatabaseCredential`
1141
1605
  """
1142
1606
  body = {}
1607
+ if claims is not None:
1608
+ body["claims"] = [v.as_dict() for v in claims]
1143
1609
  if instance_names is not None:
1144
1610
  body["instance_names"] = [v for v in instance_names]
1145
1611
  if request_id is not None:
@@ -1183,6 +1649,22 @@ class DatabaseAPI:
1183
1649
  res = self._api.do("GET", f"/api/2.0/database/instances/{name}", headers=headers)
1184
1650
  return DatabaseInstance.from_dict(res)
1185
1651
 
1652
+ def get_database_instance_role(self, instance_name: str, name: str) -> DatabaseInstanceRole:
1653
+ """Gets a role for a Database Instance.
1654
+
1655
+ :param instance_name: str
1656
+ :param name: str
1657
+
1658
+ :returns: :class:`DatabaseInstanceRole`
1659
+ """
1660
+
1661
+ headers = {
1662
+ "Accept": "application/json",
1663
+ }
1664
+
1665
+ res = self._api.do("GET", f"/api/2.0/database/instances/{instance_name}/roles/{name}", headers=headers)
1666
+ return DatabaseInstanceRole.from_dict(res)
1667
+
1186
1668
  def get_database_table(self, name: str) -> DatabaseTable:
1187
1669
  """Get a Database Table.
1188
1670
 
@@ -1213,6 +1695,40 @@ class DatabaseAPI:
1213
1695
  res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers)
1214
1696
  return SyncedDatabaseTable.from_dict(res)
1215
1697
 
1698
+ def list_database_instance_roles(
1699
+ self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None
1700
+ ) -> Iterator[DatabaseInstanceRole]:
1701
+ """START OF PG ROLE APIs Section
1702
+
1703
+ :param instance_name: str
1704
+ :param page_size: int (optional)
1705
+ Upper bound for items returned.
1706
+ :param page_token: str (optional)
1707
+ Pagination token to go to the next page of Database Instances. Requests first page if absent.
1708
+
1709
+ :returns: Iterator over :class:`DatabaseInstanceRole`
1710
+ """
1711
+
1712
+ query = {}
1713
+ if page_size is not None:
1714
+ query["page_size"] = page_size
1715
+ if page_token is not None:
1716
+ query["page_token"] = page_token
1717
+ headers = {
1718
+ "Accept": "application/json",
1719
+ }
1720
+
1721
+ while True:
1722
+ json = self._api.do(
1723
+ "GET", f"/api/2.0/database/instances/{instance_name}/roles", query=query, headers=headers
1724
+ )
1725
+ if "database_instance_roles" in json:
1726
+ for v in json["database_instance_roles"]:
1727
+ yield DatabaseInstanceRole.from_dict(v)
1728
+ if "next_page_token" not in json or not json["next_page_token"]:
1729
+ return
1730
+ query["page_token"] = json["next_page_token"]
1731
+
1216
1732
  def list_database_instances(
1217
1733
  self, *, page_size: Optional[int] = None, page_token: Optional[str] = None
1218
1734
  ) -> Iterator[DatabaseInstance]:
@@ -1252,7 +1768,6 @@ class DatabaseAPI:
1252
1768
  :param name: str
1253
1769
  The name of the instance. This is the unique identifier for the instance.
1254
1770
  :param database_instance: :class:`DatabaseInstance`
1255
- A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
1256
1771
  :param update_mask: str
1257
1772
  The list of fields to update.
1258
1773