databricks-sdk 0.48.0__py3-none-any.whl → 0.50.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -776,6 +776,13 @@ class OidcFederationPolicy:
776
776
  endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for
777
777
  discovering public keys."""
778
778
 
779
+ jwks_uri: Optional[str] = None
780
+ """URL of the public keys used to validate the signature of federated tokens, in JWKS format. Most
781
+ use cases should not need to specify this field. If jwks_uri and jwks_json are both unspecified
782
+ (recommended), Databricks automatically fetches the public keys from your issuer’s well known
783
+ endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for
784
+ discovering public keys."""
785
+
779
786
  subject: Optional[str] = None
780
787
  """The required token subject, as specified in the subject claim of federated tokens. Must be
781
788
  specified for service principal federation policies. Must not be specified for account
@@ -793,6 +800,8 @@ class OidcFederationPolicy:
793
800
  body["issuer"] = self.issuer
794
801
  if self.jwks_json is not None:
795
802
  body["jwks_json"] = self.jwks_json
803
+ if self.jwks_uri is not None:
804
+ body["jwks_uri"] = self.jwks_uri
796
805
  if self.subject is not None:
797
806
  body["subject"] = self.subject
798
807
  if self.subject_claim is not None:
@@ -808,6 +817,8 @@ class OidcFederationPolicy:
808
817
  body["issuer"] = self.issuer
809
818
  if self.jwks_json is not None:
810
819
  body["jwks_json"] = self.jwks_json
820
+ if self.jwks_uri is not None:
821
+ body["jwks_uri"] = self.jwks_uri
811
822
  if self.subject is not None:
812
823
  body["subject"] = self.subject
813
824
  if self.subject_claim is not None:
@@ -821,6 +832,7 @@ class OidcFederationPolicy:
821
832
  audiences=d.get("audiences", None),
822
833
  issuer=d.get("issuer", None),
823
834
  jwks_json=d.get("jwks_json", None),
835
+ jwks_uri=d.get("jwks_uri", None),
824
836
  subject=d.get("subject", None),
825
837
  subject_claim=d.get("subject_claim", None),
826
838
  )
@@ -58,6 +58,9 @@ class CreatePipeline:
58
58
  edition: Optional[str] = None
59
59
  """Pipeline product edition."""
60
60
 
61
+ event_log: Optional[EventLogSpec] = None
62
+ """Event log configuration for this pipeline"""
63
+
61
64
  filters: Optional[Filters] = None
62
65
  """Filters on which Pipeline packages to include in the deployed graph."""
63
66
 
@@ -69,7 +72,7 @@ class CreatePipeline:
69
72
 
70
73
  ingestion_definition: Optional[IngestionPipelineDefinition] = None
71
74
  """The configuration for a managed ingestion pipeline. These settings cannot be used with the
72
- 'libraries', 'target' or 'catalog' settings."""
75
+ 'libraries', 'schema', 'target', or 'catalog' settings."""
73
76
 
74
77
  libraries: Optional[List[PipelineLibrary]] = None
75
78
  """Libraries or code needed by this deployment."""
@@ -95,8 +98,7 @@ class CreatePipeline:
95
98
  is thrown."""
96
99
 
97
100
  schema: Optional[str] = None
98
- """The default schema (database) where tables are read from or published to. The presence of this
99
- field implies that the pipeline is in direct publishing mode."""
101
+ """The default schema (database) where tables are read from or published to."""
100
102
 
101
103
  serverless: Optional[bool] = None
102
104
  """Whether serverless compute is enabled for this pipeline."""
@@ -105,9 +107,9 @@ class CreatePipeline:
105
107
  """DBFS root directory for storing checkpoints and tables."""
106
108
 
107
109
  target: Optional[str] = None
108
- """Target schema (database) to add tables in this pipeline to. If not specified, no data is
109
- published to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify
110
- `catalog`."""
110
+ """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
111
+ must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
112
+ deprecated for pipeline creation in favor of the `schema` field."""
111
113
 
112
114
  trigger: Optional[PipelineTrigger] = None
113
115
  """Which pipeline trigger to use. Deprecated: Use `continuous` instead."""
@@ -137,6 +139,8 @@ class CreatePipeline:
137
139
  body["dry_run"] = self.dry_run
138
140
  if self.edition is not None:
139
141
  body["edition"] = self.edition
142
+ if self.event_log:
143
+ body["event_log"] = self.event_log.as_dict()
140
144
  if self.filters:
141
145
  body["filters"] = self.filters.as_dict()
142
146
  if self.gateway_definition:
@@ -194,6 +198,8 @@ class CreatePipeline:
194
198
  body["dry_run"] = self.dry_run
195
199
  if self.edition is not None:
196
200
  body["edition"] = self.edition
201
+ if self.event_log:
202
+ body["event_log"] = self.event_log
197
203
  if self.filters:
198
204
  body["filters"] = self.filters
199
205
  if self.gateway_definition:
@@ -241,6 +247,7 @@ class CreatePipeline:
241
247
  development=d.get("development", None),
242
248
  dry_run=d.get("dry_run", None),
243
249
  edition=d.get("edition", None),
250
+ event_log=_from_dict(d, "event_log", EventLogSpec),
244
251
  filters=_from_dict(d, "filters", Filters),
245
252
  gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
246
253
  id=d.get("id", None),
@@ -428,6 +435,9 @@ class EditPipeline:
428
435
  edition: Optional[str] = None
429
436
  """Pipeline product edition."""
430
437
 
438
+ event_log: Optional[EventLogSpec] = None
439
+ """Event log configuration for this pipeline"""
440
+
431
441
  expected_last_modified: Optional[int] = None
432
442
  """If present, the last-modified time of the pipeline settings before the edit. If the settings
433
443
  were modified after that time, then the request will fail with a conflict."""
@@ -443,7 +453,7 @@ class EditPipeline:
443
453
 
444
454
  ingestion_definition: Optional[IngestionPipelineDefinition] = None
445
455
  """The configuration for a managed ingestion pipeline. These settings cannot be used with the
446
- 'libraries', 'target' or 'catalog' settings."""
456
+ 'libraries', 'schema', 'target', or 'catalog' settings."""
447
457
 
448
458
  libraries: Optional[List[PipelineLibrary]] = None
449
459
  """Libraries or code needed by this deployment."""
@@ -472,8 +482,7 @@ class EditPipeline:
472
482
  is thrown."""
473
483
 
474
484
  schema: Optional[str] = None
475
- """The default schema (database) where tables are read from or published to. The presence of this
476
- field implies that the pipeline is in direct publishing mode."""
485
+ """The default schema (database) where tables are read from or published to."""
477
486
 
478
487
  serverless: Optional[bool] = None
479
488
  """Whether serverless compute is enabled for this pipeline."""
@@ -482,9 +491,9 @@ class EditPipeline:
482
491
  """DBFS root directory for storing checkpoints and tables."""
483
492
 
484
493
  target: Optional[str] = None
485
- """Target schema (database) to add tables in this pipeline to. If not specified, no data is
486
- published to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify
487
- `catalog`."""
494
+ """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
495
+ must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
496
+ deprecated for pipeline creation in favor of the `schema` field."""
488
497
 
489
498
  trigger: Optional[PipelineTrigger] = None
490
499
  """Which pipeline trigger to use. Deprecated: Use `continuous` instead."""
@@ -512,6 +521,8 @@ class EditPipeline:
512
521
  body["development"] = self.development
513
522
  if self.edition is not None:
514
523
  body["edition"] = self.edition
524
+ if self.event_log:
525
+ body["event_log"] = self.event_log.as_dict()
515
526
  if self.expected_last_modified is not None:
516
527
  body["expected_last_modified"] = self.expected_last_modified
517
528
  if self.filters:
@@ -571,6 +582,8 @@ class EditPipeline:
571
582
  body["development"] = self.development
572
583
  if self.edition is not None:
573
584
  body["edition"] = self.edition
585
+ if self.event_log:
586
+ body["event_log"] = self.event_log
574
587
  if self.expected_last_modified is not None:
575
588
  body["expected_last_modified"] = self.expected_last_modified
576
589
  if self.filters:
@@ -621,6 +634,7 @@ class EditPipeline:
621
634
  deployment=_from_dict(d, "deployment", PipelineDeployment),
622
635
  development=d.get("development", None),
623
636
  edition=d.get("edition", None),
637
+ event_log=_from_dict(d, "event_log", EventLogSpec),
624
638
  expected_last_modified=d.get("expected_last_modified", None),
625
639
  filters=_from_dict(d, "filters", Filters),
626
640
  gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
@@ -700,6 +714,47 @@ class EventLevel(Enum):
700
714
  WARN = "WARN"
701
715
 
702
716
 
717
+ @dataclass
718
+ class EventLogSpec:
719
+ """Configurable event log parameters."""
720
+
721
+ catalog: Optional[str] = None
722
+ """The UC catalog the event log is published under."""
723
+
724
+ name: Optional[str] = None
725
+ """The name the event log is published to in UC."""
726
+
727
+ schema: Optional[str] = None
728
+ """The UC schema the event log is published under."""
729
+
730
+ def as_dict(self) -> dict:
731
+ """Serializes the EventLogSpec into a dictionary suitable for use as a JSON request body."""
732
+ body = {}
733
+ if self.catalog is not None:
734
+ body["catalog"] = self.catalog
735
+ if self.name is not None:
736
+ body["name"] = self.name
737
+ if self.schema is not None:
738
+ body["schema"] = self.schema
739
+ return body
740
+
741
+ def as_shallow_dict(self) -> dict:
742
+ """Serializes the EventLogSpec into a shallow dictionary of its immediate attributes."""
743
+ body = {}
744
+ if self.catalog is not None:
745
+ body["catalog"] = self.catalog
746
+ if self.name is not None:
747
+ body["name"] = self.name
748
+ if self.schema is not None:
749
+ body["schema"] = self.schema
750
+ return body
751
+
752
+ @classmethod
753
+ def from_dict(cls, d: Dict[str, Any]) -> EventLogSpec:
754
+ """Deserializes the EventLogSpec from a dictionary."""
755
+ return cls(catalog=d.get("catalog", None), name=d.get("name", None), schema=d.get("schema", None))
756
+
757
+
703
758
  @dataclass
704
759
  class FileLibrary:
705
760
  path: Optional[str] = None
@@ -2207,6 +2262,9 @@ class PipelineSpec:
2207
2262
  edition: Optional[str] = None
2208
2263
  """Pipeline product edition."""
2209
2264
 
2265
+ event_log: Optional[EventLogSpec] = None
2266
+ """Event log configuration for this pipeline"""
2267
+
2210
2268
  filters: Optional[Filters] = None
2211
2269
  """Filters on which Pipeline packages to include in the deployed graph."""
2212
2270
 
@@ -2218,7 +2276,7 @@ class PipelineSpec:
2218
2276
 
2219
2277
  ingestion_definition: Optional[IngestionPipelineDefinition] = None
2220
2278
  """The configuration for a managed ingestion pipeline. These settings cannot be used with the
2221
- 'libraries', 'target' or 'catalog' settings."""
2279
+ 'libraries', 'schema', 'target', or 'catalog' settings."""
2222
2280
 
2223
2281
  libraries: Optional[List[PipelineLibrary]] = None
2224
2282
  """Libraries or code needed by this deployment."""
@@ -2236,8 +2294,7 @@ class PipelineSpec:
2236
2294
  """Restart window of this pipeline."""
2237
2295
 
2238
2296
  schema: Optional[str] = None
2239
- """The default schema (database) where tables are read from or published to. The presence of this
2240
- field implies that the pipeline is in direct publishing mode."""
2297
+ """The default schema (database) where tables are read from or published to."""
2241
2298
 
2242
2299
  serverless: Optional[bool] = None
2243
2300
  """Whether serverless compute is enabled for this pipeline."""
@@ -2246,9 +2303,9 @@ class PipelineSpec:
2246
2303
  """DBFS root directory for storing checkpoints and tables."""
2247
2304
 
2248
2305
  target: Optional[str] = None
2249
- """Target schema (database) to add tables in this pipeline to. If not specified, no data is
2250
- published to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify
2251
- `catalog`."""
2306
+ """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
2307
+ must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
2308
+ deprecated for pipeline creation in favor of the `schema` field."""
2252
2309
 
2253
2310
  trigger: Optional[PipelineTrigger] = None
2254
2311
  """Which pipeline trigger to use. Deprecated: Use `continuous` instead."""
@@ -2274,6 +2331,8 @@ class PipelineSpec:
2274
2331
  body["development"] = self.development
2275
2332
  if self.edition is not None:
2276
2333
  body["edition"] = self.edition
2334
+ if self.event_log:
2335
+ body["event_log"] = self.event_log.as_dict()
2277
2336
  if self.filters:
2278
2337
  body["filters"] = self.filters.as_dict()
2279
2338
  if self.gateway_definition:
@@ -2325,6 +2384,8 @@ class PipelineSpec:
2325
2384
  body["development"] = self.development
2326
2385
  if self.edition is not None:
2327
2386
  body["edition"] = self.edition
2387
+ if self.event_log:
2388
+ body["event_log"] = self.event_log
2328
2389
  if self.filters:
2329
2390
  body["filters"] = self.filters
2330
2391
  if self.gateway_definition:
@@ -2368,6 +2429,7 @@ class PipelineSpec:
2368
2429
  deployment=_from_dict(d, "deployment", PipelineDeployment),
2369
2430
  development=d.get("development", None),
2370
2431
  edition=d.get("edition", None),
2432
+ event_log=_from_dict(d, "event_log", EventLogSpec),
2371
2433
  filters=_from_dict(d, "filters", Filters),
2372
2434
  gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
2373
2435
  id=d.get("id", None),
@@ -3328,38 +3390,6 @@ class PipelinesAPI:
3328
3390
  def __init__(self, api_client):
3329
3391
  self._api = api_client
3330
3392
 
3331
- def wait_get_pipeline_running(
3332
- self,
3333
- pipeline_id: str,
3334
- timeout=timedelta(minutes=20),
3335
- callback: Optional[Callable[[GetPipelineResponse], None]] = None,
3336
- ) -> GetPipelineResponse:
3337
- deadline = time.time() + timeout.total_seconds()
3338
- target_states = (PipelineState.RUNNING,)
3339
- failure_states = (PipelineState.FAILED,)
3340
- status_message = "polling..."
3341
- attempt = 1
3342
- while time.time() < deadline:
3343
- poll = self.get(pipeline_id=pipeline_id)
3344
- status = poll.state
3345
- status_message = poll.cause
3346
- if status in target_states:
3347
- return poll
3348
- if callback:
3349
- callback(poll)
3350
- if status in failure_states:
3351
- msg = f"failed to reach RUNNING, got {status}: {status_message}"
3352
- raise OperationFailed(msg)
3353
- prefix = f"pipeline_id={pipeline_id}"
3354
- sleep = attempt
3355
- if sleep > 10:
3356
- # sleep 10s max per attempt
3357
- sleep = 10
3358
- _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
3359
- time.sleep(sleep + random.random())
3360
- attempt += 1
3361
- raise TimeoutError(f"timed out after {timeout}: {status_message}")
3362
-
3363
3393
  def wait_get_pipeline_idle(
3364
3394
  self,
3365
3395
  pipeline_id: str,
@@ -3406,6 +3436,7 @@ class PipelinesAPI:
3406
3436
  development: Optional[bool] = None,
3407
3437
  dry_run: Optional[bool] = None,
3408
3438
  edition: Optional[str] = None,
3439
+ event_log: Optional[EventLogSpec] = None,
3409
3440
  filters: Optional[Filters] = None,
3410
3441
  gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
3411
3442
  id: Optional[str] = None,
@@ -3450,6 +3481,8 @@ class PipelinesAPI:
3450
3481
  :param dry_run: bool (optional)
3451
3482
  :param edition: str (optional)
3452
3483
  Pipeline product edition.
3484
+ :param event_log: :class:`EventLogSpec` (optional)
3485
+ Event log configuration for this pipeline
3453
3486
  :param filters: :class:`Filters` (optional)
3454
3487
  Filters on which Pipeline packages to include in the deployed graph.
3455
3488
  :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
@@ -3458,7 +3491,7 @@ class PipelinesAPI:
3458
3491
  Unique identifier for this pipeline.
3459
3492
  :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
3460
3493
  The configuration for a managed ingestion pipeline. These settings cannot be used with the
3461
- 'libraries', 'target' or 'catalog' settings.
3494
+ 'libraries', 'schema', 'target', or 'catalog' settings.
3462
3495
  :param libraries: List[:class:`PipelineLibrary`] (optional)
3463
3496
  Libraries or code needed by this deployment.
3464
3497
  :param name: str (optional)
@@ -3476,15 +3509,15 @@ class PipelinesAPI:
3476
3509
  Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
3477
3510
  thrown.
3478
3511
  :param schema: str (optional)
3479
- The default schema (database) where tables are read from or published to. The presence of this field
3480
- implies that the pipeline is in direct publishing mode.
3512
+ The default schema (database) where tables are read from or published to.
3481
3513
  :param serverless: bool (optional)
3482
3514
  Whether serverless compute is enabled for this pipeline.
3483
3515
  :param storage: str (optional)
3484
3516
  DBFS root directory for storing checkpoints and tables.
3485
3517
  :param target: str (optional)
3486
- Target schema (database) to add tables in this pipeline to. If not specified, no data is published
3487
- to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
3518
+ Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must
3519
+ be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated
3520
+ for pipeline creation in favor of the `schema` field.
3488
3521
  :param trigger: :class:`PipelineTrigger` (optional)
3489
3522
  Which pipeline trigger to use. Deprecated: Use `continuous` instead.
3490
3523
 
@@ -3513,6 +3546,8 @@ class PipelinesAPI:
3513
3546
  body["dry_run"] = dry_run
3514
3547
  if edition is not None:
3515
3548
  body["edition"] = edition
3549
+ if event_log is not None:
3550
+ body["event_log"] = event_log.as_dict()
3516
3551
  if filters is not None:
3517
3552
  body["filters"] = filters.as_dict()
3518
3553
  if gateway_definition is not None:
@@ -3906,6 +3941,7 @@ class PipelinesAPI:
3906
3941
  deployment: Optional[PipelineDeployment] = None,
3907
3942
  development: Optional[bool] = None,
3908
3943
  edition: Optional[str] = None,
3944
+ event_log: Optional[EventLogSpec] = None,
3909
3945
  expected_last_modified: Optional[int] = None,
3910
3946
  filters: Optional[Filters] = None,
3911
3947
  gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
@@ -3951,6 +3987,8 @@ class PipelinesAPI:
3951
3987
  Whether the pipeline is in Development mode. Defaults to false.
3952
3988
  :param edition: str (optional)
3953
3989
  Pipeline product edition.
3990
+ :param event_log: :class:`EventLogSpec` (optional)
3991
+ Event log configuration for this pipeline
3954
3992
  :param expected_last_modified: int (optional)
3955
3993
  If present, the last-modified time of the pipeline settings before the edit. If the settings were
3956
3994
  modified after that time, then the request will fail with a conflict.
@@ -3962,7 +4000,7 @@ class PipelinesAPI:
3962
4000
  Unique identifier for this pipeline.
3963
4001
  :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
3964
4002
  The configuration for a managed ingestion pipeline. These settings cannot be used with the
3965
- 'libraries', 'target' or 'catalog' settings.
4003
+ 'libraries', 'schema', 'target', or 'catalog' settings.
3966
4004
  :param libraries: List[:class:`PipelineLibrary`] (optional)
3967
4005
  Libraries or code needed by this deployment.
3968
4006
  :param name: str (optional)
@@ -3980,15 +4018,15 @@ class PipelinesAPI:
3980
4018
  Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
3981
4019
  thrown.
3982
4020
  :param schema: str (optional)
3983
- The default schema (database) where tables are read from or published to. The presence of this field
3984
- implies that the pipeline is in direct publishing mode.
4021
+ The default schema (database) where tables are read from or published to.
3985
4022
  :param serverless: bool (optional)
3986
4023
  Whether serverless compute is enabled for this pipeline.
3987
4024
  :param storage: str (optional)
3988
4025
  DBFS root directory for storing checkpoints and tables.
3989
4026
  :param target: str (optional)
3990
- Target schema (database) to add tables in this pipeline to. If not specified, no data is published
3991
- to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
4027
+ Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must
4028
+ be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated
4029
+ for pipeline creation in favor of the `schema` field.
3992
4030
  :param trigger: :class:`PipelineTrigger` (optional)
3993
4031
  Which pipeline trigger to use. Deprecated: Use `continuous` instead.
3994
4032
 
@@ -4015,6 +4053,8 @@ class PipelinesAPI:
4015
4053
  body["development"] = development
4016
4054
  if edition is not None:
4017
4055
  body["edition"] = edition
4056
+ if event_log is not None:
4057
+ body["event_log"] = event_log.as_dict()
4018
4058
  if expected_last_modified is not None:
4019
4059
  body["expected_last_modified"] = expected_last_modified
4020
4060
  if filters is not None: