databricks-sdk 0.48.0__py3-none-any.whl → 0.49.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -58,6 +58,9 @@ class CreatePipeline:
58
58
  edition: Optional[str] = None
59
59
  """Pipeline product edition."""
60
60
 
61
+ event_log: Optional[EventLogSpec] = None
62
+ """Event log configuration for this pipeline"""
63
+
61
64
  filters: Optional[Filters] = None
62
65
  """Filters on which Pipeline packages to include in the deployed graph."""
63
66
 
@@ -69,7 +72,7 @@ class CreatePipeline:
69
72
 
70
73
  ingestion_definition: Optional[IngestionPipelineDefinition] = None
71
74
  """The configuration for a managed ingestion pipeline. These settings cannot be used with the
72
- 'libraries', 'target' or 'catalog' settings."""
75
+ 'libraries', 'schema', 'target', or 'catalog' settings."""
73
76
 
74
77
  libraries: Optional[List[PipelineLibrary]] = None
75
78
  """Libraries or code needed by this deployment."""
@@ -95,8 +98,7 @@ class CreatePipeline:
95
98
  is thrown."""
96
99
 
97
100
  schema: Optional[str] = None
98
- """The default schema (database) where tables are read from or published to. The presence of this
99
- field implies that the pipeline is in direct publishing mode."""
101
+ """The default schema (database) where tables are read from or published to."""
100
102
 
101
103
  serverless: Optional[bool] = None
102
104
  """Whether serverless compute is enabled for this pipeline."""
@@ -105,9 +107,9 @@ class CreatePipeline:
105
107
  """DBFS root directory for storing checkpoints and tables."""
106
108
 
107
109
  target: Optional[str] = None
108
- """Target schema (database) to add tables in this pipeline to. If not specified, no data is
109
- published to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify
110
- `catalog`."""
110
+ """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
111
+ must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
112
+ deprecated for pipeline creation in favor of the `schema` field."""
111
113
 
112
114
  trigger: Optional[PipelineTrigger] = None
113
115
  """Which pipeline trigger to use. Deprecated: Use `continuous` instead."""
@@ -137,6 +139,8 @@ class CreatePipeline:
137
139
  body["dry_run"] = self.dry_run
138
140
  if self.edition is not None:
139
141
  body["edition"] = self.edition
142
+ if self.event_log:
143
+ body["event_log"] = self.event_log.as_dict()
140
144
  if self.filters:
141
145
  body["filters"] = self.filters.as_dict()
142
146
  if self.gateway_definition:
@@ -194,6 +198,8 @@ class CreatePipeline:
194
198
  body["dry_run"] = self.dry_run
195
199
  if self.edition is not None:
196
200
  body["edition"] = self.edition
201
+ if self.event_log:
202
+ body["event_log"] = self.event_log
197
203
  if self.filters:
198
204
  body["filters"] = self.filters
199
205
  if self.gateway_definition:
@@ -241,6 +247,7 @@ class CreatePipeline:
241
247
  development=d.get("development", None),
242
248
  dry_run=d.get("dry_run", None),
243
249
  edition=d.get("edition", None),
250
+ event_log=_from_dict(d, "event_log", EventLogSpec),
244
251
  filters=_from_dict(d, "filters", Filters),
245
252
  gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
246
253
  id=d.get("id", None),
@@ -428,6 +435,9 @@ class EditPipeline:
428
435
  edition: Optional[str] = None
429
436
  """Pipeline product edition."""
430
437
 
438
+ event_log: Optional[EventLogSpec] = None
439
+ """Event log configuration for this pipeline"""
440
+
431
441
  expected_last_modified: Optional[int] = None
432
442
  """If present, the last-modified time of the pipeline settings before the edit. If the settings
433
443
  were modified after that time, then the request will fail with a conflict."""
@@ -443,7 +453,7 @@ class EditPipeline:
443
453
 
444
454
  ingestion_definition: Optional[IngestionPipelineDefinition] = None
445
455
  """The configuration for a managed ingestion pipeline. These settings cannot be used with the
446
- 'libraries', 'target' or 'catalog' settings."""
456
+ 'libraries', 'schema', 'target', or 'catalog' settings."""
447
457
 
448
458
  libraries: Optional[List[PipelineLibrary]] = None
449
459
  """Libraries or code needed by this deployment."""
@@ -472,8 +482,7 @@ class EditPipeline:
472
482
  is thrown."""
473
483
 
474
484
  schema: Optional[str] = None
475
- """The default schema (database) where tables are read from or published to. The presence of this
476
- field implies that the pipeline is in direct publishing mode."""
485
+ """The default schema (database) where tables are read from or published to."""
477
486
 
478
487
  serverless: Optional[bool] = None
479
488
  """Whether serverless compute is enabled for this pipeline."""
@@ -482,9 +491,9 @@ class EditPipeline:
482
491
  """DBFS root directory for storing checkpoints and tables."""
483
492
 
484
493
  target: Optional[str] = None
485
- """Target schema (database) to add tables in this pipeline to. If not specified, no data is
486
- published to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify
487
- `catalog`."""
494
+ """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
495
+ must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
496
+ deprecated for pipeline creation in favor of the `schema` field."""
488
497
 
489
498
  trigger: Optional[PipelineTrigger] = None
490
499
  """Which pipeline trigger to use. Deprecated: Use `continuous` instead."""
@@ -512,6 +521,8 @@ class EditPipeline:
512
521
  body["development"] = self.development
513
522
  if self.edition is not None:
514
523
  body["edition"] = self.edition
524
+ if self.event_log:
525
+ body["event_log"] = self.event_log.as_dict()
515
526
  if self.expected_last_modified is not None:
516
527
  body["expected_last_modified"] = self.expected_last_modified
517
528
  if self.filters:
@@ -571,6 +582,8 @@ class EditPipeline:
571
582
  body["development"] = self.development
572
583
  if self.edition is not None:
573
584
  body["edition"] = self.edition
585
+ if self.event_log:
586
+ body["event_log"] = self.event_log
574
587
  if self.expected_last_modified is not None:
575
588
  body["expected_last_modified"] = self.expected_last_modified
576
589
  if self.filters:
@@ -621,6 +634,7 @@ class EditPipeline:
621
634
  deployment=_from_dict(d, "deployment", PipelineDeployment),
622
635
  development=d.get("development", None),
623
636
  edition=d.get("edition", None),
637
+ event_log=_from_dict(d, "event_log", EventLogSpec),
624
638
  expected_last_modified=d.get("expected_last_modified", None),
625
639
  filters=_from_dict(d, "filters", Filters),
626
640
  gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
@@ -700,6 +714,47 @@ class EventLevel(Enum):
700
714
  WARN = "WARN"
701
715
 
702
716
 
717
+ @dataclass
718
+ class EventLogSpec:
719
+ """Configurable event log parameters."""
720
+
721
+ catalog: Optional[str] = None
722
+ """The UC catalog the event log is published under."""
723
+
724
+ name: Optional[str] = None
725
+ """The name the event log is published to in UC."""
726
+
727
+ schema: Optional[str] = None
728
+ """The UC schema the event log is published under."""
729
+
730
+ def as_dict(self) -> dict:
731
+ """Serializes the EventLogSpec into a dictionary suitable for use as a JSON request body."""
732
+ body = {}
733
+ if self.catalog is not None:
734
+ body["catalog"] = self.catalog
735
+ if self.name is not None:
736
+ body["name"] = self.name
737
+ if self.schema is not None:
738
+ body["schema"] = self.schema
739
+ return body
740
+
741
+ def as_shallow_dict(self) -> dict:
742
+ """Serializes the EventLogSpec into a shallow dictionary of its immediate attributes."""
743
+ body = {}
744
+ if self.catalog is not None:
745
+ body["catalog"] = self.catalog
746
+ if self.name is not None:
747
+ body["name"] = self.name
748
+ if self.schema is not None:
749
+ body["schema"] = self.schema
750
+ return body
751
+
752
+ @classmethod
753
+ def from_dict(cls, d: Dict[str, Any]) -> EventLogSpec:
754
+ """Deserializes the EventLogSpec from a dictionary."""
755
+ return cls(catalog=d.get("catalog", None), name=d.get("name", None), schema=d.get("schema", None))
756
+
757
+
703
758
  @dataclass
704
759
  class FileLibrary:
705
760
  path: Optional[str] = None
@@ -2207,6 +2262,9 @@ class PipelineSpec:
2207
2262
  edition: Optional[str] = None
2208
2263
  """Pipeline product edition."""
2209
2264
 
2265
+ event_log: Optional[EventLogSpec] = None
2266
+ """Event log configuration for this pipeline"""
2267
+
2210
2268
  filters: Optional[Filters] = None
2211
2269
  """Filters on which Pipeline packages to include in the deployed graph."""
2212
2270
 
@@ -2218,7 +2276,7 @@ class PipelineSpec:
2218
2276
 
2219
2277
  ingestion_definition: Optional[IngestionPipelineDefinition] = None
2220
2278
  """The configuration for a managed ingestion pipeline. These settings cannot be used with the
2221
- 'libraries', 'target' or 'catalog' settings."""
2279
+ 'libraries', 'schema', 'target', or 'catalog' settings."""
2222
2280
 
2223
2281
  libraries: Optional[List[PipelineLibrary]] = None
2224
2282
  """Libraries or code needed by this deployment."""
@@ -2236,8 +2294,7 @@ class PipelineSpec:
2236
2294
  """Restart window of this pipeline."""
2237
2295
 
2238
2296
  schema: Optional[str] = None
2239
- """The default schema (database) where tables are read from or published to. The presence of this
2240
- field implies that the pipeline is in direct publishing mode."""
2297
+ """The default schema (database) where tables are read from or published to."""
2241
2298
 
2242
2299
  serverless: Optional[bool] = None
2243
2300
  """Whether serverless compute is enabled for this pipeline."""
@@ -2246,9 +2303,9 @@ class PipelineSpec:
2246
2303
  """DBFS root directory for storing checkpoints and tables."""
2247
2304
 
2248
2305
  target: Optional[str] = None
2249
- """Target schema (database) to add tables in this pipeline to. If not specified, no data is
2250
- published to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify
2251
- `catalog`."""
2306
+ """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
2307
+ must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
2308
+ deprecated for pipeline creation in favor of the `schema` field."""
2252
2309
 
2253
2310
  trigger: Optional[PipelineTrigger] = None
2254
2311
  """Which pipeline trigger to use. Deprecated: Use `continuous` instead."""
@@ -2274,6 +2331,8 @@ class PipelineSpec:
2274
2331
  body["development"] = self.development
2275
2332
  if self.edition is not None:
2276
2333
  body["edition"] = self.edition
2334
+ if self.event_log:
2335
+ body["event_log"] = self.event_log.as_dict()
2277
2336
  if self.filters:
2278
2337
  body["filters"] = self.filters.as_dict()
2279
2338
  if self.gateway_definition:
@@ -2325,6 +2384,8 @@ class PipelineSpec:
2325
2384
  body["development"] = self.development
2326
2385
  if self.edition is not None:
2327
2386
  body["edition"] = self.edition
2387
+ if self.event_log:
2388
+ body["event_log"] = self.event_log
2328
2389
  if self.filters:
2329
2390
  body["filters"] = self.filters
2330
2391
  if self.gateway_definition:
@@ -2368,6 +2429,7 @@ class PipelineSpec:
2368
2429
  deployment=_from_dict(d, "deployment", PipelineDeployment),
2369
2430
  development=d.get("development", None),
2370
2431
  edition=d.get("edition", None),
2432
+ event_log=_from_dict(d, "event_log", EventLogSpec),
2371
2433
  filters=_from_dict(d, "filters", Filters),
2372
2434
  gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
2373
2435
  id=d.get("id", None),
@@ -3406,6 +3468,7 @@ class PipelinesAPI:
3406
3468
  development: Optional[bool] = None,
3407
3469
  dry_run: Optional[bool] = None,
3408
3470
  edition: Optional[str] = None,
3471
+ event_log: Optional[EventLogSpec] = None,
3409
3472
  filters: Optional[Filters] = None,
3410
3473
  gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
3411
3474
  id: Optional[str] = None,
@@ -3450,6 +3513,8 @@ class PipelinesAPI:
3450
3513
  :param dry_run: bool (optional)
3451
3514
  :param edition: str (optional)
3452
3515
  Pipeline product edition.
3516
+ :param event_log: :class:`EventLogSpec` (optional)
3517
+ Event log configuration for this pipeline
3453
3518
  :param filters: :class:`Filters` (optional)
3454
3519
  Filters on which Pipeline packages to include in the deployed graph.
3455
3520
  :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
@@ -3458,7 +3523,7 @@ class PipelinesAPI:
3458
3523
  Unique identifier for this pipeline.
3459
3524
  :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
3460
3525
  The configuration for a managed ingestion pipeline. These settings cannot be used with the
3461
- 'libraries', 'target' or 'catalog' settings.
3526
+ 'libraries', 'schema', 'target', or 'catalog' settings.
3462
3527
  :param libraries: List[:class:`PipelineLibrary`] (optional)
3463
3528
  Libraries or code needed by this deployment.
3464
3529
  :param name: str (optional)
@@ -3476,15 +3541,15 @@ class PipelinesAPI:
3476
3541
  Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
3477
3542
  thrown.
3478
3543
  :param schema: str (optional)
3479
- The default schema (database) where tables are read from or published to. The presence of this field
3480
- implies that the pipeline is in direct publishing mode.
3544
+ The default schema (database) where tables are read from or published to.
3481
3545
  :param serverless: bool (optional)
3482
3546
  Whether serverless compute is enabled for this pipeline.
3483
3547
  :param storage: str (optional)
3484
3548
  DBFS root directory for storing checkpoints and tables.
3485
3549
  :param target: str (optional)
3486
- Target schema (database) to add tables in this pipeline to. If not specified, no data is published
3487
- to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
3550
+ Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must
3551
+ be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated
3552
+ for pipeline creation in favor of the `schema` field.
3488
3553
  :param trigger: :class:`PipelineTrigger` (optional)
3489
3554
  Which pipeline trigger to use. Deprecated: Use `continuous` instead.
3490
3555
 
@@ -3513,6 +3578,8 @@ class PipelinesAPI:
3513
3578
  body["dry_run"] = dry_run
3514
3579
  if edition is not None:
3515
3580
  body["edition"] = edition
3581
+ if event_log is not None:
3582
+ body["event_log"] = event_log.as_dict()
3516
3583
  if filters is not None:
3517
3584
  body["filters"] = filters.as_dict()
3518
3585
  if gateway_definition is not None:
@@ -3906,6 +3973,7 @@ class PipelinesAPI:
3906
3973
  deployment: Optional[PipelineDeployment] = None,
3907
3974
  development: Optional[bool] = None,
3908
3975
  edition: Optional[str] = None,
3976
+ event_log: Optional[EventLogSpec] = None,
3909
3977
  expected_last_modified: Optional[int] = None,
3910
3978
  filters: Optional[Filters] = None,
3911
3979
  gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
@@ -3951,6 +4019,8 @@ class PipelinesAPI:
3951
4019
  Whether the pipeline is in Development mode. Defaults to false.
3952
4020
  :param edition: str (optional)
3953
4021
  Pipeline product edition.
4022
+ :param event_log: :class:`EventLogSpec` (optional)
4023
+ Event log configuration for this pipeline
3954
4024
  :param expected_last_modified: int (optional)
3955
4025
  If present, the last-modified time of the pipeline settings before the edit. If the settings were
3956
4026
  modified after that time, then the request will fail with a conflict.
@@ -3962,7 +4032,7 @@ class PipelinesAPI:
3962
4032
  Unique identifier for this pipeline.
3963
4033
  :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
3964
4034
  The configuration for a managed ingestion pipeline. These settings cannot be used with the
3965
- 'libraries', 'target' or 'catalog' settings.
4035
+ 'libraries', 'schema', 'target', or 'catalog' settings.
3966
4036
  :param libraries: List[:class:`PipelineLibrary`] (optional)
3967
4037
  Libraries or code needed by this deployment.
3968
4038
  :param name: str (optional)
@@ -3980,15 +4050,15 @@ class PipelinesAPI:
3980
4050
  Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
3981
4051
  thrown.
3982
4052
  :param schema: str (optional)
3983
- The default schema (database) where tables are read from or published to. The presence of this field
3984
- implies that the pipeline is in direct publishing mode.
4053
+ The default schema (database) where tables are read from or published to.
3985
4054
  :param serverless: bool (optional)
3986
4055
  Whether serverless compute is enabled for this pipeline.
3987
4056
  :param storage: str (optional)
3988
4057
  DBFS root directory for storing checkpoints and tables.
3989
4058
  :param target: str (optional)
3990
- Target schema (database) to add tables in this pipeline to. If not specified, no data is published
3991
- to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
4059
+ Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must
4060
+ be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated
4061
+ for pipeline creation in favor of the `schema` field.
3992
4062
  :param trigger: :class:`PipelineTrigger` (optional)
3993
4063
  Which pipeline trigger to use. Deprecated: Use `continuous` instead.
3994
4064
 
@@ -4015,6 +4085,8 @@ class PipelinesAPI:
4015
4085
  body["development"] = development
4016
4086
  if edition is not None:
4017
4087
  body["edition"] = edition
4088
+ if event_log is not None:
4089
+ body["event_log"] = event_log.as_dict()
4018
4090
  if expected_last_modified is not None:
4019
4091
  body["expected_last_modified"] = expected_last_modified
4020
4092
  if filters is not None: