databricks-sdk 0.55.0__py3-none-any.whl → 0.57.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (31) hide show
  1. databricks/sdk/__init__.py +41 -24
  2. databricks/sdk/service/aibuilder.py +505 -0
  3. databricks/sdk/service/apps.py +14 -42
  4. databricks/sdk/service/billing.py +167 -220
  5. databricks/sdk/service/catalog.py +462 -1235
  6. databricks/sdk/service/cleanrooms.py +26 -43
  7. databricks/sdk/service/compute.py +75 -211
  8. databricks/sdk/service/dashboards.py +77 -511
  9. databricks/sdk/service/database.py +1271 -0
  10. databricks/sdk/service/files.py +20 -54
  11. databricks/sdk/service/iam.py +61 -171
  12. databricks/sdk/service/jobs.py +453 -68
  13. databricks/sdk/service/marketplace.py +46 -146
  14. databricks/sdk/service/ml.py +453 -477
  15. databricks/sdk/service/oauth2.py +17 -45
  16. databricks/sdk/service/pipelines.py +125 -40
  17. databricks/sdk/service/provisioning.py +30 -93
  18. databricks/sdk/service/qualitymonitorv2.py +265 -0
  19. databricks/sdk/service/serving.py +106 -46
  20. databricks/sdk/service/settings.py +1062 -390
  21. databricks/sdk/service/sharing.py +33 -88
  22. databricks/sdk/service/sql.py +292 -185
  23. databricks/sdk/service/vectorsearch.py +13 -43
  24. databricks/sdk/service/workspace.py +35 -105
  25. databricks/sdk/version.py +1 -1
  26. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/METADATA +1 -1
  27. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/RECORD +31 -28
  28. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/WHEEL +0 -0
  29. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/licenses/LICENSE +0 -0
  30. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/licenses/NOTICE +0 -0
  31. {databricks_sdk-0.55.0.dist-info → databricks_sdk-0.57.0.dist-info}/top_level.txt +0 -0
@@ -58,6 +58,9 @@ class CreatePipeline:
58
58
  edition: Optional[str] = None
59
59
  """Pipeline product edition."""
60
60
 
61
+ environment: Optional[PipelinesEnvironment] = None
62
+ """Environment specification for this pipeline used to install dependencies."""
63
+
61
64
  event_log: Optional[EventLogSpec] = None
62
65
  """Event log configuration for this pipeline"""
63
66
 
@@ -111,6 +114,11 @@ class CreatePipeline:
111
114
  storage: Optional[str] = None
112
115
  """DBFS root directory for storing checkpoints and tables."""
113
116
 
117
+ tags: Optional[Dict[str, str]] = None
118
+ """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags,
119
+ and are therefore subject to the same limitations. A maximum of 25 tags can be added to the
120
+ pipeline."""
121
+
114
122
  target: Optional[str] = None
115
123
  """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
116
124
  must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
@@ -144,6 +152,8 @@ class CreatePipeline:
144
152
  body["dry_run"] = self.dry_run
145
153
  if self.edition is not None:
146
154
  body["edition"] = self.edition
155
+ if self.environment:
156
+ body["environment"] = self.environment.as_dict()
147
157
  if self.event_log:
148
158
  body["event_log"] = self.event_log.as_dict()
149
159
  if self.filters:
@@ -174,6 +184,8 @@ class CreatePipeline:
174
184
  body["serverless"] = self.serverless
175
185
  if self.storage is not None:
176
186
  body["storage"] = self.storage
187
+ if self.tags:
188
+ body["tags"] = self.tags
177
189
  if self.target is not None:
178
190
  body["target"] = self.target
179
191
  if self.trigger:
@@ -205,6 +217,8 @@ class CreatePipeline:
205
217
  body["dry_run"] = self.dry_run
206
218
  if self.edition is not None:
207
219
  body["edition"] = self.edition
220
+ if self.environment:
221
+ body["environment"] = self.environment
208
222
  if self.event_log:
209
223
  body["event_log"] = self.event_log
210
224
  if self.filters:
@@ -235,6 +249,8 @@ class CreatePipeline:
235
249
  body["serverless"] = self.serverless
236
250
  if self.storage is not None:
237
251
  body["storage"] = self.storage
252
+ if self.tags:
253
+ body["tags"] = self.tags
238
254
  if self.target is not None:
239
255
  body["target"] = self.target
240
256
  if self.trigger:
@@ -256,6 +272,7 @@ class CreatePipeline:
256
272
  development=d.get("development", None),
257
273
  dry_run=d.get("dry_run", None),
258
274
  edition=d.get("edition", None),
275
+ environment=_from_dict(d, "environment", PipelinesEnvironment),
259
276
  event_log=_from_dict(d, "event_log", EventLogSpec),
260
277
  filters=_from_dict(d, "filters", Filters),
261
278
  gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
@@ -271,6 +288,7 @@ class CreatePipeline:
271
288
  schema=d.get("schema", None),
272
289
  serverless=d.get("serverless", None),
273
290
  storage=d.get("storage", None),
291
+ tags=d.get("tags", None),
274
292
  target=d.get("target", None),
275
293
  trigger=_from_dict(d, "trigger", PipelineTrigger),
276
294
  )
@@ -445,6 +463,9 @@ class EditPipeline:
445
463
  edition: Optional[str] = None
446
464
  """Pipeline product edition."""
447
465
 
466
+ environment: Optional[PipelinesEnvironment] = None
467
+ """Environment specification for this pipeline used to install dependencies."""
468
+
448
469
  event_log: Optional[EventLogSpec] = None
449
470
  """Event log configuration for this pipeline"""
450
471
 
@@ -505,6 +526,11 @@ class EditPipeline:
505
526
  storage: Optional[str] = None
506
527
  """DBFS root directory for storing checkpoints and tables."""
507
528
 
529
+ tags: Optional[Dict[str, str]] = None
530
+ """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags,
531
+ and are therefore subject to the same limitations. A maximum of 25 tags can be added to the
532
+ pipeline."""
533
+
508
534
  target: Optional[str] = None
509
535
  """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
510
536
  must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
@@ -536,6 +562,8 @@ class EditPipeline:
536
562
  body["development"] = self.development
537
563
  if self.edition is not None:
538
564
  body["edition"] = self.edition
565
+ if self.environment:
566
+ body["environment"] = self.environment.as_dict()
539
567
  if self.event_log:
540
568
  body["event_log"] = self.event_log.as_dict()
541
569
  if self.expected_last_modified is not None:
@@ -570,6 +598,8 @@ class EditPipeline:
570
598
  body["serverless"] = self.serverless
571
599
  if self.storage is not None:
572
600
  body["storage"] = self.storage
601
+ if self.tags:
602
+ body["tags"] = self.tags
573
603
  if self.target is not None:
574
604
  body["target"] = self.target
575
605
  if self.trigger:
@@ -599,6 +629,8 @@ class EditPipeline:
599
629
  body["development"] = self.development
600
630
  if self.edition is not None:
601
631
  body["edition"] = self.edition
632
+ if self.environment:
633
+ body["environment"] = self.environment
602
634
  if self.event_log:
603
635
  body["event_log"] = self.event_log
604
636
  if self.expected_last_modified is not None:
@@ -633,6 +665,8 @@ class EditPipeline:
633
665
  body["serverless"] = self.serverless
634
666
  if self.storage is not None:
635
667
  body["storage"] = self.storage
668
+ if self.tags:
669
+ body["tags"] = self.tags
636
670
  if self.target is not None:
637
671
  body["target"] = self.target
638
672
  if self.trigger:
@@ -653,6 +687,7 @@ class EditPipeline:
653
687
  deployment=_from_dict(d, "deployment", PipelineDeployment),
654
688
  development=d.get("development", None),
655
689
  edition=d.get("edition", None),
690
+ environment=_from_dict(d, "environment", PipelinesEnvironment),
656
691
  event_log=_from_dict(d, "event_log", EventLogSpec),
657
692
  expected_last_modified=d.get("expected_last_modified", None),
658
693
  filters=_from_dict(d, "filters", Filters),
@@ -670,6 +705,7 @@ class EditPipeline:
670
705
  schema=d.get("schema", None),
671
706
  serverless=d.get("serverless", None),
672
707
  storage=d.get("storage", None),
708
+ tags=d.get("tags", None),
673
709
  target=d.get("target", None),
674
710
  trigger=_from_dict(d, "trigger", PipelineTrigger),
675
711
  )
@@ -1186,6 +1222,7 @@ class IngestionSourceType(Enum):
1186
1222
  SERVICENOW = "SERVICENOW"
1187
1223
  SHAREPOINT = "SHAREPOINT"
1188
1224
  SQLSERVER = "SQLSERVER"
1225
+ TERADATA = "TERADATA"
1189
1226
  WORKDAY_RAAS = "WORKDAY_RAAS"
1190
1227
 
1191
1228
 
@@ -2341,6 +2378,9 @@ class PipelineSpec:
2341
2378
  edition: Optional[str] = None
2342
2379
  """Pipeline product edition."""
2343
2380
 
2381
+ environment: Optional[PipelinesEnvironment] = None
2382
+ """Environment specification for this pipeline used to install dependencies."""
2383
+
2344
2384
  event_log: Optional[EventLogSpec] = None
2345
2385
  """Event log configuration for this pipeline"""
2346
2386
 
@@ -2386,6 +2426,11 @@ class PipelineSpec:
2386
2426
  storage: Optional[str] = None
2387
2427
  """DBFS root directory for storing checkpoints and tables."""
2388
2428
 
2429
+ tags: Optional[Dict[str, str]] = None
2430
+ """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags,
2431
+ and are therefore subject to the same limitations. A maximum of 25 tags can be added to the
2432
+ pipeline."""
2433
+
2389
2434
  target: Optional[str] = None
2390
2435
  """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
2391
2436
  must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
@@ -2415,6 +2460,8 @@ class PipelineSpec:
2415
2460
  body["development"] = self.development
2416
2461
  if self.edition is not None:
2417
2462
  body["edition"] = self.edition
2463
+ if self.environment:
2464
+ body["environment"] = self.environment.as_dict()
2418
2465
  if self.event_log:
2419
2466
  body["event_log"] = self.event_log.as_dict()
2420
2467
  if self.filters:
@@ -2443,6 +2490,8 @@ class PipelineSpec:
2443
2490
  body["serverless"] = self.serverless
2444
2491
  if self.storage is not None:
2445
2492
  body["storage"] = self.storage
2493
+ if self.tags:
2494
+ body["tags"] = self.tags
2446
2495
  if self.target is not None:
2447
2496
  body["target"] = self.target
2448
2497
  if self.trigger:
@@ -2470,6 +2519,8 @@ class PipelineSpec:
2470
2519
  body["development"] = self.development
2471
2520
  if self.edition is not None:
2472
2521
  body["edition"] = self.edition
2522
+ if self.environment:
2523
+ body["environment"] = self.environment
2473
2524
  if self.event_log:
2474
2525
  body["event_log"] = self.event_log
2475
2526
  if self.filters:
@@ -2498,6 +2549,8 @@ class PipelineSpec:
2498
2549
  body["serverless"] = self.serverless
2499
2550
  if self.storage is not None:
2500
2551
  body["storage"] = self.storage
2552
+ if self.tags:
2553
+ body["tags"] = self.tags
2501
2554
  if self.target is not None:
2502
2555
  body["target"] = self.target
2503
2556
  if self.trigger:
@@ -2517,6 +2570,7 @@ class PipelineSpec:
2517
2570
  deployment=_from_dict(d, "deployment", PipelineDeployment),
2518
2571
  development=d.get("development", None),
2519
2572
  edition=d.get("edition", None),
2573
+ environment=_from_dict(d, "environment", PipelinesEnvironment),
2520
2574
  event_log=_from_dict(d, "event_log", EventLogSpec),
2521
2575
  filters=_from_dict(d, "filters", Filters),
2522
2576
  gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
@@ -2531,6 +2585,7 @@ class PipelineSpec:
2531
2585
  schema=d.get("schema", None),
2532
2586
  serverless=d.get("serverless", None),
2533
2587
  storage=d.get("storage", None),
2588
+ tags=d.get("tags", None),
2534
2589
  target=d.get("target", None),
2535
2590
  trigger=_from_dict(d, "trigger", PipelineTrigger),
2536
2591
  )
@@ -2671,6 +2726,39 @@ class PipelineTrigger:
2671
2726
  return cls(cron=_from_dict(d, "cron", CronTrigger), manual=_from_dict(d, "manual", ManualTrigger))
2672
2727
 
2673
2728
 
2729
+ @dataclass
2730
+ class PipelinesEnvironment:
2731
+ """The environment entity used to preserve serverless environment side panel, jobs' environment for
2732
+ non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal
2733
+ environment spec, only pip dependencies are supported."""
2734
+
2735
+ dependencies: Optional[List[str]] = None
2736
+ """List of pip dependencies, as supported by the version of pip in this environment. Each
2737
+ dependency is a pip requirement file line
2738
+ https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be
2739
+ <requirement specifier>, <archive url/path>, <local project path>(WSFS or Volumes in
2740
+ Databricks), <vcs project url>"""
2741
+
2742
+ def as_dict(self) -> dict:
2743
+ """Serializes the PipelinesEnvironment into a dictionary suitable for use as a JSON request body."""
2744
+ body = {}
2745
+ if self.dependencies:
2746
+ body["dependencies"] = [v for v in self.dependencies]
2747
+ return body
2748
+
2749
+ def as_shallow_dict(self) -> dict:
2750
+ """Serializes the PipelinesEnvironment into a shallow dictionary of its immediate attributes."""
2751
+ body = {}
2752
+ if self.dependencies:
2753
+ body["dependencies"] = self.dependencies
2754
+ return body
2755
+
2756
+ @classmethod
2757
+ def from_dict(cls, d: Dict[str, Any]) -> PipelinesEnvironment:
2758
+ """Deserializes the PipelinesEnvironment from a dictionary."""
2759
+ return cls(dependencies=d.get("dependencies", None))
2760
+
2761
+
2674
2762
  @dataclass
2675
2763
  class ReportSpec:
2676
2764
  source_url: str
@@ -2884,7 +2972,7 @@ class SchemaSpec:
2884
2972
  @dataclass
2885
2973
  class Sequencing:
2886
2974
  control_plane_seq_no: Optional[int] = None
2887
- """A sequence number, unique and increasing within the control plane."""
2975
+ """A sequence number, unique and increasing per pipeline."""
2888
2976
 
2889
2977
  data_plane_id: Optional[DataPlaneId] = None
2890
2978
  """the ID assigned by the data plane."""
@@ -3553,6 +3641,7 @@ class PipelinesAPI:
3553
3641
  development: Optional[bool] = None,
3554
3642
  dry_run: Optional[bool] = None,
3555
3643
  edition: Optional[str] = None,
3644
+ environment: Optional[PipelinesEnvironment] = None,
3556
3645
  event_log: Optional[EventLogSpec] = None,
3557
3646
  filters: Optional[Filters] = None,
3558
3647
  gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
@@ -3568,12 +3657,11 @@ class PipelinesAPI:
3568
3657
  schema: Optional[str] = None,
3569
3658
  serverless: Optional[bool] = None,
3570
3659
  storage: Optional[str] = None,
3660
+ tags: Optional[Dict[str, str]] = None,
3571
3661
  target: Optional[str] = None,
3572
3662
  trigger: Optional[PipelineTrigger] = None,
3573
3663
  ) -> CreatePipelineResponse:
3574
- """Create a pipeline.
3575
-
3576
- Creates a new data processing pipeline based on the requested configuration. If successful, this
3664
+ """Creates a new data processing pipeline based on the requested configuration. If successful, this
3577
3665
  method returns the ID of the new pipeline.
3578
3666
 
3579
3667
  :param allow_duplicate_names: bool (optional)
@@ -3599,6 +3687,8 @@ class PipelinesAPI:
3599
3687
  :param dry_run: bool (optional)
3600
3688
  :param edition: str (optional)
3601
3689
  Pipeline product edition.
3690
+ :param environment: :class:`PipelinesEnvironment` (optional)
3691
+ Environment specification for this pipeline used to install dependencies.
3602
3692
  :param event_log: :class:`EventLogSpec` (optional)
3603
3693
  Event log configuration for this pipeline
3604
3694
  :param filters: :class:`Filters` (optional)
@@ -3636,6 +3726,9 @@ class PipelinesAPI:
3636
3726
  Whether serverless compute is enabled for this pipeline.
3637
3727
  :param storage: str (optional)
3638
3728
  DBFS root directory for storing checkpoints and tables.
3729
+ :param tags: Dict[str,str] (optional)
3730
+ A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and
3731
+ are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline.
3639
3732
  :param target: str (optional)
3640
3733
  Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must
3641
3734
  be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated
@@ -3668,6 +3761,8 @@ class PipelinesAPI:
3668
3761
  body["dry_run"] = dry_run
3669
3762
  if edition is not None:
3670
3763
  body["edition"] = edition
3764
+ if environment is not None:
3765
+ body["environment"] = environment.as_dict()
3671
3766
  if event_log is not None:
3672
3767
  body["event_log"] = event_log.as_dict()
3673
3768
  if filters is not None:
@@ -3698,6 +3793,8 @@ class PipelinesAPI:
3698
3793
  body["serverless"] = serverless
3699
3794
  if storage is not None:
3700
3795
  body["storage"] = storage
3796
+ if tags is not None:
3797
+ body["tags"] = tags
3701
3798
  if target is not None:
3702
3799
  body["target"] = target
3703
3800
  if trigger is not None:
@@ -3711,9 +3808,8 @@ class PipelinesAPI:
3711
3808
  return CreatePipelineResponse.from_dict(res)
3712
3809
 
3713
3810
  def delete(self, pipeline_id: str):
3714
- """Delete a pipeline.
3715
-
3716
- Deletes a pipeline.
3811
+ """Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and
3812
+ its tables. You cannot undo this action.
3717
3813
 
3718
3814
  :param pipeline_id: str
3719
3815
 
@@ -3742,9 +3838,7 @@ class PipelinesAPI:
3742
3838
  return GetPipelineResponse.from_dict(res)
3743
3839
 
3744
3840
  def get_permission_levels(self, pipeline_id: str) -> GetPipelinePermissionLevelsResponse:
3745
- """Get pipeline permission levels.
3746
-
3747
- Gets the permission levels that a user can have on an object.
3841
+ """Gets the permission levels that a user can have on an object.
3748
3842
 
3749
3843
  :param pipeline_id: str
3750
3844
  The pipeline for which to get or manage permissions.
@@ -3760,9 +3854,7 @@ class PipelinesAPI:
3760
3854
  return GetPipelinePermissionLevelsResponse.from_dict(res)
3761
3855
 
3762
3856
  def get_permissions(self, pipeline_id: str) -> PipelinePermissions:
3763
- """Get pipeline permissions.
3764
-
3765
- Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object.
3857
+ """Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object.
3766
3858
 
3767
3859
  :param pipeline_id: str
3768
3860
  The pipeline for which to get or manage permissions.
@@ -3778,9 +3870,7 @@ class PipelinesAPI:
3778
3870
  return PipelinePermissions.from_dict(res)
3779
3871
 
3780
3872
  def get_update(self, pipeline_id: str, update_id: str) -> GetUpdateResponse:
3781
- """Get a pipeline update.
3782
-
3783
- Gets an update from an active pipeline.
3873
+ """Gets an update from an active pipeline.
3784
3874
 
3785
3875
  :param pipeline_id: str
3786
3876
  The ID of the pipeline.
@@ -3806,9 +3896,7 @@ class PipelinesAPI:
3806
3896
  order_by: Optional[List[str]] = None,
3807
3897
  page_token: Optional[str] = None,
3808
3898
  ) -> Iterator[PipelineEvent]:
3809
- """List pipeline events.
3810
-
3811
- Retrieves events for a pipeline.
3899
+ """Retrieves events for a pipeline.
3812
3900
 
3813
3901
  :param pipeline_id: str
3814
3902
  The pipeline to return events for.
@@ -3864,9 +3952,7 @@ class PipelinesAPI:
3864
3952
  order_by: Optional[List[str]] = None,
3865
3953
  page_token: Optional[str] = None,
3866
3954
  ) -> Iterator[PipelineStateInfo]:
3867
- """List pipelines.
3868
-
3869
- Lists pipelines defined in the Delta Live Tables system.
3955
+ """Lists pipelines defined in the Delta Live Tables system.
3870
3956
 
3871
3957
  :param filter: str (optional)
3872
3958
  Select a subset of results based on the specified criteria. The supported filters are:
@@ -3920,9 +4006,7 @@ class PipelinesAPI:
3920
4006
  page_token: Optional[str] = None,
3921
4007
  until_update_id: Optional[str] = None,
3922
4008
  ) -> ListUpdatesResponse:
3923
- """List pipeline updates.
3924
-
3925
- List updates for an active pipeline.
4009
+ """List updates for an active pipeline.
3926
4010
 
3927
4011
  :param pipeline_id: str
3928
4012
  The pipeline to return updates for.
@@ -3953,9 +4037,7 @@ class PipelinesAPI:
3953
4037
  def set_permissions(
3954
4038
  self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None
3955
4039
  ) -> PipelinePermissions:
3956
- """Set pipeline permissions.
3957
-
3958
- Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
4040
+ """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
3959
4041
  permissions if none are specified. Objects can inherit permissions from their root object.
3960
4042
 
3961
4043
  :param pipeline_id: str
@@ -3985,9 +4067,7 @@ class PipelinesAPI:
3985
4067
  refresh_selection: Optional[List[str]] = None,
3986
4068
  validate_only: Optional[bool] = None,
3987
4069
  ) -> StartUpdateResponse:
3988
- """Start a pipeline.
3989
-
3990
- Starts a new update for the pipeline. If there is already an active update for the pipeline, the
4070
+ """Starts a new update for the pipeline. If there is already an active update for the pipeline, the
3991
4071
  request will fail and the active update will remain running.
3992
4072
 
3993
4073
  :param pipeline_id: str
@@ -4029,9 +4109,7 @@ class PipelinesAPI:
4029
4109
  return StartUpdateResponse.from_dict(res)
4030
4110
 
4031
4111
  def stop(self, pipeline_id: str) -> Wait[GetPipelineResponse]:
4032
- """Stop a pipeline.
4033
-
4034
- Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this
4112
+ """Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this
4035
4113
  request is a no-op.
4036
4114
 
4037
4115
  :param pipeline_id: str
@@ -4067,6 +4145,7 @@ class PipelinesAPI:
4067
4145
  deployment: Optional[PipelineDeployment] = None,
4068
4146
  development: Optional[bool] = None,
4069
4147
  edition: Optional[str] = None,
4148
+ environment: Optional[PipelinesEnvironment] = None,
4070
4149
  event_log: Optional[EventLogSpec] = None,
4071
4150
  expected_last_modified: Optional[int] = None,
4072
4151
  filters: Optional[Filters] = None,
@@ -4083,12 +4162,11 @@ class PipelinesAPI:
4083
4162
  schema: Optional[str] = None,
4084
4163
  serverless: Optional[bool] = None,
4085
4164
  storage: Optional[str] = None,
4165
+ tags: Optional[Dict[str, str]] = None,
4086
4166
  target: Optional[str] = None,
4087
4167
  trigger: Optional[PipelineTrigger] = None,
4088
4168
  ):
4089
- """Edit a pipeline.
4090
-
4091
- Updates a pipeline with the supplied configuration.
4169
+ """Updates a pipeline with the supplied configuration.
4092
4170
 
4093
4171
  :param pipeline_id: str
4094
4172
  Unique identifier for this pipeline.
@@ -4114,6 +4192,8 @@ class PipelinesAPI:
4114
4192
  Whether the pipeline is in Development mode. Defaults to false.
4115
4193
  :param edition: str (optional)
4116
4194
  Pipeline product edition.
4195
+ :param environment: :class:`PipelinesEnvironment` (optional)
4196
+ Environment specification for this pipeline used to install dependencies.
4117
4197
  :param event_log: :class:`EventLogSpec` (optional)
4118
4198
  Event log configuration for this pipeline
4119
4199
  :param expected_last_modified: int (optional)
@@ -4154,6 +4234,9 @@ class PipelinesAPI:
4154
4234
  Whether serverless compute is enabled for this pipeline.
4155
4235
  :param storage: str (optional)
4156
4236
  DBFS root directory for storing checkpoints and tables.
4237
+ :param tags: Dict[str,str] (optional)
4238
+ A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and
4239
+ are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline.
4157
4240
  :param target: str (optional)
4158
4241
  Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must
4159
4242
  be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated
@@ -4184,6 +4267,8 @@ class PipelinesAPI:
4184
4267
  body["development"] = development
4185
4268
  if edition is not None:
4186
4269
  body["edition"] = edition
4270
+ if environment is not None:
4271
+ body["environment"] = environment.as_dict()
4187
4272
  if event_log is not None:
4188
4273
  body["event_log"] = event_log.as_dict()
4189
4274
  if expected_last_modified is not None:
@@ -4216,6 +4301,8 @@ class PipelinesAPI:
4216
4301
  body["serverless"] = serverless
4217
4302
  if storage is not None:
4218
4303
  body["storage"] = storage
4304
+ if tags is not None:
4305
+ body["tags"] = tags
4219
4306
  if target is not None:
4220
4307
  body["target"] = target
4221
4308
  if trigger is not None:
@@ -4230,9 +4317,7 @@ class PipelinesAPI:
4230
4317
  def update_permissions(
4231
4318
  self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None
4232
4319
  ) -> PipelinePermissions:
4233
- """Update pipeline permissions.
4234
-
4235
- Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object.
4320
+ """Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object.
4236
4321
 
4237
4322
  :param pipeline_id: str
4238
4323
  The pipeline for which to get or manage permissions.