databricks-sdk 0.56.0__py3-none-any.whl → 0.58.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (31) hide show
  1. databricks/sdk/__init__.py +38 -11
  2. databricks/sdk/service/aibuilder.py +122 -17
  3. databricks/sdk/service/apps.py +15 -45
  4. databricks/sdk/service/billing.py +70 -74
  5. databricks/sdk/service/catalog.py +1898 -557
  6. databricks/sdk/service/cleanrooms.py +14 -55
  7. databricks/sdk/service/compute.py +305 -508
  8. databricks/sdk/service/dashboards.py +148 -223
  9. databricks/sdk/service/database.py +657 -127
  10. databricks/sdk/service/files.py +18 -54
  11. databricks/sdk/service/iam.py +55 -165
  12. databricks/sdk/service/jobs.py +238 -214
  13. databricks/sdk/service/marketplace.py +47 -146
  14. databricks/sdk/service/ml.py +1137 -447
  15. databricks/sdk/service/oauth2.py +17 -46
  16. databricks/sdk/service/pipelines.py +93 -69
  17. databricks/sdk/service/provisioning.py +34 -212
  18. databricks/sdk/service/qualitymonitorv2.py +5 -33
  19. databricks/sdk/service/serving.py +69 -55
  20. databricks/sdk/service/settings.py +106 -434
  21. databricks/sdk/service/sharing.py +33 -95
  22. databricks/sdk/service/sql.py +164 -254
  23. databricks/sdk/service/vectorsearch.py +13 -62
  24. databricks/sdk/service/workspace.py +36 -110
  25. databricks/sdk/version.py +1 -1
  26. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/METADATA +1 -1
  27. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/RECORD +31 -31
  28. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/WHEEL +0 -0
  29. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/LICENSE +0 -0
  30. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/licenses/NOTICE +0 -0
  31. {databricks_sdk-0.56.0.dist-info → databricks_sdk-0.58.0.dist-info}/top_level.txt +0 -0
@@ -58,6 +58,9 @@ class CreatePipeline:
58
58
  edition: Optional[str] = None
59
59
  """Pipeline product edition."""
60
60
 
61
+ environment: Optional[PipelinesEnvironment] = None
62
+ """Environment specification for this pipeline used to install dependencies."""
63
+
61
64
  event_log: Optional[EventLogSpec] = None
62
65
  """Event log configuration for this pipeline"""
63
66
 
@@ -95,12 +98,6 @@ class CreatePipeline:
95
98
  pipeline execution."""
96
99
 
97
100
  run_as: Optional[RunAs] = None
98
- """Write-only setting, available only in Create/Update calls. Specifies the user or service
99
- principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
100
- the pipeline.
101
-
102
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
103
- is thrown."""
104
101
 
105
102
  schema: Optional[str] = None
106
103
  """The default schema (database) where tables are read from or published to."""
@@ -149,6 +146,8 @@ class CreatePipeline:
149
146
  body["dry_run"] = self.dry_run
150
147
  if self.edition is not None:
151
148
  body["edition"] = self.edition
149
+ if self.environment:
150
+ body["environment"] = self.environment.as_dict()
152
151
  if self.event_log:
153
152
  body["event_log"] = self.event_log.as_dict()
154
153
  if self.filters:
@@ -212,6 +211,8 @@ class CreatePipeline:
212
211
  body["dry_run"] = self.dry_run
213
212
  if self.edition is not None:
214
213
  body["edition"] = self.edition
214
+ if self.environment:
215
+ body["environment"] = self.environment
215
216
  if self.event_log:
216
217
  body["event_log"] = self.event_log
217
218
  if self.filters:
@@ -265,6 +266,7 @@ class CreatePipeline:
265
266
  development=d.get("development", None),
266
267
  dry_run=d.get("dry_run", None),
267
268
  edition=d.get("edition", None),
269
+ environment=_from_dict(d, "environment", PipelinesEnvironment),
268
270
  event_log=_from_dict(d, "event_log", EventLogSpec),
269
271
  filters=_from_dict(d, "filters", Filters),
270
272
  gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
@@ -455,6 +457,9 @@ class EditPipeline:
455
457
  edition: Optional[str] = None
456
458
  """Pipeline product edition."""
457
459
 
460
+ environment: Optional[PipelinesEnvironment] = None
461
+ """Environment specification for this pipeline used to install dependencies."""
462
+
458
463
  event_log: Optional[EventLogSpec] = None
459
464
  """Event log configuration for this pipeline"""
460
465
 
@@ -499,12 +504,6 @@ class EditPipeline:
499
504
  pipeline execution."""
500
505
 
501
506
  run_as: Optional[RunAs] = None
502
- """Write-only setting, available only in Create/Update calls. Specifies the user or service
503
- principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
504
- the pipeline.
505
-
506
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
507
- is thrown."""
508
507
 
509
508
  schema: Optional[str] = None
510
509
  """The default schema (database) where tables are read from or published to."""
@@ -551,6 +550,8 @@ class EditPipeline:
551
550
  body["development"] = self.development
552
551
  if self.edition is not None:
553
552
  body["edition"] = self.edition
553
+ if self.environment:
554
+ body["environment"] = self.environment.as_dict()
554
555
  if self.event_log:
555
556
  body["event_log"] = self.event_log.as_dict()
556
557
  if self.expected_last_modified is not None:
@@ -616,6 +617,8 @@ class EditPipeline:
616
617
  body["development"] = self.development
617
618
  if self.edition is not None:
618
619
  body["edition"] = self.edition
620
+ if self.environment:
621
+ body["environment"] = self.environment
619
622
  if self.event_log:
620
623
  body["event_log"] = self.event_log
621
624
  if self.expected_last_modified is not None:
@@ -672,6 +675,7 @@ class EditPipeline:
672
675
  deployment=_from_dict(d, "deployment", PipelineDeployment),
673
676
  development=d.get("development", None),
674
677
  edition=d.get("edition", None),
678
+ environment=_from_dict(d, "environment", PipelinesEnvironment),
675
679
  event_log=_from_dict(d, "event_log", EventLogSpec),
676
680
  expected_last_modified=d.get("expected_last_modified", None),
677
681
  filters=_from_dict(d, "filters", Filters),
@@ -906,6 +910,11 @@ class GetPipelineResponse:
906
910
  pipeline_id: Optional[str] = None
907
911
  """The ID of the pipeline."""
908
912
 
913
+ run_as: Optional[RunAs] = None
914
+ """The user or service principal that the pipeline runs as, if specified in the request. This field
915
+ indicates the explicit configuration of `run_as` for the pipeline. To find the value in all
916
+ cases, explicit or implicit, use `run_as_user_name`."""
917
+
909
918
  run_as_user_name: Optional[str] = None
910
919
  """Username of the user that the pipeline will run on behalf of."""
911
920
 
@@ -936,6 +945,8 @@ class GetPipelineResponse:
936
945
  body["name"] = self.name
937
946
  if self.pipeline_id is not None:
938
947
  body["pipeline_id"] = self.pipeline_id
948
+ if self.run_as:
949
+ body["run_as"] = self.run_as.as_dict()
939
950
  if self.run_as_user_name is not None:
940
951
  body["run_as_user_name"] = self.run_as_user_name
941
952
  if self.spec:
@@ -965,6 +976,8 @@ class GetPipelineResponse:
965
976
  body["name"] = self.name
966
977
  if self.pipeline_id is not None:
967
978
  body["pipeline_id"] = self.pipeline_id
979
+ if self.run_as:
980
+ body["run_as"] = self.run_as
968
981
  if self.run_as_user_name is not None:
969
982
  body["run_as_user_name"] = self.run_as_user_name
970
983
  if self.spec:
@@ -986,6 +999,7 @@ class GetPipelineResponse:
986
999
  latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo),
987
1000
  name=d.get("name", None),
988
1001
  pipeline_id=d.get("pipeline_id", None),
1002
+ run_as=_from_dict(d, "run_as", RunAs),
989
1003
  run_as_user_name=d.get("run_as_user_name", None),
990
1004
  spec=_from_dict(d, "spec", PipelineSpec),
991
1005
  state=_enum(d, "state", PipelineState),
@@ -1195,6 +1209,7 @@ class IngestionPipelineDefinition:
1195
1209
 
1196
1210
  class IngestionSourceType(Enum):
1197
1211
 
1212
+ BIGQUERY = "BIGQUERY"
1198
1213
  DYNAMICS365 = "DYNAMICS365"
1199
1214
  GA4_RAW_DATA = "GA4_RAW_DATA"
1200
1215
  MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL"
@@ -1605,7 +1620,6 @@ class PipelineAccessControlRequest:
1605
1620
  """name of the group"""
1606
1621
 
1607
1622
  permission_level: Optional[PipelinePermissionLevel] = None
1608
- """Permission level"""
1609
1623
 
1610
1624
  service_principal_name: Optional[str] = None
1611
1625
  """application ID of a service principal"""
@@ -2179,7 +2193,6 @@ class PipelinePermission:
2179
2193
  inherited_from_object: Optional[List[str]] = None
2180
2194
 
2181
2195
  permission_level: Optional[PipelinePermissionLevel] = None
2182
- """Permission level"""
2183
2196
 
2184
2197
  def as_dict(self) -> dict:
2185
2198
  """Serializes the PipelinePermission into a dictionary suitable for use as a JSON request body."""
@@ -2267,7 +2280,6 @@ class PipelinePermissionsDescription:
2267
2280
  description: Optional[str] = None
2268
2281
 
2269
2282
  permission_level: Optional[PipelinePermissionLevel] = None
2270
- """Permission level"""
2271
2283
 
2272
2284
  def as_dict(self) -> dict:
2273
2285
  """Serializes the PipelinePermissionsDescription into a dictionary suitable for use as a JSON request body."""
@@ -2362,6 +2374,9 @@ class PipelineSpec:
2362
2374
  edition: Optional[str] = None
2363
2375
  """Pipeline product edition."""
2364
2376
 
2377
+ environment: Optional[PipelinesEnvironment] = None
2378
+ """Environment specification for this pipeline used to install dependencies."""
2379
+
2365
2380
  event_log: Optional[EventLogSpec] = None
2366
2381
  """Event log configuration for this pipeline"""
2367
2382
 
@@ -2441,6 +2456,8 @@ class PipelineSpec:
2441
2456
  body["development"] = self.development
2442
2457
  if self.edition is not None:
2443
2458
  body["edition"] = self.edition
2459
+ if self.environment:
2460
+ body["environment"] = self.environment.as_dict()
2444
2461
  if self.event_log:
2445
2462
  body["event_log"] = self.event_log.as_dict()
2446
2463
  if self.filters:
@@ -2498,6 +2515,8 @@ class PipelineSpec:
2498
2515
  body["development"] = self.development
2499
2516
  if self.edition is not None:
2500
2517
  body["edition"] = self.edition
2518
+ if self.environment:
2519
+ body["environment"] = self.environment
2501
2520
  if self.event_log:
2502
2521
  body["event_log"] = self.event_log
2503
2522
  if self.filters:
@@ -2547,6 +2566,7 @@ class PipelineSpec:
2547
2566
  deployment=_from_dict(d, "deployment", PipelineDeployment),
2548
2567
  development=d.get("development", None),
2549
2568
  edition=d.get("edition", None),
2569
+ environment=_from_dict(d, "environment", PipelinesEnvironment),
2550
2570
  event_log=_from_dict(d, "event_log", EventLogSpec),
2551
2571
  filters=_from_dict(d, "filters", Filters),
2552
2572
  gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
@@ -2606,7 +2626,6 @@ class PipelineStateInfo:
2606
2626
  owner."""
2607
2627
 
2608
2628
  state: Optional[PipelineState] = None
2609
- """The pipeline state."""
2610
2629
 
2611
2630
  def as_dict(self) -> dict:
2612
2631
  """Serializes the PipelineStateInfo into a dictionary suitable for use as a JSON request body."""
@@ -2702,6 +2721,39 @@ class PipelineTrigger:
2702
2721
  return cls(cron=_from_dict(d, "cron", CronTrigger), manual=_from_dict(d, "manual", ManualTrigger))
2703
2722
 
2704
2723
 
2724
+ @dataclass
2725
+ class PipelinesEnvironment:
2726
+ """The environment entity used to preserve serverless environment side panel, jobs' environment for
2727
+ non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal
2728
+ environment spec, only pip dependencies are supported."""
2729
+
2730
+ dependencies: Optional[List[str]] = None
2731
+ """List of pip dependencies, as supported by the version of pip in this environment. Each
2732
+ dependency is a pip requirement file line
2733
+ https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be
2734
+ <requirement specifier>, <archive url/path>, <local project path>(WSFS or Volumes in
2735
+ Databricks), <vcs project url>"""
2736
+
2737
+ def as_dict(self) -> dict:
2738
+ """Serializes the PipelinesEnvironment into a dictionary suitable for use as a JSON request body."""
2739
+ body = {}
2740
+ if self.dependencies:
2741
+ body["dependencies"] = [v for v in self.dependencies]
2742
+ return body
2743
+
2744
+ def as_shallow_dict(self) -> dict:
2745
+ """Serializes the PipelinesEnvironment into a shallow dictionary of its immediate attributes."""
2746
+ body = {}
2747
+ if self.dependencies:
2748
+ body["dependencies"] = self.dependencies
2749
+ return body
2750
+
2751
+ @classmethod
2752
+ def from_dict(cls, d: Dict[str, Any]) -> PipelinesEnvironment:
2753
+ """Deserializes the PipelinesEnvironment from a dictionary."""
2754
+ return cls(dependencies=d.get("dependencies", None))
2755
+
2756
+
2705
2757
  @dataclass
2706
2758
  class ReportSpec:
2707
2759
  source_url: str
@@ -2915,7 +2967,7 @@ class SchemaSpec:
2915
2967
  @dataclass
2916
2968
  class Sequencing:
2917
2969
  control_plane_seq_no: Optional[int] = None
2918
- """A sequence number, unique and increasing within the control plane."""
2970
+ """A sequence number, unique and increasing per pipeline."""
2919
2971
 
2920
2972
  data_plane_id: Optional[DataPlaneId] = None
2921
2973
  """the ID assigned by the data plane."""
@@ -3044,7 +3096,6 @@ class StackFrame:
3044
3096
  @dataclass
3045
3097
  class StartUpdate:
3046
3098
  cause: Optional[StartUpdateCause] = None
3047
- """What triggered this update."""
3048
3099
 
3049
3100
  full_refresh: Optional[bool] = None
3050
3101
  """If true, this update will reset all tables before running."""
@@ -3321,6 +3372,7 @@ class TableSpecificConfig:
3321
3372
  class TableSpecificConfigScdType(Enum):
3322
3373
  """The SCD type to use to ingest the table."""
3323
3374
 
3375
+ APPEND_ONLY = "APPEND_ONLY"
3324
3376
  SCD_TYPE_1 = "SCD_TYPE_1"
3325
3377
  SCD_TYPE_2 = "SCD_TYPE_2"
3326
3378
 
@@ -3471,7 +3523,6 @@ class UpdateStateInfo:
3471
3523
  creation_time: Optional[str] = None
3472
3524
 
3473
3525
  state: Optional[UpdateStateInfoState] = None
3474
- """The update state."""
3475
3526
 
3476
3527
  update_id: Optional[str] = None
3477
3528
 
@@ -3584,6 +3635,7 @@ class PipelinesAPI:
3584
3635
  development: Optional[bool] = None,
3585
3636
  dry_run: Optional[bool] = None,
3586
3637
  edition: Optional[str] = None,
3638
+ environment: Optional[PipelinesEnvironment] = None,
3587
3639
  event_log: Optional[EventLogSpec] = None,
3588
3640
  filters: Optional[Filters] = None,
3589
3641
  gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
@@ -3603,9 +3655,7 @@ class PipelinesAPI:
3603
3655
  target: Optional[str] = None,
3604
3656
  trigger: Optional[PipelineTrigger] = None,
3605
3657
  ) -> CreatePipelineResponse:
3606
- """Create a pipeline.
3607
-
3608
- Creates a new data processing pipeline based on the requested configuration. If successful, this
3658
+ """Creates a new data processing pipeline based on the requested configuration. If successful, this
3609
3659
  method returns the ID of the new pipeline.
3610
3660
 
3611
3661
  :param allow_duplicate_names: bool (optional)
@@ -3631,6 +3681,8 @@ class PipelinesAPI:
3631
3681
  :param dry_run: bool (optional)
3632
3682
  :param edition: str (optional)
3633
3683
  Pipeline product edition.
3684
+ :param environment: :class:`PipelinesEnvironment` (optional)
3685
+ Environment specification for this pipeline used to install dependencies.
3634
3686
  :param event_log: :class:`EventLogSpec` (optional)
3635
3687
  Event log configuration for this pipeline
3636
3688
  :param filters: :class:`Filters` (optional)
@@ -3657,11 +3709,6 @@ class PipelinesAPI:
3657
3709
  Databricks user interface and it is added to sys.path when executing Python sources during pipeline
3658
3710
  execution.
3659
3711
  :param run_as: :class:`RunAs` (optional)
3660
- Write-only setting, available only in Create/Update calls. Specifies the user or service principal
3661
- that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
3662
-
3663
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
3664
- thrown.
3665
3712
  :param schema: str (optional)
3666
3713
  The default schema (database) where tables are read from or published to.
3667
3714
  :param serverless: bool (optional)
@@ -3703,6 +3750,8 @@ class PipelinesAPI:
3703
3750
  body["dry_run"] = dry_run
3704
3751
  if edition is not None:
3705
3752
  body["edition"] = edition
3753
+ if environment is not None:
3754
+ body["environment"] = environment.as_dict()
3706
3755
  if event_log is not None:
3707
3756
  body["event_log"] = event_log.as_dict()
3708
3757
  if filters is not None:
@@ -3748,9 +3797,7 @@ class PipelinesAPI:
3748
3797
  return CreatePipelineResponse.from_dict(res)
3749
3798
 
3750
3799
  def delete(self, pipeline_id: str):
3751
- """Delete a pipeline.
3752
-
3753
- Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and
3800
+ """Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and
3754
3801
  its tables. You cannot undo this action.
3755
3802
 
3756
3803
  :param pipeline_id: str
@@ -3780,9 +3827,7 @@ class PipelinesAPI:
3780
3827
  return GetPipelineResponse.from_dict(res)
3781
3828
 
3782
3829
  def get_permission_levels(self, pipeline_id: str) -> GetPipelinePermissionLevelsResponse:
3783
- """Get pipeline permission levels.
3784
-
3785
- Gets the permission levels that a user can have on an object.
3830
+ """Gets the permission levels that a user can have on an object.
3786
3831
 
3787
3832
  :param pipeline_id: str
3788
3833
  The pipeline for which to get or manage permissions.
@@ -3798,9 +3843,7 @@ class PipelinesAPI:
3798
3843
  return GetPipelinePermissionLevelsResponse.from_dict(res)
3799
3844
 
3800
3845
  def get_permissions(self, pipeline_id: str) -> PipelinePermissions:
3801
- """Get pipeline permissions.
3802
-
3803
- Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object.
3846
+ """Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object.
3804
3847
 
3805
3848
  :param pipeline_id: str
3806
3849
  The pipeline for which to get or manage permissions.
@@ -3816,9 +3859,7 @@ class PipelinesAPI:
3816
3859
  return PipelinePermissions.from_dict(res)
3817
3860
 
3818
3861
  def get_update(self, pipeline_id: str, update_id: str) -> GetUpdateResponse:
3819
- """Get a pipeline update.
3820
-
3821
- Gets an update from an active pipeline.
3862
+ """Gets an update from an active pipeline.
3822
3863
 
3823
3864
  :param pipeline_id: str
3824
3865
  The ID of the pipeline.
@@ -3844,9 +3885,7 @@ class PipelinesAPI:
3844
3885
  order_by: Optional[List[str]] = None,
3845
3886
  page_token: Optional[str] = None,
3846
3887
  ) -> Iterator[PipelineEvent]:
3847
- """List pipeline events.
3848
-
3849
- Retrieves events for a pipeline.
3888
+ """Retrieves events for a pipeline.
3850
3889
 
3851
3890
  :param pipeline_id: str
3852
3891
  The pipeline to return events for.
@@ -3902,9 +3941,7 @@ class PipelinesAPI:
3902
3941
  order_by: Optional[List[str]] = None,
3903
3942
  page_token: Optional[str] = None,
3904
3943
  ) -> Iterator[PipelineStateInfo]:
3905
- """List pipelines.
3906
-
3907
- Lists pipelines defined in the Delta Live Tables system.
3944
+ """Lists pipelines defined in the Delta Live Tables system.
3908
3945
 
3909
3946
  :param filter: str (optional)
3910
3947
  Select a subset of results based on the specified criteria. The supported filters are:
@@ -3958,9 +3995,7 @@ class PipelinesAPI:
3958
3995
  page_token: Optional[str] = None,
3959
3996
  until_update_id: Optional[str] = None,
3960
3997
  ) -> ListUpdatesResponse:
3961
- """List pipeline updates.
3962
-
3963
- List updates for an active pipeline.
3998
+ """List updates for an active pipeline.
3964
3999
 
3965
4000
  :param pipeline_id: str
3966
4001
  The pipeline to return updates for.
@@ -3991,9 +4026,7 @@ class PipelinesAPI:
3991
4026
  def set_permissions(
3992
4027
  self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None
3993
4028
  ) -> PipelinePermissions:
3994
- """Set pipeline permissions.
3995
-
3996
- Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
4029
+ """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
3997
4030
  permissions if none are specified. Objects can inherit permissions from their root object.
3998
4031
 
3999
4032
  :param pipeline_id: str
@@ -4023,14 +4056,11 @@ class PipelinesAPI:
4023
4056
  refresh_selection: Optional[List[str]] = None,
4024
4057
  validate_only: Optional[bool] = None,
4025
4058
  ) -> StartUpdateResponse:
4026
- """Start a pipeline.
4027
-
4028
- Starts a new update for the pipeline. If there is already an active update for the pipeline, the
4059
+ """Starts a new update for the pipeline. If there is already an active update for the pipeline, the
4029
4060
  request will fail and the active update will remain running.
4030
4061
 
4031
4062
  :param pipeline_id: str
4032
4063
  :param cause: :class:`StartUpdateCause` (optional)
4033
- What triggered this update.
4034
4064
  :param full_refresh: bool (optional)
4035
4065
  If true, this update will reset all tables before running.
4036
4066
  :param full_refresh_selection: List[str] (optional)
@@ -4067,9 +4097,7 @@ class PipelinesAPI:
4067
4097
  return StartUpdateResponse.from_dict(res)
4068
4098
 
4069
4099
  def stop(self, pipeline_id: str) -> Wait[GetPipelineResponse]:
4070
- """Stop a pipeline.
4071
-
4072
- Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this
4100
+ """Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this
4073
4101
  request is a no-op.
4074
4102
 
4075
4103
  :param pipeline_id: str
@@ -4105,6 +4133,7 @@ class PipelinesAPI:
4105
4133
  deployment: Optional[PipelineDeployment] = None,
4106
4134
  development: Optional[bool] = None,
4107
4135
  edition: Optional[str] = None,
4136
+ environment: Optional[PipelinesEnvironment] = None,
4108
4137
  event_log: Optional[EventLogSpec] = None,
4109
4138
  expected_last_modified: Optional[int] = None,
4110
4139
  filters: Optional[Filters] = None,
@@ -4125,9 +4154,7 @@ class PipelinesAPI:
4125
4154
  target: Optional[str] = None,
4126
4155
  trigger: Optional[PipelineTrigger] = None,
4127
4156
  ):
4128
- """Edit a pipeline.
4129
-
4130
- Updates a pipeline with the supplied configuration.
4157
+ """Updates a pipeline with the supplied configuration.
4131
4158
 
4132
4159
  :param pipeline_id: str
4133
4160
  Unique identifier for this pipeline.
@@ -4153,6 +4180,8 @@ class PipelinesAPI:
4153
4180
  Whether the pipeline is in Development mode. Defaults to false.
4154
4181
  :param edition: str (optional)
4155
4182
  Pipeline product edition.
4183
+ :param environment: :class:`PipelinesEnvironment` (optional)
4184
+ Environment specification for this pipeline used to install dependencies.
4156
4185
  :param event_log: :class:`EventLogSpec` (optional)
4157
4186
  Event log configuration for this pipeline
4158
4187
  :param expected_last_modified: int (optional)
@@ -4182,11 +4211,6 @@ class PipelinesAPI:
4182
4211
  Databricks user interface and it is added to sys.path when executing Python sources during pipeline
4183
4212
  execution.
4184
4213
  :param run_as: :class:`RunAs` (optional)
4185
- Write-only setting, available only in Create/Update calls. Specifies the user or service principal
4186
- that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
4187
-
4188
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
4189
- thrown.
4190
4214
  :param schema: str (optional)
4191
4215
  The default schema (database) where tables are read from or published to.
4192
4216
  :param serverless: bool (optional)
@@ -4226,6 +4250,8 @@ class PipelinesAPI:
4226
4250
  body["development"] = development
4227
4251
  if edition is not None:
4228
4252
  body["edition"] = edition
4253
+ if environment is not None:
4254
+ body["environment"] = environment.as_dict()
4229
4255
  if event_log is not None:
4230
4256
  body["event_log"] = event_log.as_dict()
4231
4257
  if expected_last_modified is not None:
@@ -4274,9 +4300,7 @@ class PipelinesAPI:
4274
4300
  def update_permissions(
4275
4301
  self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None
4276
4302
  ) -> PipelinePermissions:
4277
- """Update pipeline permissions.
4278
-
4279
- Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object.
4303
+ """Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object.
4280
4304
 
4281
4305
  :param pipeline_id: str
4282
4306
  The pipeline for which to get or manage permissions.