databricks-sdk 0.33.0__py3-none-any.whl → 0.35.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -29,6 +29,12 @@ class BaseJob:
29
29
  """The creator user name. This field won’t be included in the response if the user has already
30
30
  been deleted."""
31
31
 
32
+ effective_budget_policy_id: Optional[str] = None
33
+ """The id of the budget policy used by this job for cost attribution purposes. This may be set
34
+ through (in order of precedence): 1. Budget admins through the account or workspace console 2.
35
+ Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
36
+ on accessible budget policies of the run_as identity on job creation or modification."""
37
+
32
38
  job_id: Optional[int] = None
33
39
  """The canonical identifier for this job."""
34
40
 
@@ -41,6 +47,8 @@ class BaseJob:
41
47
  body = {}
42
48
  if self.created_time is not None: body['created_time'] = self.created_time
43
49
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
50
+ if self.effective_budget_policy_id is not None:
51
+ body['effective_budget_policy_id'] = self.effective_budget_policy_id
44
52
  if self.job_id is not None: body['job_id'] = self.job_id
45
53
  if self.settings: body['settings'] = self.settings.as_dict()
46
54
  return body
@@ -50,6 +58,7 @@ class BaseJob:
50
58
  """Deserializes the BaseJob from a dictionary."""
51
59
  return cls(created_time=d.get('created_time', None),
52
60
  creator_user_name=d.get('creator_user_name', None),
61
+ effective_budget_policy_id=d.get('effective_budget_policy_id', None),
53
62
  job_id=d.get('job_id', None),
54
63
  settings=_from_dict(d, 'settings', JobSettings))
55
64
 
@@ -484,6 +493,11 @@ class CreateJob:
484
493
  access_control_list: Optional[List[JobAccessControlRequest]] = None
485
494
  """List of permissions to set on the job."""
486
495
 
496
+ budget_policy_id: Optional[str] = None
497
+ """The id of the user specified budget policy to use for this job. If not specified, a default
498
+ budget policy may be applied when creating or modifying the job. See
499
+ `effective_budget_policy_id` for the budget policy used by this workload."""
500
+
487
501
  continuous: Optional[Continuous] = None
488
502
  """An optional continuous property for this job. The continuous property will ensure that there is
489
503
  always one run executing. Only one of `schedule` and `continuous` can be used."""
@@ -591,6 +605,7 @@ class CreateJob:
591
605
  body = {}
592
606
  if self.access_control_list:
593
607
  body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
608
+ if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
594
609
  if self.continuous: body['continuous'] = self.continuous.as_dict()
595
610
  if self.deployment: body['deployment'] = self.deployment.as_dict()
596
611
  if self.description is not None: body['description'] = self.description
@@ -619,6 +634,7 @@ class CreateJob:
619
634
  def from_dict(cls, d: Dict[str, any]) -> CreateJob:
620
635
  """Deserializes the CreateJob from a dictionary."""
621
636
  return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
637
+ budget_policy_id=d.get('budget_policy_id', None),
622
638
  continuous=_from_dict(d, 'continuous', Continuous),
623
639
  deployment=_from_dict(d, 'deployment', JobDeployment),
624
640
  description=d.get('description', None),
@@ -1261,6 +1277,12 @@ class Job:
1261
1277
  """The creator user name. This field won’t be included in the response if the user has already
1262
1278
  been deleted."""
1263
1279
 
1280
+ effective_budget_policy_id: Optional[str] = None
1281
+ """The id of the budget policy used by this job for cost attribution purposes. This may be set
1282
+ through (in order of precedence): 1. Budget admins through the account or workspace console 2.
1283
+ Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
1284
+ on accessible budget policies of the run_as identity on job creation or modification."""
1285
+
1264
1286
  job_id: Optional[int] = None
1265
1287
  """The canonical identifier for this job."""
1266
1288
 
@@ -1282,6 +1304,8 @@ class Job:
1282
1304
  body = {}
1283
1305
  if self.created_time is not None: body['created_time'] = self.created_time
1284
1306
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
1307
+ if self.effective_budget_policy_id is not None:
1308
+ body['effective_budget_policy_id'] = self.effective_budget_policy_id
1285
1309
  if self.job_id is not None: body['job_id'] = self.job_id
1286
1310
  if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
1287
1311
  if self.settings: body['settings'] = self.settings.as_dict()
@@ -1292,6 +1316,7 @@ class Job:
1292
1316
  """Deserializes the Job from a dictionary."""
1293
1317
  return cls(created_time=d.get('created_time', None),
1294
1318
  creator_user_name=d.get('creator_user_name', None),
1319
+ effective_budget_policy_id=d.get('effective_budget_policy_id', None),
1295
1320
  job_id=d.get('job_id', None),
1296
1321
  run_as_user_name=d.get('run_as_user_name', None),
1297
1322
  settings=_from_dict(d, 'settings', JobSettings))
@@ -1755,6 +1780,11 @@ class JobRunAs:
1755
1780
 
1756
1781
  @dataclass
1757
1782
  class JobSettings:
1783
+ budget_policy_id: Optional[str] = None
1784
+ """The id of the user specified budget policy to use for this job. If not specified, a default
1785
+ budget policy may be applied when creating or modifying the job. See
1786
+ `effective_budget_policy_id` for the budget policy used by this workload."""
1787
+
1758
1788
  continuous: Optional[Continuous] = None
1759
1789
  """An optional continuous property for this job. The continuous property will ensure that there is
1760
1790
  always one run executing. Only one of `schedule` and `continuous` can be used."""
@@ -1860,6 +1890,7 @@ class JobSettings:
1860
1890
  def as_dict(self) -> dict:
1861
1891
  """Serializes the JobSettings into a dictionary suitable for use as a JSON request body."""
1862
1892
  body = {}
1893
+ if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
1863
1894
  if self.continuous: body['continuous'] = self.continuous.as_dict()
1864
1895
  if self.deployment: body['deployment'] = self.deployment.as_dict()
1865
1896
  if self.description is not None: body['description'] = self.description
@@ -1887,7 +1918,8 @@ class JobSettings:
1887
1918
  @classmethod
1888
1919
  def from_dict(cls, d: Dict[str, any]) -> JobSettings:
1889
1920
  """Deserializes the JobSettings from a dictionary."""
1890
- return cls(continuous=_from_dict(d, 'continuous', Continuous),
1921
+ return cls(budget_policy_id=d.get('budget_policy_id', None),
1922
+ continuous=_from_dict(d, 'continuous', Continuous),
1891
1923
  deployment=_from_dict(d, 'deployment', JobDeployment),
1892
1924
  description=d.get('description', None),
1893
1925
  edit_mode=_enum(d, 'edit_mode', JobEditMode),
@@ -2478,6 +2510,7 @@ class RepairRun:
2478
2510
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
2479
2511
 
2480
2512
  pipeline_params: Optional[PipelineParams] = None
2513
+ """Controls whether the pipeline should perform a full refresh"""
2481
2514
 
2482
2515
  python_named_params: Optional[Dict[str, str]] = None
2483
2516
 
@@ -3181,6 +3214,7 @@ class RunJobTask:
3181
3214
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
3182
3215
 
3183
3216
  pipeline_params: Optional[PipelineParams] = None
3217
+ """Controls whether the pipeline should perform a full refresh"""
3184
3218
 
3185
3219
  python_named_params: Optional[Dict[str, str]] = None
3186
3220
 
@@ -3340,6 +3374,7 @@ class RunNow:
3340
3374
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
3341
3375
 
3342
3376
  pipeline_params: Optional[PipelineParams] = None
3377
+ """Controls whether the pipeline should perform a full refresh"""
3343
3378
 
3344
3379
  python_named_params: Optional[Dict[str, str]] = None
3345
3380
 
@@ -3549,6 +3584,7 @@ class RunParameters:
3549
3584
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
3550
3585
 
3551
3586
  pipeline_params: Optional[PipelineParams] = None
3587
+ """Controls whether the pipeline should perform a full refresh"""
3552
3588
 
3553
3589
  python_named_params: Optional[Dict[str, str]] = None
3554
3590
 
@@ -4503,6 +4539,10 @@ class SubmitRun:
4503
4539
  access_control_list: Optional[List[JobAccessControlRequest]] = None
4504
4540
  """List of permissions to set on the job."""
4505
4541
 
4542
+ budget_policy_id: Optional[str] = None
4543
+ """The user specified id of the budget policy to use for this one-time run. If not specified, the
4544
+ run will be not be attributed to any budget policy."""
4545
+
4506
4546
  email_notifications: Optional[JobEmailNotifications] = None
4507
4547
  """An optional set of email addresses notified when the run begins or completes."""
4508
4548
 
@@ -4563,6 +4603,7 @@ class SubmitRun:
4563
4603
  body = {}
4564
4604
  if self.access_control_list:
4565
4605
  body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
4606
+ if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
4566
4607
  if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
4567
4608
  if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
4568
4609
  if self.git_source: body['git_source'] = self.git_source.as_dict()
@@ -4581,6 +4622,7 @@ class SubmitRun:
4581
4622
  def from_dict(cls, d: Dict[str, any]) -> SubmitRun:
4582
4623
  """Deserializes the SubmitRun from a dictionary."""
4583
4624
  return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
4625
+ budget_policy_id=d.get('budget_policy_id', None),
4584
4626
  email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
4585
4627
  environments=_repeated_dict(d, 'environments', JobEnvironment),
4586
4628
  git_source=_from_dict(d, 'git_source', GitSource),
@@ -5615,6 +5657,7 @@ class JobsAPI:
5615
5657
  def create(self,
5616
5658
  *,
5617
5659
  access_control_list: Optional[List[JobAccessControlRequest]] = None,
5660
+ budget_policy_id: Optional[str] = None,
5618
5661
  continuous: Optional[Continuous] = None,
5619
5662
  deployment: Optional[JobDeployment] = None,
5620
5663
  description: Optional[str] = None,
@@ -5643,6 +5686,10 @@ class JobsAPI:
5643
5686
 
5644
5687
  :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
5645
5688
  List of permissions to set on the job.
5689
+ :param budget_policy_id: str (optional)
5690
+ The id of the user specified budget policy to use for this job. If not specified, a default budget
5691
+ policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
5692
+ budget policy used by this workload.
5646
5693
  :param continuous: :class:`Continuous` (optional)
5647
5694
  An optional continuous property for this job. The continuous property will ensure that there is
5648
5695
  always one run executing. Only one of `schedule` and `continuous` can be used.
@@ -5727,6 +5774,7 @@ class JobsAPI:
5727
5774
  body = {}
5728
5775
  if access_control_list is not None:
5729
5776
  body['access_control_list'] = [v.as_dict() for v in access_control_list]
5777
+ if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
5730
5778
  if continuous is not None: body['continuous'] = continuous.as_dict()
5731
5779
  if deployment is not None: body['deployment'] = deployment.as_dict()
5732
5780
  if description is not None: body['description'] = description
@@ -6087,6 +6135,7 @@ class JobsAPI:
6087
6135
  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
6088
6136
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
6089
6137
  :param pipeline_params: :class:`PipelineParams` (optional)
6138
+ Controls whether the pipeline should perform a full refresh
6090
6139
  :param python_named_params: Dict[str,str] (optional)
6091
6140
  :param python_params: List[str] (optional)
6092
6141
  A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
@@ -6276,6 +6325,7 @@ class JobsAPI:
6276
6325
  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
6277
6326
  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
6278
6327
  :param pipeline_params: :class:`PipelineParams` (optional)
6328
+ Controls whether the pipeline should perform a full refresh
6279
6329
  :param python_named_params: Dict[str,str] (optional)
6280
6330
  :param python_params: List[str] (optional)
6281
6331
  A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
@@ -6392,6 +6442,7 @@ class JobsAPI:
6392
6442
  def submit(self,
6393
6443
  *,
6394
6444
  access_control_list: Optional[List[JobAccessControlRequest]] = None,
6445
+ budget_policy_id: Optional[str] = None,
6395
6446
  email_notifications: Optional[JobEmailNotifications] = None,
6396
6447
  environments: Optional[List[JobEnvironment]] = None,
6397
6448
  git_source: Optional[GitSource] = None,
@@ -6412,6 +6463,9 @@ class JobsAPI:
6412
6463
 
6413
6464
  :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
6414
6465
  List of permissions to set on the job.
6466
+ :param budget_policy_id: str (optional)
6467
+ The user specified id of the budget policy to use for this one-time run. If not specified, the run
6468
+ will be not be attributed to any budget policy.
6415
6469
  :param email_notifications: :class:`JobEmailNotifications` (optional)
6416
6470
  An optional set of email addresses notified when the run begins or completes.
6417
6471
  :param environments: List[:class:`JobEnvironment`] (optional)
@@ -6463,6 +6517,7 @@ class JobsAPI:
6463
6517
  body = {}
6464
6518
  if access_control_list is not None:
6465
6519
  body['access_control_list'] = [v.as_dict() for v in access_control_list]
6520
+ if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
6466
6521
  if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict()
6467
6522
  if environments is not None: body['environments'] = [v.as_dict() for v in environments]
6468
6523
  if git_source is not None: body['git_source'] = git_source.as_dict()
@@ -6486,6 +6541,7 @@ class JobsAPI:
6486
6541
  self,
6487
6542
  *,
6488
6543
  access_control_list: Optional[List[JobAccessControlRequest]] = None,
6544
+ budget_policy_id: Optional[str] = None,
6489
6545
  email_notifications: Optional[JobEmailNotifications] = None,
6490
6546
  environments: Optional[List[JobEnvironment]] = None,
6491
6547
  git_source: Optional[GitSource] = None,
@@ -6500,6 +6556,7 @@ class JobsAPI:
6500
6556
  webhook_notifications: Optional[WebhookNotifications] = None,
6501
6557
  timeout=timedelta(minutes=20)) -> Run:
6502
6558
  return self.submit(access_control_list=access_control_list,
6559
+ budget_policy_id=budget_policy_id,
6503
6560
  email_notifications=email_notifications,
6504
6561
  environments=environments,
6505
6562
  git_source=git_source,
@@ -587,6 +587,9 @@ class GetUpdateResponse:
587
587
 
588
588
  @dataclass
589
589
  class IngestionConfig:
590
+ report: Optional[ReportSpec] = None
591
+ """Select tables from a specific source report."""
592
+
590
593
  schema: Optional[SchemaSpec] = None
591
594
  """Select tables from a specific source schema."""
592
595
 
@@ -596,6 +599,7 @@ class IngestionConfig:
596
599
  def as_dict(self) -> dict:
597
600
  """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body."""
598
601
  body = {}
602
+ if self.report: body['report'] = self.report.as_dict()
599
603
  if self.schema: body['schema'] = self.schema.as_dict()
600
604
  if self.table: body['table'] = self.table.as_dict()
601
605
  return body
@@ -603,7 +607,9 @@ class IngestionConfig:
603
607
  @classmethod
604
608
  def from_dict(cls, d: Dict[str, any]) -> IngestionConfig:
605
609
  """Deserializes the IngestionConfig from a dictionary."""
606
- return cls(schema=_from_dict(d, 'schema', SchemaSpec), table=_from_dict(d, 'table', TableSpec))
610
+ return cls(report=_from_dict(d, 'report', ReportSpec),
611
+ schema=_from_dict(d, 'schema', SchemaSpec),
612
+ table=_from_dict(d, 'table', TableSpec))
607
613
 
608
614
 
609
615
  @dataclass
@@ -1624,6 +1630,44 @@ class PipelineTrigger:
1624
1630
  return cls(cron=_from_dict(d, 'cron', CronTrigger), manual=_from_dict(d, 'manual', ManualTrigger))
1625
1631
 
1626
1632
 
1633
+ @dataclass
1634
+ class ReportSpec:
1635
+ destination_catalog: Optional[str] = None
1636
+ """Required. Destination catalog to store table."""
1637
+
1638
+ destination_schema: Optional[str] = None
1639
+ """Required. Destination schema to store table."""
1640
+
1641
+ destination_table: Optional[str] = None
1642
+ """Required. Destination table name. The pipeline fails if a table with that name already exists."""
1643
+
1644
+ source_url: Optional[str] = None
1645
+ """Required. Report URL in the source system."""
1646
+
1647
+ table_configuration: Optional[TableSpecificConfig] = None
1648
+ """Configuration settings to control the ingestion of tables. These settings override the
1649
+ table_configuration defined in the IngestionPipelineDefinition object."""
1650
+
1651
+ def as_dict(self) -> dict:
1652
+ """Serializes the ReportSpec into a dictionary suitable for use as a JSON request body."""
1653
+ body = {}
1654
+ if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
1655
+ if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
1656
+ if self.destination_table is not None: body['destination_table'] = self.destination_table
1657
+ if self.source_url is not None: body['source_url'] = self.source_url
1658
+ if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
1659
+ return body
1660
+
1661
+ @classmethod
1662
+ def from_dict(cls, d: Dict[str, any]) -> ReportSpec:
1663
+ """Deserializes the ReportSpec from a dictionary."""
1664
+ return cls(destination_catalog=d.get('destination_catalog', None),
1665
+ destination_schema=d.get('destination_schema', None),
1666
+ destination_table=d.get('destination_table', None),
1667
+ source_url=d.get('source_url', None),
1668
+ table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
1669
+
1670
+
1627
1671
  @dataclass
1628
1672
  class SchemaSpec:
1629
1673
  destination_catalog: Optional[str] = None
@@ -1841,7 +1885,7 @@ class TableSpec:
1841
1885
  """Required. Destination schema to store table."""
1842
1886
 
1843
1887
  destination_table: Optional[str] = None
1844
- """Optional. Destination table name. The pipeline fails If a table with that name already exists.
1888
+ """Optional. Destination table name. The pipeline fails if a table with that name already exists.
1845
1889
  If not set, the source table name is used."""
1846
1890
 
1847
1891
  source_catalog: Optional[str] = None
@@ -1893,6 +1937,10 @@ class TableSpecificConfig:
1893
1937
  scd_type: Optional[TableSpecificConfigScdType] = None
1894
1938
  """The SCD type to use to ingest the table."""
1895
1939
 
1940
+ sequence_by: Optional[List[str]] = None
1941
+ """The column names specifying the logical order of events in the source data. Delta Live Tables
1942
+ uses this sequencing to handle change events that arrive out of order."""
1943
+
1896
1944
  def as_dict(self) -> dict:
1897
1945
  """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body."""
1898
1946
  body = {}
@@ -1900,6 +1948,7 @@ class TableSpecificConfig:
1900
1948
  if self.salesforce_include_formula_fields is not None:
1901
1949
  body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields
1902
1950
  if self.scd_type is not None: body['scd_type'] = self.scd_type.value
1951
+ if self.sequence_by: body['sequence_by'] = [v for v in self.sequence_by]
1903
1952
  return body
1904
1953
 
1905
1954
  @classmethod
@@ -1907,7 +1956,8 @@ class TableSpecificConfig:
1907
1956
  """Deserializes the TableSpecificConfig from a dictionary."""
1908
1957
  return cls(primary_keys=d.get('primary_keys', None),
1909
1958
  salesforce_include_formula_fields=d.get('salesforce_include_formula_fields', None),
1910
- scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType))
1959
+ scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType),
1960
+ sequence_by=d.get('sequence_by', None))
1911
1961
 
1912
1962
 
1913
1963
  class TableSpecificConfigScdType(Enum):
@@ -720,6 +720,30 @@ class DeleteDisableLegacyAccessResponse:
720
720
  return cls(etag=d.get('etag', None))
721
721
 
722
722
 
723
+ @dataclass
724
+ class DeleteDisableLegacyDbfsResponse:
725
+ """The etag is returned."""
726
+
727
+ etag: str
728
+ """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
729
+ for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
730
+ overwriting each other. It is strongly suggested that systems make use of the etag in the read
731
+ -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
732
+ an etag from a GET request, and pass it with the DELETE request to identify the rule set version
733
+ you are deleting."""
734
+
735
+ def as_dict(self) -> dict:
736
+ """Serializes the DeleteDisableLegacyDbfsResponse into a dictionary suitable for use as a JSON request body."""
737
+ body = {}
738
+ if self.etag is not None: body['etag'] = self.etag
739
+ return body
740
+
741
+ @classmethod
742
+ def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyDbfsResponse:
743
+ """Deserializes the DeleteDisableLegacyDbfsResponse from a dictionary."""
744
+ return cls(etag=d.get('etag', None))
745
+
746
+
723
747
  @dataclass
724
748
  class DeleteDisableLegacyFeaturesResponse:
725
749
  """The etag is returned."""
@@ -863,6 +887,40 @@ class DisableLegacyAccess:
863
887
  setting_name=d.get('setting_name', None))
864
888
 
865
889
 
890
+ @dataclass
891
+ class DisableLegacyDbfs:
892
+ disable_legacy_dbfs: BooleanMessage
893
+
894
+ etag: Optional[str] = None
895
+ """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
896
+ for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
897
+ overwriting each other. It is strongly suggested that systems make use of the etag in the read
898
+ -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
899
+ etag from a GET request, and pass it with the PATCH request to identify the setting version you
900
+ are updating."""
901
+
902
+ setting_name: Optional[str] = None
903
+ """Name of the corresponding setting. This field is populated in the response, but it will not be
904
+ respected even if it's set in the request body. The setting name in the path parameter will be
905
+ respected instead. Setting name is required to be 'default' if the setting only has one instance
906
+ per workspace."""
907
+
908
+ def as_dict(self) -> dict:
909
+ """Serializes the DisableLegacyDbfs into a dictionary suitable for use as a JSON request body."""
910
+ body = {}
911
+ if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs.as_dict()
912
+ if self.etag is not None: body['etag'] = self.etag
913
+ if self.setting_name is not None: body['setting_name'] = self.setting_name
914
+ return body
915
+
916
+ @classmethod
917
+ def from_dict(cls, d: Dict[str, any]) -> DisableLegacyDbfs:
918
+ """Deserializes the DisableLegacyDbfs from a dictionary."""
919
+ return cls(disable_legacy_dbfs=_from_dict(d, 'disable_legacy_dbfs', BooleanMessage),
920
+ etag=d.get('etag', None),
921
+ setting_name=d.get('setting_name', None))
922
+
923
+
866
924
  @dataclass
867
925
  class DisableLegacyFeatures:
868
926
  disable_legacy_features: BooleanMessage
@@ -2534,6 +2592,36 @@ class UpdateDisableLegacyAccessRequest:
2534
2592
  setting=_from_dict(d, 'setting', DisableLegacyAccess))
2535
2593
 
2536
2594
 
2595
+ @dataclass
2596
+ class UpdateDisableLegacyDbfsRequest:
2597
+ """Details required to update a setting."""
2598
+
2599
+ allow_missing: bool
2600
+ """This should always be set to true for Settings API. Added for AIP compliance."""
2601
+
2602
+ setting: DisableLegacyDbfs
2603
+
2604
+ field_mask: str
2605
+ """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
2606
+ the setting payload will be updated. The field mask needs to be supplied as single string. To
2607
+ specify multiple fields in the field mask, use comma as the separator (no space)."""
2608
+
2609
+ def as_dict(self) -> dict:
2610
+ """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body."""
2611
+ body = {}
2612
+ if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
2613
+ if self.field_mask is not None: body['field_mask'] = self.field_mask
2614
+ if self.setting: body['setting'] = self.setting.as_dict()
2615
+ return body
2616
+
2617
+ @classmethod
2618
+ def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyDbfsRequest:
2619
+ """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary."""
2620
+ return cls(allow_missing=d.get('allow_missing', None),
2621
+ field_mask=d.get('field_mask', None),
2622
+ setting=_from_dict(d, 'setting', DisableLegacyDbfs))
2623
+
2624
+
2537
2625
  @dataclass
2538
2626
  class UpdateDisableLegacyFeaturesRequest:
2539
2627
  """Details required to update a setting."""
@@ -3447,6 +3535,91 @@ class DisableLegacyAccessAPI:
3447
3535
  return DisableLegacyAccess.from_dict(res)
3448
3536
 
3449
3537
 
3538
+ class DisableLegacyDbfsAPI:
3539
+ """When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
3540
+ mounts). When the setting is off, all DBFS functionality is enabled"""
3541
+
3542
+ def __init__(self, api_client):
3543
+ self._api = api_client
3544
+
3545
+ def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyDbfsResponse:
3546
+ """Delete the disable legacy DBFS setting.
3547
+
3548
+ Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
3549
+
3550
+ :param etag: str (optional)
3551
+ etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
3552
+ optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
3553
+ each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
3554
+ to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
3555
+ request, and pass it with the DELETE request to identify the rule set version you are deleting.
3556
+
3557
+ :returns: :class:`DeleteDisableLegacyDbfsResponse`
3558
+ """
3559
+
3560
+ query = {}
3561
+ if etag is not None: query['etag'] = etag
3562
+ headers = {'Accept': 'application/json', }
3563
+
3564
+ res = self._api.do('DELETE',
3565
+ '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
3566
+ query=query,
3567
+ headers=headers)
3568
+ return DeleteDisableLegacyDbfsResponse.from_dict(res)
3569
+
3570
+ def get(self, *, etag: Optional[str] = None) -> DisableLegacyDbfs:
3571
+ """Get the disable legacy DBFS setting.
3572
+
3573
+ Gets the disable legacy DBFS setting.
3574
+
3575
+ :param etag: str (optional)
3576
+ etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
3577
+ optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
3578
+ each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
3579
+ to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
3580
+ request, and pass it with the DELETE request to identify the rule set version you are deleting.
3581
+
3582
+ :returns: :class:`DisableLegacyDbfs`
3583
+ """
3584
+
3585
+ query = {}
3586
+ if etag is not None: query['etag'] = etag
3587
+ headers = {'Accept': 'application/json', }
3588
+
3589
+ res = self._api.do('GET',
3590
+ '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
3591
+ query=query,
3592
+ headers=headers)
3593
+ return DisableLegacyDbfs.from_dict(res)
3594
+
3595
+ def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs:
3596
+ """Update the disable legacy DBFS setting.
3597
+
3598
+ Updates the disable legacy DBFS setting for the workspace.
3599
+
3600
+ :param allow_missing: bool
3601
+ This should always be set to true for Settings API. Added for AIP compliance.
3602
+ :param setting: :class:`DisableLegacyDbfs`
3603
+ :param field_mask: str
3604
+ Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
3605
+ setting payload will be updated. The field mask needs to be supplied as single string. To specify
3606
+ multiple fields in the field mask, use comma as the separator (no space).
3607
+
3608
+ :returns: :class:`DisableLegacyDbfs`
3609
+ """
3610
+ body = {}
3611
+ if allow_missing is not None: body['allow_missing'] = allow_missing
3612
+ if field_mask is not None: body['field_mask'] = field_mask
3613
+ if setting is not None: body['setting'] = setting.as_dict()
3614
+ headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
3615
+
3616
+ res = self._api.do('PATCH',
3617
+ '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
3618
+ body=body,
3619
+ headers=headers)
3620
+ return DisableLegacyDbfs.from_dict(res)
3621
+
3622
+
3450
3623
  class DisableLegacyFeaturesAPI:
3451
3624
  """Disable legacy features for new Databricks workspaces.
3452
3625
 
@@ -4411,6 +4584,7 @@ class SettingsAPI:
4411
4584
  self._compliance_security_profile = ComplianceSecurityProfileAPI(self._api)
4412
4585
  self._default_namespace = DefaultNamespaceAPI(self._api)
4413
4586
  self._disable_legacy_access = DisableLegacyAccessAPI(self._api)
4587
+ self._disable_legacy_dbfs = DisableLegacyDbfsAPI(self._api)
4414
4588
  self._enhanced_security_monitoring = EnhancedSecurityMonitoringAPI(self._api)
4415
4589
  self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api)
4416
4590
 
@@ -4434,6 +4608,11 @@ class SettingsAPI:
4434
4608
  """'Disabling legacy access' has the following impacts: 1."""
4435
4609
  return self._disable_legacy_access
4436
4610
 
4611
+ @property
4612
+ def disable_legacy_dbfs(self) -> DisableLegacyDbfsAPI:
4613
+ """When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new mounts)."""
4614
+ return self._disable_legacy_dbfs
4615
+
4437
4616
  @property
4438
4617
  def enhanced_security_monitoring(self) -> EnhancedSecurityMonitoringAPI:
4439
4618
  """Controls whether enhanced security monitoring is enabled for the current workspace."""
@@ -2496,7 +2496,7 @@ class SharesAPI:
2496
2496
  f'/api/2.1/unity-catalog/shares/{name}/permissions',
2497
2497
  query=query,
2498
2498
  headers=headers)
2499
- return PermissionsList.from_dict(res)
2499
+ return catalog.PermissionsList.from_dict(res)
2500
2500
 
2501
2501
  def update(self,
2502
2502
  name: str,