databricks-sdk 0.37.0__py3-none-any.whl → 0.38.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +3 -2
- databricks/sdk/_base_client.py +61 -14
- databricks/sdk/config.py +10 -9
- databricks/sdk/credentials_provider.py +6 -5
- databricks/sdk/mixins/jobs.py +49 -0
- databricks/sdk/service/apps.py +10 -4
- databricks/sdk/service/billing.py +1 -1
- databricks/sdk/service/catalog.py +196 -32
- databricks/sdk/service/dashboards.py +10 -10
- databricks/sdk/service/iam.py +2 -2
- databricks/sdk/service/jobs.py +17 -8
- databricks/sdk/service/oauth2.py +1 -0
- databricks/sdk/service/pipelines.py +82 -15
- databricks/sdk/service/provisioning.py +15 -0
- databricks/sdk/service/settings.py +3 -1
- databricks/sdk/service/sharing.py +2 -0
- databricks/sdk/service/workspace.py +2 -1
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.37.0.dist-info → databricks_sdk-0.38.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.37.0.dist-info → databricks_sdk-0.38.0.dist-info}/RECORD +24 -23
- {databricks_sdk-0.37.0.dist-info → databricks_sdk-0.38.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.37.0.dist-info → databricks_sdk-0.38.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.37.0.dist-info → databricks_sdk-0.38.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.37.0.dist-info → databricks_sdk-0.38.0.dist-info}/top_level.txt +0 -0
|
@@ -1144,7 +1144,7 @@ class LakeviewAPI:
|
|
|
1144
1144
|
|
|
1145
1145
|
:returns: :class:`Dashboard`
|
|
1146
1146
|
"""
|
|
1147
|
-
body = dashboard
|
|
1147
|
+
body = dashboard.as_dict()
|
|
1148
1148
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
1149
1149
|
|
|
1150
1150
|
res = self._api.do('POST', '/api/2.0/lakeview/dashboards', body=body, headers=headers)
|
|
@@ -1159,7 +1159,7 @@ class LakeviewAPI:
|
|
|
1159
1159
|
|
|
1160
1160
|
:returns: :class:`Schedule`
|
|
1161
1161
|
"""
|
|
1162
|
-
body = schedule
|
|
1162
|
+
body = schedule.as_dict()
|
|
1163
1163
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
1164
1164
|
|
|
1165
1165
|
res = self._api.do('POST',
|
|
@@ -1183,7 +1183,7 @@ class LakeviewAPI:
|
|
|
1183
1183
|
|
|
1184
1184
|
:returns: :class:`Subscription`
|
|
1185
1185
|
"""
|
|
1186
|
-
body = subscription
|
|
1186
|
+
body = subscription.as_dict()
|
|
1187
1187
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
1188
1188
|
|
|
1189
1189
|
res = self._api.do(
|
|
@@ -1269,7 +1269,7 @@ class LakeviewAPI:
|
|
|
1269
1269
|
Get the current published dashboard.
|
|
1270
1270
|
|
|
1271
1271
|
:param dashboard_id: str
|
|
1272
|
-
UUID identifying the dashboard
|
|
1272
|
+
UUID identifying the published dashboard.
|
|
1273
1273
|
|
|
1274
1274
|
:returns: :class:`PublishedDashboard`
|
|
1275
1275
|
"""
|
|
@@ -1364,7 +1364,7 @@ class LakeviewAPI:
|
|
|
1364
1364
|
"""List dashboard schedules.
|
|
1365
1365
|
|
|
1366
1366
|
:param dashboard_id: str
|
|
1367
|
-
UUID identifying the dashboard to which the
|
|
1367
|
+
UUID identifying the dashboard to which the schedules belongs.
|
|
1368
1368
|
:param page_size: int (optional)
|
|
1369
1369
|
The number of schedules to return per page.
|
|
1370
1370
|
:param page_token: str (optional)
|
|
@@ -1400,9 +1400,9 @@ class LakeviewAPI:
|
|
|
1400
1400
|
"""List schedule subscriptions.
|
|
1401
1401
|
|
|
1402
1402
|
:param dashboard_id: str
|
|
1403
|
-
UUID identifying the dashboard
|
|
1403
|
+
UUID identifying the dashboard which the subscriptions belongs.
|
|
1404
1404
|
:param schedule_id: str
|
|
1405
|
-
UUID identifying the schedule
|
|
1405
|
+
UUID identifying the schedule which the subscriptions belongs.
|
|
1406
1406
|
:param page_size: int (optional)
|
|
1407
1407
|
The number of subscriptions to return per page.
|
|
1408
1408
|
:param page_token: str (optional)
|
|
@@ -1508,7 +1508,7 @@ class LakeviewAPI:
|
|
|
1508
1508
|
Unpublish the dashboard.
|
|
1509
1509
|
|
|
1510
1510
|
:param dashboard_id: str
|
|
1511
|
-
UUID identifying the dashboard
|
|
1511
|
+
UUID identifying the published dashboard.
|
|
1512
1512
|
|
|
1513
1513
|
|
|
1514
1514
|
"""
|
|
@@ -1528,7 +1528,7 @@ class LakeviewAPI:
|
|
|
1528
1528
|
|
|
1529
1529
|
:returns: :class:`Dashboard`
|
|
1530
1530
|
"""
|
|
1531
|
-
body = dashboard
|
|
1531
|
+
body = dashboard.as_dict()
|
|
1532
1532
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
1533
1533
|
|
|
1534
1534
|
res = self._api.do('PATCH',
|
|
@@ -1552,7 +1552,7 @@ class LakeviewAPI:
|
|
|
1552
1552
|
|
|
1553
1553
|
:returns: :class:`Schedule`
|
|
1554
1554
|
"""
|
|
1555
|
-
body = schedule
|
|
1555
|
+
body = schedule.as_dict()
|
|
1556
1556
|
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
|
|
1557
1557
|
|
|
1558
1558
|
res = self._api.do('PUT',
|
databricks/sdk/service/iam.py
CHANGED
|
@@ -1150,7 +1150,7 @@ class UpdateWorkspaceAssignments:
|
|
|
1150
1150
|
"""The ID of the user, service principal, or group."""
|
|
1151
1151
|
|
|
1152
1152
|
workspace_id: Optional[int] = None
|
|
1153
|
-
"""The workspace ID
|
|
1153
|
+
"""The workspace ID."""
|
|
1154
1154
|
|
|
1155
1155
|
def as_dict(self) -> dict:
|
|
1156
1156
|
"""Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body."""
|
|
@@ -3385,7 +3385,7 @@ class WorkspaceAssignmentAPI:
|
|
|
3385
3385
|
specified principal.
|
|
3386
3386
|
|
|
3387
3387
|
:param workspace_id: int
|
|
3388
|
-
The workspace ID
|
|
3388
|
+
The workspace ID.
|
|
3389
3389
|
:param principal_id: int
|
|
3390
3390
|
The ID of the user, service principal, or group.
|
|
3391
3391
|
:param permissions: List[:class:`WorkspacePermission`] (optional)
|
databricks/sdk/service/jobs.py
CHANGED
|
@@ -574,8 +574,7 @@ class CreateJob:
|
|
|
574
574
|
"""Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
|
|
575
575
|
as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
|
|
576
576
|
|
|
577
|
-
|
|
578
|
-
an error is thrown."""
|
|
577
|
+
Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
|
|
579
578
|
|
|
580
579
|
schedule: Optional[CronSchedule] = None
|
|
581
580
|
"""An optional periodic schedule for this job. The default behavior is that the job only runs when
|
|
@@ -1752,8 +1751,7 @@ class JobRunAs:
|
|
|
1752
1751
|
"""Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
|
|
1753
1752
|
as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
|
|
1754
1753
|
|
|
1755
|
-
|
|
1756
|
-
an error is thrown."""
|
|
1754
|
+
Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
|
|
1757
1755
|
|
|
1758
1756
|
service_principal_name: Optional[str] = None
|
|
1759
1757
|
"""Application ID of an active service principal. Setting this field requires the
|
|
@@ -1861,8 +1859,7 @@ class JobSettings:
|
|
|
1861
1859
|
"""Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
|
|
1862
1860
|
as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
|
|
1863
1861
|
|
|
1864
|
-
|
|
1865
|
-
an error is thrown."""
|
|
1862
|
+
Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
|
|
1866
1863
|
|
|
1867
1864
|
schedule: Optional[CronSchedule] = None
|
|
1868
1865
|
"""An optional periodic schedule for this job. The default behavior is that the job only runs when
|
|
@@ -3371,6 +3368,10 @@ class RunNow:
|
|
|
3371
3368
|
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
3372
3369
|
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
|
|
3373
3370
|
|
|
3371
|
+
only: Optional[List[str]] = None
|
|
3372
|
+
"""A list of task keys to run inside of the job. If this field is not provided, all tasks in the
|
|
3373
|
+
job will be run."""
|
|
3374
|
+
|
|
3374
3375
|
pipeline_params: Optional[PipelineParams] = None
|
|
3375
3376
|
"""Controls whether the pipeline should perform a full refresh"""
|
|
3376
3377
|
|
|
@@ -3425,6 +3426,7 @@ class RunNow:
|
|
|
3425
3426
|
if self.job_id is not None: body['job_id'] = self.job_id
|
|
3426
3427
|
if self.job_parameters: body['job_parameters'] = self.job_parameters
|
|
3427
3428
|
if self.notebook_params: body['notebook_params'] = self.notebook_params
|
|
3429
|
+
if self.only: body['only'] = [v for v in self.only]
|
|
3428
3430
|
if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
|
|
3429
3431
|
if self.python_named_params: body['python_named_params'] = self.python_named_params
|
|
3430
3432
|
if self.python_params: body['python_params'] = [v for v in self.python_params]
|
|
@@ -3442,6 +3444,7 @@ class RunNow:
|
|
|
3442
3444
|
job_id=d.get('job_id', None),
|
|
3443
3445
|
job_parameters=d.get('job_parameters', None),
|
|
3444
3446
|
notebook_params=d.get('notebook_params', None),
|
|
3447
|
+
only=d.get('only', None),
|
|
3445
3448
|
pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
|
|
3446
3449
|
python_named_params=d.get('python_named_params', None),
|
|
3447
3450
|
python_params=d.get('python_params', None),
|
|
@@ -5754,8 +5757,7 @@ class JobsAPI:
|
|
|
5754
5757
|
Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
|
|
5755
5758
|
not specified, the job/pipeline runs as the user who created the job/pipeline.
|
|
5756
5759
|
|
|
5757
|
-
|
|
5758
|
-
error is thrown.
|
|
5760
|
+
Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
|
|
5759
5761
|
:param schedule: :class:`CronSchedule` (optional)
|
|
5760
5762
|
An optional periodic schedule for this job. The default behavior is that the job only runs when
|
|
5761
5763
|
triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
|
|
@@ -6275,6 +6277,7 @@ class JobsAPI:
|
|
|
6275
6277
|
jar_params: Optional[List[str]] = None,
|
|
6276
6278
|
job_parameters: Optional[Dict[str, str]] = None,
|
|
6277
6279
|
notebook_params: Optional[Dict[str, str]] = None,
|
|
6280
|
+
only: Optional[List[str]] = None,
|
|
6278
6281
|
pipeline_params: Optional[PipelineParams] = None,
|
|
6279
6282
|
python_named_params: Optional[Dict[str, str]] = None,
|
|
6280
6283
|
python_params: Optional[List[str]] = None,
|
|
@@ -6331,6 +6334,9 @@ class JobsAPI:
|
|
|
6331
6334
|
|
|
6332
6335
|
[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
|
|
6333
6336
|
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
|
|
6337
|
+
:param only: List[str] (optional)
|
|
6338
|
+
A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
|
|
6339
|
+
will be run.
|
|
6334
6340
|
:param pipeline_params: :class:`PipelineParams` (optional)
|
|
6335
6341
|
Controls whether the pipeline should perform a full refresh
|
|
6336
6342
|
:param python_named_params: Dict[str,str] (optional)
|
|
@@ -6382,6 +6388,7 @@ class JobsAPI:
|
|
|
6382
6388
|
if job_id is not None: body['job_id'] = job_id
|
|
6383
6389
|
if job_parameters is not None: body['job_parameters'] = job_parameters
|
|
6384
6390
|
if notebook_params is not None: body['notebook_params'] = notebook_params
|
|
6391
|
+
if only is not None: body['only'] = [v for v in only]
|
|
6385
6392
|
if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict()
|
|
6386
6393
|
if python_named_params is not None: body['python_named_params'] = python_named_params
|
|
6387
6394
|
if python_params is not None: body['python_params'] = [v for v in python_params]
|
|
@@ -6403,6 +6410,7 @@ class JobsAPI:
|
|
|
6403
6410
|
jar_params: Optional[List[str]] = None,
|
|
6404
6411
|
job_parameters: Optional[Dict[str, str]] = None,
|
|
6405
6412
|
notebook_params: Optional[Dict[str, str]] = None,
|
|
6413
|
+
only: Optional[List[str]] = None,
|
|
6406
6414
|
pipeline_params: Optional[PipelineParams] = None,
|
|
6407
6415
|
python_named_params: Optional[Dict[str, str]] = None,
|
|
6408
6416
|
python_params: Optional[List[str]] = None,
|
|
@@ -6416,6 +6424,7 @@ class JobsAPI:
|
|
|
6416
6424
|
job_id=job_id,
|
|
6417
6425
|
job_parameters=job_parameters,
|
|
6418
6426
|
notebook_params=notebook_params,
|
|
6427
|
+
only=only,
|
|
6419
6428
|
pipeline_params=pipeline_params,
|
|
6420
6429
|
python_named_params=python_named_params,
|
|
6421
6430
|
python_params=python_params,
|
databricks/sdk/service/oauth2.py
CHANGED
|
@@ -61,7 +61,7 @@ class CreatePipeline:
|
|
|
61
61
|
"""Filters on which Pipeline packages to include in the deployed graph."""
|
|
62
62
|
|
|
63
63
|
gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
|
|
64
|
-
"""The definition of a gateway pipeline to support
|
|
64
|
+
"""The definition of a gateway pipeline to support change data capture."""
|
|
65
65
|
|
|
66
66
|
id: Optional[str] = None
|
|
67
67
|
"""Unique identifier for this pipeline."""
|
|
@@ -82,6 +82,9 @@ class CreatePipeline:
|
|
|
82
82
|
photon: Optional[bool] = None
|
|
83
83
|
"""Whether Photon is enabled for this pipeline."""
|
|
84
84
|
|
|
85
|
+
restart_window: Optional[RestartWindow] = None
|
|
86
|
+
"""Restart window of this pipeline."""
|
|
87
|
+
|
|
85
88
|
schema: Optional[str] = None
|
|
86
89
|
"""The default schema (database) where tables are read from or published to. The presence of this
|
|
87
90
|
field implies that the pipeline is in direct publishing mode."""
|
|
@@ -122,6 +125,7 @@ class CreatePipeline:
|
|
|
122
125
|
if self.name is not None: body['name'] = self.name
|
|
123
126
|
if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
|
|
124
127
|
if self.photon is not None: body['photon'] = self.photon
|
|
128
|
+
if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
|
|
125
129
|
if self.schema is not None: body['schema'] = self.schema
|
|
126
130
|
if self.serverless is not None: body['serverless'] = self.serverless
|
|
127
131
|
if self.storage is not None: body['storage'] = self.storage
|
|
@@ -151,6 +155,7 @@ class CreatePipeline:
|
|
|
151
155
|
name=d.get('name', None),
|
|
152
156
|
notifications=_repeated_dict(d, 'notifications', Notifications),
|
|
153
157
|
photon=d.get('photon', None),
|
|
158
|
+
restart_window=_from_dict(d, 'restart_window', RestartWindow),
|
|
154
159
|
schema=d.get('schema', None),
|
|
155
160
|
serverless=d.get('serverless', None),
|
|
156
161
|
storage=d.get('storage', None),
|
|
@@ -285,7 +290,7 @@ class EditPipeline:
|
|
|
285
290
|
"""Filters on which Pipeline packages to include in the deployed graph."""
|
|
286
291
|
|
|
287
292
|
gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
|
|
288
|
-
"""The definition of a gateway pipeline to support
|
|
293
|
+
"""The definition of a gateway pipeline to support change data capture."""
|
|
289
294
|
|
|
290
295
|
id: Optional[str] = None
|
|
291
296
|
"""Unique identifier for this pipeline."""
|
|
@@ -309,6 +314,9 @@ class EditPipeline:
|
|
|
309
314
|
pipeline_id: Optional[str] = None
|
|
310
315
|
"""Unique identifier for this pipeline."""
|
|
311
316
|
|
|
317
|
+
restart_window: Optional[RestartWindow] = None
|
|
318
|
+
"""Restart window of this pipeline."""
|
|
319
|
+
|
|
312
320
|
schema: Optional[str] = None
|
|
313
321
|
"""The default schema (database) where tables are read from or published to. The presence of this
|
|
314
322
|
field implies that the pipeline is in direct publishing mode."""
|
|
@@ -351,6 +359,7 @@ class EditPipeline:
|
|
|
351
359
|
if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
|
|
352
360
|
if self.photon is not None: body['photon'] = self.photon
|
|
353
361
|
if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
|
|
362
|
+
if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
|
|
354
363
|
if self.schema is not None: body['schema'] = self.schema
|
|
355
364
|
if self.serverless is not None: body['serverless'] = self.serverless
|
|
356
365
|
if self.storage is not None: body['storage'] = self.storage
|
|
@@ -381,6 +390,7 @@ class EditPipeline:
|
|
|
381
390
|
notifications=_repeated_dict(d, 'notifications', Notifications),
|
|
382
391
|
photon=d.get('photon', None),
|
|
383
392
|
pipeline_id=d.get('pipeline_id', None),
|
|
393
|
+
restart_window=_from_dict(d, 'restart_window', RestartWindow),
|
|
384
394
|
schema=d.get('schema', None),
|
|
385
395
|
serverless=d.get('serverless', None),
|
|
386
396
|
storage=d.get('storage', None),
|
|
@@ -588,13 +598,13 @@ class GetUpdateResponse:
|
|
|
588
598
|
@dataclass
|
|
589
599
|
class IngestionConfig:
|
|
590
600
|
report: Optional[ReportSpec] = None
|
|
591
|
-
"""Select
|
|
601
|
+
"""Select a specific source report."""
|
|
592
602
|
|
|
593
603
|
schema: Optional[SchemaSpec] = None
|
|
594
|
-
"""Select tables from a specific source schema."""
|
|
604
|
+
"""Select all tables from a specific source schema."""
|
|
595
605
|
|
|
596
606
|
table: Optional[TableSpec] = None
|
|
597
|
-
"""Select
|
|
607
|
+
"""Select a specific source table."""
|
|
598
608
|
|
|
599
609
|
def as_dict(self) -> dict:
|
|
600
610
|
"""Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body."""
|
|
@@ -615,11 +625,11 @@ class IngestionConfig:
|
|
|
615
625
|
@dataclass
|
|
616
626
|
class IngestionGatewayPipelineDefinition:
|
|
617
627
|
connection_id: Optional[str] = None
|
|
618
|
-
"""[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection this
|
|
619
|
-
pipeline uses to communicate with the source."""
|
|
628
|
+
"""[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this
|
|
629
|
+
gateway pipeline uses to communicate with the source."""
|
|
620
630
|
|
|
621
631
|
connection_name: Optional[str] = None
|
|
622
|
-
"""Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the
|
|
632
|
+
"""Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the
|
|
623
633
|
source."""
|
|
624
634
|
|
|
625
635
|
gateway_storage_catalog: Optional[str] = None
|
|
@@ -658,12 +668,12 @@ class IngestionGatewayPipelineDefinition:
|
|
|
658
668
|
@dataclass
|
|
659
669
|
class IngestionPipelineDefinition:
|
|
660
670
|
connection_name: Optional[str] = None
|
|
661
|
-
"""Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with
|
|
662
|
-
source.
|
|
671
|
+
"""Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with
|
|
672
|
+
the source. This is used with connectors for applications like Salesforce, Workday, and so on."""
|
|
663
673
|
|
|
664
674
|
ingestion_gateway_id: Optional[str] = None
|
|
665
|
-
"""Immutable. Identifier for the
|
|
666
|
-
with the source.
|
|
675
|
+
"""Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate
|
|
676
|
+
with the source database. This is used with connectors to databases like SQL Server."""
|
|
667
677
|
|
|
668
678
|
objects: Optional[List[IngestionConfig]] = None
|
|
669
679
|
"""Required. Settings specifying tables to replicate and the destination for the replicated tables."""
|
|
@@ -1450,7 +1460,7 @@ class PipelineSpec:
|
|
|
1450
1460
|
"""Filters on which Pipeline packages to include in the deployed graph."""
|
|
1451
1461
|
|
|
1452
1462
|
gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
|
|
1453
|
-
"""The definition of a gateway pipeline to support
|
|
1463
|
+
"""The definition of a gateway pipeline to support change data capture."""
|
|
1454
1464
|
|
|
1455
1465
|
id: Optional[str] = None
|
|
1456
1466
|
"""Unique identifier for this pipeline."""
|
|
@@ -1471,6 +1481,9 @@ class PipelineSpec:
|
|
|
1471
1481
|
photon: Optional[bool] = None
|
|
1472
1482
|
"""Whether Photon is enabled for this pipeline."""
|
|
1473
1483
|
|
|
1484
|
+
restart_window: Optional[RestartWindow] = None
|
|
1485
|
+
"""Restart window of this pipeline."""
|
|
1486
|
+
|
|
1474
1487
|
schema: Optional[str] = None
|
|
1475
1488
|
"""The default schema (database) where tables are read from or published to. The presence of this
|
|
1476
1489
|
field implies that the pipeline is in direct publishing mode."""
|
|
@@ -1509,6 +1522,7 @@ class PipelineSpec:
|
|
|
1509
1522
|
if self.name is not None: body['name'] = self.name
|
|
1510
1523
|
if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
|
|
1511
1524
|
if self.photon is not None: body['photon'] = self.photon
|
|
1525
|
+
if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
|
|
1512
1526
|
if self.schema is not None: body['schema'] = self.schema
|
|
1513
1527
|
if self.serverless is not None: body['serverless'] = self.serverless
|
|
1514
1528
|
if self.storage is not None: body['storage'] = self.storage
|
|
@@ -1536,6 +1550,7 @@ class PipelineSpec:
|
|
|
1536
1550
|
name=d.get('name', None),
|
|
1537
1551
|
notifications=_repeated_dict(d, 'notifications', Notifications),
|
|
1538
1552
|
photon=d.get('photon', None),
|
|
1553
|
+
restart_window=_from_dict(d, 'restart_window', RestartWindow),
|
|
1539
1554
|
schema=d.get('schema', None),
|
|
1540
1555
|
serverless=d.get('serverless', None),
|
|
1541
1556
|
storage=d.get('storage', None),
|
|
@@ -1674,6 +1689,50 @@ class ReportSpec:
|
|
|
1674
1689
|
table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
|
|
1675
1690
|
|
|
1676
1691
|
|
|
1692
|
+
@dataclass
|
|
1693
|
+
class RestartWindow:
|
|
1694
|
+
start_hour: int
|
|
1695
|
+
"""An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.
|
|
1696
|
+
Continuous pipeline restart is triggered only within a five-hour window starting at this hour."""
|
|
1697
|
+
|
|
1698
|
+
days_of_week: Optional[RestartWindowDaysOfWeek] = None
|
|
1699
|
+
"""Days of week in which the restart is allowed to happen (within a five-hour window starting at
|
|
1700
|
+
start_hour). If not specified all days of the week will be used."""
|
|
1701
|
+
|
|
1702
|
+
time_zone_id: Optional[str] = None
|
|
1703
|
+
"""Time zone id of restart window. See
|
|
1704
|
+
https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html
|
|
1705
|
+
for details. If not specified, UTC will be used."""
|
|
1706
|
+
|
|
1707
|
+
def as_dict(self) -> dict:
|
|
1708
|
+
"""Serializes the RestartWindow into a dictionary suitable for use as a JSON request body."""
|
|
1709
|
+
body = {}
|
|
1710
|
+
if self.days_of_week is not None: body['days_of_week'] = self.days_of_week.value
|
|
1711
|
+
if self.start_hour is not None: body['start_hour'] = self.start_hour
|
|
1712
|
+
if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
|
|
1713
|
+
return body
|
|
1714
|
+
|
|
1715
|
+
@classmethod
|
|
1716
|
+
def from_dict(cls, d: Dict[str, any]) -> RestartWindow:
|
|
1717
|
+
"""Deserializes the RestartWindow from a dictionary."""
|
|
1718
|
+
return cls(days_of_week=_enum(d, 'days_of_week', RestartWindowDaysOfWeek),
|
|
1719
|
+
start_hour=d.get('start_hour', None),
|
|
1720
|
+
time_zone_id=d.get('time_zone_id', None))
|
|
1721
|
+
|
|
1722
|
+
|
|
1723
|
+
class RestartWindowDaysOfWeek(Enum):
|
|
1724
|
+
"""Days of week in which the restart is allowed to happen (within a five-hour window starting at
|
|
1725
|
+
start_hour). If not specified all days of the week will be used."""
|
|
1726
|
+
|
|
1727
|
+
FRIDAY = 'FRIDAY'
|
|
1728
|
+
MONDAY = 'MONDAY'
|
|
1729
|
+
SATURDAY = 'SATURDAY'
|
|
1730
|
+
SUNDAY = 'SUNDAY'
|
|
1731
|
+
THURSDAY = 'THURSDAY'
|
|
1732
|
+
TUESDAY = 'TUESDAY'
|
|
1733
|
+
WEDNESDAY = 'WEDNESDAY'
|
|
1734
|
+
|
|
1735
|
+
|
|
1677
1736
|
@dataclass
|
|
1678
1737
|
class SchemaSpec:
|
|
1679
1738
|
destination_catalog: Optional[str] = None
|
|
@@ -2211,6 +2270,7 @@ class PipelinesAPI:
|
|
|
2211
2270
|
name: Optional[str] = None,
|
|
2212
2271
|
notifications: Optional[List[Notifications]] = None,
|
|
2213
2272
|
photon: Optional[bool] = None,
|
|
2273
|
+
restart_window: Optional[RestartWindow] = None,
|
|
2214
2274
|
schema: Optional[str] = None,
|
|
2215
2275
|
serverless: Optional[bool] = None,
|
|
2216
2276
|
storage: Optional[str] = None,
|
|
@@ -2247,7 +2307,7 @@ class PipelinesAPI:
|
|
|
2247
2307
|
:param filters: :class:`Filters` (optional)
|
|
2248
2308
|
Filters on which Pipeline packages to include in the deployed graph.
|
|
2249
2309
|
:param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
|
|
2250
|
-
The definition of a gateway pipeline to support
|
|
2310
|
+
The definition of a gateway pipeline to support change data capture.
|
|
2251
2311
|
:param id: str (optional)
|
|
2252
2312
|
Unique identifier for this pipeline.
|
|
2253
2313
|
:param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
|
|
@@ -2261,6 +2321,8 @@ class PipelinesAPI:
|
|
|
2261
2321
|
List of notification settings for this pipeline.
|
|
2262
2322
|
:param photon: bool (optional)
|
|
2263
2323
|
Whether Photon is enabled for this pipeline.
|
|
2324
|
+
:param restart_window: :class:`RestartWindow` (optional)
|
|
2325
|
+
Restart window of this pipeline.
|
|
2264
2326
|
:param schema: str (optional)
|
|
2265
2327
|
The default schema (database) where tables are read from or published to. The presence of this field
|
|
2266
2328
|
implies that the pipeline is in direct publishing mode.
|
|
@@ -2296,6 +2358,7 @@ class PipelinesAPI:
|
|
|
2296
2358
|
if name is not None: body['name'] = name
|
|
2297
2359
|
if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
|
|
2298
2360
|
if photon is not None: body['photon'] = photon
|
|
2361
|
+
if restart_window is not None: body['restart_window'] = restart_window.as_dict()
|
|
2299
2362
|
if schema is not None: body['schema'] = schema
|
|
2300
2363
|
if serverless is not None: body['serverless'] = serverless
|
|
2301
2364
|
if storage is not None: body['storage'] = storage
|
|
@@ -2629,6 +2692,7 @@ class PipelinesAPI:
|
|
|
2629
2692
|
name: Optional[str] = None,
|
|
2630
2693
|
notifications: Optional[List[Notifications]] = None,
|
|
2631
2694
|
photon: Optional[bool] = None,
|
|
2695
|
+
restart_window: Optional[RestartWindow] = None,
|
|
2632
2696
|
schema: Optional[str] = None,
|
|
2633
2697
|
serverless: Optional[bool] = None,
|
|
2634
2698
|
storage: Optional[str] = None,
|
|
@@ -2668,7 +2732,7 @@ class PipelinesAPI:
|
|
|
2668
2732
|
:param filters: :class:`Filters` (optional)
|
|
2669
2733
|
Filters on which Pipeline packages to include in the deployed graph.
|
|
2670
2734
|
:param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
|
|
2671
|
-
The definition of a gateway pipeline to support
|
|
2735
|
+
The definition of a gateway pipeline to support change data capture.
|
|
2672
2736
|
:param id: str (optional)
|
|
2673
2737
|
Unique identifier for this pipeline.
|
|
2674
2738
|
:param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
|
|
@@ -2682,6 +2746,8 @@ class PipelinesAPI:
|
|
|
2682
2746
|
List of notification settings for this pipeline.
|
|
2683
2747
|
:param photon: bool (optional)
|
|
2684
2748
|
Whether Photon is enabled for this pipeline.
|
|
2749
|
+
:param restart_window: :class:`RestartWindow` (optional)
|
|
2750
|
+
Restart window of this pipeline.
|
|
2685
2751
|
:param schema: str (optional)
|
|
2686
2752
|
The default schema (database) where tables are read from or published to. The presence of this field
|
|
2687
2753
|
implies that the pipeline is in direct publishing mode.
|
|
@@ -2717,6 +2783,7 @@ class PipelinesAPI:
|
|
|
2717
2783
|
if name is not None: body['name'] = name
|
|
2718
2784
|
if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
|
|
2719
2785
|
if photon is not None: body['photon'] = photon
|
|
2786
|
+
if restart_window is not None: body['restart_window'] = restart_window.as_dict()
|
|
2720
2787
|
if schema is not None: body['schema'] = schema
|
|
2721
2788
|
if serverless is not None: body['serverless'] = serverless
|
|
2722
2789
|
if storage is not None: body['storage'] = storage
|
|
@@ -1245,6 +1245,10 @@ class UpdateWorkspaceRequest:
|
|
|
1245
1245
|
customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC
|
|
1246
1246
|
to a customer-managed VPC by updating the workspace to add a network configuration ID."""
|
|
1247
1247
|
|
|
1248
|
+
private_access_settings_id: Optional[str] = None
|
|
1249
|
+
"""The ID of the workspace's private access settings configuration object. This parameter is
|
|
1250
|
+
available only for updating failed workspaces."""
|
|
1251
|
+
|
|
1248
1252
|
storage_configuration_id: Optional[str] = None
|
|
1249
1253
|
"""The ID of the workspace's storage configuration object. This parameter is available only for
|
|
1250
1254
|
updating failed workspaces."""
|
|
@@ -1267,6 +1271,8 @@ class UpdateWorkspaceRequest:
|
|
|
1267
1271
|
if self.network_connectivity_config_id is not None:
|
|
1268
1272
|
body['network_connectivity_config_id'] = self.network_connectivity_config_id
|
|
1269
1273
|
if self.network_id is not None: body['network_id'] = self.network_id
|
|
1274
|
+
if self.private_access_settings_id is not None:
|
|
1275
|
+
body['private_access_settings_id'] = self.private_access_settings_id
|
|
1270
1276
|
if self.storage_configuration_id is not None:
|
|
1271
1277
|
body['storage_configuration_id'] = self.storage_configuration_id
|
|
1272
1278
|
if self.storage_customer_managed_key_id is not None:
|
|
@@ -1284,6 +1290,7 @@ class UpdateWorkspaceRequest:
|
|
|
1284
1290
|
None),
|
|
1285
1291
|
network_connectivity_config_id=d.get('network_connectivity_config_id', None),
|
|
1286
1292
|
network_id=d.get('network_id', None),
|
|
1293
|
+
private_access_settings_id=d.get('private_access_settings_id', None),
|
|
1287
1294
|
storage_configuration_id=d.get('storage_configuration_id', None),
|
|
1288
1295
|
storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None),
|
|
1289
1296
|
workspace_id=d.get('workspace_id', None))
|
|
@@ -2706,6 +2713,7 @@ class WorkspacesAPI:
|
|
|
2706
2713
|
managed_services_customer_managed_key_id: Optional[str] = None,
|
|
2707
2714
|
network_connectivity_config_id: Optional[str] = None,
|
|
2708
2715
|
network_id: Optional[str] = None,
|
|
2716
|
+
private_access_settings_id: Optional[str] = None,
|
|
2709
2717
|
storage_configuration_id: Optional[str] = None,
|
|
2710
2718
|
storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]:
|
|
2711
2719
|
"""Update workspace configuration.
|
|
@@ -2824,6 +2832,9 @@ class WorkspacesAPI:
|
|
|
2824
2832
|
The ID of the workspace's network configuration object. Used only if you already use a
|
|
2825
2833
|
customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a
|
|
2826
2834
|
customer-managed VPC by updating the workspace to add a network configuration ID.
|
|
2835
|
+
:param private_access_settings_id: str (optional)
|
|
2836
|
+
The ID of the workspace's private access settings configuration object. This parameter is available
|
|
2837
|
+
only for updating failed workspaces.
|
|
2827
2838
|
:param storage_configuration_id: str (optional)
|
|
2828
2839
|
The ID of the workspace's storage configuration object. This parameter is available only for
|
|
2829
2840
|
updating failed workspaces.
|
|
@@ -2844,6 +2855,8 @@ class WorkspacesAPI:
|
|
|
2844
2855
|
if network_connectivity_config_id is not None:
|
|
2845
2856
|
body['network_connectivity_config_id'] = network_connectivity_config_id
|
|
2846
2857
|
if network_id is not None: body['network_id'] = network_id
|
|
2858
|
+
if private_access_settings_id is not None:
|
|
2859
|
+
body['private_access_settings_id'] = private_access_settings_id
|
|
2847
2860
|
if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id
|
|
2848
2861
|
if storage_customer_managed_key_id is not None:
|
|
2849
2862
|
body['storage_customer_managed_key_id'] = storage_customer_managed_key_id
|
|
@@ -2867,6 +2880,7 @@ class WorkspacesAPI:
|
|
|
2867
2880
|
managed_services_customer_managed_key_id: Optional[str] = None,
|
|
2868
2881
|
network_connectivity_config_id: Optional[str] = None,
|
|
2869
2882
|
network_id: Optional[str] = None,
|
|
2883
|
+
private_access_settings_id: Optional[str] = None,
|
|
2870
2884
|
storage_configuration_id: Optional[str] = None,
|
|
2871
2885
|
storage_customer_managed_key_id: Optional[str] = None,
|
|
2872
2886
|
timeout=timedelta(minutes=20)) -> Workspace:
|
|
@@ -2876,6 +2890,7 @@ class WorkspacesAPI:
|
|
|
2876
2890
|
managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
|
|
2877
2891
|
network_connectivity_config_id=network_connectivity_config_id,
|
|
2878
2892
|
network_id=network_id,
|
|
2893
|
+
private_access_settings_id=private_access_settings_id,
|
|
2879
2894
|
storage_configuration_id=storage_configuration_id,
|
|
2880
2895
|
storage_customer_managed_key_id=storage_customer_managed_key_id,
|
|
2881
2896
|
workspace_id=workspace_id).result(timeout=timeout)
|
|
@@ -2943,6 +2943,7 @@ class UpdateNotificationDestinationRequest:
|
|
|
2943
2943
|
"""The display name for the notification destination."""
|
|
2944
2944
|
|
|
2945
2945
|
id: Optional[str] = None
|
|
2946
|
+
"""UUID identifying notification destination."""
|
|
2946
2947
|
|
|
2947
2948
|
def as_dict(self) -> dict:
|
|
2948
2949
|
"""Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body."""
|
|
@@ -4670,6 +4671,7 @@ class NotificationDestinationsAPI:
|
|
|
4670
4671
|
required in the request body.
|
|
4671
4672
|
|
|
4672
4673
|
:param id: str
|
|
4674
|
+
UUID identifying notification destination.
|
|
4673
4675
|
:param config: :class:`Config` (optional)
|
|
4674
4676
|
The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
|
|
4675
4677
|
:param display_name: str (optional)
|
|
@@ -4984,7 +4986,7 @@ class TokenManagementAPI:
|
|
|
4984
4986
|
Deletes a token, specified by its ID.
|
|
4985
4987
|
|
|
4986
4988
|
:param token_id: str
|
|
4987
|
-
The ID of the token to
|
|
4989
|
+
The ID of the token to revoke.
|
|
4988
4990
|
|
|
4989
4991
|
|
|
4990
4992
|
"""
|
|
@@ -984,6 +984,8 @@ class SharedDataObject:
|
|
|
984
984
|
class SharedDataObjectDataObjectType(Enum):
|
|
985
985
|
"""The type of the data object."""
|
|
986
986
|
|
|
987
|
+
FEATURE_SPEC = 'FEATURE_SPEC'
|
|
988
|
+
FUNCTION = 'FUNCTION'
|
|
987
989
|
MATERIALIZED_VIEW = 'MATERIALIZED_VIEW'
|
|
988
990
|
MODEL = 'MODEL'
|
|
989
991
|
NOTEBOOK_FILE = 'NOTEBOOK_FILE'
|
|
@@ -684,6 +684,7 @@ class ImportFormat(Enum):
|
|
|
684
684
|
DBC = 'DBC'
|
|
685
685
|
HTML = 'HTML'
|
|
686
686
|
JUPYTER = 'JUPYTER'
|
|
687
|
+
RAW = 'RAW'
|
|
687
688
|
R_MARKDOWN = 'R_MARKDOWN'
|
|
688
689
|
SOURCE = 'SOURCE'
|
|
689
690
|
|
|
@@ -1799,7 +1800,7 @@ class ReposAPI:
|
|
|
1799
1800
|
Deletes the specified repo.
|
|
1800
1801
|
|
|
1801
1802
|
:param repo_id: int
|
|
1802
|
-
ID
|
|
1803
|
+
The ID for the corresponding repo to delete.
|
|
1803
1804
|
|
|
1804
1805
|
|
|
1805
1806
|
"""
|
databricks/sdk/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '0.
|
|
1
|
+
__version__ = '0.38.0'
|