databricks-sdk 0.53.0__py3-none-any.whl → 0.55.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -368,6 +368,13 @@ class FederationPolicy:
368
368
  oidc_policy: Optional[OidcFederationPolicy] = None
369
369
  """Specifies the policy to use for validating OIDC claims in your federated tokens."""
370
370
 
371
+ policy_id: Optional[str] = None
372
+ """The ID of the federation policy."""
373
+
374
+ service_principal_id: Optional[int] = None
375
+ """The service principal ID that this federation policy applies to. Only set for service principal
376
+ federation policies."""
377
+
371
378
  uid: Optional[str] = None
372
379
  """Unique, immutable id of the federation policy."""
373
380
 
@@ -385,6 +392,10 @@ class FederationPolicy:
385
392
  body["name"] = self.name
386
393
  if self.oidc_policy:
387
394
  body["oidc_policy"] = self.oidc_policy.as_dict()
395
+ if self.policy_id is not None:
396
+ body["policy_id"] = self.policy_id
397
+ if self.service_principal_id is not None:
398
+ body["service_principal_id"] = self.service_principal_id
388
399
  if self.uid is not None:
389
400
  body["uid"] = self.uid
390
401
  if self.update_time is not None:
@@ -402,6 +413,10 @@ class FederationPolicy:
402
413
  body["name"] = self.name
403
414
  if self.oidc_policy:
404
415
  body["oidc_policy"] = self.oidc_policy
416
+ if self.policy_id is not None:
417
+ body["policy_id"] = self.policy_id
418
+ if self.service_principal_id is not None:
419
+ body["service_principal_id"] = self.service_principal_id
405
420
  if self.uid is not None:
406
421
  body["uid"] = self.uid
407
422
  if self.update_time is not None:
@@ -416,6 +431,8 @@ class FederationPolicy:
416
431
  description=d.get("description", None),
417
432
  name=d.get("name", None),
418
433
  oidc_policy=_from_dict(d, "oidc_policy", OidcFederationPolicy),
434
+ policy_id=d.get("policy_id", None),
435
+ service_principal_id=d.get("service_principal_id", None),
419
436
  uid=d.get("uid", None),
420
437
  update_time=d.get("update_time", None),
421
438
  )
@@ -89,6 +89,11 @@ class CreatePipeline:
89
89
  restart_window: Optional[RestartWindow] = None
90
90
  """Restart window of this pipeline."""
91
91
 
92
+ root_path: Optional[str] = None
93
+ """Root path for this pipeline. This is used as the root directory when editing the pipeline in the
94
+ Databricks user interface and it is added to sys.path when executing Python sources during
95
+ pipeline execution."""
96
+
92
97
  run_as: Optional[RunAs] = None
93
98
  """Write-only setting, available only in Create/Update calls. Specifies the user or service
94
99
  principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
@@ -159,6 +164,8 @@ class CreatePipeline:
159
164
  body["photon"] = self.photon
160
165
  if self.restart_window:
161
166
  body["restart_window"] = self.restart_window.as_dict()
167
+ if self.root_path is not None:
168
+ body["root_path"] = self.root_path
162
169
  if self.run_as:
163
170
  body["run_as"] = self.run_as.as_dict()
164
171
  if self.schema is not None:
@@ -218,6 +225,8 @@ class CreatePipeline:
218
225
  body["photon"] = self.photon
219
226
  if self.restart_window:
220
227
  body["restart_window"] = self.restart_window
228
+ if self.root_path is not None:
229
+ body["root_path"] = self.root_path
221
230
  if self.run_as:
222
231
  body["run_as"] = self.run_as
223
232
  if self.schema is not None:
@@ -257,6 +266,7 @@ class CreatePipeline:
257
266
  notifications=_repeated_dict(d, "notifications", Notifications),
258
267
  photon=d.get("photon", None),
259
268
  restart_window=_from_dict(d, "restart_window", RestartWindow),
269
+ root_path=d.get("root_path", None),
260
270
  run_as=_from_dict(d, "run_as", RunAs),
261
271
  schema=d.get("schema", None),
262
272
  serverless=d.get("serverless", None),
@@ -473,6 +483,11 @@ class EditPipeline:
473
483
  restart_window: Optional[RestartWindow] = None
474
484
  """Restart window of this pipeline."""
475
485
 
486
+ root_path: Optional[str] = None
487
+ """Root path for this pipeline. This is used as the root directory when editing the pipeline in the
488
+ Databricks user interface and it is added to sys.path when executing Python sources during
489
+ pipeline execution."""
490
+
476
491
  run_as: Optional[RunAs] = None
477
492
  """Write-only setting, available only in Create/Update calls. Specifies the user or service
478
493
  principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
@@ -545,6 +560,8 @@ class EditPipeline:
545
560
  body["pipeline_id"] = self.pipeline_id
546
561
  if self.restart_window:
547
562
  body["restart_window"] = self.restart_window.as_dict()
563
+ if self.root_path is not None:
564
+ body["root_path"] = self.root_path
548
565
  if self.run_as:
549
566
  body["run_as"] = self.run_as.as_dict()
550
567
  if self.schema is not None:
@@ -606,6 +623,8 @@ class EditPipeline:
606
623
  body["pipeline_id"] = self.pipeline_id
607
624
  if self.restart_window:
608
625
  body["restart_window"] = self.restart_window
626
+ if self.root_path is not None:
627
+ body["root_path"] = self.root_path
609
628
  if self.run_as:
610
629
  body["run_as"] = self.run_as
611
630
  if self.schema is not None:
@@ -646,6 +665,7 @@ class EditPipeline:
646
665
  photon=d.get("photon", None),
647
666
  pipeline_id=d.get("pipeline_id", None),
648
667
  restart_window=_from_dict(d, "restart_window", RestartWindow),
668
+ root_path=d.get("root_path", None),
649
669
  run_as=_from_dict(d, "run_as", RunAs),
650
670
  schema=d.get("schema", None),
651
671
  serverless=d.get("serverless", None),
@@ -1103,6 +1123,10 @@ class IngestionPipelineDefinition:
1103
1123
  objects: Optional[List[IngestionConfig]] = None
1104
1124
  """Required. Settings specifying tables to replicate and the destination for the replicated tables."""
1105
1125
 
1126
+ source_type: Optional[IngestionSourceType] = None
1127
+ """The type of the foreign source. The source type will be inferred from the source connection or
1128
+ ingestion gateway. This field is output only and will be ignored if provided."""
1129
+
1106
1130
  table_configuration: Optional[TableSpecificConfig] = None
1107
1131
  """Configuration settings to control the ingestion of tables. These settings are applied to all
1108
1132
  tables in the pipeline."""
@@ -1116,6 +1140,8 @@ class IngestionPipelineDefinition:
1116
1140
  body["ingestion_gateway_id"] = self.ingestion_gateway_id
1117
1141
  if self.objects:
1118
1142
  body["objects"] = [v.as_dict() for v in self.objects]
1143
+ if self.source_type is not None:
1144
+ body["source_type"] = self.source_type.value
1119
1145
  if self.table_configuration:
1120
1146
  body["table_configuration"] = self.table_configuration.as_dict()
1121
1147
  return body
@@ -1129,6 +1155,8 @@ class IngestionPipelineDefinition:
1129
1155
  body["ingestion_gateway_id"] = self.ingestion_gateway_id
1130
1156
  if self.objects:
1131
1157
  body["objects"] = self.objects
1158
+ if self.source_type is not None:
1159
+ body["source_type"] = self.source_type
1132
1160
  if self.table_configuration:
1133
1161
  body["table_configuration"] = self.table_configuration
1134
1162
  return body
@@ -1140,10 +1168,27 @@ class IngestionPipelineDefinition:
1140
1168
  connection_name=d.get("connection_name", None),
1141
1169
  ingestion_gateway_id=d.get("ingestion_gateway_id", None),
1142
1170
  objects=_repeated_dict(d, "objects", IngestionConfig),
1171
+ source_type=_enum(d, "source_type", IngestionSourceType),
1143
1172
  table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig),
1144
1173
  )
1145
1174
 
1146
1175
 
1176
+ class IngestionSourceType(Enum):
1177
+
1178
+ DYNAMICS365 = "DYNAMICS365"
1179
+ GA4_RAW_DATA = "GA4_RAW_DATA"
1180
+ MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL"
1181
+ MYSQL = "MYSQL"
1182
+ NETSUITE = "NETSUITE"
1183
+ ORACLE = "ORACLE"
1184
+ POSTGRESQL = "POSTGRESQL"
1185
+ SALESFORCE = "SALESFORCE"
1186
+ SERVICENOW = "SERVICENOW"
1187
+ SHAREPOINT = "SHAREPOINT"
1188
+ SQLSERVER = "SQLSERVER"
1189
+ WORKDAY_RAAS = "WORKDAY_RAAS"
1190
+
1191
+
1147
1192
  @dataclass
1148
1193
  class ListPipelineEventsResponse:
1149
1194
  events: Optional[List[PipelineEvent]] = None
@@ -1508,6 +1553,31 @@ class Origin:
1508
1553
  )
1509
1554
 
1510
1555
 
1556
+ @dataclass
1557
+ class PathPattern:
1558
+ include: Optional[str] = None
1559
+ """The source code to include for pipelines"""
1560
+
1561
+ def as_dict(self) -> dict:
1562
+ """Serializes the PathPattern into a dictionary suitable for use as a JSON request body."""
1563
+ body = {}
1564
+ if self.include is not None:
1565
+ body["include"] = self.include
1566
+ return body
1567
+
1568
+ def as_shallow_dict(self) -> dict:
1569
+ """Serializes the PathPattern into a shallow dictionary of its immediate attributes."""
1570
+ body = {}
1571
+ if self.include is not None:
1572
+ body["include"] = self.include
1573
+ return body
1574
+
1575
+ @classmethod
1576
+ def from_dict(cls, d: Dict[str, Any]) -> PathPattern:
1577
+ """Deserializes the PathPattern from a dictionary."""
1578
+ return cls(include=d.get("include", None))
1579
+
1580
+
1511
1581
  @dataclass
1512
1582
  class PipelineAccessControlRequest:
1513
1583
  group_name: Optional[str] = None
@@ -2018,6 +2088,10 @@ class PipelineLibrary:
2018
2088
  file: Optional[FileLibrary] = None
2019
2089
  """The path to a file that defines a pipeline and is stored in the Databricks Repos."""
2020
2090
 
2091
+ glob: Optional[PathPattern] = None
2092
+ """The unified field to include source codes. Each entry can be a notebook path, a file path, or a
2093
+ folder path that ends `/**`. This field cannot be used together with `notebook` or `file`."""
2094
+
2021
2095
  jar: Optional[str] = None
2022
2096
  """URI of the jar to be installed. Currently only DBFS is supported."""
2023
2097
 
@@ -2035,6 +2109,8 @@ class PipelineLibrary:
2035
2109
  body = {}
2036
2110
  if self.file:
2037
2111
  body["file"] = self.file.as_dict()
2112
+ if self.glob:
2113
+ body["glob"] = self.glob.as_dict()
2038
2114
  if self.jar is not None:
2039
2115
  body["jar"] = self.jar
2040
2116
  if self.maven:
@@ -2050,6 +2126,8 @@ class PipelineLibrary:
2050
2126
  body = {}
2051
2127
  if self.file:
2052
2128
  body["file"] = self.file
2129
+ if self.glob:
2130
+ body["glob"] = self.glob
2053
2131
  if self.jar is not None:
2054
2132
  body["jar"] = self.jar
2055
2133
  if self.maven:
@@ -2065,6 +2143,7 @@ class PipelineLibrary:
2065
2143
  """Deserializes the PipelineLibrary from a dictionary."""
2066
2144
  return cls(
2067
2145
  file=_from_dict(d, "file", FileLibrary),
2146
+ glob=_from_dict(d, "glob", PathPattern),
2068
2147
  jar=d.get("jar", None),
2069
2148
  maven=_from_dict(d, "maven", compute.MavenLibrary),
2070
2149
  notebook=_from_dict(d, "notebook", NotebookLibrary),
@@ -2293,6 +2372,11 @@ class PipelineSpec:
2293
2372
  restart_window: Optional[RestartWindow] = None
2294
2373
  """Restart window of this pipeline."""
2295
2374
 
2375
+ root_path: Optional[str] = None
2376
+ """Root path for this pipeline. This is used as the root directory when editing the pipeline in the
2377
+ Databricks user interface and it is added to sys.path when executing Python sources during
2378
+ pipeline execution."""
2379
+
2296
2380
  schema: Optional[str] = None
2297
2381
  """The default schema (database) where tables are read from or published to."""
2298
2382
 
@@ -2351,6 +2435,8 @@ class PipelineSpec:
2351
2435
  body["photon"] = self.photon
2352
2436
  if self.restart_window:
2353
2437
  body["restart_window"] = self.restart_window.as_dict()
2438
+ if self.root_path is not None:
2439
+ body["root_path"] = self.root_path
2354
2440
  if self.schema is not None:
2355
2441
  body["schema"] = self.schema
2356
2442
  if self.serverless is not None:
@@ -2404,6 +2490,8 @@ class PipelineSpec:
2404
2490
  body["photon"] = self.photon
2405
2491
  if self.restart_window:
2406
2492
  body["restart_window"] = self.restart_window
2493
+ if self.root_path is not None:
2494
+ body["root_path"] = self.root_path
2407
2495
  if self.schema is not None:
2408
2496
  body["schema"] = self.schema
2409
2497
  if self.serverless is not None:
@@ -2439,6 +2527,7 @@ class PipelineSpec:
2439
2527
  notifications=_repeated_dict(d, "notifications", Notifications),
2440
2528
  photon=d.get("photon", None),
2441
2529
  restart_window=_from_dict(d, "restart_window", RestartWindow),
2530
+ root_path=d.get("root_path", None),
2442
2531
  schema=d.get("schema", None),
2443
2532
  serverless=d.get("serverless", None),
2444
2533
  storage=d.get("storage", None),
@@ -2996,6 +3085,7 @@ class StartUpdateCause(Enum):
2996
3085
  """What triggered this update."""
2997
3086
 
2998
3087
  API_CALL = "API_CALL"
3088
+ INFRASTRUCTURE_MAINTENANCE = "INFRASTRUCTURE_MAINTENANCE"
2999
3089
  JOB_TASK = "JOB_TASK"
3000
3090
  RETRY_ON_FAILURE = "RETRY_ON_FAILURE"
3001
3091
  SCHEMA_CHANGE = "SCHEMA_CHANGE"
@@ -3321,6 +3411,7 @@ class UpdateInfoCause(Enum):
3321
3411
  """What triggered this update."""
3322
3412
 
3323
3413
  API_CALL = "API_CALL"
3414
+ INFRASTRUCTURE_MAINTENANCE = "INFRASTRUCTURE_MAINTENANCE"
3324
3415
  JOB_TASK = "JOB_TASK"
3325
3416
  RETRY_ON_FAILURE = "RETRY_ON_FAILURE"
3326
3417
  SCHEMA_CHANGE = "SCHEMA_CHANGE"
@@ -3472,6 +3563,7 @@ class PipelinesAPI:
3472
3563
  notifications: Optional[List[Notifications]] = None,
3473
3564
  photon: Optional[bool] = None,
3474
3565
  restart_window: Optional[RestartWindow] = None,
3566
+ root_path: Optional[str] = None,
3475
3567
  run_as: Optional[RunAs] = None,
3476
3568
  schema: Optional[str] = None,
3477
3569
  serverless: Optional[bool] = None,
@@ -3528,6 +3620,10 @@ class PipelinesAPI:
3528
3620
  Whether Photon is enabled for this pipeline.
3529
3621
  :param restart_window: :class:`RestartWindow` (optional)
3530
3622
  Restart window of this pipeline.
3623
+ :param root_path: str (optional)
3624
+ Root path for this pipeline. This is used as the root directory when editing the pipeline in the
3625
+ Databricks user interface and it is added to sys.path when executing Python sources during pipeline
3626
+ execution.
3531
3627
  :param run_as: :class:`RunAs` (optional)
3532
3628
  Write-only setting, available only in Create/Update calls. Specifies the user or service principal
3533
3629
  that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
@@ -3592,6 +3688,8 @@ class PipelinesAPI:
3592
3688
  body["photon"] = photon
3593
3689
  if restart_window is not None:
3594
3690
  body["restart_window"] = restart_window.as_dict()
3691
+ if root_path is not None:
3692
+ body["root_path"] = root_path
3595
3693
  if run_as is not None:
3596
3694
  body["run_as"] = run_as.as_dict()
3597
3695
  if schema is not None:
@@ -3980,6 +4078,7 @@ class PipelinesAPI:
3980
4078
  notifications: Optional[List[Notifications]] = None,
3981
4079
  photon: Optional[bool] = None,
3982
4080
  restart_window: Optional[RestartWindow] = None,
4081
+ root_path: Optional[str] = None,
3983
4082
  run_as: Optional[RunAs] = None,
3984
4083
  schema: Optional[str] = None,
3985
4084
  serverless: Optional[bool] = None,
@@ -4039,6 +4138,10 @@ class PipelinesAPI:
4039
4138
  Whether Photon is enabled for this pipeline.
4040
4139
  :param restart_window: :class:`RestartWindow` (optional)
4041
4140
  Restart window of this pipeline.
4141
+ :param root_path: str (optional)
4142
+ Root path for this pipeline. This is used as the root directory when editing the pipeline in the
4143
+ Databricks user interface and it is added to sys.path when executing Python sources during pipeline
4144
+ execution.
4042
4145
  :param run_as: :class:`RunAs` (optional)
4043
4146
  Write-only setting, available only in Create/Update calls. Specifies the user or service principal
4044
4147
  that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
@@ -4103,6 +4206,8 @@ class PipelinesAPI:
4103
4206
  body["photon"] = photon
4104
4207
  if restart_window is not None:
4105
4208
  body["restart_window"] = restart_window.as_dict()
4209
+ if root_path is not None:
4210
+ body["root_path"] = root_path
4106
4211
  if run_as is not None:
4107
4212
  body["run_as"] = run_as.as_dict()
4108
4213
  if schema is not None: