databricks-sdk 0.57.0__py3-none-any.whl → 0.59.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (31) hide show
  1. databricks/sdk/__init__.py +38 -9
  2. databricks/sdk/service/aibuilder.py +0 -163
  3. databricks/sdk/service/apps.py +53 -49
  4. databricks/sdk/service/billing.py +62 -223
  5. databricks/sdk/service/catalog.py +3052 -3707
  6. databricks/sdk/service/cleanrooms.py +5 -54
  7. databricks/sdk/service/compute.py +579 -2715
  8. databricks/sdk/service/dashboards.py +108 -317
  9. databricks/sdk/service/database.py +603 -122
  10. databricks/sdk/service/files.py +2 -218
  11. databricks/sdk/service/iam.py +19 -298
  12. databricks/sdk/service/jobs.py +77 -1263
  13. databricks/sdk/service/marketplace.py +3 -575
  14. databricks/sdk/service/ml.py +816 -2734
  15. databricks/sdk/service/oauth2.py +122 -238
  16. databricks/sdk/service/pipelines.py +133 -724
  17. databricks/sdk/service/provisioning.py +36 -757
  18. databricks/sdk/service/qualitymonitorv2.py +0 -18
  19. databricks/sdk/service/serving.py +37 -583
  20. databricks/sdk/service/settings.py +282 -1768
  21. databricks/sdk/service/sharing.py +6 -478
  22. databricks/sdk/service/sql.py +129 -1696
  23. databricks/sdk/service/vectorsearch.py +0 -410
  24. databricks/sdk/service/workspace.py +252 -727
  25. databricks/sdk/version.py +1 -1
  26. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.59.0.dist-info}/METADATA +1 -1
  27. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.59.0.dist-info}/RECORD +31 -31
  28. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.59.0.dist-info}/WHEEL +0 -0
  29. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.59.0.dist-info}/licenses/LICENSE +0 -0
  30. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.59.0.dist-info}/licenses/NOTICE +0 -0
  31. {databricks_sdk-0.57.0.dist-info → databricks_sdk-0.59.0.dist-info}/top_level.txt +0 -0
@@ -21,279 +21,6 @@ from databricks.sdk.service import compute
21
21
  # all definitions in this file are in alphabetical order
22
22
 
23
23
 
24
- @dataclass
25
- class CreatePipeline:
26
- allow_duplicate_names: Optional[bool] = None
27
- """If false, deployment will fail if name conflicts with that of another pipeline."""
28
-
29
- budget_policy_id: Optional[str] = None
30
- """Budget policy of this pipeline."""
31
-
32
- catalog: Optional[str] = None
33
- """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
34
- tables in this pipeline are published to a `target` schema inside `catalog` (for example,
35
- `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity
36
- Catalog."""
37
-
38
- channel: Optional[str] = None
39
- """DLT Release Channel that specifies which version to use."""
40
-
41
- clusters: Optional[List[PipelineCluster]] = None
42
- """Cluster settings for this pipeline deployment."""
43
-
44
- configuration: Optional[Dict[str, str]] = None
45
- """String-String configuration for this pipeline execution."""
46
-
47
- continuous: Optional[bool] = None
48
- """Whether the pipeline is continuous or triggered. This replaces `trigger`."""
49
-
50
- deployment: Optional[PipelineDeployment] = None
51
- """Deployment type of this pipeline."""
52
-
53
- development: Optional[bool] = None
54
- """Whether the pipeline is in Development mode. Defaults to false."""
55
-
56
- dry_run: Optional[bool] = None
57
-
58
- edition: Optional[str] = None
59
- """Pipeline product edition."""
60
-
61
- environment: Optional[PipelinesEnvironment] = None
62
- """Environment specification for this pipeline used to install dependencies."""
63
-
64
- event_log: Optional[EventLogSpec] = None
65
- """Event log configuration for this pipeline"""
66
-
67
- filters: Optional[Filters] = None
68
- """Filters on which Pipeline packages to include in the deployed graph."""
69
-
70
- gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
71
- """The definition of a gateway pipeline to support change data capture."""
72
-
73
- id: Optional[str] = None
74
- """Unique identifier for this pipeline."""
75
-
76
- ingestion_definition: Optional[IngestionPipelineDefinition] = None
77
- """The configuration for a managed ingestion pipeline. These settings cannot be used with the
78
- 'libraries', 'schema', 'target', or 'catalog' settings."""
79
-
80
- libraries: Optional[List[PipelineLibrary]] = None
81
- """Libraries or code needed by this deployment."""
82
-
83
- name: Optional[str] = None
84
- """Friendly identifier for this pipeline."""
85
-
86
- notifications: Optional[List[Notifications]] = None
87
- """List of notification settings for this pipeline."""
88
-
89
- photon: Optional[bool] = None
90
- """Whether Photon is enabled for this pipeline."""
91
-
92
- restart_window: Optional[RestartWindow] = None
93
- """Restart window of this pipeline."""
94
-
95
- root_path: Optional[str] = None
96
- """Root path for this pipeline. This is used as the root directory when editing the pipeline in the
97
- Databricks user interface and it is added to sys.path when executing Python sources during
98
- pipeline execution."""
99
-
100
- run_as: Optional[RunAs] = None
101
- """Write-only setting, available only in Create/Update calls. Specifies the user or service
102
- principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
103
- the pipeline.
104
-
105
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
106
- is thrown."""
107
-
108
- schema: Optional[str] = None
109
- """The default schema (database) where tables are read from or published to."""
110
-
111
- serverless: Optional[bool] = None
112
- """Whether serverless compute is enabled for this pipeline."""
113
-
114
- storage: Optional[str] = None
115
- """DBFS root directory for storing checkpoints and tables."""
116
-
117
- tags: Optional[Dict[str, str]] = None
118
- """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags,
119
- and are therefore subject to the same limitations. A maximum of 25 tags can be added to the
120
- pipeline."""
121
-
122
- target: Optional[str] = None
123
- """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
124
- must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
125
- deprecated for pipeline creation in favor of the `schema` field."""
126
-
127
- trigger: Optional[PipelineTrigger] = None
128
- """Which pipeline trigger to use. Deprecated: Use `continuous` instead."""
129
-
130
- def as_dict(self) -> dict:
131
- """Serializes the CreatePipeline into a dictionary suitable for use as a JSON request body."""
132
- body = {}
133
- if self.allow_duplicate_names is not None:
134
- body["allow_duplicate_names"] = self.allow_duplicate_names
135
- if self.budget_policy_id is not None:
136
- body["budget_policy_id"] = self.budget_policy_id
137
- if self.catalog is not None:
138
- body["catalog"] = self.catalog
139
- if self.channel is not None:
140
- body["channel"] = self.channel
141
- if self.clusters:
142
- body["clusters"] = [v.as_dict() for v in self.clusters]
143
- if self.configuration:
144
- body["configuration"] = self.configuration
145
- if self.continuous is not None:
146
- body["continuous"] = self.continuous
147
- if self.deployment:
148
- body["deployment"] = self.deployment.as_dict()
149
- if self.development is not None:
150
- body["development"] = self.development
151
- if self.dry_run is not None:
152
- body["dry_run"] = self.dry_run
153
- if self.edition is not None:
154
- body["edition"] = self.edition
155
- if self.environment:
156
- body["environment"] = self.environment.as_dict()
157
- if self.event_log:
158
- body["event_log"] = self.event_log.as_dict()
159
- if self.filters:
160
- body["filters"] = self.filters.as_dict()
161
- if self.gateway_definition:
162
- body["gateway_definition"] = self.gateway_definition.as_dict()
163
- if self.id is not None:
164
- body["id"] = self.id
165
- if self.ingestion_definition:
166
- body["ingestion_definition"] = self.ingestion_definition.as_dict()
167
- if self.libraries:
168
- body["libraries"] = [v.as_dict() for v in self.libraries]
169
- if self.name is not None:
170
- body["name"] = self.name
171
- if self.notifications:
172
- body["notifications"] = [v.as_dict() for v in self.notifications]
173
- if self.photon is not None:
174
- body["photon"] = self.photon
175
- if self.restart_window:
176
- body["restart_window"] = self.restart_window.as_dict()
177
- if self.root_path is not None:
178
- body["root_path"] = self.root_path
179
- if self.run_as:
180
- body["run_as"] = self.run_as.as_dict()
181
- if self.schema is not None:
182
- body["schema"] = self.schema
183
- if self.serverless is not None:
184
- body["serverless"] = self.serverless
185
- if self.storage is not None:
186
- body["storage"] = self.storage
187
- if self.tags:
188
- body["tags"] = self.tags
189
- if self.target is not None:
190
- body["target"] = self.target
191
- if self.trigger:
192
- body["trigger"] = self.trigger.as_dict()
193
- return body
194
-
195
- def as_shallow_dict(self) -> dict:
196
- """Serializes the CreatePipeline into a shallow dictionary of its immediate attributes."""
197
- body = {}
198
- if self.allow_duplicate_names is not None:
199
- body["allow_duplicate_names"] = self.allow_duplicate_names
200
- if self.budget_policy_id is not None:
201
- body["budget_policy_id"] = self.budget_policy_id
202
- if self.catalog is not None:
203
- body["catalog"] = self.catalog
204
- if self.channel is not None:
205
- body["channel"] = self.channel
206
- if self.clusters:
207
- body["clusters"] = self.clusters
208
- if self.configuration:
209
- body["configuration"] = self.configuration
210
- if self.continuous is not None:
211
- body["continuous"] = self.continuous
212
- if self.deployment:
213
- body["deployment"] = self.deployment
214
- if self.development is not None:
215
- body["development"] = self.development
216
- if self.dry_run is not None:
217
- body["dry_run"] = self.dry_run
218
- if self.edition is not None:
219
- body["edition"] = self.edition
220
- if self.environment:
221
- body["environment"] = self.environment
222
- if self.event_log:
223
- body["event_log"] = self.event_log
224
- if self.filters:
225
- body["filters"] = self.filters
226
- if self.gateway_definition:
227
- body["gateway_definition"] = self.gateway_definition
228
- if self.id is not None:
229
- body["id"] = self.id
230
- if self.ingestion_definition:
231
- body["ingestion_definition"] = self.ingestion_definition
232
- if self.libraries:
233
- body["libraries"] = self.libraries
234
- if self.name is not None:
235
- body["name"] = self.name
236
- if self.notifications:
237
- body["notifications"] = self.notifications
238
- if self.photon is not None:
239
- body["photon"] = self.photon
240
- if self.restart_window:
241
- body["restart_window"] = self.restart_window
242
- if self.root_path is not None:
243
- body["root_path"] = self.root_path
244
- if self.run_as:
245
- body["run_as"] = self.run_as
246
- if self.schema is not None:
247
- body["schema"] = self.schema
248
- if self.serverless is not None:
249
- body["serverless"] = self.serverless
250
- if self.storage is not None:
251
- body["storage"] = self.storage
252
- if self.tags:
253
- body["tags"] = self.tags
254
- if self.target is not None:
255
- body["target"] = self.target
256
- if self.trigger:
257
- body["trigger"] = self.trigger
258
- return body
259
-
260
- @classmethod
261
- def from_dict(cls, d: Dict[str, Any]) -> CreatePipeline:
262
- """Deserializes the CreatePipeline from a dictionary."""
263
- return cls(
264
- allow_duplicate_names=d.get("allow_duplicate_names", None),
265
- budget_policy_id=d.get("budget_policy_id", None),
266
- catalog=d.get("catalog", None),
267
- channel=d.get("channel", None),
268
- clusters=_repeated_dict(d, "clusters", PipelineCluster),
269
- configuration=d.get("configuration", None),
270
- continuous=d.get("continuous", None),
271
- deployment=_from_dict(d, "deployment", PipelineDeployment),
272
- development=d.get("development", None),
273
- dry_run=d.get("dry_run", None),
274
- edition=d.get("edition", None),
275
- environment=_from_dict(d, "environment", PipelinesEnvironment),
276
- event_log=_from_dict(d, "event_log", EventLogSpec),
277
- filters=_from_dict(d, "filters", Filters),
278
- gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
279
- id=d.get("id", None),
280
- ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition),
281
- libraries=_repeated_dict(d, "libraries", PipelineLibrary),
282
- name=d.get("name", None),
283
- notifications=_repeated_dict(d, "notifications", Notifications),
284
- photon=d.get("photon", None),
285
- restart_window=_from_dict(d, "restart_window", RestartWindow),
286
- root_path=d.get("root_path", None),
287
- run_as=_from_dict(d, "run_as", RunAs),
288
- schema=d.get("schema", None),
289
- serverless=d.get("serverless", None),
290
- storage=d.get("storage", None),
291
- tags=d.get("tags", None),
292
- target=d.get("target", None),
293
- trigger=_from_dict(d, "trigger", PipelineTrigger),
294
- )
295
-
296
-
297
24
  @dataclass
298
25
  class CreatePipelineResponse:
299
26
  effective_settings: Optional[PipelineSpec] = None
@@ -372,343 +99,60 @@ class DataPlaneId:
372
99
  if self.instance is not None:
373
100
  body["instance"] = self.instance
374
101
  if self.seq_no is not None:
375
- body["seq_no"] = self.seq_no
376
- return body
377
-
378
- def as_shallow_dict(self) -> dict:
379
- """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes."""
380
- body = {}
381
- if self.instance is not None:
382
- body["instance"] = self.instance
383
- if self.seq_no is not None:
384
- body["seq_no"] = self.seq_no
385
- return body
386
-
387
- @classmethod
388
- def from_dict(cls, d: Dict[str, Any]) -> DataPlaneId:
389
- """Deserializes the DataPlaneId from a dictionary."""
390
- return cls(instance=d.get("instance", None), seq_no=d.get("seq_no", None))
391
-
392
-
393
- class DayOfWeek(Enum):
394
- """Days of week in which the restart is allowed to happen (within a five-hour window starting at
395
- start_hour). If not specified all days of the week will be used."""
396
-
397
- FRIDAY = "FRIDAY"
398
- MONDAY = "MONDAY"
399
- SATURDAY = "SATURDAY"
400
- SUNDAY = "SUNDAY"
401
- THURSDAY = "THURSDAY"
402
- TUESDAY = "TUESDAY"
403
- WEDNESDAY = "WEDNESDAY"
404
-
405
-
406
- @dataclass
407
- class DeletePipelineResponse:
408
- def as_dict(self) -> dict:
409
- """Serializes the DeletePipelineResponse into a dictionary suitable for use as a JSON request body."""
410
- body = {}
411
- return body
412
-
413
- def as_shallow_dict(self) -> dict:
414
- """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes."""
415
- body = {}
416
- return body
417
-
418
- @classmethod
419
- def from_dict(cls, d: Dict[str, Any]) -> DeletePipelineResponse:
420
- """Deserializes the DeletePipelineResponse from a dictionary."""
421
- return cls()
422
-
423
-
424
- class DeploymentKind(Enum):
425
- """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a
426
- Databricks Asset Bundle."""
427
-
428
- BUNDLE = "BUNDLE"
429
-
430
-
431
- @dataclass
432
- class EditPipeline:
433
- allow_duplicate_names: Optional[bool] = None
434
- """If false, deployment will fail if name has changed and conflicts the name of another pipeline."""
435
-
436
- budget_policy_id: Optional[str] = None
437
- """Budget policy of this pipeline."""
438
-
439
- catalog: Optional[str] = None
440
- """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
441
- tables in this pipeline are published to a `target` schema inside `catalog` (for example,
442
- `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity
443
- Catalog."""
444
-
445
- channel: Optional[str] = None
446
- """DLT Release Channel that specifies which version to use."""
447
-
448
- clusters: Optional[List[PipelineCluster]] = None
449
- """Cluster settings for this pipeline deployment."""
450
-
451
- configuration: Optional[Dict[str, str]] = None
452
- """String-String configuration for this pipeline execution."""
453
-
454
- continuous: Optional[bool] = None
455
- """Whether the pipeline is continuous or triggered. This replaces `trigger`."""
456
-
457
- deployment: Optional[PipelineDeployment] = None
458
- """Deployment type of this pipeline."""
459
-
460
- development: Optional[bool] = None
461
- """Whether the pipeline is in Development mode. Defaults to false."""
462
-
463
- edition: Optional[str] = None
464
- """Pipeline product edition."""
465
-
466
- environment: Optional[PipelinesEnvironment] = None
467
- """Environment specification for this pipeline used to install dependencies."""
468
-
469
- event_log: Optional[EventLogSpec] = None
470
- """Event log configuration for this pipeline"""
471
-
472
- expected_last_modified: Optional[int] = None
473
- """If present, the last-modified time of the pipeline settings before the edit. If the settings
474
- were modified after that time, then the request will fail with a conflict."""
475
-
476
- filters: Optional[Filters] = None
477
- """Filters on which Pipeline packages to include in the deployed graph."""
478
-
479
- gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
480
- """The definition of a gateway pipeline to support change data capture."""
481
-
482
- id: Optional[str] = None
483
- """Unique identifier for this pipeline."""
484
-
485
- ingestion_definition: Optional[IngestionPipelineDefinition] = None
486
- """The configuration for a managed ingestion pipeline. These settings cannot be used with the
487
- 'libraries', 'schema', 'target', or 'catalog' settings."""
488
-
489
- libraries: Optional[List[PipelineLibrary]] = None
490
- """Libraries or code needed by this deployment."""
491
-
492
- name: Optional[str] = None
493
- """Friendly identifier for this pipeline."""
494
-
495
- notifications: Optional[List[Notifications]] = None
496
- """List of notification settings for this pipeline."""
497
-
498
- photon: Optional[bool] = None
499
- """Whether Photon is enabled for this pipeline."""
500
-
501
- pipeline_id: Optional[str] = None
502
- """Unique identifier for this pipeline."""
503
-
504
- restart_window: Optional[RestartWindow] = None
505
- """Restart window of this pipeline."""
506
-
507
- root_path: Optional[str] = None
508
- """Root path for this pipeline. This is used as the root directory when editing the pipeline in the
509
- Databricks user interface and it is added to sys.path when executing Python sources during
510
- pipeline execution."""
511
-
512
- run_as: Optional[RunAs] = None
513
- """Write-only setting, available only in Create/Update calls. Specifies the user or service
514
- principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
515
- the pipeline.
516
-
517
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
518
- is thrown."""
519
-
520
- schema: Optional[str] = None
521
- """The default schema (database) where tables are read from or published to."""
522
-
523
- serverless: Optional[bool] = None
524
- """Whether serverless compute is enabled for this pipeline."""
525
-
526
- storage: Optional[str] = None
527
- """DBFS root directory for storing checkpoints and tables."""
528
-
529
- tags: Optional[Dict[str, str]] = None
530
- """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags,
531
- and are therefore subject to the same limitations. A maximum of 25 tags can be added to the
532
- pipeline."""
533
-
534
- target: Optional[str] = None
535
- """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
536
- must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
537
- deprecated for pipeline creation in favor of the `schema` field."""
538
-
539
- trigger: Optional[PipelineTrigger] = None
540
- """Which pipeline trigger to use. Deprecated: Use `continuous` instead."""
541
-
542
- def as_dict(self) -> dict:
543
- """Serializes the EditPipeline into a dictionary suitable for use as a JSON request body."""
544
- body = {}
545
- if self.allow_duplicate_names is not None:
546
- body["allow_duplicate_names"] = self.allow_duplicate_names
547
- if self.budget_policy_id is not None:
548
- body["budget_policy_id"] = self.budget_policy_id
549
- if self.catalog is not None:
550
- body["catalog"] = self.catalog
551
- if self.channel is not None:
552
- body["channel"] = self.channel
553
- if self.clusters:
554
- body["clusters"] = [v.as_dict() for v in self.clusters]
555
- if self.configuration:
556
- body["configuration"] = self.configuration
557
- if self.continuous is not None:
558
- body["continuous"] = self.continuous
559
- if self.deployment:
560
- body["deployment"] = self.deployment.as_dict()
561
- if self.development is not None:
562
- body["development"] = self.development
563
- if self.edition is not None:
564
- body["edition"] = self.edition
565
- if self.environment:
566
- body["environment"] = self.environment.as_dict()
567
- if self.event_log:
568
- body["event_log"] = self.event_log.as_dict()
569
- if self.expected_last_modified is not None:
570
- body["expected_last_modified"] = self.expected_last_modified
571
- if self.filters:
572
- body["filters"] = self.filters.as_dict()
573
- if self.gateway_definition:
574
- body["gateway_definition"] = self.gateway_definition.as_dict()
575
- if self.id is not None:
576
- body["id"] = self.id
577
- if self.ingestion_definition:
578
- body["ingestion_definition"] = self.ingestion_definition.as_dict()
579
- if self.libraries:
580
- body["libraries"] = [v.as_dict() for v in self.libraries]
581
- if self.name is not None:
582
- body["name"] = self.name
583
- if self.notifications:
584
- body["notifications"] = [v.as_dict() for v in self.notifications]
585
- if self.photon is not None:
586
- body["photon"] = self.photon
587
- if self.pipeline_id is not None:
588
- body["pipeline_id"] = self.pipeline_id
589
- if self.restart_window:
590
- body["restart_window"] = self.restart_window.as_dict()
591
- if self.root_path is not None:
592
- body["root_path"] = self.root_path
593
- if self.run_as:
594
- body["run_as"] = self.run_as.as_dict()
595
- if self.schema is not None:
596
- body["schema"] = self.schema
597
- if self.serverless is not None:
598
- body["serverless"] = self.serverless
599
- if self.storage is not None:
600
- body["storage"] = self.storage
601
- if self.tags:
602
- body["tags"] = self.tags
603
- if self.target is not None:
604
- body["target"] = self.target
605
- if self.trigger:
606
- body["trigger"] = self.trigger.as_dict()
102
+ body["seq_no"] = self.seq_no
607
103
  return body
608
104
 
609
105
  def as_shallow_dict(self) -> dict:
610
- """Serializes the EditPipeline into a shallow dictionary of its immediate attributes."""
106
+ """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes."""
611
107
  body = {}
612
- if self.allow_duplicate_names is not None:
613
- body["allow_duplicate_names"] = self.allow_duplicate_names
614
- if self.budget_policy_id is not None:
615
- body["budget_policy_id"] = self.budget_policy_id
616
- if self.catalog is not None:
617
- body["catalog"] = self.catalog
618
- if self.channel is not None:
619
- body["channel"] = self.channel
620
- if self.clusters:
621
- body["clusters"] = self.clusters
622
- if self.configuration:
623
- body["configuration"] = self.configuration
624
- if self.continuous is not None:
625
- body["continuous"] = self.continuous
626
- if self.deployment:
627
- body["deployment"] = self.deployment
628
- if self.development is not None:
629
- body["development"] = self.development
630
- if self.edition is not None:
631
- body["edition"] = self.edition
632
- if self.environment:
633
- body["environment"] = self.environment
634
- if self.event_log:
635
- body["event_log"] = self.event_log
636
- if self.expected_last_modified is not None:
637
- body["expected_last_modified"] = self.expected_last_modified
638
- if self.filters:
639
- body["filters"] = self.filters
640
- if self.gateway_definition:
641
- body["gateway_definition"] = self.gateway_definition
642
- if self.id is not None:
643
- body["id"] = self.id
644
- if self.ingestion_definition:
645
- body["ingestion_definition"] = self.ingestion_definition
646
- if self.libraries:
647
- body["libraries"] = self.libraries
648
- if self.name is not None:
649
- body["name"] = self.name
650
- if self.notifications:
651
- body["notifications"] = self.notifications
652
- if self.photon is not None:
653
- body["photon"] = self.photon
654
- if self.pipeline_id is not None:
655
- body["pipeline_id"] = self.pipeline_id
656
- if self.restart_window:
657
- body["restart_window"] = self.restart_window
658
- if self.root_path is not None:
659
- body["root_path"] = self.root_path
660
- if self.run_as:
661
- body["run_as"] = self.run_as
662
- if self.schema is not None:
663
- body["schema"] = self.schema
664
- if self.serverless is not None:
665
- body["serverless"] = self.serverless
666
- if self.storage is not None:
667
- body["storage"] = self.storage
668
- if self.tags:
669
- body["tags"] = self.tags
670
- if self.target is not None:
671
- body["target"] = self.target
672
- if self.trigger:
673
- body["trigger"] = self.trigger
108
+ if self.instance is not None:
109
+ body["instance"] = self.instance
110
+ if self.seq_no is not None:
111
+ body["seq_no"] = self.seq_no
674
112
  return body
675
113
 
676
114
  @classmethod
677
- def from_dict(cls, d: Dict[str, Any]) -> EditPipeline:
678
- """Deserializes the EditPipeline from a dictionary."""
679
- return cls(
680
- allow_duplicate_names=d.get("allow_duplicate_names", None),
681
- budget_policy_id=d.get("budget_policy_id", None),
682
- catalog=d.get("catalog", None),
683
- channel=d.get("channel", None),
684
- clusters=_repeated_dict(d, "clusters", PipelineCluster),
685
- configuration=d.get("configuration", None),
686
- continuous=d.get("continuous", None),
687
- deployment=_from_dict(d, "deployment", PipelineDeployment),
688
- development=d.get("development", None),
689
- edition=d.get("edition", None),
690
- environment=_from_dict(d, "environment", PipelinesEnvironment),
691
- event_log=_from_dict(d, "event_log", EventLogSpec),
692
- expected_last_modified=d.get("expected_last_modified", None),
693
- filters=_from_dict(d, "filters", Filters),
694
- gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
695
- id=d.get("id", None),
696
- ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition),
697
- libraries=_repeated_dict(d, "libraries", PipelineLibrary),
698
- name=d.get("name", None),
699
- notifications=_repeated_dict(d, "notifications", Notifications),
700
- photon=d.get("photon", None),
701
- pipeline_id=d.get("pipeline_id", None),
702
- restart_window=_from_dict(d, "restart_window", RestartWindow),
703
- root_path=d.get("root_path", None),
704
- run_as=_from_dict(d, "run_as", RunAs),
705
- schema=d.get("schema", None),
706
- serverless=d.get("serverless", None),
707
- storage=d.get("storage", None),
708
- tags=d.get("tags", None),
709
- target=d.get("target", None),
710
- trigger=_from_dict(d, "trigger", PipelineTrigger),
711
- )
115
+ def from_dict(cls, d: Dict[str, Any]) -> DataPlaneId:
116
+ """Deserializes the DataPlaneId from a dictionary."""
117
+ return cls(instance=d.get("instance", None), seq_no=d.get("seq_no", None))
118
+
119
+
120
+ class DayOfWeek(Enum):
121
+ """Days of week in which the restart is allowed to happen (within a five-hour window starting at
122
+ start_hour). If not specified all days of the week will be used."""
123
+
124
+ FRIDAY = "FRIDAY"
125
+ MONDAY = "MONDAY"
126
+ SATURDAY = "SATURDAY"
127
+ SUNDAY = "SUNDAY"
128
+ THURSDAY = "THURSDAY"
129
+ TUESDAY = "TUESDAY"
130
+ WEDNESDAY = "WEDNESDAY"
131
+
132
+
133
+ @dataclass
134
+ class DeletePipelineResponse:
135
+ def as_dict(self) -> dict:
136
+ """Serializes the DeletePipelineResponse into a dictionary suitable for use as a JSON request body."""
137
+ body = {}
138
+ return body
139
+
140
+ def as_shallow_dict(self) -> dict:
141
+ """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes."""
142
+ body = {}
143
+ return body
144
+
145
+ @classmethod
146
+ def from_dict(cls, d: Dict[str, Any]) -> DeletePipelineResponse:
147
+ """Deserializes the DeletePipelineResponse from a dictionary."""
148
+ return cls()
149
+
150
+
151
+ class DeploymentKind(Enum):
152
+ """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a
153
+ Databricks Asset Bundle."""
154
+
155
+ BUNDLE = "BUNDLE"
712
156
 
713
157
 
714
158
  @dataclass
@@ -922,6 +366,11 @@ class GetPipelineResponse:
922
366
  pipeline_id: Optional[str] = None
923
367
  """The ID of the pipeline."""
924
368
 
369
+ run_as: Optional[RunAs] = None
370
+ """The user or service principal that the pipeline runs as, if specified in the request. This field
371
+ indicates the explicit configuration of `run_as` for the pipeline. To find the value in all
372
+ cases, explicit or implicit, use `run_as_user_name`."""
373
+
925
374
  run_as_user_name: Optional[str] = None
926
375
  """Username of the user that the pipeline will run on behalf of."""
927
376
 
@@ -952,6 +401,8 @@ class GetPipelineResponse:
952
401
  body["name"] = self.name
953
402
  if self.pipeline_id is not None:
954
403
  body["pipeline_id"] = self.pipeline_id
404
+ if self.run_as:
405
+ body["run_as"] = self.run_as.as_dict()
955
406
  if self.run_as_user_name is not None:
956
407
  body["run_as_user_name"] = self.run_as_user_name
957
408
  if self.spec:
@@ -981,6 +432,8 @@ class GetPipelineResponse:
981
432
  body["name"] = self.name
982
433
  if self.pipeline_id is not None:
983
434
  body["pipeline_id"] = self.pipeline_id
435
+ if self.run_as:
436
+ body["run_as"] = self.run_as
984
437
  if self.run_as_user_name is not None:
985
438
  body["run_as_user_name"] = self.run_as_user_name
986
439
  if self.spec:
@@ -1002,6 +455,7 @@ class GetPipelineResponse:
1002
455
  latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo),
1003
456
  name=d.get("name", None),
1004
457
  pipeline_id=d.get("pipeline_id", None),
458
+ run_as=_from_dict(d, "run_as", RunAs),
1005
459
  run_as_user_name=d.get("run_as_user_name", None),
1006
460
  spec=_from_dict(d, "spec", PipelineSpec),
1007
461
  state=_enum(d, "state", PipelineState),
@@ -1209,11 +663,73 @@ class IngestionPipelineDefinition:
1209
663
  )
1210
664
 
1211
665
 
666
+ @dataclass
667
+ class IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig:
668
+ """Configurations that are only applicable for query-based ingestion connectors."""
669
+
670
+ cursor_columns: Optional[List[str]] = None
671
+ """The names of the monotonically increasing columns in the source table that are used to enable
672
+ the table to be read and ingested incrementally through structured streaming. The columns are
673
+ allowed to have repeated values but have to be non-decreasing. If the source data is merged into
674
+ the destination (e.g., using SCD Type 1 or Type 2), these columns will implicitly define the
675
+ `sequence_by` behavior. You can still explicitly set `sequence_by` to override this default."""
676
+
677
+ deletion_condition: Optional[str] = None
678
+ """Specifies a SQL WHERE condition that specifies that the source row has been deleted. This is
679
+ sometimes referred to as "soft-deletes". For example: "Operation = 'DELETE'" or "is_deleted =
680
+ true". This field is orthogonal to `hard_deletion_sync_interval_in_seconds`, one for
681
+ soft-deletes and the other for hard-deletes. See also the
682
+ hard_deletion_sync_min_interval_in_seconds field for handling of "hard deletes" where the source
683
+ rows are physically removed from the table."""
684
+
685
+ hard_deletion_sync_min_interval_in_seconds: Optional[int] = None
686
+ """Specifies the minimum interval (in seconds) between snapshots on primary keys for detecting and
687
+ synchronizing hard deletions—i.e., rows that have been physically removed from the source
688
+ table. This interval acts as a lower bound. If ingestion runs less frequently than this value,
689
+ hard deletion synchronization will align with the actual ingestion frequency instead of
690
+ happening more often. If not set, hard deletion synchronization via snapshots is disabled. This
691
+ field is mutable and can be updated without triggering a full snapshot."""
692
+
693
+ def as_dict(self) -> dict:
694
+ """Serializes the IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig into a dictionary suitable for use as a JSON request body."""
695
+ body = {}
696
+ if self.cursor_columns:
697
+ body["cursor_columns"] = [v for v in self.cursor_columns]
698
+ if self.deletion_condition is not None:
699
+ body["deletion_condition"] = self.deletion_condition
700
+ if self.hard_deletion_sync_min_interval_in_seconds is not None:
701
+ body["hard_deletion_sync_min_interval_in_seconds"] = self.hard_deletion_sync_min_interval_in_seconds
702
+ return body
703
+
704
+ def as_shallow_dict(self) -> dict:
705
+ """Serializes the IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig into a shallow dictionary of its immediate attributes."""
706
+ body = {}
707
+ if self.cursor_columns:
708
+ body["cursor_columns"] = self.cursor_columns
709
+ if self.deletion_condition is not None:
710
+ body["deletion_condition"] = self.deletion_condition
711
+ if self.hard_deletion_sync_min_interval_in_seconds is not None:
712
+ body["hard_deletion_sync_min_interval_in_seconds"] = self.hard_deletion_sync_min_interval_in_seconds
713
+ return body
714
+
715
+ @classmethod
716
+ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig:
717
+ """Deserializes the IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig from a dictionary."""
718
+ return cls(
719
+ cursor_columns=d.get("cursor_columns", None),
720
+ deletion_condition=d.get("deletion_condition", None),
721
+ hard_deletion_sync_min_interval_in_seconds=d.get("hard_deletion_sync_min_interval_in_seconds", None),
722
+ )
723
+
724
+
1212
725
  class IngestionSourceType(Enum):
1213
726
 
727
+ BIGQUERY = "BIGQUERY"
728
+ CONFLUENCE = "CONFLUENCE"
1214
729
  DYNAMICS365 = "DYNAMICS365"
1215
730
  GA4_RAW_DATA = "GA4_RAW_DATA"
1216
731
  MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL"
732
+ META_MARKETING = "META_MARKETING"
1217
733
  MYSQL = "MYSQL"
1218
734
  NETSUITE = "NETSUITE"
1219
735
  ORACLE = "ORACLE"
@@ -1621,7 +1137,6 @@ class PipelineAccessControlRequest:
1621
1137
  """name of the group"""
1622
1138
 
1623
1139
  permission_level: Optional[PipelinePermissionLevel] = None
1624
- """Permission level"""
1625
1140
 
1626
1141
  service_principal_name: Optional[str] = None
1627
1142
  """application ID of a service principal"""
@@ -2195,7 +1710,6 @@ class PipelinePermission:
2195
1710
  inherited_from_object: Optional[List[str]] = None
2196
1711
 
2197
1712
  permission_level: Optional[PipelinePermissionLevel] = None
2198
- """Permission level"""
2199
1713
 
2200
1714
  def as_dict(self) -> dict:
2201
1715
  """Serializes the PipelinePermission into a dictionary suitable for use as a JSON request body."""
@@ -2283,7 +1797,6 @@ class PipelinePermissionsDescription:
2283
1797
  description: Optional[str] = None
2284
1798
 
2285
1799
  permission_level: Optional[PipelinePermissionLevel] = None
2286
- """Permission level"""
2287
1800
 
2288
1801
  def as_dict(self) -> dict:
2289
1802
  """Serializes the PipelinePermissionsDescription into a dictionary suitable for use as a JSON request body."""
@@ -2312,40 +1825,6 @@ class PipelinePermissionsDescription:
2312
1825
  )
2313
1826
 
2314
1827
 
2315
- @dataclass
2316
- class PipelinePermissionsRequest:
2317
- access_control_list: Optional[List[PipelineAccessControlRequest]] = None
2318
-
2319
- pipeline_id: Optional[str] = None
2320
- """The pipeline for which to get or manage permissions."""
2321
-
2322
- def as_dict(self) -> dict:
2323
- """Serializes the PipelinePermissionsRequest into a dictionary suitable for use as a JSON request body."""
2324
- body = {}
2325
- if self.access_control_list:
2326
- body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
2327
- if self.pipeline_id is not None:
2328
- body["pipeline_id"] = self.pipeline_id
2329
- return body
2330
-
2331
- def as_shallow_dict(self) -> dict:
2332
- """Serializes the PipelinePermissionsRequest into a shallow dictionary of its immediate attributes."""
2333
- body = {}
2334
- if self.access_control_list:
2335
- body["access_control_list"] = self.access_control_list
2336
- if self.pipeline_id is not None:
2337
- body["pipeline_id"] = self.pipeline_id
2338
- return body
2339
-
2340
- @classmethod
2341
- def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissionsRequest:
2342
- """Deserializes the PipelinePermissionsRequest from a dictionary."""
2343
- return cls(
2344
- access_control_list=_repeated_dict(d, "access_control_list", PipelineAccessControlRequest),
2345
- pipeline_id=d.get("pipeline_id", None),
2346
- )
2347
-
2348
-
2349
1828
  @dataclass
2350
1829
  class PipelineSpec:
2351
1830
  budget_policy_id: Optional[str] = None
@@ -2630,7 +2109,6 @@ class PipelineStateInfo:
2630
2109
  owner."""
2631
2110
 
2632
2111
  state: Optional[PipelineState] = None
2633
- """The pipeline state."""
2634
2112
 
2635
2113
  def as_dict(self) -> dict:
2636
2114
  """Serializes the PipelineStateInfo into a dictionary suitable for use as a JSON request body."""
@@ -3098,77 +2576,6 @@ class StackFrame:
3098
2576
  )
3099
2577
 
3100
2578
 
3101
- @dataclass
3102
- class StartUpdate:
3103
- cause: Optional[StartUpdateCause] = None
3104
- """What triggered this update."""
3105
-
3106
- full_refresh: Optional[bool] = None
3107
- """If true, this update will reset all tables before running."""
3108
-
3109
- full_refresh_selection: Optional[List[str]] = None
3110
- """A list of tables to update with fullRefresh. If both refresh_selection and
3111
- full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means
3112
- that the states of the table will be reset before the refresh."""
3113
-
3114
- pipeline_id: Optional[str] = None
3115
-
3116
- refresh_selection: Optional[List[str]] = None
3117
- """A list of tables to update without fullRefresh. If both refresh_selection and
3118
- full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means
3119
- that the states of the table will be reset before the refresh."""
3120
-
3121
- validate_only: Optional[bool] = None
3122
- """If true, this update only validates the correctness of pipeline source code but does not
3123
- materialize or publish any datasets."""
3124
-
3125
- def as_dict(self) -> dict:
3126
- """Serializes the StartUpdate into a dictionary suitable for use as a JSON request body."""
3127
- body = {}
3128
- if self.cause is not None:
3129
- body["cause"] = self.cause.value
3130
- if self.full_refresh is not None:
3131
- body["full_refresh"] = self.full_refresh
3132
- if self.full_refresh_selection:
3133
- body["full_refresh_selection"] = [v for v in self.full_refresh_selection]
3134
- if self.pipeline_id is not None:
3135
- body["pipeline_id"] = self.pipeline_id
3136
- if self.refresh_selection:
3137
- body["refresh_selection"] = [v for v in self.refresh_selection]
3138
- if self.validate_only is not None:
3139
- body["validate_only"] = self.validate_only
3140
- return body
3141
-
3142
- def as_shallow_dict(self) -> dict:
3143
- """Serializes the StartUpdate into a shallow dictionary of its immediate attributes."""
3144
- body = {}
3145
- if self.cause is not None:
3146
- body["cause"] = self.cause
3147
- if self.full_refresh is not None:
3148
- body["full_refresh"] = self.full_refresh
3149
- if self.full_refresh_selection:
3150
- body["full_refresh_selection"] = self.full_refresh_selection
3151
- if self.pipeline_id is not None:
3152
- body["pipeline_id"] = self.pipeline_id
3153
- if self.refresh_selection:
3154
- body["refresh_selection"] = self.refresh_selection
3155
- if self.validate_only is not None:
3156
- body["validate_only"] = self.validate_only
3157
- return body
3158
-
3159
- @classmethod
3160
- def from_dict(cls, d: Dict[str, Any]) -> StartUpdate:
3161
- """Deserializes the StartUpdate from a dictionary."""
3162
- return cls(
3163
- cause=_enum(d, "cause", StartUpdateCause),
3164
- full_refresh=d.get("full_refresh", None),
3165
- full_refresh_selection=d.get("full_refresh_selection", None),
3166
- pipeline_id=d.get("pipeline_id", None),
3167
- refresh_selection=d.get("refresh_selection", None),
3168
- validate_only=d.get("validate_only", None),
3169
- )
3170
-
3171
-
3172
2579
  class StartUpdateCause(Enum):
3173
2580
  """What triggered this update."""
3174
2581
 
@@ -3317,6 +2724,10 @@ class TableSpecificConfig:
3317
2724
  primary_keys: Optional[List[str]] = None
3318
2725
  """The primary key of the table used to apply changes."""
3319
2726
 
2727
+ query_based_connector_config: Optional[IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig] = (
2728
+ None
2729
+ )
2730
+
3320
2731
  salesforce_include_formula_fields: Optional[bool] = None
3321
2732
  """If true, formula fields defined in the table are included in the ingestion. This setting is only
3322
2733
  valid for the Salesforce connector"""
@@ -3337,6 +2748,8 @@ class TableSpecificConfig:
3337
2748
  body["include_columns"] = [v for v in self.include_columns]
3338
2749
  if self.primary_keys:
3339
2750
  body["primary_keys"] = [v for v in self.primary_keys]
2751
+ if self.query_based_connector_config:
2752
+ body["query_based_connector_config"] = self.query_based_connector_config.as_dict()
3340
2753
  if self.salesforce_include_formula_fields is not None:
3341
2754
  body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields
3342
2755
  if self.scd_type is not None:
@@ -3354,6 +2767,8 @@ class TableSpecificConfig:
3354
2767
  body["include_columns"] = self.include_columns
3355
2768
  if self.primary_keys:
3356
2769
  body["primary_keys"] = self.primary_keys
2770
+ if self.query_based_connector_config:
2771
+ body["query_based_connector_config"] = self.query_based_connector_config
3357
2772
  if self.salesforce_include_formula_fields is not None:
3358
2773
  body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields
3359
2774
  if self.scd_type is not None:
@@ -3369,6 +2784,11 @@ class TableSpecificConfig:
3369
2784
  exclude_columns=d.get("exclude_columns", None),
3370
2785
  include_columns=d.get("include_columns", None),
3371
2786
  primary_keys=d.get("primary_keys", None),
2787
+ query_based_connector_config=_from_dict(
2788
+ d,
2789
+ "query_based_connector_config",
2790
+ IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig,
2791
+ ),
3372
2792
  salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None),
3373
2793
  scd_type=_enum(d, "scd_type", TableSpecificConfigScdType),
3374
2794
  sequence_by=d.get("sequence_by", None),
@@ -3378,6 +2798,7 @@ class TableSpecificConfig:
3378
2798
  class TableSpecificConfigScdType(Enum):
3379
2799
  """The SCD type to use to ingest the table."""
3380
2800
 
2801
+ APPEND_ONLY = "APPEND_ONLY"
3381
2802
  SCD_TYPE_1 = "SCD_TYPE_1"
3382
2803
  SCD_TYPE_2 = "SCD_TYPE_2"
3383
2804
 
@@ -3528,7 +2949,6 @@ class UpdateStateInfo:
3528
2949
  creation_time: Optional[str] = None
3529
2950
 
3530
2951
  state: Optional[UpdateStateInfoState] = None
3531
- """The update state."""
3532
2952
 
3533
2953
  update_id: Optional[str] = None
3534
2954
 
@@ -3715,11 +3135,6 @@ class PipelinesAPI:
3715
3135
  Databricks user interface and it is added to sys.path when executing Python sources during pipeline
3716
3136
  execution.
3717
3137
  :param run_as: :class:`RunAs` (optional)
3718
- Write-only setting, available only in Create/Update calls. Specifies the user or service principal
3719
- that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
3720
-
3721
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
3722
- thrown.
3723
3138
  :param schema: str (optional)
3724
3139
  The default schema (database) where tables are read from or published to.
3725
3140
  :param serverless: bool (optional)
@@ -4072,7 +3487,6 @@ class PipelinesAPI:
4072
3487
 
4073
3488
  :param pipeline_id: str
4074
3489
  :param cause: :class:`StartUpdateCause` (optional)
4075
- What triggered this update.
4076
3490
  :param full_refresh: bool (optional)
4077
3491
  If true, this update will reset all tables before running.
4078
3492
  :param full_refresh_selection: List[str] (optional)
@@ -4223,11 +3637,6 @@ class PipelinesAPI:
4223
3637
  Databricks user interface and it is added to sys.path when executing Python sources during pipeline
4224
3638
  execution.
4225
3639
  :param run_as: :class:`RunAs` (optional)
4226
- Write-only setting, available only in Create/Update calls. Specifies the user or service principal
4227
- that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
4228
-
4229
- Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
4230
- thrown.
4231
3640
  :param schema: str (optional)
4232
3641
  The default schema (database) where tables are read from or published to.
4233
3642
  :param serverless: bool (optional)