databricks-sdk 0.58.0__py3-none-any.whl → 0.60.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (34) hide show
  1. databricks/sdk/__init__.py +18 -10
  2. databricks/sdk/credentials_provider.py +2 -2
  3. databricks/sdk/mixins/files.py +43 -15
  4. databricks/sdk/mixins/open_ai_client.py +28 -7
  5. databricks/sdk/oidc.py +6 -2
  6. databricks/sdk/service/{aibuilder.py → agentbricks.py} +5 -132
  7. databricks/sdk/service/apps.py +52 -46
  8. databricks/sdk/service/billing.py +9 -200
  9. databricks/sdk/service/catalog.py +5501 -7697
  10. databricks/sdk/service/cleanrooms.py +24 -54
  11. databricks/sdk/service/compute.py +456 -2515
  12. databricks/sdk/service/dashboards.py +1 -177
  13. databricks/sdk/service/database.py +34 -53
  14. databricks/sdk/service/files.py +2 -218
  15. databricks/sdk/service/iam.py +16 -295
  16. databricks/sdk/service/jobs.py +108 -1171
  17. databricks/sdk/service/marketplace.py +0 -573
  18. databricks/sdk/service/ml.py +76 -2445
  19. databricks/sdk/service/oauth2.py +122 -237
  20. databricks/sdk/service/pipelines.py +180 -752
  21. databricks/sdk/service/provisioning.py +0 -603
  22. databricks/sdk/service/serving.py +5 -577
  23. databricks/sdk/service/settings.py +192 -1560
  24. databricks/sdk/service/sharing.py +5 -470
  25. databricks/sdk/service/sql.py +117 -1704
  26. databricks/sdk/service/vectorsearch.py +0 -391
  27. databricks/sdk/service/workspace.py +250 -721
  28. databricks/sdk/version.py +1 -1
  29. {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.60.0.dist-info}/METADATA +1 -1
  30. {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.60.0.dist-info}/RECORD +34 -34
  31. {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.60.0.dist-info}/WHEEL +0 -0
  32. {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.60.0.dist-info}/licenses/LICENSE +0 -0
  33. {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.60.0.dist-info}/licenses/NOTICE +0 -0
  34. {databricks_sdk-0.58.0.dist-info → databricks_sdk-0.60.0.dist-info}/top_level.txt +0 -0
@@ -21,273 +21,6 @@ from databricks.sdk.service import compute
21
21
  # all definitions in this file are in alphabetical order
22
22
 
23
23
 
24
- @dataclass
25
- class CreatePipeline:
26
- allow_duplicate_names: Optional[bool] = None
27
- """If false, deployment will fail if name conflicts with that of another pipeline."""
28
-
29
- budget_policy_id: Optional[str] = None
30
- """Budget policy of this pipeline."""
31
-
32
- catalog: Optional[str] = None
33
- """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
34
- tables in this pipeline are published to a `target` schema inside `catalog` (for example,
35
- `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity
36
- Catalog."""
37
-
38
- channel: Optional[str] = None
39
- """DLT Release Channel that specifies which version to use."""
40
-
41
- clusters: Optional[List[PipelineCluster]] = None
42
- """Cluster settings for this pipeline deployment."""
43
-
44
- configuration: Optional[Dict[str, str]] = None
45
- """String-String configuration for this pipeline execution."""
46
-
47
- continuous: Optional[bool] = None
48
- """Whether the pipeline is continuous or triggered. This replaces `trigger`."""
49
-
50
- deployment: Optional[PipelineDeployment] = None
51
- """Deployment type of this pipeline."""
52
-
53
- development: Optional[bool] = None
54
- """Whether the pipeline is in Development mode. Defaults to false."""
55
-
56
- dry_run: Optional[bool] = None
57
-
58
- edition: Optional[str] = None
59
- """Pipeline product edition."""
60
-
61
- environment: Optional[PipelinesEnvironment] = None
62
- """Environment specification for this pipeline used to install dependencies."""
63
-
64
- event_log: Optional[EventLogSpec] = None
65
- """Event log configuration for this pipeline"""
66
-
67
- filters: Optional[Filters] = None
68
- """Filters on which Pipeline packages to include in the deployed graph."""
69
-
70
- gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
71
- """The definition of a gateway pipeline to support change data capture."""
72
-
73
- id: Optional[str] = None
74
- """Unique identifier for this pipeline."""
75
-
76
- ingestion_definition: Optional[IngestionPipelineDefinition] = None
77
- """The configuration for a managed ingestion pipeline. These settings cannot be used with the
78
- 'libraries', 'schema', 'target', or 'catalog' settings."""
79
-
80
- libraries: Optional[List[PipelineLibrary]] = None
81
- """Libraries or code needed by this deployment."""
82
-
83
- name: Optional[str] = None
84
- """Friendly identifier for this pipeline."""
85
-
86
- notifications: Optional[List[Notifications]] = None
87
- """List of notification settings for this pipeline."""
88
-
89
- photon: Optional[bool] = None
90
- """Whether Photon is enabled for this pipeline."""
91
-
92
- restart_window: Optional[RestartWindow] = None
93
- """Restart window of this pipeline."""
94
-
95
- root_path: Optional[str] = None
96
- """Root path for this pipeline. This is used as the root directory when editing the pipeline in the
97
- Databricks user interface and it is added to sys.path when executing Python sources during
98
- pipeline execution."""
99
-
100
- run_as: Optional[RunAs] = None
101
-
102
- schema: Optional[str] = None
103
- """The default schema (database) where tables are read from or published to."""
104
-
105
- serverless: Optional[bool] = None
106
- """Whether serverless compute is enabled for this pipeline."""
107
-
108
- storage: Optional[str] = None
109
- """DBFS root directory for storing checkpoints and tables."""
110
-
111
- tags: Optional[Dict[str, str]] = None
112
- """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags,
113
- and are therefore subject to the same limitations. A maximum of 25 tags can be added to the
114
- pipeline."""
115
-
116
- target: Optional[str] = None
117
- """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
118
- must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
119
- deprecated for pipeline creation in favor of the `schema` field."""
120
-
121
- trigger: Optional[PipelineTrigger] = None
122
- """Which pipeline trigger to use. Deprecated: Use `continuous` instead."""
123
-
124
- def as_dict(self) -> dict:
125
- """Serializes the CreatePipeline into a dictionary suitable for use as a JSON request body."""
126
- body = {}
127
- if self.allow_duplicate_names is not None:
128
- body["allow_duplicate_names"] = self.allow_duplicate_names
129
- if self.budget_policy_id is not None:
130
- body["budget_policy_id"] = self.budget_policy_id
131
- if self.catalog is not None:
132
- body["catalog"] = self.catalog
133
- if self.channel is not None:
134
- body["channel"] = self.channel
135
- if self.clusters:
136
- body["clusters"] = [v.as_dict() for v in self.clusters]
137
- if self.configuration:
138
- body["configuration"] = self.configuration
139
- if self.continuous is not None:
140
- body["continuous"] = self.continuous
141
- if self.deployment:
142
- body["deployment"] = self.deployment.as_dict()
143
- if self.development is not None:
144
- body["development"] = self.development
145
- if self.dry_run is not None:
146
- body["dry_run"] = self.dry_run
147
- if self.edition is not None:
148
- body["edition"] = self.edition
149
- if self.environment:
150
- body["environment"] = self.environment.as_dict()
151
- if self.event_log:
152
- body["event_log"] = self.event_log.as_dict()
153
- if self.filters:
154
- body["filters"] = self.filters.as_dict()
155
- if self.gateway_definition:
156
- body["gateway_definition"] = self.gateway_definition.as_dict()
157
- if self.id is not None:
158
- body["id"] = self.id
159
- if self.ingestion_definition:
160
- body["ingestion_definition"] = self.ingestion_definition.as_dict()
161
- if self.libraries:
162
- body["libraries"] = [v.as_dict() for v in self.libraries]
163
- if self.name is not None:
164
- body["name"] = self.name
165
- if self.notifications:
166
- body["notifications"] = [v.as_dict() for v in self.notifications]
167
- if self.photon is not None:
168
- body["photon"] = self.photon
169
- if self.restart_window:
170
- body["restart_window"] = self.restart_window.as_dict()
171
- if self.root_path is not None:
172
- body["root_path"] = self.root_path
173
- if self.run_as:
174
- body["run_as"] = self.run_as.as_dict()
175
- if self.schema is not None:
176
- body["schema"] = self.schema
177
- if self.serverless is not None:
178
- body["serverless"] = self.serverless
179
- if self.storage is not None:
180
- body["storage"] = self.storage
181
- if self.tags:
182
- body["tags"] = self.tags
183
- if self.target is not None:
184
- body["target"] = self.target
185
- if self.trigger:
186
- body["trigger"] = self.trigger.as_dict()
187
- return body
188
-
189
- def as_shallow_dict(self) -> dict:
190
- """Serializes the CreatePipeline into a shallow dictionary of its immediate attributes."""
191
- body = {}
192
- if self.allow_duplicate_names is not None:
193
- body["allow_duplicate_names"] = self.allow_duplicate_names
194
- if self.budget_policy_id is not None:
195
- body["budget_policy_id"] = self.budget_policy_id
196
- if self.catalog is not None:
197
- body["catalog"] = self.catalog
198
- if self.channel is not None:
199
- body["channel"] = self.channel
200
- if self.clusters:
201
- body["clusters"] = self.clusters
202
- if self.configuration:
203
- body["configuration"] = self.configuration
204
- if self.continuous is not None:
205
- body["continuous"] = self.continuous
206
- if self.deployment:
207
- body["deployment"] = self.deployment
208
- if self.development is not None:
209
- body["development"] = self.development
210
- if self.dry_run is not None:
211
- body["dry_run"] = self.dry_run
212
- if self.edition is not None:
213
- body["edition"] = self.edition
214
- if self.environment:
215
- body["environment"] = self.environment
216
- if self.event_log:
217
- body["event_log"] = self.event_log
218
- if self.filters:
219
- body["filters"] = self.filters
220
- if self.gateway_definition:
221
- body["gateway_definition"] = self.gateway_definition
222
- if self.id is not None:
223
- body["id"] = self.id
224
- if self.ingestion_definition:
225
- body["ingestion_definition"] = self.ingestion_definition
226
- if self.libraries:
227
- body["libraries"] = self.libraries
228
- if self.name is not None:
229
- body["name"] = self.name
230
- if self.notifications:
231
- body["notifications"] = self.notifications
232
- if self.photon is not None:
233
- body["photon"] = self.photon
234
- if self.restart_window:
235
- body["restart_window"] = self.restart_window
236
- if self.root_path is not None:
237
- body["root_path"] = self.root_path
238
- if self.run_as:
239
- body["run_as"] = self.run_as
240
- if self.schema is not None:
241
- body["schema"] = self.schema
242
- if self.serverless is not None:
243
- body["serverless"] = self.serverless
244
- if self.storage is not None:
245
- body["storage"] = self.storage
246
- if self.tags:
247
- body["tags"] = self.tags
248
- if self.target is not None:
249
- body["target"] = self.target
250
- if self.trigger:
251
- body["trigger"] = self.trigger
252
- return body
253
-
254
- @classmethod
255
- def from_dict(cls, d: Dict[str, Any]) -> CreatePipeline:
256
- """Deserializes the CreatePipeline from a dictionary."""
257
- return cls(
258
- allow_duplicate_names=d.get("allow_duplicate_names", None),
259
- budget_policy_id=d.get("budget_policy_id", None),
260
- catalog=d.get("catalog", None),
261
- channel=d.get("channel", None),
262
- clusters=_repeated_dict(d, "clusters", PipelineCluster),
263
- configuration=d.get("configuration", None),
264
- continuous=d.get("continuous", None),
265
- deployment=_from_dict(d, "deployment", PipelineDeployment),
266
- development=d.get("development", None),
267
- dry_run=d.get("dry_run", None),
268
- edition=d.get("edition", None),
269
- environment=_from_dict(d, "environment", PipelinesEnvironment),
270
- event_log=_from_dict(d, "event_log", EventLogSpec),
271
- filters=_from_dict(d, "filters", Filters),
272
- gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
273
- id=d.get("id", None),
274
- ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition),
275
- libraries=_repeated_dict(d, "libraries", PipelineLibrary),
276
- name=d.get("name", None),
277
- notifications=_repeated_dict(d, "notifications", Notifications),
278
- photon=d.get("photon", None),
279
- restart_window=_from_dict(d, "restart_window", RestartWindow),
280
- root_path=d.get("root_path", None),
281
- run_as=_from_dict(d, "run_as", RunAs),
282
- schema=d.get("schema", None),
283
- serverless=d.get("serverless", None),
284
- storage=d.get("storage", None),
285
- tags=d.get("tags", None),
286
- target=d.get("target", None),
287
- trigger=_from_dict(d, "trigger", PipelineTrigger),
288
- )
289
-
290
-
291
24
  @dataclass
292
25
  class CreatePipelineResponse:
293
26
  effective_settings: Optional[PipelineSpec] = None
@@ -309,396 +42,119 @@ class CreatePipelineResponse:
309
42
  """Serializes the CreatePipelineResponse into a shallow dictionary of its immediate attributes."""
310
43
  body = {}
311
44
  if self.effective_settings:
312
- body["effective_settings"] = self.effective_settings
313
- if self.pipeline_id is not None:
314
- body["pipeline_id"] = self.pipeline_id
315
- return body
316
-
317
- @classmethod
318
- def from_dict(cls, d: Dict[str, Any]) -> CreatePipelineResponse:
319
- """Deserializes the CreatePipelineResponse from a dictionary."""
320
- return cls(
321
- effective_settings=_from_dict(d, "effective_settings", PipelineSpec), pipeline_id=d.get("pipeline_id", None)
322
- )
323
-
324
-
325
- @dataclass
326
- class CronTrigger:
327
- quartz_cron_schedule: Optional[str] = None
328
-
329
- timezone_id: Optional[str] = None
330
-
331
- def as_dict(self) -> dict:
332
- """Serializes the CronTrigger into a dictionary suitable for use as a JSON request body."""
333
- body = {}
334
- if self.quartz_cron_schedule is not None:
335
- body["quartz_cron_schedule"] = self.quartz_cron_schedule
336
- if self.timezone_id is not None:
337
- body["timezone_id"] = self.timezone_id
338
- return body
339
-
340
- def as_shallow_dict(self) -> dict:
341
- """Serializes the CronTrigger into a shallow dictionary of its immediate attributes."""
342
- body = {}
343
- if self.quartz_cron_schedule is not None:
344
- body["quartz_cron_schedule"] = self.quartz_cron_schedule
345
- if self.timezone_id is not None:
346
- body["timezone_id"] = self.timezone_id
347
- return body
348
-
349
- @classmethod
350
- def from_dict(cls, d: Dict[str, Any]) -> CronTrigger:
351
- """Deserializes the CronTrigger from a dictionary."""
352
- return cls(quartz_cron_schedule=d.get("quartz_cron_schedule", None), timezone_id=d.get("timezone_id", None))
353
-
354
-
355
- @dataclass
356
- class DataPlaneId:
357
- instance: Optional[str] = None
358
- """The instance name of the data plane emitting an event."""
359
-
360
- seq_no: Optional[int] = None
361
- """A sequence number, unique and increasing within the data plane instance."""
362
-
363
- def as_dict(self) -> dict:
364
- """Serializes the DataPlaneId into a dictionary suitable for use as a JSON request body."""
365
- body = {}
366
- if self.instance is not None:
367
- body["instance"] = self.instance
368
- if self.seq_no is not None:
369
- body["seq_no"] = self.seq_no
370
- return body
371
-
372
- def as_shallow_dict(self) -> dict:
373
- """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes."""
374
- body = {}
375
- if self.instance is not None:
376
- body["instance"] = self.instance
377
- if self.seq_no is not None:
378
- body["seq_no"] = self.seq_no
379
- return body
380
-
381
- @classmethod
382
- def from_dict(cls, d: Dict[str, Any]) -> DataPlaneId:
383
- """Deserializes the DataPlaneId from a dictionary."""
384
- return cls(instance=d.get("instance", None), seq_no=d.get("seq_no", None))
385
-
386
-
387
- class DayOfWeek(Enum):
388
- """Days of week in which the restart is allowed to happen (within a five-hour window starting at
389
- start_hour). If not specified all days of the week will be used."""
390
-
391
- FRIDAY = "FRIDAY"
392
- MONDAY = "MONDAY"
393
- SATURDAY = "SATURDAY"
394
- SUNDAY = "SUNDAY"
395
- THURSDAY = "THURSDAY"
396
- TUESDAY = "TUESDAY"
397
- WEDNESDAY = "WEDNESDAY"
398
-
399
-
400
- @dataclass
401
- class DeletePipelineResponse:
402
- def as_dict(self) -> dict:
403
- """Serializes the DeletePipelineResponse into a dictionary suitable for use as a JSON request body."""
404
- body = {}
405
- return body
406
-
407
- def as_shallow_dict(self) -> dict:
408
- """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes."""
409
- body = {}
410
- return body
411
-
412
- @classmethod
413
- def from_dict(cls, d: Dict[str, Any]) -> DeletePipelineResponse:
414
- """Deserializes the DeletePipelineResponse from a dictionary."""
415
- return cls()
416
-
417
-
418
- class DeploymentKind(Enum):
419
- """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a
420
- Databricks Asset Bundle."""
421
-
422
- BUNDLE = "BUNDLE"
423
-
424
-
425
- @dataclass
426
- class EditPipeline:
427
- allow_duplicate_names: Optional[bool] = None
428
- """If false, deployment will fail if name has changed and conflicts the name of another pipeline."""
429
-
430
- budget_policy_id: Optional[str] = None
431
- """Budget policy of this pipeline."""
432
-
433
- catalog: Optional[str] = None
434
- """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
435
- tables in this pipeline are published to a `target` schema inside `catalog` (for example,
436
- `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity
437
- Catalog."""
438
-
439
- channel: Optional[str] = None
440
- """DLT Release Channel that specifies which version to use."""
441
-
442
- clusters: Optional[List[PipelineCluster]] = None
443
- """Cluster settings for this pipeline deployment."""
444
-
445
- configuration: Optional[Dict[str, str]] = None
446
- """String-String configuration for this pipeline execution."""
447
-
448
- continuous: Optional[bool] = None
449
- """Whether the pipeline is continuous or triggered. This replaces `trigger`."""
450
-
451
- deployment: Optional[PipelineDeployment] = None
452
- """Deployment type of this pipeline."""
453
-
454
- development: Optional[bool] = None
455
- """Whether the pipeline is in Development mode. Defaults to false."""
456
-
457
- edition: Optional[str] = None
458
- """Pipeline product edition."""
459
-
460
- environment: Optional[PipelinesEnvironment] = None
461
- """Environment specification for this pipeline used to install dependencies."""
462
-
463
- event_log: Optional[EventLogSpec] = None
464
- """Event log configuration for this pipeline"""
465
-
466
- expected_last_modified: Optional[int] = None
467
- """If present, the last-modified time of the pipeline settings before the edit. If the settings
468
- were modified after that time, then the request will fail with a conflict."""
469
-
470
- filters: Optional[Filters] = None
471
- """Filters on which Pipeline packages to include in the deployed graph."""
472
-
473
- gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
474
- """The definition of a gateway pipeline to support change data capture."""
475
-
476
- id: Optional[str] = None
477
- """Unique identifier for this pipeline."""
478
-
479
- ingestion_definition: Optional[IngestionPipelineDefinition] = None
480
- """The configuration for a managed ingestion pipeline. These settings cannot be used with the
481
- 'libraries', 'schema', 'target', or 'catalog' settings."""
482
-
483
- libraries: Optional[List[PipelineLibrary]] = None
484
- """Libraries or code needed by this deployment."""
485
-
486
- name: Optional[str] = None
487
- """Friendly identifier for this pipeline."""
488
-
489
- notifications: Optional[List[Notifications]] = None
490
- """List of notification settings for this pipeline."""
491
-
492
- photon: Optional[bool] = None
493
- """Whether Photon is enabled for this pipeline."""
494
-
495
- pipeline_id: Optional[str] = None
496
- """Unique identifier for this pipeline."""
497
-
498
- restart_window: Optional[RestartWindow] = None
499
- """Restart window of this pipeline."""
500
-
501
- root_path: Optional[str] = None
502
- """Root path for this pipeline. This is used as the root directory when editing the pipeline in the
503
- Databricks user interface and it is added to sys.path when executing Python sources during
504
- pipeline execution."""
505
-
506
- run_as: Optional[RunAs] = None
507
-
508
- schema: Optional[str] = None
509
- """The default schema (database) where tables are read from or published to."""
510
-
511
- serverless: Optional[bool] = None
512
- """Whether serverless compute is enabled for this pipeline."""
513
-
514
- storage: Optional[str] = None
515
- """DBFS root directory for storing checkpoints and tables."""
516
-
517
- tags: Optional[Dict[str, str]] = None
518
- """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags,
519
- and are therefore subject to the same limitations. A maximum of 25 tags can be added to the
520
- pipeline."""
521
-
522
- target: Optional[str] = None
523
- """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target`
524
- must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is
525
- deprecated for pipeline creation in favor of the `schema` field."""
526
-
527
- trigger: Optional[PipelineTrigger] = None
528
- """Which pipeline trigger to use. Deprecated: Use `continuous` instead."""
529
-
530
- def as_dict(self) -> dict:
531
- """Serializes the EditPipeline into a dictionary suitable for use as a JSON request body."""
532
- body = {}
533
- if self.allow_duplicate_names is not None:
534
- body["allow_duplicate_names"] = self.allow_duplicate_names
535
- if self.budget_policy_id is not None:
536
- body["budget_policy_id"] = self.budget_policy_id
537
- if self.catalog is not None:
538
- body["catalog"] = self.catalog
539
- if self.channel is not None:
540
- body["channel"] = self.channel
541
- if self.clusters:
542
- body["clusters"] = [v.as_dict() for v in self.clusters]
543
- if self.configuration:
544
- body["configuration"] = self.configuration
545
- if self.continuous is not None:
546
- body["continuous"] = self.continuous
547
- if self.deployment:
548
- body["deployment"] = self.deployment.as_dict()
549
- if self.development is not None:
550
- body["development"] = self.development
551
- if self.edition is not None:
552
- body["edition"] = self.edition
553
- if self.environment:
554
- body["environment"] = self.environment.as_dict()
555
- if self.event_log:
556
- body["event_log"] = self.event_log.as_dict()
557
- if self.expected_last_modified is not None:
558
- body["expected_last_modified"] = self.expected_last_modified
559
- if self.filters:
560
- body["filters"] = self.filters.as_dict()
561
- if self.gateway_definition:
562
- body["gateway_definition"] = self.gateway_definition.as_dict()
563
- if self.id is not None:
564
- body["id"] = self.id
565
- if self.ingestion_definition:
566
- body["ingestion_definition"] = self.ingestion_definition.as_dict()
567
- if self.libraries:
568
- body["libraries"] = [v.as_dict() for v in self.libraries]
569
- if self.name is not None:
570
- body["name"] = self.name
571
- if self.notifications:
572
- body["notifications"] = [v.as_dict() for v in self.notifications]
573
- if self.photon is not None:
574
- body["photon"] = self.photon
575
- if self.pipeline_id is not None:
576
- body["pipeline_id"] = self.pipeline_id
577
- if self.restart_window:
578
- body["restart_window"] = self.restart_window.as_dict()
579
- if self.root_path is not None:
580
- body["root_path"] = self.root_path
581
- if self.run_as:
582
- body["run_as"] = self.run_as.as_dict()
583
- if self.schema is not None:
584
- body["schema"] = self.schema
585
- if self.serverless is not None:
586
- body["serverless"] = self.serverless
587
- if self.storage is not None:
588
- body["storage"] = self.storage
589
- if self.tags:
590
- body["tags"] = self.tags
591
- if self.target is not None:
592
- body["target"] = self.target
593
- if self.trigger:
594
- body["trigger"] = self.trigger.as_dict()
595
- return body
596
-
597
- def as_shallow_dict(self) -> dict:
598
- """Serializes the EditPipeline into a shallow dictionary of its immediate attributes."""
599
- body = {}
600
- if self.allow_duplicate_names is not None:
601
- body["allow_duplicate_names"] = self.allow_duplicate_names
602
- if self.budget_policy_id is not None:
603
- body["budget_policy_id"] = self.budget_policy_id
604
- if self.catalog is not None:
605
- body["catalog"] = self.catalog
606
- if self.channel is not None:
607
- body["channel"] = self.channel
608
- if self.clusters:
609
- body["clusters"] = self.clusters
610
- if self.configuration:
611
- body["configuration"] = self.configuration
612
- if self.continuous is not None:
613
- body["continuous"] = self.continuous
614
- if self.deployment:
615
- body["deployment"] = self.deployment
616
- if self.development is not None:
617
- body["development"] = self.development
618
- if self.edition is not None:
619
- body["edition"] = self.edition
620
- if self.environment:
621
- body["environment"] = self.environment
622
- if self.event_log:
623
- body["event_log"] = self.event_log
624
- if self.expected_last_modified is not None:
625
- body["expected_last_modified"] = self.expected_last_modified
626
- if self.filters:
627
- body["filters"] = self.filters
628
- if self.gateway_definition:
629
- body["gateway_definition"] = self.gateway_definition
630
- if self.id is not None:
631
- body["id"] = self.id
632
- if self.ingestion_definition:
633
- body["ingestion_definition"] = self.ingestion_definition
634
- if self.libraries:
635
- body["libraries"] = self.libraries
636
- if self.name is not None:
637
- body["name"] = self.name
638
- if self.notifications:
639
- body["notifications"] = self.notifications
640
- if self.photon is not None:
641
- body["photon"] = self.photon
45
+ body["effective_settings"] = self.effective_settings
642
46
  if self.pipeline_id is not None:
643
47
  body["pipeline_id"] = self.pipeline_id
644
- if self.restart_window:
645
- body["restart_window"] = self.restart_window
646
- if self.root_path is not None:
647
- body["root_path"] = self.root_path
648
- if self.run_as:
649
- body["run_as"] = self.run_as
650
- if self.schema is not None:
651
- body["schema"] = self.schema
652
- if self.serverless is not None:
653
- body["serverless"] = self.serverless
654
- if self.storage is not None:
655
- body["storage"] = self.storage
656
- if self.tags:
657
- body["tags"] = self.tags
658
- if self.target is not None:
659
- body["target"] = self.target
660
- if self.trigger:
661
- body["trigger"] = self.trigger
662
48
  return body
663
49
 
664
50
  @classmethod
665
- def from_dict(cls, d: Dict[str, Any]) -> EditPipeline:
666
- """Deserializes the EditPipeline from a dictionary."""
51
+ def from_dict(cls, d: Dict[str, Any]) -> CreatePipelineResponse:
52
+ """Deserializes the CreatePipelineResponse from a dictionary."""
667
53
  return cls(
668
- allow_duplicate_names=d.get("allow_duplicate_names", None),
669
- budget_policy_id=d.get("budget_policy_id", None),
670
- catalog=d.get("catalog", None),
671
- channel=d.get("channel", None),
672
- clusters=_repeated_dict(d, "clusters", PipelineCluster),
673
- configuration=d.get("configuration", None),
674
- continuous=d.get("continuous", None),
675
- deployment=_from_dict(d, "deployment", PipelineDeployment),
676
- development=d.get("development", None),
677
- edition=d.get("edition", None),
678
- environment=_from_dict(d, "environment", PipelinesEnvironment),
679
- event_log=_from_dict(d, "event_log", EventLogSpec),
680
- expected_last_modified=d.get("expected_last_modified", None),
681
- filters=_from_dict(d, "filters", Filters),
682
- gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
683
- id=d.get("id", None),
684
- ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition),
685
- libraries=_repeated_dict(d, "libraries", PipelineLibrary),
686
- name=d.get("name", None),
687
- notifications=_repeated_dict(d, "notifications", Notifications),
688
- photon=d.get("photon", None),
689
- pipeline_id=d.get("pipeline_id", None),
690
- restart_window=_from_dict(d, "restart_window", RestartWindow),
691
- root_path=d.get("root_path", None),
692
- run_as=_from_dict(d, "run_as", RunAs),
693
- schema=d.get("schema", None),
694
- serverless=d.get("serverless", None),
695
- storage=d.get("storage", None),
696
- tags=d.get("tags", None),
697
- target=d.get("target", None),
698
- trigger=_from_dict(d, "trigger", PipelineTrigger),
54
+ effective_settings=_from_dict(d, "effective_settings", PipelineSpec), pipeline_id=d.get("pipeline_id", None)
699
55
  )
700
56
 
701
57
 
58
+ @dataclass
59
+ class CronTrigger:
60
+ quartz_cron_schedule: Optional[str] = None
61
+
62
+ timezone_id: Optional[str] = None
63
+
64
+ def as_dict(self) -> dict:
65
+ """Serializes the CronTrigger into a dictionary suitable for use as a JSON request body."""
66
+ body = {}
67
+ if self.quartz_cron_schedule is not None:
68
+ body["quartz_cron_schedule"] = self.quartz_cron_schedule
69
+ if self.timezone_id is not None:
70
+ body["timezone_id"] = self.timezone_id
71
+ return body
72
+
73
+ def as_shallow_dict(self) -> dict:
74
+ """Serializes the CronTrigger into a shallow dictionary of its immediate attributes."""
75
+ body = {}
76
+ if self.quartz_cron_schedule is not None:
77
+ body["quartz_cron_schedule"] = self.quartz_cron_schedule
78
+ if self.timezone_id is not None:
79
+ body["timezone_id"] = self.timezone_id
80
+ return body
81
+
82
+ @classmethod
83
+ def from_dict(cls, d: Dict[str, Any]) -> CronTrigger:
84
+ """Deserializes the CronTrigger from a dictionary."""
85
+ return cls(quartz_cron_schedule=d.get("quartz_cron_schedule", None), timezone_id=d.get("timezone_id", None))
86
+
87
+
88
+ @dataclass
89
+ class DataPlaneId:
90
+ instance: Optional[str] = None
91
+ """The instance name of the data plane emitting an event."""
92
+
93
+ seq_no: Optional[int] = None
94
+ """A sequence number, unique and increasing within the data plane instance."""
95
+
96
+ def as_dict(self) -> dict:
97
+ """Serializes the DataPlaneId into a dictionary suitable for use as a JSON request body."""
98
+ body = {}
99
+ if self.instance is not None:
100
+ body["instance"] = self.instance
101
+ if self.seq_no is not None:
102
+ body["seq_no"] = self.seq_no
103
+ return body
104
+
105
+ def as_shallow_dict(self) -> dict:
106
+ """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes."""
107
+ body = {}
108
+ if self.instance is not None:
109
+ body["instance"] = self.instance
110
+ if self.seq_no is not None:
111
+ body["seq_no"] = self.seq_no
112
+ return body
113
+
114
+ @classmethod
115
+ def from_dict(cls, d: Dict[str, Any]) -> DataPlaneId:
116
+ """Deserializes the DataPlaneId from a dictionary."""
117
+ return cls(instance=d.get("instance", None), seq_no=d.get("seq_no", None))
118
+
119
+
120
+ class DayOfWeek(Enum):
121
+ """Days of week in which the restart is allowed to happen (within a five-hour window starting at
122
+ start_hour). If not specified all days of the week will be used."""
123
+
124
+ FRIDAY = "FRIDAY"
125
+ MONDAY = "MONDAY"
126
+ SATURDAY = "SATURDAY"
127
+ SUNDAY = "SUNDAY"
128
+ THURSDAY = "THURSDAY"
129
+ TUESDAY = "TUESDAY"
130
+ WEDNESDAY = "WEDNESDAY"
131
+
132
+
133
+ @dataclass
134
+ class DeletePipelineResponse:
135
+ def as_dict(self) -> dict:
136
+ """Serializes the DeletePipelineResponse into a dictionary suitable for use as a JSON request body."""
137
+ body = {}
138
+ return body
139
+
140
+ def as_shallow_dict(self) -> dict:
141
+ """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes."""
142
+ body = {}
143
+ return body
144
+
145
+ @classmethod
146
+ def from_dict(cls, d: Dict[str, Any]) -> DeletePipelineResponse:
147
+ """Deserializes the DeletePipelineResponse from a dictionary."""
148
+ return cls()
149
+
150
+
151
+ class DeploymentKind(Enum):
152
+ """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a
153
+ Databricks Asset Bundle."""
154
+
155
+ BUNDLE = "BUNDLE"
156
+
157
+
702
158
  @dataclass
703
159
  class EditPipelineResponse:
704
160
  def as_dict(self) -> dict:
@@ -1207,19 +663,82 @@ class IngestionPipelineDefinition:
1207
663
  )
1208
664
 
1209
665
 
666
+ @dataclass
667
+ class IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig:
668
+ """Configurations that are only applicable for query-based ingestion connectors."""
669
+
670
+ cursor_columns: Optional[List[str]] = None
671
+ """The names of the monotonically increasing columns in the source table that are used to enable
672
+ the table to be read and ingested incrementally through structured streaming. The columns are
673
+ allowed to have repeated values but have to be non-decreasing. If the source data is merged into
674
+ the destination (e.g., using SCD Type 1 or Type 2), these columns will implicitly define the
675
+ `sequence_by` behavior. You can still explicitly set `sequence_by` to override this default."""
676
+
677
+ deletion_condition: Optional[str] = None
678
+ """Specifies a SQL WHERE condition that specifies that the source row has been deleted. This is
679
+ sometimes referred to as "soft-deletes". For example: "Operation = 'DELETE'" or "is_deleted =
680
+ true". This field is orthogonal to `hard_deletion_sync_interval_in_seconds`, one for
681
+ soft-deletes and the other for hard-deletes. See also the
682
+ hard_deletion_sync_min_interval_in_seconds field for handling of "hard deletes" where the source
683
+ rows are physically removed from the table."""
684
+
685
+ hard_deletion_sync_min_interval_in_seconds: Optional[int] = None
686
+ """Specifies the minimum interval (in seconds) between snapshots on primary keys for detecting and
687
+ synchronizing hard deletions—i.e., rows that have been physically removed from the source
688
+ table. This interval acts as a lower bound. If ingestion runs less frequently than this value,
689
+ hard deletion synchronization will align with the actual ingestion frequency instead of
690
+ happening more often. If not set, hard deletion synchronization via snapshots is disabled. This
691
+ field is mutable and can be updated without triggering a full snapshot."""
692
+
693
+ def as_dict(self) -> dict:
694
+ """Serializes the IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig into a dictionary suitable for use as a JSON request body."""
695
+ body = {}
696
+ if self.cursor_columns:
697
+ body["cursor_columns"] = [v for v in self.cursor_columns]
698
+ if self.deletion_condition is not None:
699
+ body["deletion_condition"] = self.deletion_condition
700
+ if self.hard_deletion_sync_min_interval_in_seconds is not None:
701
+ body["hard_deletion_sync_min_interval_in_seconds"] = self.hard_deletion_sync_min_interval_in_seconds
702
+ return body
703
+
704
+ def as_shallow_dict(self) -> dict:
705
+ """Serializes the IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig into a shallow dictionary of its immediate attributes."""
706
+ body = {}
707
+ if self.cursor_columns:
708
+ body["cursor_columns"] = self.cursor_columns
709
+ if self.deletion_condition is not None:
710
+ body["deletion_condition"] = self.deletion_condition
711
+ if self.hard_deletion_sync_min_interval_in_seconds is not None:
712
+ body["hard_deletion_sync_min_interval_in_seconds"] = self.hard_deletion_sync_min_interval_in_seconds
713
+ return body
714
+
715
+ @classmethod
716
+ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig:
717
+ """Deserializes the IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig from a dictionary."""
718
+ return cls(
719
+ cursor_columns=d.get("cursor_columns", None),
720
+ deletion_condition=d.get("deletion_condition", None),
721
+ hard_deletion_sync_min_interval_in_seconds=d.get("hard_deletion_sync_min_interval_in_seconds", None),
722
+ )
723
+
724
+
1210
725
  class IngestionSourceType(Enum):
1211
726
 
1212
727
  BIGQUERY = "BIGQUERY"
728
+ CONFLUENCE = "CONFLUENCE"
1213
729
  DYNAMICS365 = "DYNAMICS365"
1214
730
  GA4_RAW_DATA = "GA4_RAW_DATA"
1215
731
  MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL"
732
+ META_MARKETING = "META_MARKETING"
1216
733
  MYSQL = "MYSQL"
1217
734
  NETSUITE = "NETSUITE"
1218
735
  ORACLE = "ORACLE"
1219
736
  POSTGRESQL = "POSTGRESQL"
737
+ REDSHIFT = "REDSHIFT"
1220
738
  SALESFORCE = "SALESFORCE"
1221
739
  SERVICENOW = "SERVICENOW"
1222
740
  SHAREPOINT = "SHAREPOINT"
741
+ SQLDW = "SQLDW"
1223
742
  SQLSERVER = "SQLSERVER"
1224
743
  TERADATA = "TERADATA"
1225
744
  WORKDAY_RAAS = "WORKDAY_RAAS"
@@ -2308,40 +1827,6 @@ class PipelinePermissionsDescription:
2308
1827
  )
2309
1828
 
2310
1829
 
2311
- @dataclass
2312
- class PipelinePermissionsRequest:
2313
- access_control_list: Optional[List[PipelineAccessControlRequest]] = None
2314
-
2315
- pipeline_id: Optional[str] = None
2316
- """The pipeline for which to get or manage permissions."""
2317
-
2318
- def as_dict(self) -> dict:
2319
- """Serializes the PipelinePermissionsRequest into a dictionary suitable for use as a JSON request body."""
2320
- body = {}
2321
- if self.access_control_list:
2322
- body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
2323
- if self.pipeline_id is not None:
2324
- body["pipeline_id"] = self.pipeline_id
2325
- return body
2326
-
2327
- def as_shallow_dict(self) -> dict:
2328
- """Serializes the PipelinePermissionsRequest into a shallow dictionary of its immediate attributes."""
2329
- body = {}
2330
- if self.access_control_list:
2331
- body["access_control_list"] = self.access_control_list
2332
- if self.pipeline_id is not None:
2333
- body["pipeline_id"] = self.pipeline_id
2334
- return body
2335
-
2336
- @classmethod
2337
- def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissionsRequest:
2338
- """Deserializes the PipelinePermissionsRequest from a dictionary."""
2339
- return cls(
2340
- access_control_list=_repeated_dict(d, "access_control_list", PipelineAccessControlRequest),
2341
- pipeline_id=d.get("pipeline_id", None),
2342
- )
2343
-
2344
-
2345
1830
  @dataclass
2346
1831
  class PipelineSpec:
2347
1832
  budget_policy_id: Optional[str] = None
@@ -3093,76 +2578,6 @@ class StackFrame:
3093
2578
  )
3094
2579
 
3095
2580
 
3096
- @dataclass
3097
- class StartUpdate:
3098
- cause: Optional[StartUpdateCause] = None
3099
-
3100
- full_refresh: Optional[bool] = None
3101
- """If true, this update will reset all tables before running."""
3102
-
3103
- full_refresh_selection: Optional[List[str]] = None
3104
- """A list of tables to update with fullRefresh. If both refresh_selection and
3105
- full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means
3106
- that the states of the table will be reset before the refresh."""
3107
-
3108
- pipeline_id: Optional[str] = None
3109
-
3110
- refresh_selection: Optional[List[str]] = None
3111
- """A list of tables to update without fullRefresh. If both refresh_selection and
3112
- full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means
3113
- that the states of the table will be reset before the refresh."""
3114
-
3115
- validate_only: Optional[bool] = None
3116
- """If true, this update only validates the correctness of pipeline source code but does not
3117
- materialize or publish any datasets."""
3118
-
3119
- def as_dict(self) -> dict:
3120
- """Serializes the StartUpdate into a dictionary suitable for use as a JSON request body."""
3121
- body = {}
3122
- if self.cause is not None:
3123
- body["cause"] = self.cause.value
3124
- if self.full_refresh is not None:
3125
- body["full_refresh"] = self.full_refresh
3126
- if self.full_refresh_selection:
3127
- body["full_refresh_selection"] = [v for v in self.full_refresh_selection]
3128
- if self.pipeline_id is not None:
3129
- body["pipeline_id"] = self.pipeline_id
3130
- if self.refresh_selection:
3131
- body["refresh_selection"] = [v for v in self.refresh_selection]
3132
- if self.validate_only is not None:
3133
- body["validate_only"] = self.validate_only
3134
- return body
3135
-
3136
- def as_shallow_dict(self) -> dict:
3137
- """Serializes the StartUpdate into a shallow dictionary of its immediate attributes."""
3138
- body = {}
3139
- if self.cause is not None:
3140
- body["cause"] = self.cause
3141
- if self.full_refresh is not None:
3142
- body["full_refresh"] = self.full_refresh
3143
- if self.full_refresh_selection:
3144
- body["full_refresh_selection"] = self.full_refresh_selection
3145
- if self.pipeline_id is not None:
3146
- body["pipeline_id"] = self.pipeline_id
3147
- if self.refresh_selection:
3148
- body["refresh_selection"] = self.refresh_selection
3149
- if self.validate_only is not None:
3150
- body["validate_only"] = self.validate_only
3151
- return body
3152
-
3153
- @classmethod
3154
- def from_dict(cls, d: Dict[str, Any]) -> StartUpdate:
3155
- """Deserializes the StartUpdate from a dictionary."""
3156
- return cls(
3157
- cause=_enum(d, "cause", StartUpdateCause),
3158
- full_refresh=d.get("full_refresh", None),
3159
- full_refresh_selection=d.get("full_refresh_selection", None),
3160
- pipeline_id=d.get("pipeline_id", None),
3161
- refresh_selection=d.get("refresh_selection", None),
3162
- validate_only=d.get("validate_only", None),
3163
- )
3164
-
3165
-
3166
2581
  class StartUpdateCause(Enum):
3167
2582
  """What triggered this update."""
3168
2583
 
@@ -3311,6 +2726,10 @@ class TableSpecificConfig:
3311
2726
  primary_keys: Optional[List[str]] = None
3312
2727
  """The primary key of the table used to apply changes."""
3313
2728
 
2729
+ query_based_connector_config: Optional[IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig] = (
2730
+ None
2731
+ )
2732
+
3314
2733
  salesforce_include_formula_fields: Optional[bool] = None
3315
2734
  """If true, formula fields defined in the table are included in the ingestion. This setting is only
3316
2735
  valid for the Salesforce connector"""
@@ -3331,6 +2750,8 @@ class TableSpecificConfig:
3331
2750
  body["include_columns"] = [v for v in self.include_columns]
3332
2751
  if self.primary_keys:
3333
2752
  body["primary_keys"] = [v for v in self.primary_keys]
2753
+ if self.query_based_connector_config:
2754
+ body["query_based_connector_config"] = self.query_based_connector_config.as_dict()
3334
2755
  if self.salesforce_include_formula_fields is not None:
3335
2756
  body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields
3336
2757
  if self.scd_type is not None:
@@ -3348,6 +2769,8 @@ class TableSpecificConfig:
3348
2769
  body["include_columns"] = self.include_columns
3349
2770
  if self.primary_keys:
3350
2771
  body["primary_keys"] = self.primary_keys
2772
+ if self.query_based_connector_config:
2773
+ body["query_based_connector_config"] = self.query_based_connector_config
3351
2774
  if self.salesforce_include_formula_fields is not None:
3352
2775
  body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields
3353
2776
  if self.scd_type is not None:
@@ -3363,6 +2786,11 @@ class TableSpecificConfig:
3363
2786
  exclude_columns=d.get("exclude_columns", None),
3364
2787
  include_columns=d.get("include_columns", None),
3365
2788
  primary_keys=d.get("primary_keys", None),
2789
+ query_based_connector_config=_from_dict(
2790
+ d,
2791
+ "query_based_connector_config",
2792
+ IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig,
2793
+ ),
3366
2794
  salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None),
3367
2795
  scd_type=_enum(d, "scd_type", TableSpecificConfigScdType),
3368
2796
  sequence_by=d.get("sequence_by", None),