databricks-sdk 0.40.0__py3-none-any.whl → 0.42.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -35,6 +35,11 @@ class BaseJob:
35
35
  Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
36
36
  on accessible budget policies of the run_as identity on job creation or modification."""
37
37
 
38
+ has_more: Optional[bool] = None
39
+ """Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
40
+ can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list
41
+ requests with `expand_tasks=true`."""
42
+
38
43
  job_id: Optional[int] = None
39
44
  """The canonical identifier for this job."""
40
45
 
@@ -49,6 +54,7 @@ class BaseJob:
49
54
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
50
55
  if self.effective_budget_policy_id is not None:
51
56
  body['effective_budget_policy_id'] = self.effective_budget_policy_id
57
+ if self.has_more is not None: body['has_more'] = self.has_more
52
58
  if self.job_id is not None: body['job_id'] = self.job_id
53
59
  if self.settings: body['settings'] = self.settings.as_dict()
54
60
  return body
@@ -60,6 +66,7 @@ class BaseJob:
60
66
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
61
67
  if self.effective_budget_policy_id is not None:
62
68
  body['effective_budget_policy_id'] = self.effective_budget_policy_id
69
+ if self.has_more is not None: body['has_more'] = self.has_more
63
70
  if self.job_id is not None: body['job_id'] = self.job_id
64
71
  if self.settings: body['settings'] = self.settings
65
72
  return body
@@ -70,6 +77,7 @@ class BaseJob:
70
77
  return cls(created_time=d.get('created_time', None),
71
78
  creator_user_name=d.get('creator_user_name', None),
72
79
  effective_budget_policy_id=d.get('effective_budget_policy_id', None),
80
+ has_more=d.get('has_more', None),
73
81
  job_id=d.get('job_id', None),
74
82
  settings=_from_dict(d, 'settings', JobSettings))
75
83
 
@@ -103,6 +111,12 @@ class BaseRun:
103
111
  description: Optional[str] = None
104
112
  """Description of the run"""
105
113
 
114
+ effective_performance_target: Optional[PerformanceTarget] = None
115
+ """effective_performance_target is the actual performance target used by the run during execution.
116
+ effective_performance_target can differ from performance_target depending on if the job was
117
+ eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically
118
+ override the value for the run (ex. RunNow)."""
119
+
106
120
  end_time: Optional[int] = None
107
121
  """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
108
122
  field is set to 0 if the job is still running."""
@@ -124,10 +138,16 @@ class BaseRun:
124
138
  Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
125
139
  are used, `git_source` must be defined on the job."""
126
140
 
141
+ has_more: Optional[bool] = None
142
+ """Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
143
+ can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2
144
+ :method:jobs/listruns requests with `expand_tasks=true`."""
145
+
127
146
  job_clusters: Optional[List[JobCluster]] = None
128
147
  """A list of job cluster specifications that can be shared and reused by tasks of this job.
129
148
  Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
130
- task settings."""
149
+ task settings. If more than 100 job clusters are available, you can paginate through them using
150
+ :method:jobs/getrun."""
131
151
 
132
152
  job_id: Optional[int] = None
133
153
  """The canonical identifier of the job that contains this run."""
@@ -198,7 +218,9 @@ class BaseRun:
198
218
 
199
219
  tasks: Optional[List[RunTask]] = None
200
220
  """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call
201
- `JobsGetOutput` to retrieve the run resutls."""
221
+ `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can
222
+ paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object
223
+ root to determine if more results are available."""
202
224
 
203
225
  trigger: Optional[TriggerType] = None
204
226
  """The type of trigger that fired this run.
@@ -224,9 +246,12 @@ class BaseRun:
224
246
  if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict()
225
247
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
226
248
  if self.description is not None: body['description'] = self.description
249
+ if self.effective_performance_target is not None:
250
+ body['effective_performance_target'] = self.effective_performance_target.value
227
251
  if self.end_time is not None: body['end_time'] = self.end_time
228
252
  if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
229
253
  if self.git_source: body['git_source'] = self.git_source.as_dict()
254
+ if self.has_more is not None: body['has_more'] = self.has_more
230
255
  if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
231
256
  if self.job_id is not None: body['job_id'] = self.job_id
232
257
  if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters]
@@ -261,9 +286,12 @@ class BaseRun:
261
286
  if self.cluster_spec: body['cluster_spec'] = self.cluster_spec
262
287
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
263
288
  if self.description is not None: body['description'] = self.description
289
+ if self.effective_performance_target is not None:
290
+ body['effective_performance_target'] = self.effective_performance_target
264
291
  if self.end_time is not None: body['end_time'] = self.end_time
265
292
  if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
266
293
  if self.git_source: body['git_source'] = self.git_source
294
+ if self.has_more is not None: body['has_more'] = self.has_more
267
295
  if self.job_clusters: body['job_clusters'] = self.job_clusters
268
296
  if self.job_id is not None: body['job_id'] = self.job_id
269
297
  if self.job_parameters: body['job_parameters'] = self.job_parameters
@@ -298,9 +326,11 @@ class BaseRun:
298
326
  cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec),
299
327
  creator_user_name=d.get('creator_user_name', None),
300
328
  description=d.get('description', None),
329
+ effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
301
330
  end_time=d.get('end_time', None),
302
331
  execution_duration=d.get('execution_duration', None),
303
332
  git_source=_from_dict(d, 'git_source', GitSource),
333
+ has_more=d.get('has_more', None),
304
334
  job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
305
335
  job_id=d.get('job_id', None),
306
336
  job_parameters=_repeated_dict(d, 'job_parameters', JobParameter),
@@ -424,6 +454,7 @@ class CleanRoomTaskRunLifeCycleState(Enum):
424
454
  PENDING = 'PENDING'
425
455
  QUEUED = 'QUEUED'
426
456
  RUNNING = 'RUNNING'
457
+ RUN_LIFE_CYCLE_STATE_UNSPECIFIED = 'RUN_LIFE_CYCLE_STATE_UNSPECIFIED'
427
458
  SKIPPED = 'SKIPPED'
428
459
  TERMINATED = 'TERMINATED'
429
460
  TERMINATING = 'TERMINATING'
@@ -440,6 +471,7 @@ class CleanRoomTaskRunResultState(Enum):
440
471
  EXCLUDED = 'EXCLUDED'
441
472
  FAILED = 'FAILED'
442
473
  MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED'
474
+ RUN_RESULT_STATE_UNSPECIFIED = 'RUN_RESULT_STATE_UNSPECIFIED'
443
475
  SUCCESS = 'SUCCESS'
444
476
  SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES'
445
477
  TIMEDOUT = 'TIMEDOUT'
@@ -522,6 +554,42 @@ class CleanRoomsNotebookTask:
522
554
  notebook_name=d.get('notebook_name', None))
523
555
 
524
556
 
557
+ @dataclass
558
+ class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput:
559
+ clean_room_job_run_state: Optional[CleanRoomTaskRunState] = None
560
+ """The run state of the clean rooms notebook task."""
561
+
562
+ notebook_output: Optional[NotebookOutput] = None
563
+ """The notebook output for the clean room run"""
564
+
565
+ output_schema_info: Optional[OutputSchemaInfo] = None
566
+ """Information on how to access the output schema for the clean room run"""
567
+
568
+ def as_dict(self) -> dict:
569
+ """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a dictionary suitable for use as a JSON request body."""
570
+ body = {}
571
+ if self.clean_room_job_run_state:
572
+ body['clean_room_job_run_state'] = self.clean_room_job_run_state.as_dict()
573
+ if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict()
574
+ if self.output_schema_info: body['output_schema_info'] = self.output_schema_info.as_dict()
575
+ return body
576
+
577
+ def as_shallow_dict(self) -> dict:
578
+ """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a shallow dictionary of its immediate attributes."""
579
+ body = {}
580
+ if self.clean_room_job_run_state: body['clean_room_job_run_state'] = self.clean_room_job_run_state
581
+ if self.notebook_output: body['notebook_output'] = self.notebook_output
582
+ if self.output_schema_info: body['output_schema_info'] = self.output_schema_info
583
+ return body
584
+
585
+ @classmethod
586
+ def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput:
587
+ """Deserializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput from a dictionary."""
588
+ return cls(clean_room_job_run_state=_from_dict(d, 'clean_room_job_run_state', CleanRoomTaskRunState),
589
+ notebook_output=_from_dict(d, 'notebook_output', NotebookOutput),
590
+ output_schema_info=_from_dict(d, 'output_schema_info', OutputSchemaInfo))
591
+
592
+
525
593
  @dataclass
526
594
  class ClusterInstance:
527
595
  cluster_id: Optional[str] = None
@@ -754,7 +822,8 @@ class CreateJob:
754
822
  job_clusters: Optional[List[JobCluster]] = None
755
823
  """A list of job cluster specifications that can be shared and reused by tasks of this job.
756
824
  Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
757
- task settings."""
825
+ task settings. If more than 100 job clusters are available, you can paginate through them using
826
+ :method:jobs/get."""
758
827
 
759
828
  max_concurrent_runs: Optional[int] = None
760
829
  """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
@@ -776,6 +845,10 @@ class CreateJob:
776
845
  parameters: Optional[List[JobParameterDefinition]] = None
777
846
  """Job-level parameter definitions"""
778
847
 
848
+ performance_target: Optional[PerformanceTarget] = None
849
+ """PerformanceTarget defines how performant or cost efficient the execution of run on serverless
850
+ should be."""
851
+
779
852
  queue: Optional[QueueSettings] = None
780
853
  """The queue settings of the job."""
781
854
 
@@ -795,7 +868,9 @@ class CreateJob:
795
868
  be added to the job."""
796
869
 
797
870
  tasks: Optional[List[Task]] = None
798
- """A list of task specifications to be executed by this job."""
871
+ """A list of task specifications to be executed by this job. If more than 100 tasks are available,
872
+ you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the
873
+ object root to determine if more results are available."""
799
874
 
800
875
  timeout_seconds: Optional[int] = None
801
876
  """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
@@ -828,6 +903,7 @@ class CreateJob:
828
903
  if self.name is not None: body['name'] = self.name
829
904
  if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
830
905
  if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
906
+ if self.performance_target is not None: body['performance_target'] = self.performance_target.value
831
907
  if self.queue: body['queue'] = self.queue.as_dict()
832
908
  if self.run_as: body['run_as'] = self.run_as.as_dict()
833
909
  if self.schedule: body['schedule'] = self.schedule.as_dict()
@@ -857,6 +933,7 @@ class CreateJob:
857
933
  if self.name is not None: body['name'] = self.name
858
934
  if self.notification_settings: body['notification_settings'] = self.notification_settings
859
935
  if self.parameters: body['parameters'] = self.parameters
936
+ if self.performance_target is not None: body['performance_target'] = self.performance_target
860
937
  if self.queue: body['queue'] = self.queue
861
938
  if self.run_as: body['run_as'] = self.run_as
862
939
  if self.schedule: body['schedule'] = self.schedule
@@ -886,6 +963,7 @@ class CreateJob:
886
963
  name=d.get('name', None),
887
964
  notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
888
965
  parameters=_repeated_dict(d, 'parameters', JobParameterDefinition),
966
+ performance_target=_enum(d, 'performance_target', PerformanceTarget),
889
967
  queue=_from_dict(d, 'queue', QueueSettings),
890
968
  run_as=_from_dict(d, 'run_as', JobRunAs),
891
969
  schedule=_from_dict(d, 'schedule', CronSchedule),
@@ -1680,9 +1758,17 @@ class Job:
1680
1758
  Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
1681
1759
  on accessible budget policies of the run_as identity on job creation or modification."""
1682
1760
 
1761
+ has_more: Optional[bool] = None
1762
+ """Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
1763
+ can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list
1764
+ requests with `expand_tasks=true`."""
1765
+
1683
1766
  job_id: Optional[int] = None
1684
1767
  """The canonical identifier for this job."""
1685
1768
 
1769
+ next_page_token: Optional[str] = None
1770
+ """A token that can be used to list the next page of sub-resources."""
1771
+
1686
1772
  run_as_user_name: Optional[str] = None
1687
1773
  """The email of an active workspace user or the application ID of a service principal that the job
1688
1774
  runs as. This value can be changed by setting the `run_as` field when creating or updating a
@@ -1703,7 +1789,9 @@ class Job:
1703
1789
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
1704
1790
  if self.effective_budget_policy_id is not None:
1705
1791
  body['effective_budget_policy_id'] = self.effective_budget_policy_id
1792
+ if self.has_more is not None: body['has_more'] = self.has_more
1706
1793
  if self.job_id is not None: body['job_id'] = self.job_id
1794
+ if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
1707
1795
  if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
1708
1796
  if self.settings: body['settings'] = self.settings.as_dict()
1709
1797
  return body
@@ -1715,7 +1803,9 @@ class Job:
1715
1803
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
1716
1804
  if self.effective_budget_policy_id is not None:
1717
1805
  body['effective_budget_policy_id'] = self.effective_budget_policy_id
1806
+ if self.has_more is not None: body['has_more'] = self.has_more
1718
1807
  if self.job_id is not None: body['job_id'] = self.job_id
1808
+ if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
1719
1809
  if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
1720
1810
  if self.settings: body['settings'] = self.settings
1721
1811
  return body
@@ -1726,7 +1816,9 @@ class Job:
1726
1816
  return cls(created_time=d.get('created_time', None),
1727
1817
  creator_user_name=d.get('creator_user_name', None),
1728
1818
  effective_budget_policy_id=d.get('effective_budget_policy_id', None),
1819
+ has_more=d.get('has_more', None),
1729
1820
  job_id=d.get('job_id', None),
1821
+ next_page_token=d.get('next_page_token', None),
1730
1822
  run_as_user_name=d.get('run_as_user_name', None),
1731
1823
  settings=_from_dict(d, 'settings', JobSettings))
1732
1824
 
@@ -2366,7 +2458,8 @@ class JobSettings:
2366
2458
  job_clusters: Optional[List[JobCluster]] = None
2367
2459
  """A list of job cluster specifications that can be shared and reused by tasks of this job.
2368
2460
  Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
2369
- task settings."""
2461
+ task settings. If more than 100 job clusters are available, you can paginate through them using
2462
+ :method:jobs/get."""
2370
2463
 
2371
2464
  max_concurrent_runs: Optional[int] = None
2372
2465
  """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
@@ -2388,6 +2481,10 @@ class JobSettings:
2388
2481
  parameters: Optional[List[JobParameterDefinition]] = None
2389
2482
  """Job-level parameter definitions"""
2390
2483
 
2484
+ performance_target: Optional[PerformanceTarget] = None
2485
+ """PerformanceTarget defines how performant or cost efficient the execution of run on serverless
2486
+ should be."""
2487
+
2391
2488
  queue: Optional[QueueSettings] = None
2392
2489
  """The queue settings of the job."""
2393
2490
 
@@ -2407,7 +2504,9 @@ class JobSettings:
2407
2504
  be added to the job."""
2408
2505
 
2409
2506
  tasks: Optional[List[Task]] = None
2410
- """A list of task specifications to be executed by this job."""
2507
+ """A list of task specifications to be executed by this job. If more than 100 tasks are available,
2508
+ you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the
2509
+ object root to determine if more results are available."""
2411
2510
 
2412
2511
  timeout_seconds: Optional[int] = None
2413
2512
  """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
@@ -2438,6 +2537,7 @@ class JobSettings:
2438
2537
  if self.name is not None: body['name'] = self.name
2439
2538
  if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
2440
2539
  if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
2540
+ if self.performance_target is not None: body['performance_target'] = self.performance_target.value
2441
2541
  if self.queue: body['queue'] = self.queue.as_dict()
2442
2542
  if self.run_as: body['run_as'] = self.run_as.as_dict()
2443
2543
  if self.schedule: body['schedule'] = self.schedule.as_dict()
@@ -2466,6 +2566,7 @@ class JobSettings:
2466
2566
  if self.name is not None: body['name'] = self.name
2467
2567
  if self.notification_settings: body['notification_settings'] = self.notification_settings
2468
2568
  if self.parameters: body['parameters'] = self.parameters
2569
+ if self.performance_target is not None: body['performance_target'] = self.performance_target
2469
2570
  if self.queue: body['queue'] = self.queue
2470
2571
  if self.run_as: body['run_as'] = self.run_as
2471
2572
  if self.schedule: body['schedule'] = self.schedule
@@ -2494,6 +2595,7 @@ class JobSettings:
2494
2595
  name=d.get('name', None),
2495
2596
  notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
2496
2597
  parameters=_repeated_dict(d, 'parameters', JobParameterDefinition),
2598
+ performance_target=_enum(d, 'performance_target', PerformanceTarget),
2497
2599
  queue=_from_dict(d, 'queue', QueueSettings),
2498
2600
  run_as=_from_dict(d, 'run_as', JobRunAs),
2499
2601
  schedule=_from_dict(d, 'schedule', CronSchedule),
@@ -2875,12 +2977,57 @@ class NotebookTask:
2875
2977
  warehouse_id=d.get('warehouse_id', None))
2876
2978
 
2877
2979
 
2980
+ @dataclass
2981
+ class OutputSchemaInfo:
2982
+ """Stores the catalog name, schema name, and the output schema expiration time for the clean room
2983
+ run."""
2984
+
2985
+ catalog_name: Optional[str] = None
2986
+
2987
+ expiration_time: Optional[int] = None
2988
+ """The expiration time for the output schema as a Unix timestamp in milliseconds."""
2989
+
2990
+ schema_name: Optional[str] = None
2991
+
2992
+ def as_dict(self) -> dict:
2993
+ """Serializes the OutputSchemaInfo into a dictionary suitable for use as a JSON request body."""
2994
+ body = {}
2995
+ if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
2996
+ if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
2997
+ if self.schema_name is not None: body['schema_name'] = self.schema_name
2998
+ return body
2999
+
3000
+ def as_shallow_dict(self) -> dict:
3001
+ """Serializes the OutputSchemaInfo into a shallow dictionary of its immediate attributes."""
3002
+ body = {}
3003
+ if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
3004
+ if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
3005
+ if self.schema_name is not None: body['schema_name'] = self.schema_name
3006
+ return body
3007
+
3008
+ @classmethod
3009
+ def from_dict(cls, d: Dict[str, any]) -> OutputSchemaInfo:
3010
+ """Deserializes the OutputSchemaInfo from a dictionary."""
3011
+ return cls(catalog_name=d.get('catalog_name', None),
3012
+ expiration_time=d.get('expiration_time', None),
3013
+ schema_name=d.get('schema_name', None))
3014
+
3015
+
2878
3016
  class PauseStatus(Enum):
2879
3017
 
2880
3018
  PAUSED = 'PAUSED'
2881
3019
  UNPAUSED = 'UNPAUSED'
2882
3020
 
2883
3021
 
3022
+ class PerformanceTarget(Enum):
3023
+ """PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run
3024
+ on serverless compute should be. The performance mode on the job or pipeline should map to a
3025
+ performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget)."""
3026
+
3027
+ COST_OPTIMIZED = 'COST_OPTIMIZED'
3028
+ PERFORMANCE_OPTIMIZED = 'PERFORMANCE_OPTIMIZED'
3029
+
3030
+
2884
3031
  @dataclass
2885
3032
  class PeriodicTriggerConfiguration:
2886
3033
  interval: int
@@ -3642,6 +3789,12 @@ class Run:
3642
3789
  description: Optional[str] = None
3643
3790
  """Description of the run"""
3644
3791
 
3792
+ effective_performance_target: Optional[PerformanceTarget] = None
3793
+ """effective_performance_target is the actual performance target used by the run during execution.
3794
+ effective_performance_target can differ from performance_target depending on if the job was
3795
+ eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically
3796
+ override the value for the run (ex. RunNow)."""
3797
+
3645
3798
  end_time: Optional[int] = None
3646
3799
  """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
3647
3800
  field is set to 0 if the job is still running."""
@@ -3663,13 +3816,19 @@ class Run:
3663
3816
  Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
3664
3817
  are used, `git_source` must be defined on the job."""
3665
3818
 
3819
+ has_more: Optional[bool] = None
3820
+ """Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
3821
+ can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2
3822
+ :method:jobs/listruns requests with `expand_tasks=true`."""
3823
+
3666
3824
  iterations: Optional[List[RunTask]] = None
3667
3825
  """Only populated by for-each iterations. The parent for-each task is located in tasks array."""
3668
3826
 
3669
3827
  job_clusters: Optional[List[JobCluster]] = None
3670
3828
  """A list of job cluster specifications that can be shared and reused by tasks of this job.
3671
3829
  Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
3672
- task settings."""
3830
+ task settings. If more than 100 job clusters are available, you can paginate through them using
3831
+ :method:jobs/getrun."""
3673
3832
 
3674
3833
  job_id: Optional[int] = None
3675
3834
  """The canonical identifier of the job that contains this run."""
@@ -3743,7 +3902,9 @@ class Run:
3743
3902
 
3744
3903
  tasks: Optional[List[RunTask]] = None
3745
3904
  """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call
3746
- `JobsGetOutput` to retrieve the run resutls."""
3905
+ `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can
3906
+ paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object
3907
+ root to determine if more results are available."""
3747
3908
 
3748
3909
  trigger: Optional[TriggerType] = None
3749
3910
  """The type of trigger that fired this run.
@@ -3769,9 +3930,12 @@ class Run:
3769
3930
  if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict()
3770
3931
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
3771
3932
  if self.description is not None: body['description'] = self.description
3933
+ if self.effective_performance_target is not None:
3934
+ body['effective_performance_target'] = self.effective_performance_target.value
3772
3935
  if self.end_time is not None: body['end_time'] = self.end_time
3773
3936
  if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
3774
3937
  if self.git_source: body['git_source'] = self.git_source.as_dict()
3938
+ if self.has_more is not None: body['has_more'] = self.has_more
3775
3939
  if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations]
3776
3940
  if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
3777
3941
  if self.job_id is not None: body['job_id'] = self.job_id
@@ -3808,9 +3972,12 @@ class Run:
3808
3972
  if self.cluster_spec: body['cluster_spec'] = self.cluster_spec
3809
3973
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
3810
3974
  if self.description is not None: body['description'] = self.description
3975
+ if self.effective_performance_target is not None:
3976
+ body['effective_performance_target'] = self.effective_performance_target
3811
3977
  if self.end_time is not None: body['end_time'] = self.end_time
3812
3978
  if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
3813
3979
  if self.git_source: body['git_source'] = self.git_source
3980
+ if self.has_more is not None: body['has_more'] = self.has_more
3814
3981
  if self.iterations: body['iterations'] = self.iterations
3815
3982
  if self.job_clusters: body['job_clusters'] = self.job_clusters
3816
3983
  if self.job_id is not None: body['job_id'] = self.job_id
@@ -3847,9 +4014,11 @@ class Run:
3847
4014
  cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec),
3848
4015
  creator_user_name=d.get('creator_user_name', None),
3849
4016
  description=d.get('description', None),
4017
+ effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
3850
4018
  end_time=d.get('end_time', None),
3851
4019
  execution_duration=d.get('execution_duration', None),
3852
4020
  git_source=_from_dict(d, 'git_source', GitSource),
4021
+ has_more=d.get('has_more', None),
3853
4022
  iterations=_repeated_dict(d, 'iterations', RunTask),
3854
4023
  job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
3855
4024
  job_id=d.get('job_id', None),
@@ -4232,6 +4401,11 @@ class RunNow:
4232
4401
  """A list of task keys to run inside of the job. If this field is not provided, all tasks in the
4233
4402
  job will be run."""
4234
4403
 
4404
+ performance_target: Optional[PerformanceTarget] = None
4405
+ """PerformanceTarget defines how performant or cost efficient the execution of run on serverless
4406
+ compute should be. For RunNow request, the run will execute with this settings instead of ones
4407
+ defined in job."""
4408
+
4235
4409
  pipeline_params: Optional[PipelineParams] = None
4236
4410
  """Controls whether the pipeline should perform a full refresh"""
4237
4411
 
@@ -4287,6 +4461,7 @@ class RunNow:
4287
4461
  if self.job_parameters: body['job_parameters'] = self.job_parameters
4288
4462
  if self.notebook_params: body['notebook_params'] = self.notebook_params
4289
4463
  if self.only: body['only'] = [v for v in self.only]
4464
+ if self.performance_target is not None: body['performance_target'] = self.performance_target.value
4290
4465
  if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
4291
4466
  if self.python_named_params: body['python_named_params'] = self.python_named_params
4292
4467
  if self.python_params: body['python_params'] = [v for v in self.python_params]
@@ -4305,6 +4480,7 @@ class RunNow:
4305
4480
  if self.job_parameters: body['job_parameters'] = self.job_parameters
4306
4481
  if self.notebook_params: body['notebook_params'] = self.notebook_params
4307
4482
  if self.only: body['only'] = self.only
4483
+ if self.performance_target is not None: body['performance_target'] = self.performance_target
4308
4484
  if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
4309
4485
  if self.python_named_params: body['python_named_params'] = self.python_named_params
4310
4486
  if self.python_params: body['python_params'] = self.python_params
@@ -4323,6 +4499,7 @@ class RunNow:
4323
4499
  job_parameters=d.get('job_parameters', None),
4324
4500
  notebook_params=d.get('notebook_params', None),
4325
4501
  only=d.get('only', None),
4502
+ performance_target=_enum(d, 'performance_target', PerformanceTarget),
4326
4503
  pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
4327
4504
  python_named_params=d.get('python_named_params', None),
4328
4505
  python_params=d.get('python_params', None),
@@ -4365,6 +4542,9 @@ class RunNowResponse:
4365
4542
  class RunOutput:
4366
4543
  """Run output was retrieved successfully."""
4367
4544
 
4545
+ clean_rooms_notebook_output: Optional[CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput] = None
4546
+ """The output of a clean rooms notebook task, if available"""
4547
+
4368
4548
  dbt_output: Optional[DbtOutput] = None
4369
4549
  """The output of a dbt task, if available."""
4370
4550
 
@@ -4409,6 +4589,8 @@ class RunOutput:
4409
4589
  def as_dict(self) -> dict:
4410
4590
  """Serializes the RunOutput into a dictionary suitable for use as a JSON request body."""
4411
4591
  body = {}
4592
+ if self.clean_rooms_notebook_output:
4593
+ body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output.as_dict()
4412
4594
  if self.dbt_output: body['dbt_output'] = self.dbt_output.as_dict()
4413
4595
  if self.error is not None: body['error'] = self.error
4414
4596
  if self.error_trace is not None: body['error_trace'] = self.error_trace
@@ -4424,6 +4606,8 @@ class RunOutput:
4424
4606
  def as_shallow_dict(self) -> dict:
4425
4607
  """Serializes the RunOutput into a shallow dictionary of its immediate attributes."""
4426
4608
  body = {}
4609
+ if self.clean_rooms_notebook_output:
4610
+ body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output
4427
4611
  if self.dbt_output: body['dbt_output'] = self.dbt_output
4428
4612
  if self.error is not None: body['error'] = self.error
4429
4613
  if self.error_trace is not None: body['error_trace'] = self.error_trace
@@ -4439,7 +4623,9 @@ class RunOutput:
4439
4623
  @classmethod
4440
4624
  def from_dict(cls, d: Dict[str, any]) -> RunOutput:
4441
4625
  """Deserializes the RunOutput from a dictionary."""
4442
- return cls(dbt_output=_from_dict(d, 'dbt_output', DbtOutput),
4626
+ return cls(clean_rooms_notebook_output=_from_dict(d, 'clean_rooms_notebook_output',
4627
+ CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput),
4628
+ dbt_output=_from_dict(d, 'dbt_output', DbtOutput),
4443
4629
  error=d.get('error', None),
4444
4630
  error_trace=d.get('error_trace', None),
4445
4631
  info=d.get('info', None),
@@ -4729,6 +4915,12 @@ class RunTask:
4729
4915
  description: Optional[str] = None
4730
4916
  """An optional description for this task."""
4731
4917
 
4918
+ effective_performance_target: Optional[PerformanceTarget] = None
4919
+ """effective_performance_target is the actual performance target used by the run during execution.
4920
+ effective_performance_target can differ from performance_target depending on if the job was
4921
+ eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if an override was
4922
+ provided for the run (ex. RunNow)."""
4923
+
4732
4924
  email_notifications: Optional[JobEmailNotifications] = None
4733
4925
  """An optional set of email addresses notified when the task run begins or completes. The default
4734
4926
  behavior is to not send any emails."""
@@ -4877,6 +5069,8 @@ class RunTask:
4877
5069
  if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
4878
5070
  if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
4879
5071
  if self.description is not None: body['description'] = self.description
5072
+ if self.effective_performance_target is not None:
5073
+ body['effective_performance_target'] = self.effective_performance_target.value
4880
5074
  if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
4881
5075
  if self.end_time is not None: body['end_time'] = self.end_time
4882
5076
  if self.environment_key is not None: body['environment_key'] = self.environment_key
@@ -4922,6 +5116,8 @@ class RunTask:
4922
5116
  if self.dbt_task: body['dbt_task'] = self.dbt_task
4923
5117
  if self.depends_on: body['depends_on'] = self.depends_on
4924
5118
  if self.description is not None: body['description'] = self.description
5119
+ if self.effective_performance_target is not None:
5120
+ body['effective_performance_target'] = self.effective_performance_target
4925
5121
  if self.email_notifications: body['email_notifications'] = self.email_notifications
4926
5122
  if self.end_time is not None: body['end_time'] = self.end_time
4927
5123
  if self.environment_key is not None: body['environment_key'] = self.environment_key
@@ -4968,6 +5164,7 @@ class RunTask:
4968
5164
  dbt_task=_from_dict(d, 'dbt_task', DbtTask),
4969
5165
  depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
4970
5166
  description=d.get('description', None),
5167
+ effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
4971
5168
  email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
4972
5169
  end_time=d.get('end_time', None),
4973
5170
  environment_key=d.get('environment_key', None),
@@ -5047,12 +5244,16 @@ class SparkJarTask:
5047
5244
 
5048
5245
  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
5049
5246
 
5247
+ run_as_repl: Optional[bool] = None
5248
+ """Deprecated. A value of `false` is no longer supported."""
5249
+
5050
5250
  def as_dict(self) -> dict:
5051
5251
  """Serializes the SparkJarTask into a dictionary suitable for use as a JSON request body."""
5052
5252
  body = {}
5053
5253
  if self.jar_uri is not None: body['jar_uri'] = self.jar_uri
5054
5254
  if self.main_class_name is not None: body['main_class_name'] = self.main_class_name
5055
5255
  if self.parameters: body['parameters'] = [v for v in self.parameters]
5256
+ if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl
5056
5257
  return body
5057
5258
 
5058
5259
  def as_shallow_dict(self) -> dict:
@@ -5061,6 +5262,7 @@ class SparkJarTask:
5061
5262
  if self.jar_uri is not None: body['jar_uri'] = self.jar_uri
5062
5263
  if self.main_class_name is not None: body['main_class_name'] = self.main_class_name
5063
5264
  if self.parameters: body['parameters'] = self.parameters
5265
+ if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl
5064
5266
  return body
5065
5267
 
5066
5268
  @classmethod
@@ -5068,7 +5270,8 @@ class SparkJarTask:
5068
5270
  """Deserializes the SparkJarTask from a dictionary."""
5069
5271
  return cls(jar_uri=d.get('jar_uri', None),
5070
5272
  main_class_name=d.get('main_class_name', None),
5071
- parameters=d.get('parameters', None))
5273
+ parameters=d.get('parameters', None),
5274
+ run_as_repl=d.get('run_as_repl', None))
5072
5275
 
5073
5276
 
5074
5277
  @dataclass
@@ -6489,6 +6692,7 @@ class TerminationCodeCode(Enum):
6489
6692
 
6490
6693
  [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now"""
6491
6694
 
6695
+ BUDGET_POLICY_LIMIT_EXCEEDED = 'BUDGET_POLICY_LIMIT_EXCEEDED'
6492
6696
  CANCELED = 'CANCELED'
6493
6697
  CLOUD_FAILURE = 'CLOUD_FAILURE'
6494
6698
  CLUSTER_ERROR = 'CLUSTER_ERROR'
@@ -7011,6 +7215,7 @@ class JobsAPI:
7011
7215
  name: Optional[str] = None,
7012
7216
  notification_settings: Optional[JobNotificationSettings] = None,
7013
7217
  parameters: Optional[List[JobParameterDefinition]] = None,
7218
+ performance_target: Optional[PerformanceTarget] = None,
7014
7219
  queue: Optional[QueueSettings] = None,
7015
7220
  run_as: Optional[JobRunAs] = None,
7016
7221
  schedule: Optional[CronSchedule] = None,
@@ -7066,6 +7271,7 @@ class JobsAPI:
7066
7271
  :param job_clusters: List[:class:`JobCluster`] (optional)
7067
7272
  A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
7068
7273
  cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
7274
+ If more than 100 job clusters are available, you can paginate through them using :method:jobs/get.
7069
7275
  :param max_concurrent_runs: int (optional)
7070
7276
  An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
7071
7277
  able to execute multiple runs of the same job concurrently. This is useful for example if you
@@ -7082,6 +7288,9 @@ class JobsAPI:
7082
7288
  `email_notifications` and `webhook_notifications` for this job.
7083
7289
  :param parameters: List[:class:`JobParameterDefinition`] (optional)
7084
7290
  Job-level parameter definitions
7291
+ :param performance_target: :class:`PerformanceTarget` (optional)
7292
+ PerformanceTarget defines how performant or cost efficient the execution of run on serverless should
7293
+ be.
7085
7294
  :param queue: :class:`QueueSettings` (optional)
7086
7295
  The queue settings of the job.
7087
7296
  :param run_as: :class:`JobRunAs` (optional)
@@ -7097,7 +7306,9 @@ class JobsAPI:
7097
7306
  clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added
7098
7307
  to the job.
7099
7308
  :param tasks: List[:class:`Task`] (optional)
7100
- A list of task specifications to be executed by this job.
7309
+ A list of task specifications to be executed by this job. If more than 100 tasks are available, you
7310
+ can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root
7311
+ to determine if more results are available.
7101
7312
  :param timeout_seconds: int (optional)
7102
7313
  An optional timeout applied to each run of this job. A value of `0` means no timeout.
7103
7314
  :param trigger: :class:`TriggerSettings` (optional)
@@ -7127,6 +7338,7 @@ class JobsAPI:
7127
7338
  if name is not None: body['name'] = name
7128
7339
  if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict()
7129
7340
  if parameters is not None: body['parameters'] = [v.as_dict() for v in parameters]
7341
+ if performance_target is not None: body['performance_target'] = performance_target.value
7130
7342
  if queue is not None: body['queue'] = queue.as_dict()
7131
7343
  if run_as is not None: body['run_as'] = run_as.as_dict()
7132
7344
  if schedule is not None: body['schedule'] = schedule.as_dict()
@@ -7193,19 +7405,28 @@ class JobsAPI:
7193
7405
  res = self._api.do('GET', '/api/2.1/jobs/runs/export', query=query, headers=headers)
7194
7406
  return ExportRunOutput.from_dict(res)
7195
7407
 
7196
- def get(self, job_id: int) -> Job:
7408
+ def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job:
7197
7409
  """Get a single job.
7198
7410
 
7199
7411
  Retrieves the details for a single job.
7200
7412
 
7413
+ In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
7414
+ either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
7415
+ value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
7416
+ be empty on later pages.
7417
+
7201
7418
  :param job_id: int
7202
7419
  The canonical identifier of the job to retrieve information about. This field is required.
7420
+ :param page_token: str (optional)
7421
+ Use `next_page_token` returned from the previous GetJob to request the next page of the job's
7422
+ sub-resources.
7203
7423
 
7204
7424
  :returns: :class:`Job`
7205
7425
  """
7206
7426
 
7207
7427
  query = {}
7208
7428
  if job_id is not None: query['job_id'] = job_id
7429
+ if page_token is not None: query['page_token'] = page_token
7209
7430
  headers = {'Accept': 'application/json', }
7210
7431
 
7211
7432
  res = self._api.do('GET', '/api/2.1/jobs/get', query=query, headers=headers)
@@ -7251,7 +7472,12 @@ class JobsAPI:
7251
7472
  page_token: Optional[str] = None) -> Run:
7252
7473
  """Get a single job run.
7253
7474
 
7254
- Retrieve the metadata of a run.
7475
+ Retrieves the metadata of a run.
7476
+
7477
+ In Jobs API 2.2, requests for a single job run support pagination of `tasks` and `job_clusters` when
7478
+ either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
7479
+ value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
7480
+ be empty on later pages.
7255
7481
 
7256
7482
  :param run_id: int
7257
7483
  The canonical identifier of the run for which to retrieve the metadata. This field is required.
@@ -7260,8 +7486,8 @@ class JobsAPI:
7260
7486
  :param include_resolved_values: bool (optional)
7261
7487
  Whether to include resolved parameter values in the response.
7262
7488
  :param page_token: str (optional)
7263
- To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
7264
- the GetJob response.
7489
+ Use `next_page_token` returned from the previous GetRun to request the next page of the run's
7490
+ sub-resources.
7265
7491
 
7266
7492
  :returns: :class:`Run`
7267
7493
  """
@@ -7313,7 +7539,8 @@ class JobsAPI:
7313
7539
  Retrieves a list of jobs.
7314
7540
 
7315
7541
  :param expand_tasks: bool (optional)
7316
- Whether to include task and cluster details in the response.
7542
+ Whether to include task and cluster details in the response. Note that in API 2.2, only the first
7543
+ 100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
7317
7544
  :param limit: int (optional)
7318
7545
  The number of jobs to return. This value must be greater than 0 and less or equal to 100. The
7319
7546
  default value is 20.
@@ -7370,7 +7597,8 @@ class JobsAPI:
7370
7597
  If completed_only is `true`, only completed runs are included in the results; otherwise, lists both
7371
7598
  active and completed runs. This field cannot be `true` when active_only is `true`.
7372
7599
  :param expand_tasks: bool (optional)
7373
- Whether to include task and cluster details in the response.
7600
+ Whether to include task and cluster details in the response. Note that in API 2.2, only the first
7601
+ 100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters.
7374
7602
  :param job_id: int (optional)
7375
7603
  The job for which to list runs. If omitted, the Jobs service lists runs from all jobs.
7376
7604
  :param limit: int (optional)
@@ -7609,6 +7837,7 @@ class JobsAPI:
7609
7837
  job_parameters: Optional[Dict[str, str]] = None,
7610
7838
  notebook_params: Optional[Dict[str, str]] = None,
7611
7839
  only: Optional[List[str]] = None,
7840
+ performance_target: Optional[PerformanceTarget] = None,
7612
7841
  pipeline_params: Optional[PipelineParams] = None,
7613
7842
  python_named_params: Optional[Dict[str, str]] = None,
7614
7843
  python_params: Optional[List[str]] = None,
@@ -7668,6 +7897,10 @@ class JobsAPI:
7668
7897
  :param only: List[str] (optional)
7669
7898
  A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
7670
7899
  will be run.
7900
+ :param performance_target: :class:`PerformanceTarget` (optional)
7901
+ PerformanceTarget defines how performant or cost efficient the execution of run on serverless
7902
+ compute should be. For RunNow request, the run will execute with this settings instead of ones
7903
+ defined in job.
7671
7904
  :param pipeline_params: :class:`PipelineParams` (optional)
7672
7905
  Controls whether the pipeline should perform a full refresh
7673
7906
  :param python_named_params: Dict[str,str] (optional)
@@ -7720,6 +7953,7 @@ class JobsAPI:
7720
7953
  if job_parameters is not None: body['job_parameters'] = job_parameters
7721
7954
  if notebook_params is not None: body['notebook_params'] = notebook_params
7722
7955
  if only is not None: body['only'] = [v for v in only]
7956
+ if performance_target is not None: body['performance_target'] = performance_target.value
7723
7957
  if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict()
7724
7958
  if python_named_params is not None: body['python_named_params'] = python_named_params
7725
7959
  if python_params is not None: body['python_params'] = [v for v in python_params]
@@ -7742,6 +7976,7 @@ class JobsAPI:
7742
7976
  job_parameters: Optional[Dict[str, str]] = None,
7743
7977
  notebook_params: Optional[Dict[str, str]] = None,
7744
7978
  only: Optional[List[str]] = None,
7979
+ performance_target: Optional[PerformanceTarget] = None,
7745
7980
  pipeline_params: Optional[PipelineParams] = None,
7746
7981
  python_named_params: Optional[Dict[str, str]] = None,
7747
7982
  python_params: Optional[List[str]] = None,
@@ -7756,6 +7991,7 @@ class JobsAPI:
7756
7991
  job_parameters=job_parameters,
7757
7992
  notebook_params=notebook_params,
7758
7993
  only=only,
7994
+ performance_target=performance_target,
7759
7995
  pipeline_params=pipeline_params,
7760
7996
  python_named_params=python_named_params,
7761
7997
  python_params=python_params,