databricks-sdk 0.40.0__py3-none-any.whl → 0.41.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -106,6 +106,58 @@ class AccessControlResponse:
106
106
  user_name=d.get('user_name', None))
107
107
 
108
108
 
109
+ @dataclass
110
+ class Actor:
111
+ """represents an identity trying to access a resource - user or a service principal group can be a
112
+ principal of a permission set assignment but an actor is always a user or a service principal"""
113
+
114
+ actor_id: Optional[int] = None
115
+
116
+ def as_dict(self) -> dict:
117
+ """Serializes the Actor into a dictionary suitable for use as a JSON request body."""
118
+ body = {}
119
+ if self.actor_id is not None: body['actor_id'] = self.actor_id
120
+ return body
121
+
122
+ def as_shallow_dict(self) -> dict:
123
+ """Serializes the Actor into a shallow dictionary of its immediate attributes."""
124
+ body = {}
125
+ if self.actor_id is not None: body['actor_id'] = self.actor_id
126
+ return body
127
+
128
+ @classmethod
129
+ def from_dict(cls, d: Dict[str, any]) -> Actor:
130
+ """Deserializes the Actor from a dictionary."""
131
+ return cls(actor_id=d.get('actor_id', None))
132
+
133
+
134
+ @dataclass
135
+ class CheckPolicyResponse:
136
+ consistency_token: ConsistencyToken
137
+
138
+ is_permitted: Optional[bool] = None
139
+
140
+ def as_dict(self) -> dict:
141
+ """Serializes the CheckPolicyResponse into a dictionary suitable for use as a JSON request body."""
142
+ body = {}
143
+ if self.consistency_token: body['consistency_token'] = self.consistency_token.as_dict()
144
+ if self.is_permitted is not None: body['is_permitted'] = self.is_permitted
145
+ return body
146
+
147
+ def as_shallow_dict(self) -> dict:
148
+ """Serializes the CheckPolicyResponse into a shallow dictionary of its immediate attributes."""
149
+ body = {}
150
+ if self.consistency_token: body['consistency_token'] = self.consistency_token
151
+ if self.is_permitted is not None: body['is_permitted'] = self.is_permitted
152
+ return body
153
+
154
+ @classmethod
155
+ def from_dict(cls, d: Dict[str, any]) -> CheckPolicyResponse:
156
+ """Deserializes the CheckPolicyResponse from a dictionary."""
157
+ return cls(consistency_token=_from_dict(d, 'consistency_token', ConsistencyToken),
158
+ is_permitted=d.get('is_permitted', None))
159
+
160
+
109
161
  @dataclass
110
162
  class ComplexValue:
111
163
  display: Optional[str] = None
@@ -148,6 +200,28 @@ class ComplexValue:
148
200
  value=d.get('value', None))
149
201
 
150
202
 
203
+ @dataclass
204
+ class ConsistencyToken:
205
+ value: str
206
+
207
+ def as_dict(self) -> dict:
208
+ """Serializes the ConsistencyToken into a dictionary suitable for use as a JSON request body."""
209
+ body = {}
210
+ if self.value is not None: body['value'] = self.value
211
+ return body
212
+
213
+ def as_shallow_dict(self) -> dict:
214
+ """Serializes the ConsistencyToken into a shallow dictionary of its immediate attributes."""
215
+ body = {}
216
+ if self.value is not None: body['value'] = self.value
217
+ return body
218
+
219
+ @classmethod
220
+ def from_dict(cls, d: Dict[str, any]) -> ConsistencyToken:
221
+ """Deserializes the ConsistencyToken from a dictionary."""
222
+ return cls(value=d.get('value', None))
223
+
224
+
151
225
  @dataclass
152
226
  class DeleteResponse:
153
227
 
@@ -1219,6 +1293,49 @@ class PrincipalOutput:
1219
1293
  user_name=d.get('user_name', None))
1220
1294
 
1221
1295
 
1296
+ class RequestAuthzIdentity(Enum):
1297
+ """Defines the identity to be used for authZ of the request on the server side. See one pager for
1298
+ for more information: http://go/acl/service-identity"""
1299
+
1300
+ REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY = 'REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY'
1301
+ REQUEST_AUTHZ_IDENTITY_USER_CONTEXT = 'REQUEST_AUTHZ_IDENTITY_USER_CONTEXT'
1302
+
1303
+
1304
+ @dataclass
1305
+ class ResourceInfo:
1306
+ id: str
1307
+ """Id of the current resource."""
1308
+
1309
+ legacy_acl_path: Optional[str] = None
1310
+ """The legacy acl path of the current resource."""
1311
+
1312
+ parent_resource_info: Optional[ResourceInfo] = None
1313
+ """Parent resource info for the current resource. The parent may have another parent."""
1314
+
1315
+ def as_dict(self) -> dict:
1316
+ """Serializes the ResourceInfo into a dictionary suitable for use as a JSON request body."""
1317
+ body = {}
1318
+ if self.id is not None: body['id'] = self.id
1319
+ if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path
1320
+ if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info.as_dict()
1321
+ return body
1322
+
1323
+ def as_shallow_dict(self) -> dict:
1324
+ """Serializes the ResourceInfo into a shallow dictionary of its immediate attributes."""
1325
+ body = {}
1326
+ if self.id is not None: body['id'] = self.id
1327
+ if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path
1328
+ if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info
1329
+ return body
1330
+
1331
+ @classmethod
1332
+ def from_dict(cls, d: Dict[str, any]) -> ResourceInfo:
1333
+ """Deserializes the ResourceInfo from a dictionary."""
1334
+ return cls(id=d.get('id', None),
1335
+ legacy_acl_path=d.get('legacy_acl_path', None),
1336
+ parent_resource_info=_from_dict(d, 'parent_resource_info', ResourceInfo))
1337
+
1338
+
1222
1339
  @dataclass
1223
1340
  class ResourceMeta:
1224
1341
  resource_type: Optional[str] = None
@@ -1622,6 +1739,47 @@ class WorkspacePermissions:
1622
1739
  return cls(permissions=_repeated_dict(d, 'permissions', PermissionOutput))
1623
1740
 
1624
1741
 
1742
+ class AccessControlAPI:
1743
+ """Rule based Access Control for Databricks Resources."""
1744
+
1745
+ def __init__(self, api_client):
1746
+ self._api = api_client
1747
+
1748
+ def check_policy(self,
1749
+ actor: Actor,
1750
+ permission: str,
1751
+ resource: str,
1752
+ consistency_token: ConsistencyToken,
1753
+ authz_identity: RequestAuthzIdentity,
1754
+ *,
1755
+ resource_info: Optional[ResourceInfo] = None) -> CheckPolicyResponse:
1756
+ """Check access policy to a resource.
1757
+
1758
+ :param actor: :class:`Actor`
1759
+ :param permission: str
1760
+ :param resource: str
1761
+ Ex: (servicePrincipal/use, accounts/<account-id>/servicePrincipals/<sp-id>) Ex:
1762
+ (servicePrincipal.ruleSet/update, accounts/<account-id>/servicePrincipals/<sp-id>/ruleSets/default)
1763
+ :param consistency_token: :class:`ConsistencyToken`
1764
+ :param authz_identity: :class:`RequestAuthzIdentity`
1765
+ :param resource_info: :class:`ResourceInfo` (optional)
1766
+
1767
+ :returns: :class:`CheckPolicyResponse`
1768
+ """
1769
+
1770
+ query = {}
1771
+ if actor is not None: query['actor'] = actor.as_dict()
1772
+ if authz_identity is not None: query['authz_identity'] = authz_identity.value
1773
+ if consistency_token is not None: query['consistency_token'] = consistency_token.as_dict()
1774
+ if permission is not None: query['permission'] = permission
1775
+ if resource is not None: query['resource'] = resource
1776
+ if resource_info is not None: query['resource_info'] = resource_info.as_dict()
1777
+ headers = {'Accept': 'application/json', }
1778
+
1779
+ res = self._api.do('GET', '/api/2.0/access-control/check-policy-v2', query=query, headers=headers)
1780
+ return CheckPolicyResponse.from_dict(res)
1781
+
1782
+
1625
1783
  class AccountAccessControlAPI:
1626
1784
  """These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A
1627
1785
  grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
@@ -35,6 +35,11 @@ class BaseJob:
35
35
  Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
36
36
  on accessible budget policies of the run_as identity on job creation or modification."""
37
37
 
38
+ has_more: Optional[bool] = None
39
+ """Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
40
+ can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list
41
+ requests with `expand_tasks=true`."""
42
+
38
43
  job_id: Optional[int] = None
39
44
  """The canonical identifier for this job."""
40
45
 
@@ -49,6 +54,7 @@ class BaseJob:
49
54
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
50
55
  if self.effective_budget_policy_id is not None:
51
56
  body['effective_budget_policy_id'] = self.effective_budget_policy_id
57
+ if self.has_more is not None: body['has_more'] = self.has_more
52
58
  if self.job_id is not None: body['job_id'] = self.job_id
53
59
  if self.settings: body['settings'] = self.settings.as_dict()
54
60
  return body
@@ -60,6 +66,7 @@ class BaseJob:
60
66
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
61
67
  if self.effective_budget_policy_id is not None:
62
68
  body['effective_budget_policy_id'] = self.effective_budget_policy_id
69
+ if self.has_more is not None: body['has_more'] = self.has_more
63
70
  if self.job_id is not None: body['job_id'] = self.job_id
64
71
  if self.settings: body['settings'] = self.settings
65
72
  return body
@@ -70,6 +77,7 @@ class BaseJob:
70
77
  return cls(created_time=d.get('created_time', None),
71
78
  creator_user_name=d.get('creator_user_name', None),
72
79
  effective_budget_policy_id=d.get('effective_budget_policy_id', None),
80
+ has_more=d.get('has_more', None),
73
81
  job_id=d.get('job_id', None),
74
82
  settings=_from_dict(d, 'settings', JobSettings))
75
83
 
@@ -124,10 +132,16 @@ class BaseRun:
124
132
  Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
125
133
  are used, `git_source` must be defined on the job."""
126
134
 
135
+ has_more: Optional[bool] = None
136
+ """Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
137
+ can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2
138
+ :method:jobs/listruns requests with `expand_tasks=true`."""
139
+
127
140
  job_clusters: Optional[List[JobCluster]] = None
128
141
  """A list of job cluster specifications that can be shared and reused by tasks of this job.
129
142
  Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
130
- task settings."""
143
+ task settings. If more than 100 job clusters are available, you can paginate through them using
144
+ :method:jobs/getrun."""
131
145
 
132
146
  job_id: Optional[int] = None
133
147
  """The canonical identifier of the job that contains this run."""
@@ -198,7 +212,9 @@ class BaseRun:
198
212
 
199
213
  tasks: Optional[List[RunTask]] = None
200
214
  """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call
201
- `JobsGetOutput` to retrieve the run resutls."""
215
+ `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can
216
+ paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object
217
+ root to determine if more results are available."""
202
218
 
203
219
  trigger: Optional[TriggerType] = None
204
220
  """The type of trigger that fired this run.
@@ -227,6 +243,7 @@ class BaseRun:
227
243
  if self.end_time is not None: body['end_time'] = self.end_time
228
244
  if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
229
245
  if self.git_source: body['git_source'] = self.git_source.as_dict()
246
+ if self.has_more is not None: body['has_more'] = self.has_more
230
247
  if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
231
248
  if self.job_id is not None: body['job_id'] = self.job_id
232
249
  if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters]
@@ -264,6 +281,7 @@ class BaseRun:
264
281
  if self.end_time is not None: body['end_time'] = self.end_time
265
282
  if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
266
283
  if self.git_source: body['git_source'] = self.git_source
284
+ if self.has_more is not None: body['has_more'] = self.has_more
267
285
  if self.job_clusters: body['job_clusters'] = self.job_clusters
268
286
  if self.job_id is not None: body['job_id'] = self.job_id
269
287
  if self.job_parameters: body['job_parameters'] = self.job_parameters
@@ -301,6 +319,7 @@ class BaseRun:
301
319
  end_time=d.get('end_time', None),
302
320
  execution_duration=d.get('execution_duration', None),
303
321
  git_source=_from_dict(d, 'git_source', GitSource),
322
+ has_more=d.get('has_more', None),
304
323
  job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
305
324
  job_id=d.get('job_id', None),
306
325
  job_parameters=_repeated_dict(d, 'job_parameters', JobParameter),
@@ -424,6 +443,7 @@ class CleanRoomTaskRunLifeCycleState(Enum):
424
443
  PENDING = 'PENDING'
425
444
  QUEUED = 'QUEUED'
426
445
  RUNNING = 'RUNNING'
446
+ RUN_LIFE_CYCLE_STATE_UNSPECIFIED = 'RUN_LIFE_CYCLE_STATE_UNSPECIFIED'
427
447
  SKIPPED = 'SKIPPED'
428
448
  TERMINATED = 'TERMINATED'
429
449
  TERMINATING = 'TERMINATING'
@@ -440,6 +460,7 @@ class CleanRoomTaskRunResultState(Enum):
440
460
  EXCLUDED = 'EXCLUDED'
441
461
  FAILED = 'FAILED'
442
462
  MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED'
463
+ RUN_RESULT_STATE_UNSPECIFIED = 'RUN_RESULT_STATE_UNSPECIFIED'
443
464
  SUCCESS = 'SUCCESS'
444
465
  SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES'
445
466
  TIMEDOUT = 'TIMEDOUT'
@@ -522,6 +543,42 @@ class CleanRoomsNotebookTask:
522
543
  notebook_name=d.get('notebook_name', None))
523
544
 
524
545
 
546
+ @dataclass
547
+ class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput:
548
+ clean_room_job_run_state: Optional[CleanRoomTaskRunState] = None
549
+ """The run state of the clean rooms notebook task."""
550
+
551
+ notebook_output: Optional[NotebookOutput] = None
552
+ """The notebook output for the clean room run"""
553
+
554
+ output_schema_info: Optional[OutputSchemaInfo] = None
555
+ """Information on how to access the output schema for the clean room run"""
556
+
557
+ def as_dict(self) -> dict:
558
+ """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a dictionary suitable for use as a JSON request body."""
559
+ body = {}
560
+ if self.clean_room_job_run_state:
561
+ body['clean_room_job_run_state'] = self.clean_room_job_run_state.as_dict()
562
+ if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict()
563
+ if self.output_schema_info: body['output_schema_info'] = self.output_schema_info.as_dict()
564
+ return body
565
+
566
+ def as_shallow_dict(self) -> dict:
567
+ """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a shallow dictionary of its immediate attributes."""
568
+ body = {}
569
+ if self.clean_room_job_run_state: body['clean_room_job_run_state'] = self.clean_room_job_run_state
570
+ if self.notebook_output: body['notebook_output'] = self.notebook_output
571
+ if self.output_schema_info: body['output_schema_info'] = self.output_schema_info
572
+ return body
573
+
574
+ @classmethod
575
+ def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput:
576
+ """Deserializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput from a dictionary."""
577
+ return cls(clean_room_job_run_state=_from_dict(d, 'clean_room_job_run_state', CleanRoomTaskRunState),
578
+ notebook_output=_from_dict(d, 'notebook_output', NotebookOutput),
579
+ output_schema_info=_from_dict(d, 'output_schema_info', OutputSchemaInfo))
580
+
581
+
525
582
  @dataclass
526
583
  class ClusterInstance:
527
584
  cluster_id: Optional[str] = None
@@ -754,7 +811,8 @@ class CreateJob:
754
811
  job_clusters: Optional[List[JobCluster]] = None
755
812
  """A list of job cluster specifications that can be shared and reused by tasks of this job.
756
813
  Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
757
- task settings."""
814
+ task settings. If more than 100 job clusters are available, you can paginate through them using
815
+ :method:jobs/get."""
758
816
 
759
817
  max_concurrent_runs: Optional[int] = None
760
818
  """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
@@ -795,7 +853,9 @@ class CreateJob:
795
853
  be added to the job."""
796
854
 
797
855
  tasks: Optional[List[Task]] = None
798
- """A list of task specifications to be executed by this job."""
856
+ """A list of task specifications to be executed by this job. If more than 100 tasks are available,
857
+ you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the
858
+ object root to determine if more results are available."""
799
859
 
800
860
  timeout_seconds: Optional[int] = None
801
861
  """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
@@ -1680,9 +1740,17 @@ class Job:
1680
1740
  Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
1681
1741
  on accessible budget policies of the run_as identity on job creation or modification."""
1682
1742
 
1743
+ has_more: Optional[bool] = None
1744
+ """Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
1745
+ can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list
1746
+ requests with `expand_tasks=true`."""
1747
+
1683
1748
  job_id: Optional[int] = None
1684
1749
  """The canonical identifier for this job."""
1685
1750
 
1751
+ next_page_token: Optional[str] = None
1752
+ """A token that can be used to list the next page of sub-resources."""
1753
+
1686
1754
  run_as_user_name: Optional[str] = None
1687
1755
  """The email of an active workspace user or the application ID of a service principal that the job
1688
1756
  runs as. This value can be changed by setting the `run_as` field when creating or updating a
@@ -1703,7 +1771,9 @@ class Job:
1703
1771
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
1704
1772
  if self.effective_budget_policy_id is not None:
1705
1773
  body['effective_budget_policy_id'] = self.effective_budget_policy_id
1774
+ if self.has_more is not None: body['has_more'] = self.has_more
1706
1775
  if self.job_id is not None: body['job_id'] = self.job_id
1776
+ if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
1707
1777
  if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
1708
1778
  if self.settings: body['settings'] = self.settings.as_dict()
1709
1779
  return body
@@ -1715,7 +1785,9 @@ class Job:
1715
1785
  if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
1716
1786
  if self.effective_budget_policy_id is not None:
1717
1787
  body['effective_budget_policy_id'] = self.effective_budget_policy_id
1788
+ if self.has_more is not None: body['has_more'] = self.has_more
1718
1789
  if self.job_id is not None: body['job_id'] = self.job_id
1790
+ if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
1719
1791
  if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
1720
1792
  if self.settings: body['settings'] = self.settings
1721
1793
  return body
@@ -1726,7 +1798,9 @@ class Job:
1726
1798
  return cls(created_time=d.get('created_time', None),
1727
1799
  creator_user_name=d.get('creator_user_name', None),
1728
1800
  effective_budget_policy_id=d.get('effective_budget_policy_id', None),
1801
+ has_more=d.get('has_more', None),
1729
1802
  job_id=d.get('job_id', None),
1803
+ next_page_token=d.get('next_page_token', None),
1730
1804
  run_as_user_name=d.get('run_as_user_name', None),
1731
1805
  settings=_from_dict(d, 'settings', JobSettings))
1732
1806
 
@@ -2366,7 +2440,8 @@ class JobSettings:
2366
2440
  job_clusters: Optional[List[JobCluster]] = None
2367
2441
  """A list of job cluster specifications that can be shared and reused by tasks of this job.
2368
2442
  Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
2369
- task settings."""
2443
+ task settings. If more than 100 job clusters are available, you can paginate through them using
2444
+ :method:jobs/get."""
2370
2445
 
2371
2446
  max_concurrent_runs: Optional[int] = None
2372
2447
  """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
@@ -2407,7 +2482,9 @@ class JobSettings:
2407
2482
  be added to the job."""
2408
2483
 
2409
2484
  tasks: Optional[List[Task]] = None
2410
- """A list of task specifications to be executed by this job."""
2485
+ """A list of task specifications to be executed by this job. If more than 100 tasks are available,
2486
+ you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the
2487
+ object root to determine if more results are available."""
2411
2488
 
2412
2489
  timeout_seconds: Optional[int] = None
2413
2490
  """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
@@ -2875,6 +2952,42 @@ class NotebookTask:
2875
2952
  warehouse_id=d.get('warehouse_id', None))
2876
2953
 
2877
2954
 
2955
+ @dataclass
2956
+ class OutputSchemaInfo:
2957
+ """Stores the catalog name, schema name, and the output schema expiration time for the clean room
2958
+ run."""
2959
+
2960
+ catalog_name: Optional[str] = None
2961
+
2962
+ expiration_time: Optional[int] = None
2963
+ """The expiration time for the output schema as a Unix timestamp in milliseconds."""
2964
+
2965
+ schema_name: Optional[str] = None
2966
+
2967
+ def as_dict(self) -> dict:
2968
+ """Serializes the OutputSchemaInfo into a dictionary suitable for use as a JSON request body."""
2969
+ body = {}
2970
+ if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
2971
+ if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
2972
+ if self.schema_name is not None: body['schema_name'] = self.schema_name
2973
+ return body
2974
+
2975
+ def as_shallow_dict(self) -> dict:
2976
+ """Serializes the OutputSchemaInfo into a shallow dictionary of its immediate attributes."""
2977
+ body = {}
2978
+ if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
2979
+ if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
2980
+ if self.schema_name is not None: body['schema_name'] = self.schema_name
2981
+ return body
2982
+
2983
+ @classmethod
2984
+ def from_dict(cls, d: Dict[str, any]) -> OutputSchemaInfo:
2985
+ """Deserializes the OutputSchemaInfo from a dictionary."""
2986
+ return cls(catalog_name=d.get('catalog_name', None),
2987
+ expiration_time=d.get('expiration_time', None),
2988
+ schema_name=d.get('schema_name', None))
2989
+
2990
+
2878
2991
  class PauseStatus(Enum):
2879
2992
 
2880
2993
  PAUSED = 'PAUSED'
@@ -3663,13 +3776,19 @@ class Run:
3663
3776
  Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
3664
3777
  are used, `git_source` must be defined on the job."""
3665
3778
 
3779
+ has_more: Optional[bool] = None
3780
+ """Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
3781
+ can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2
3782
+ :method:jobs/listruns requests with `expand_tasks=true`."""
3783
+
3666
3784
  iterations: Optional[List[RunTask]] = None
3667
3785
  """Only populated by for-each iterations. The parent for-each task is located in tasks array."""
3668
3786
 
3669
3787
  job_clusters: Optional[List[JobCluster]] = None
3670
3788
  """A list of job cluster specifications that can be shared and reused by tasks of this job.
3671
3789
  Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
3672
- task settings."""
3790
+ task settings. If more than 100 job clusters are available, you can paginate through them using
3791
+ :method:jobs/getrun."""
3673
3792
 
3674
3793
  job_id: Optional[int] = None
3675
3794
  """The canonical identifier of the job that contains this run."""
@@ -3743,7 +3862,9 @@ class Run:
3743
3862
 
3744
3863
  tasks: Optional[List[RunTask]] = None
3745
3864
  """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call
3746
- `JobsGetOutput` to retrieve the run resutls."""
3865
+ `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can
3866
+ paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object
3867
+ root to determine if more results are available."""
3747
3868
 
3748
3869
  trigger: Optional[TriggerType] = None
3749
3870
  """The type of trigger that fired this run.
@@ -3772,6 +3893,7 @@ class Run:
3772
3893
  if self.end_time is not None: body['end_time'] = self.end_time
3773
3894
  if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
3774
3895
  if self.git_source: body['git_source'] = self.git_source.as_dict()
3896
+ if self.has_more is not None: body['has_more'] = self.has_more
3775
3897
  if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations]
3776
3898
  if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
3777
3899
  if self.job_id is not None: body['job_id'] = self.job_id
@@ -3811,6 +3933,7 @@ class Run:
3811
3933
  if self.end_time is not None: body['end_time'] = self.end_time
3812
3934
  if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
3813
3935
  if self.git_source: body['git_source'] = self.git_source
3936
+ if self.has_more is not None: body['has_more'] = self.has_more
3814
3937
  if self.iterations: body['iterations'] = self.iterations
3815
3938
  if self.job_clusters: body['job_clusters'] = self.job_clusters
3816
3939
  if self.job_id is not None: body['job_id'] = self.job_id
@@ -3850,6 +3973,7 @@ class Run:
3850
3973
  end_time=d.get('end_time', None),
3851
3974
  execution_duration=d.get('execution_duration', None),
3852
3975
  git_source=_from_dict(d, 'git_source', GitSource),
3976
+ has_more=d.get('has_more', None),
3853
3977
  iterations=_repeated_dict(d, 'iterations', RunTask),
3854
3978
  job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
3855
3979
  job_id=d.get('job_id', None),
@@ -4365,6 +4489,9 @@ class RunNowResponse:
4365
4489
  class RunOutput:
4366
4490
  """Run output was retrieved successfully."""
4367
4491
 
4492
+ clean_rooms_notebook_output: Optional[CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput] = None
4493
+ """The output of a clean rooms notebook task, if available"""
4494
+
4368
4495
  dbt_output: Optional[DbtOutput] = None
4369
4496
  """The output of a dbt task, if available."""
4370
4497
 
@@ -4409,6 +4536,8 @@ class RunOutput:
4409
4536
  def as_dict(self) -> dict:
4410
4537
  """Serializes the RunOutput into a dictionary suitable for use as a JSON request body."""
4411
4538
  body = {}
4539
+ if self.clean_rooms_notebook_output:
4540
+ body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output.as_dict()
4412
4541
  if self.dbt_output: body['dbt_output'] = self.dbt_output.as_dict()
4413
4542
  if self.error is not None: body['error'] = self.error
4414
4543
  if self.error_trace is not None: body['error_trace'] = self.error_trace
@@ -4424,6 +4553,8 @@ class RunOutput:
4424
4553
  def as_shallow_dict(self) -> dict:
4425
4554
  """Serializes the RunOutput into a shallow dictionary of its immediate attributes."""
4426
4555
  body = {}
4556
+ if self.clean_rooms_notebook_output:
4557
+ body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output
4427
4558
  if self.dbt_output: body['dbt_output'] = self.dbt_output
4428
4559
  if self.error is not None: body['error'] = self.error
4429
4560
  if self.error_trace is not None: body['error_trace'] = self.error_trace
@@ -4439,7 +4570,9 @@ class RunOutput:
4439
4570
  @classmethod
4440
4571
  def from_dict(cls, d: Dict[str, any]) -> RunOutput:
4441
4572
  """Deserializes the RunOutput from a dictionary."""
4442
- return cls(dbt_output=_from_dict(d, 'dbt_output', DbtOutput),
4573
+ return cls(clean_rooms_notebook_output=_from_dict(d, 'clean_rooms_notebook_output',
4574
+ CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput),
4575
+ dbt_output=_from_dict(d, 'dbt_output', DbtOutput),
4443
4576
  error=d.get('error', None),
4444
4577
  error_trace=d.get('error_trace', None),
4445
4578
  info=d.get('info', None),
@@ -7066,6 +7199,7 @@ class JobsAPI:
7066
7199
  :param job_clusters: List[:class:`JobCluster`] (optional)
7067
7200
  A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
7068
7201
  cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
7202
+ If more than 100 job clusters are available, you can paginate through them using :method:jobs/get.
7069
7203
  :param max_concurrent_runs: int (optional)
7070
7204
  An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
7071
7205
  able to execute multiple runs of the same job concurrently. This is useful for example if you
@@ -7097,7 +7231,9 @@ class JobsAPI:
7097
7231
  clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added
7098
7232
  to the job.
7099
7233
  :param tasks: List[:class:`Task`] (optional)
7100
- A list of task specifications to be executed by this job.
7234
+ A list of task specifications to be executed by this job. If more than 100 tasks are available, you
7235
+ can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root
7236
+ to determine if more results are available.
7101
7237
  :param timeout_seconds: int (optional)
7102
7238
  An optional timeout applied to each run of this job. A value of `0` means no timeout.
7103
7239
  :param trigger: :class:`TriggerSettings` (optional)
@@ -7193,19 +7329,28 @@ class JobsAPI:
7193
7329
  res = self._api.do('GET', '/api/2.1/jobs/runs/export', query=query, headers=headers)
7194
7330
  return ExportRunOutput.from_dict(res)
7195
7331
 
7196
- def get(self, job_id: int) -> Job:
7332
+ def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job:
7197
7333
  """Get a single job.
7198
7334
 
7199
7335
  Retrieves the details for a single job.
7200
7336
 
7337
+ In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
7338
+ either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
7339
+ value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
7340
+ be empty on later pages.
7341
+
7201
7342
  :param job_id: int
7202
7343
  The canonical identifier of the job to retrieve information about. This field is required.
7344
+ :param page_token: str (optional)
7345
+ Use `next_page_token` returned from the previous GetJob to request the next page of the job's
7346
+ sub-resources.
7203
7347
 
7204
7348
  :returns: :class:`Job`
7205
7349
  """
7206
7350
 
7207
7351
  query = {}
7208
7352
  if job_id is not None: query['job_id'] = job_id
7353
+ if page_token is not None: query['page_token'] = page_token
7209
7354
  headers = {'Accept': 'application/json', }
7210
7355
 
7211
7356
  res = self._api.do('GET', '/api/2.1/jobs/get', query=query, headers=headers)
@@ -7251,7 +7396,12 @@ class JobsAPI:
7251
7396
  page_token: Optional[str] = None) -> Run:
7252
7397
  """Get a single job run.
7253
7398
 
7254
- Retrieve the metadata of a run.
7399
+ Retrieves the metadata of a run.
7400
+
7401
+ In Jobs API 2.2, requests for a single job run support pagination of `tasks` and `job_clusters` when
7402
+ either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
7403
+ value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
7404
+ be empty on later pages.
7255
7405
 
7256
7406
  :param run_id: int
7257
7407
  The canonical identifier of the run for which to retrieve the metadata. This field is required.
@@ -7260,8 +7410,8 @@ class JobsAPI:
7260
7410
  :param include_resolved_values: bool (optional)
7261
7411
  Whether to include resolved parameter values in the response.
7262
7412
  :param page_token: str (optional)
7263
- To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
7264
- the GetJob response.
7413
+ Use `next_page_token` returned from the previous GetRun to request the next page of the run's
7414
+ sub-resources.
7265
7415
 
7266
7416
  :returns: :class:`Run`
7267
7417
  """
@@ -7313,7 +7463,8 @@ class JobsAPI:
7313
7463
  Retrieves a list of jobs.
7314
7464
 
7315
7465
  :param expand_tasks: bool (optional)
7316
- Whether to include task and cluster details in the response.
7466
+ Whether to include task and cluster details in the response. Note that in API 2.2, only the first
7467
+ 100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
7317
7468
  :param limit: int (optional)
7318
7469
  The number of jobs to return. This value must be greater than 0 and less or equal to 100. The
7319
7470
  default value is 20.
@@ -7370,7 +7521,8 @@ class JobsAPI:
7370
7521
  If completed_only is `true`, only completed runs are included in the results; otherwise, lists both
7371
7522
  active and completed runs. This field cannot be `true` when active_only is `true`.
7372
7523
  :param expand_tasks: bool (optional)
7373
- Whether to include task and cluster details in the response.
7524
+ Whether to include task and cluster details in the response. Note that in API 2.2, only the first
7525
+ 100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters.
7374
7526
  :param job_id: int (optional)
7375
7527
  The job for which to list runs. If omitted, the Jobs service lists runs from all jobs.
7376
7528
  :param limit: int (optional)