databricks-sdk 0.30.0__py3-none-any.whl → 0.31.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -58,8 +58,8 @@ class BaseJob:
58
58
  class BaseRun:
59
59
  attempt_number: Optional[int] = None
60
60
  """The sequence number of this run attempt for a triggered job run. The initial attempt of a run
61
- has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy
62
- (`max_retries` \> 0), subsequent runs are created with an `original_attempt_run_id` of the
61
+ has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy
62
+ (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the
63
63
  original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they
64
64
  succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job."""
65
65
 
@@ -115,6 +115,11 @@ class BaseRun:
115
115
  job_parameters: Optional[List[JobParameter]] = None
116
116
  """Job-level parameters used in the run"""
117
117
 
118
+ job_run_id: Optional[int] = None
119
+ """ID of the job run that this run belongs to. For legacy and single-task job runs the field is
120
+ populated with the job run ID. For task runs, the field is populated with the ID of the job run
121
+ that the task run belongs to."""
122
+
118
123
  number_in_job: Optional[int] = None
119
124
  """A unique identifier for this job run. This is set to the same value as `run_id`."""
120
125
 
@@ -201,6 +206,7 @@ class BaseRun:
201
206
  if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
202
207
  if self.job_id is not None: body['job_id'] = self.job_id
203
208
  if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters]
209
+ if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
204
210
  if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
205
211
  if self.original_attempt_run_id is not None:
206
212
  body['original_attempt_run_id'] = self.original_attempt_run_id
@@ -236,6 +242,7 @@ class BaseRun:
236
242
  job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
237
243
  job_id=d.get('job_id', None),
238
244
  job_parameters=_repeated_dict(d, 'job_parameters', JobParameter),
245
+ job_run_id=d.get('job_run_id', None),
239
246
  number_in_job=d.get('number_in_job', None),
240
247
  original_attempt_run_id=d.get('original_attempt_run_id', None),
241
248
  overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
@@ -827,6 +834,96 @@ class DeleteRunResponse:
827
834
  return cls()
828
835
 
829
836
 
837
+ @dataclass
838
+ class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange:
839
+ """Represents a change to the job cluster's settings that would be required for the job clusters to
840
+ become compliant with their policies."""
841
+
842
+ field: Optional[str] = None
843
+ """The field where this change would be made, prepended with the job cluster key."""
844
+
845
+ new_value: Optional[str] = None
846
+ """The new value of this field after enforcing policy compliance (either a number, a boolean, or a
847
+ string) converted to a string. This is intended to be read by a human. The typed new value of
848
+ this field can be retrieved by reading the settings field in the API response."""
849
+
850
+ previous_value: Optional[str] = None
851
+ """The previous value of this field before enforcing policy compliance (either a number, a boolean,
852
+ or a string) converted to a string. This is intended to be read by a human. The type of the
853
+ field can be retrieved by reading the settings field in the API response."""
854
+
855
+ def as_dict(self) -> dict:
856
+ """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a dictionary suitable for use as a JSON request body."""
857
+ body = {}
858
+ if self.field is not None: body['field'] = self.field
859
+ if self.new_value is not None: body['new_value'] = self.new_value
860
+ if self.previous_value is not None: body['previous_value'] = self.previous_value
861
+ return body
862
+
863
+ @classmethod
864
+ def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceForJobResponseJobClusterSettingsChange:
865
+ """Deserializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange from a dictionary."""
866
+ return cls(field=d.get('field', None),
867
+ new_value=d.get('new_value', None),
868
+ previous_value=d.get('previous_value', None))
869
+
870
+
871
+ @dataclass
872
+ class EnforcePolicyComplianceRequest:
873
+ job_id: int
874
+ """The ID of the job you want to enforce policy compliance on."""
875
+
876
+ validate_only: Optional[bool] = None
877
+ """If set, previews changes made to the job to comply with its policy, but does not update the job."""
878
+
879
+ def as_dict(self) -> dict:
880
+ """Serializes the EnforcePolicyComplianceRequest into a dictionary suitable for use as a JSON request body."""
881
+ body = {}
882
+ if self.job_id is not None: body['job_id'] = self.job_id
883
+ if self.validate_only is not None: body['validate_only'] = self.validate_only
884
+ return body
885
+
886
+ @classmethod
887
+ def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceRequest:
888
+ """Deserializes the EnforcePolicyComplianceRequest from a dictionary."""
889
+ return cls(job_id=d.get('job_id', None), validate_only=d.get('validate_only', None))
890
+
891
+
892
+ @dataclass
893
+ class EnforcePolicyComplianceResponse:
894
+ has_changes: Optional[bool] = None
895
+ """Whether any changes have been made to the job cluster settings for the job to become compliant
896
+ with its policies."""
897
+
898
+ job_cluster_changes: Optional[List[EnforcePolicyComplianceForJobResponseJobClusterSettingsChange]] = None
899
+ """A list of job cluster changes that have been made to the job’s cluster settings in order for
900
+ all job clusters to become compliant with their policies."""
901
+
902
+ settings: Optional[JobSettings] = None
903
+ """Updated job settings after policy enforcement. Policy enforcement only applies to job clusters
904
+ that are created when running the job (which are specified in new_cluster) and does not apply to
905
+ existing all-purpose clusters. Updated job settings are derived by applying policy default
906
+ values to the existing job clusters in order to satisfy policy requirements."""
907
+
908
+ def as_dict(self) -> dict:
909
+ """Serializes the EnforcePolicyComplianceResponse into a dictionary suitable for use as a JSON request body."""
910
+ body = {}
911
+ if self.has_changes is not None: body['has_changes'] = self.has_changes
912
+ if self.job_cluster_changes:
913
+ body['job_cluster_changes'] = [v.as_dict() for v in self.job_cluster_changes]
914
+ if self.settings: body['settings'] = self.settings.as_dict()
915
+ return body
916
+
917
+ @classmethod
918
+ def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceResponse:
919
+ """Deserializes the EnforcePolicyComplianceResponse from a dictionary."""
920
+ return cls(has_changes=d.get('has_changes', None),
921
+ job_cluster_changes=_repeated_dict(
922
+ d, 'job_cluster_changes',
923
+ EnforcePolicyComplianceForJobResponseJobClusterSettingsChange),
924
+ settings=_from_dict(d, 'settings', JobSettings))
925
+
926
+
830
927
  @dataclass
831
928
  class ExportRunOutput:
832
929
  """Run was exported successfully."""
@@ -914,7 +1011,8 @@ class ForEachTask:
914
1011
  """Configuration for the task that will be run for each element in the array"""
915
1012
 
916
1013
  concurrency: Optional[int] = None
917
- """Controls the number of active iterations task runs. Default is 20, maximum allowed is 100."""
1014
+ """An optional maximum allowed number of concurrent runs of the task. Set this value if you want to
1015
+ be able to execute multiple runs of the task concurrently."""
918
1016
 
919
1017
  def as_dict(self) -> dict:
920
1018
  """Serializes the ForEachTask into a dictionary suitable for use as a JSON request body."""
@@ -1024,6 +1122,32 @@ class GetJobPermissionLevelsResponse:
1024
1122
  return cls(permission_levels=_repeated_dict(d, 'permission_levels', JobPermissionsDescription))
1025
1123
 
1026
1124
 
1125
+ @dataclass
1126
+ class GetPolicyComplianceResponse:
1127
+ is_compliant: Optional[bool] = None
1128
+ """Whether the job is compliant with its policies or not. Jobs could be out of compliance if a
1129
+ policy they are using was updated after the job was last edited and some of its job clusters no
1130
+ longer comply with their updated policies."""
1131
+
1132
+ violations: Optional[Dict[str, str]] = None
1133
+ """An object containing key-value mappings representing the first 200 policy validation errors. The
1134
+ keys indicate the path where the policy validation error is occurring. An identifier for the job
1135
+ cluster is prepended to the path. The values indicate an error message describing the policy
1136
+ validation error."""
1137
+
1138
+ def as_dict(self) -> dict:
1139
+ """Serializes the GetPolicyComplianceResponse into a dictionary suitable for use as a JSON request body."""
1140
+ body = {}
1141
+ if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
1142
+ if self.violations: body['violations'] = self.violations
1143
+ return body
1144
+
1145
+ @classmethod
1146
+ def from_dict(cls, d: Dict[str, any]) -> GetPolicyComplianceResponse:
1147
+ """Deserializes the GetPolicyComplianceResponse from a dictionary."""
1148
+ return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None))
1149
+
1150
+
1027
1151
  class GitProvider(Enum):
1028
1152
 
1029
1153
  AWS_CODE_COMMIT = 'awsCodeCommit'
@@ -1260,6 +1384,36 @@ class JobCluster:
1260
1384
  new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec))
1261
1385
 
1262
1386
 
1387
+ @dataclass
1388
+ class JobCompliance:
1389
+ job_id: int
1390
+ """Canonical unique identifier for a job."""
1391
+
1392
+ is_compliant: Optional[bool] = None
1393
+ """Whether this job is in compliance with the latest version of its policy."""
1394
+
1395
+ violations: Optional[Dict[str, str]] = None
1396
+ """An object containing key-value mappings representing the first 200 policy validation errors. The
1397
+ keys indicate the path where the policy validation error is occurring. An identifier for the job
1398
+ cluster is prepended to the path. The values indicate an error message describing the policy
1399
+ validation error."""
1400
+
1401
+ def as_dict(self) -> dict:
1402
+ """Serializes the JobCompliance into a dictionary suitable for use as a JSON request body."""
1403
+ body = {}
1404
+ if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
1405
+ if self.job_id is not None: body['job_id'] = self.job_id
1406
+ if self.violations: body['violations'] = self.violations
1407
+ return body
1408
+
1409
+ @classmethod
1410
+ def from_dict(cls, d: Dict[str, any]) -> JobCompliance:
1411
+ """Deserializes the JobCompliance from a dictionary."""
1412
+ return cls(is_compliant=d.get('is_compliant', None),
1413
+ job_id=d.get('job_id', None),
1414
+ violations=d.get('violations', None))
1415
+
1416
+
1263
1417
  @dataclass
1264
1418
  class JobDeployment:
1265
1419
  kind: JobDeploymentKind
@@ -1874,6 +2028,35 @@ class JobsHealthRules:
1874
2028
  return cls(rules=_repeated_dict(d, 'rules', JobsHealthRule))
1875
2029
 
1876
2030
 
2031
+ @dataclass
2032
+ class ListJobComplianceForPolicyResponse:
2033
+ jobs: Optional[List[JobCompliance]] = None
2034
+ """A list of jobs and their policy compliance statuses."""
2035
+
2036
+ next_page_token: Optional[str] = None
2037
+ """This field represents the pagination token to retrieve the next page of results. If this field
2038
+ is not in the response, it means no further results for the request."""
2039
+
2040
+ prev_page_token: Optional[str] = None
2041
+ """This field represents the pagination token to retrieve the previous page of results. If this
2042
+ field is not in the response, it means no further results for the request."""
2043
+
2044
+ def as_dict(self) -> dict:
2045
+ """Serializes the ListJobComplianceForPolicyResponse into a dictionary suitable for use as a JSON request body."""
2046
+ body = {}
2047
+ if self.jobs: body['jobs'] = [v.as_dict() for v in self.jobs]
2048
+ if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
2049
+ if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
2050
+ return body
2051
+
2052
+ @classmethod
2053
+ def from_dict(cls, d: Dict[str, any]) -> ListJobComplianceForPolicyResponse:
2054
+ """Deserializes the ListJobComplianceForPolicyResponse from a dictionary."""
2055
+ return cls(jobs=_repeated_dict(d, 'jobs', JobCompliance),
2056
+ next_page_token=d.get('next_page_token', None),
2057
+ prev_page_token=d.get('prev_page_token', None))
2058
+
2059
+
1877
2060
  @dataclass
1878
2061
  class ListJobsResponse:
1879
2062
  """List of jobs was retrieved successfully."""
@@ -2568,8 +2751,8 @@ class Run:
2568
2751
 
2569
2752
  attempt_number: Optional[int] = None
2570
2753
  """The sequence number of this run attempt for a triggered job run. The initial attempt of a run
2571
- has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy
2572
- (`max_retries` \> 0), subsequent runs are created with an `original_attempt_run_id` of the
2754
+ has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy
2755
+ (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the
2573
2756
  original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they
2574
2757
  succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job."""
2575
2758
 
@@ -2628,6 +2811,11 @@ class Run:
2628
2811
  job_parameters: Optional[List[JobParameter]] = None
2629
2812
  """Job-level parameters used in the run"""
2630
2813
 
2814
+ job_run_id: Optional[int] = None
2815
+ """ID of the job run that this run belongs to. For legacy and single-task job runs the field is
2816
+ populated with the job run ID. For task runs, the field is populated with the ID of the job run
2817
+ that the task run belongs to."""
2818
+
2631
2819
  next_page_token: Optional[str] = None
2632
2820
  """A token that can be used to list the next page of sub-resources."""
2633
2821
 
@@ -2721,6 +2909,7 @@ class Run:
2721
2909
  if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
2722
2910
  if self.job_id is not None: body['job_id'] = self.job_id
2723
2911
  if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters]
2912
+ if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
2724
2913
  if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
2725
2914
  if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
2726
2915
  if self.original_attempt_run_id is not None:
@@ -2759,6 +2948,7 @@ class Run:
2759
2948
  job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
2760
2949
  job_id=d.get('job_id', None),
2761
2950
  job_parameters=_repeated_dict(d, 'job_parameters', JobParameter),
2951
+ job_run_id=d.get('job_run_id', None),
2762
2952
  next_page_token=d.get('next_page_token', None),
2763
2953
  number_in_job=d.get('number_in_job', None),
2764
2954
  original_attempt_run_id=d.get('original_attempt_run_id', None),
@@ -2832,7 +3022,8 @@ class RunForEachTask:
2832
3022
  """Configuration for the task that will be run for each element in the array"""
2833
3023
 
2834
3024
  concurrency: Optional[int] = None
2835
- """Controls the number of active iterations task runs. Default is 20, maximum allowed is 100."""
3025
+ """An optional maximum allowed number of concurrent runs of the task. Set this value if you want to
3026
+ be able to execute multiple runs of the task concurrently."""
2836
3027
 
2837
3028
  stats: Optional[ForEachStats] = None
2838
3029
  """Read only field. Populated for GetRun and ListRuns RPC calls and stores the execution stats of
@@ -3429,8 +3620,8 @@ class RunTask:
3429
3620
 
3430
3621
  attempt_number: Optional[int] = None
3431
3622
  """The sequence number of this run attempt for a triggered job run. The initial attempt of a run
3432
- has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy
3433
- (`max_retries` \> 0), subsequent runs are created with an `original_attempt_run_id` of the
3623
+ has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy
3624
+ (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the
3434
3625
  original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they
3435
3626
  succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job."""
3436
3627
 
@@ -6127,3 +6318,102 @@ class JobsAPI:
6127
6318
 
6128
6319
  res = self._api.do('PATCH', f'/api/2.0/permissions/jobs/{job_id}', body=body, headers=headers)
6129
6320
  return JobPermissions.from_dict(res)
6321
+
6322
+
6323
+ class PolicyComplianceForJobsAPI:
6324
+ """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.
6325
+ This API currently only supports compliance controls for cluster policies.
6326
+
6327
+ A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster
6328
+ policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last
6329
+ edited. The job is considered out of compliance if any of its clusters no longer comply with their updated
6330
+ policies.
6331
+
6332
+ The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce
6333
+ compliance API allows you to update a job so that it becomes compliant with all of its policies."""
6334
+
6335
+ def __init__(self, api_client):
6336
+ self._api = api_client
6337
+
6338
+ def enforce_compliance(self,
6339
+ job_id: int,
6340
+ *,
6341
+ validate_only: Optional[bool] = None) -> EnforcePolicyComplianceResponse:
6342
+ """Enforce job policy compliance.
6343
+
6344
+ Updates a job so the job clusters that are created when running the job (specified in `new_cluster`)
6345
+ are compliant with the current versions of their respective cluster policies. All-purpose clusters
6346
+ used in the job will not be updated.
6347
+
6348
+ :param job_id: int
6349
+ The ID of the job you want to enforce policy compliance on.
6350
+ :param validate_only: bool (optional)
6351
+ If set, previews changes made to the job to comply with its policy, but does not update the job.
6352
+
6353
+ :returns: :class:`EnforcePolicyComplianceResponse`
6354
+ """
6355
+ body = {}
6356
+ if job_id is not None: body['job_id'] = job_id
6357
+ if validate_only is not None: body['validate_only'] = validate_only
6358
+ headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
6359
+
6360
+ res = self._api.do('POST', '/api/2.0/policies/jobs/enforce-compliance', body=body, headers=headers)
6361
+ return EnforcePolicyComplianceResponse.from_dict(res)
6362
+
6363
+ def get_compliance(self, job_id: int) -> GetPolicyComplianceResponse:
6364
+ """Get job policy compliance.
6365
+
6366
+ Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy
6367
+ they use was updated after the job was last edited and some of its job clusters no longer comply with
6368
+ their updated policies.
6369
+
6370
+ :param job_id: int
6371
+ The ID of the job whose compliance status you are requesting.
6372
+
6373
+ :returns: :class:`GetPolicyComplianceResponse`
6374
+ """
6375
+
6376
+ query = {}
6377
+ if job_id is not None: query['job_id'] = job_id
6378
+ headers = {'Accept': 'application/json', }
6379
+
6380
+ res = self._api.do('GET', '/api/2.0/policies/jobs/get-compliance', query=query, headers=headers)
6381
+ return GetPolicyComplianceResponse.from_dict(res)
6382
+
6383
+ def list_compliance(self,
6384
+ policy_id: str,
6385
+ *,
6386
+ page_size: Optional[int] = None,
6387
+ page_token: Optional[str] = None) -> Iterator[JobCompliance]:
6388
+ """List job policy compliance.
6389
+
6390
+ Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of
6391
+ compliance if a cluster policy they use was updated after the job was last edited and its job clusters
6392
+ no longer comply with the updated policy.
6393
+
6394
+ :param policy_id: str
6395
+ Canonical unique identifier for the cluster policy.
6396
+ :param page_size: int (optional)
6397
+ Use this field to specify the maximum number of results to be returned by the server. The server may
6398
+ further constrain the maximum number of results returned in a single page.
6399
+ :param page_token: str (optional)
6400
+ A page token that can be used to navigate to the next page or previous page as returned by
6401
+ `next_page_token` or `prev_page_token`.
6402
+
6403
+ :returns: Iterator over :class:`JobCompliance`
6404
+ """
6405
+
6406
+ query = {}
6407
+ if page_size is not None: query['page_size'] = page_size
6408
+ if page_token is not None: query['page_token'] = page_token
6409
+ if policy_id is not None: query['policy_id'] = policy_id
6410
+ headers = {'Accept': 'application/json', }
6411
+
6412
+ while True:
6413
+ json = self._api.do('GET', '/api/2.0/policies/jobs/list-compliance', query=query, headers=headers)
6414
+ if 'jobs' in json:
6415
+ for v in json['jobs']:
6416
+ yield JobCompliance.from_dict(v)
6417
+ if 'next_page_token' not in json or not json['next_page_token']:
6418
+ return
6419
+ query['page_token'] = json['next_page_token']
@@ -600,68 +600,6 @@ class ColumnInfoTypeName(Enum):
600
600
  USER_DEFINED_TYPE = 'USER_DEFINED_TYPE'
601
601
 
602
602
 
603
- @dataclass
604
- class ContextFilter:
605
- dbsql_alert_id: Optional[str] = None
606
- """Databricks SQL Alert id"""
607
-
608
- dbsql_dashboard_id: Optional[str] = None
609
- """Databricks SQL Dashboard id"""
610
-
611
- dbsql_query_id: Optional[str] = None
612
- """Databricks SQL Query id"""
613
-
614
- dbsql_session_id: Optional[str] = None
615
- """Databricks SQL Query session id"""
616
-
617
- job_id: Optional[str] = None
618
- """Databricks Workflows id"""
619
-
620
- job_run_id: Optional[str] = None
621
- """Databricks Workflows task run id"""
622
-
623
- lakeview_dashboard_id: Optional[str] = None
624
- """Databricks Lakeview Dashboard id"""
625
-
626
- notebook_cell_run_id: Optional[str] = None
627
- """Databricks Notebook runnableCommandId"""
628
-
629
- notebook_id: Optional[str] = None
630
- """Databricks Notebook id"""
631
-
632
- statement_ids: Optional[List[str]] = None
633
- """Databricks Query History statement ids."""
634
-
635
- def as_dict(self) -> dict:
636
- """Serializes the ContextFilter into a dictionary suitable for use as a JSON request body."""
637
- body = {}
638
- if self.dbsql_alert_id is not None: body['dbsql_alert_id'] = self.dbsql_alert_id
639
- if self.dbsql_dashboard_id is not None: body['dbsql_dashboard_id'] = self.dbsql_dashboard_id
640
- if self.dbsql_query_id is not None: body['dbsql_query_id'] = self.dbsql_query_id
641
- if self.dbsql_session_id is not None: body['dbsql_session_id'] = self.dbsql_session_id
642
- if self.job_id is not None: body['job_id'] = self.job_id
643
- if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
644
- if self.lakeview_dashboard_id is not None: body['lakeview_dashboard_id'] = self.lakeview_dashboard_id
645
- if self.notebook_cell_run_id is not None: body['notebook_cell_run_id'] = self.notebook_cell_run_id
646
- if self.notebook_id is not None: body['notebook_id'] = self.notebook_id
647
- if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids]
648
- return body
649
-
650
- @classmethod
651
- def from_dict(cls, d: Dict[str, any]) -> ContextFilter:
652
- """Deserializes the ContextFilter from a dictionary."""
653
- return cls(dbsql_alert_id=d.get('dbsql_alert_id', None),
654
- dbsql_dashboard_id=d.get('dbsql_dashboard_id', None),
655
- dbsql_query_id=d.get('dbsql_query_id', None),
656
- dbsql_session_id=d.get('dbsql_session_id', None),
657
- job_id=d.get('job_id', None),
658
- job_run_id=d.get('job_run_id', None),
659
- lakeview_dashboard_id=d.get('lakeview_dashboard_id', None),
660
- notebook_cell_run_id=d.get('notebook_cell_run_id', None),
661
- notebook_id=d.get('notebook_id', None),
662
- statement_ids=d.get('statement_ids', None))
663
-
664
-
665
603
  @dataclass
666
604
  class CreateAlert:
667
605
  name: str
@@ -3434,12 +3372,12 @@ class QueryEditContent:
3434
3372
 
3435
3373
  @dataclass
3436
3374
  class QueryFilter:
3437
- context_filter: Optional[ContextFilter] = None
3438
- """Filter by one or more property describing where the query was generated"""
3439
-
3440
3375
  query_start_time_range: Optional[TimeRange] = None
3441
3376
  """A range filter for query submitted time. The time range must be <= 30 days."""
3442
3377
 
3378
+ statement_ids: Optional[List[str]] = None
3379
+ """A list of statement IDs."""
3380
+
3443
3381
  statuses: Optional[List[QueryStatus]] = None
3444
3382
 
3445
3383
  user_ids: Optional[List[int]] = None
@@ -3451,8 +3389,8 @@ class QueryFilter:
3451
3389
  def as_dict(self) -> dict:
3452
3390
  """Serializes the QueryFilter into a dictionary suitable for use as a JSON request body."""
3453
3391
  body = {}
3454
- if self.context_filter: body['context_filter'] = self.context_filter.as_dict()
3455
3392
  if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range.as_dict()
3393
+ if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids]
3456
3394
  if self.statuses: body['statuses'] = [v.value for v in self.statuses]
3457
3395
  if self.user_ids: body['user_ids'] = [v for v in self.user_ids]
3458
3396
  if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids]
@@ -3461,8 +3399,8 @@ class QueryFilter:
3461
3399
  @classmethod
3462
3400
  def from_dict(cls, d: Dict[str, any]) -> QueryFilter:
3463
3401
  """Deserializes the QueryFilter from a dictionary."""
3464
- return cls(context_filter=_from_dict(d, 'context_filter', ContextFilter),
3465
- query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange),
3402
+ return cls(query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange),
3403
+ statement_ids=d.get('statement_ids', None),
3466
3404
  statuses=_repeated_enum(d, 'statuses', QueryStatus),
3467
3405
  user_ids=d.get('user_ids', None),
3468
3406
  warehouse_ids=d.get('warehouse_ids', None))
@@ -3944,12 +3882,6 @@ class QuerySource:
3944
3882
 
3945
3883
  notebook_id: Optional[str] = None
3946
3884
 
3947
- pipeline_id: Optional[str] = None
3948
- """Id associated with a DLT pipeline"""
3949
-
3950
- pipeline_update_id: Optional[str] = None
3951
- """Id associated with a DLT update"""
3952
-
3953
3885
  query_tags: Optional[str] = None
3954
3886
  """String provided by a customer that'll help them identify the query"""
3955
3887
 
@@ -3984,8 +3916,6 @@ class QuerySource:
3984
3916
  if self.job_id is not None: body['job_id'] = self.job_id
3985
3917
  if self.job_managed_by is not None: body['job_managed_by'] = self.job_managed_by.value
3986
3918
  if self.notebook_id is not None: body['notebook_id'] = self.notebook_id
3987
- if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
3988
- if self.pipeline_update_id is not None: body['pipeline_update_id'] = self.pipeline_update_id
3989
3919
  if self.query_tags is not None: body['query_tags'] = self.query_tags
3990
3920
  if self.run_id is not None: body['run_id'] = self.run_id
3991
3921
  if self.runnable_command_id is not None: body['runnable_command_id'] = self.runnable_command_id
@@ -4012,8 +3942,6 @@ class QuerySource:
4012
3942
  job_id=d.get('job_id', None),
4013
3943
  job_managed_by=_enum(d, 'job_managed_by', QuerySourceJobManager),
4014
3944
  notebook_id=d.get('notebook_id', None),
4015
- pipeline_id=d.get('pipeline_id', None),
4016
- pipeline_update_id=d.get('pipeline_update_id', None),
4017
3945
  query_tags=d.get('query_tags', None),
4018
3946
  run_id=d.get('run_id', None),
4019
3947
  runnable_command_id=d.get('runnable_command_id', None),
@@ -6558,8 +6486,8 @@ class QueriesLegacyAPI:
6558
6486
 
6559
6487
 
6560
6488
  class QueryHistoryAPI:
6561
- """A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless
6562
- compute, and DLT."""
6489
+ """A service responsible for storing and retrieving the list of queries run against SQL endpoints and
6490
+ serverless compute."""
6563
6491
 
6564
6492
  def __init__(self, api_client):
6565
6493
  self._api = api_client
@@ -6567,11 +6495,12 @@ class QueryHistoryAPI:
6567
6495
  def list(self,
6568
6496
  *,
6569
6497
  filter_by: Optional[QueryFilter] = None,
6498
+ include_metrics: Optional[bool] = None,
6570
6499
  max_results: Optional[int] = None,
6571
6500
  page_token: Optional[str] = None) -> ListQueriesResponse:
6572
6501
  """List Queries.
6573
6502
 
6574
- List the history of queries through SQL warehouses, serverless compute, and DLT.
6503
+ List the history of queries through SQL warehouses, and serverless compute.
6575
6504
 
6576
6505
  You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are
6577
6506
  returned first (up to max_results in request). The pagination token returned in response can be used
@@ -6579,6 +6508,9 @@ class QueryHistoryAPI:
6579
6508
 
6580
6509
  :param filter_by: :class:`QueryFilter` (optional)
6581
6510
  A filter to limit query history results. This field is optional.
6511
+ :param include_metrics: bool (optional)
6512
+ Whether to include the query metrics with each query. Only use this for a small subset of queries
6513
+ (max_results). Defaults to false.
6582
6514
  :param max_results: int (optional)
6583
6515
  Limit the number of results returned in one page. Must be less than 1000 and the default is 100.
6584
6516
  :param page_token: str (optional)
@@ -6591,6 +6523,7 @@ class QueryHistoryAPI:
6591
6523
 
6592
6524
  query = {}
6593
6525
  if filter_by is not None: query['filter_by'] = filter_by.as_dict()
6526
+ if include_metrics is not None: query['include_metrics'] = include_metrics
6594
6527
  if max_results is not None: query['max_results'] = max_results
6595
6528
  if page_token is not None: query['page_token'] = page_token
6596
6529
  headers = {'Accept': 'application/json', }
@@ -21,7 +21,14 @@ _extra = []
21
21
 
22
22
  # Precompiled regex patterns
23
23
  alphanum_pattern = re.compile(r'^[a-zA-Z0-9_.+-]+$')
24
- semver_pattern = re.compile(r'^v?(\d+\.)?(\d+\.)?(\*|\d+)$')
24
+
25
+ # official https://semver.org/ recommendation: https://regex101.com/r/Ly7O1x/
26
+ # with addition of "x" wildcards for minor/patch versions. Also, patch version may be omitted.
27
+ semver_pattern = re.compile(r"^"
28
+ r"(?P<major>0|[1-9]\d*)\.(?P<minor>x|0|[1-9]\d*)(\.(?P<patch>x|0|[1-9x]\d*))?"
29
+ r"(?:-(?P<pre_release>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)"
30
+ r"(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?"
31
+ r"(?:\+(?P<build>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
25
32
 
26
33
 
27
34
  def _match_alphanum(value):
databricks/sdk/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = '0.30.0'
1
+ __version__ = '0.31.1'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: databricks-sdk
3
- Version: 0.30.0
3
+ Version: 0.31.1
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Home-page: https://databricks-sdk-py.readthedocs.io
6
6
  Author: Serge Smertin