databricks-sdk 0.17.0__py3-none-any.whl → 0.19.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (36) hide show
  1. databricks/sdk/__init__.py +41 -5
  2. databricks/sdk/azure.py +17 -7
  3. databricks/sdk/clock.py +49 -0
  4. databricks/sdk/config.py +459 -0
  5. databricks/sdk/core.py +7 -1026
  6. databricks/sdk/credentials_provider.py +628 -0
  7. databricks/sdk/environments.py +72 -0
  8. databricks/sdk/errors/__init__.py +1 -1
  9. databricks/sdk/errors/mapper.py +5 -5
  10. databricks/sdk/mixins/workspace.py +3 -3
  11. databricks/sdk/oauth.py +2 -1
  12. databricks/sdk/retries.py +9 -5
  13. databricks/sdk/service/_internal.py +1 -1
  14. databricks/sdk/service/catalog.py +946 -82
  15. databricks/sdk/service/compute.py +106 -41
  16. databricks/sdk/service/files.py +145 -31
  17. databricks/sdk/service/iam.py +44 -40
  18. databricks/sdk/service/jobs.py +199 -20
  19. databricks/sdk/service/ml.py +33 -42
  20. databricks/sdk/service/oauth2.py +3 -4
  21. databricks/sdk/service/pipelines.py +51 -31
  22. databricks/sdk/service/serving.py +1 -2
  23. databricks/sdk/service/settings.py +377 -72
  24. databricks/sdk/service/sharing.py +3 -4
  25. databricks/sdk/service/sql.py +27 -19
  26. databricks/sdk/service/vectorsearch.py +13 -17
  27. databricks/sdk/service/workspace.py +20 -11
  28. databricks/sdk/version.py +1 -1
  29. {databricks_sdk-0.17.0.dist-info → databricks_sdk-0.19.0.dist-info}/METADATA +4 -4
  30. databricks_sdk-0.19.0.dist-info/RECORD +53 -0
  31. databricks_sdk-0.17.0.dist-info/RECORD +0 -49
  32. /databricks/sdk/errors/{mapping.py → platform.py} +0 -0
  33. {databricks_sdk-0.17.0.dist-info → databricks_sdk-0.19.0.dist-info}/LICENSE +0 -0
  34. {databricks_sdk-0.17.0.dist-info → databricks_sdk-0.19.0.dist-info}/NOTICE +0 -0
  35. {databricks_sdk-0.17.0.dist-info → databricks_sdk-0.19.0.dist-info}/WHEEL +0 -0
  36. {databricks_sdk-0.17.0.dist-info → databricks_sdk-0.19.0.dist-info}/top_level.txt +0 -0
@@ -23,8 +23,7 @@ class AccessControlRequest:
23
23
  """Permission level"""
24
24
 
25
25
  service_principal_name: Optional[str] = None
26
- """Application ID of an active service principal. Setting this field requires the
27
- `servicePrincipal/user` role."""
26
+ """application ID of a service principal"""
28
27
 
29
28
  user_name: Optional[str] = None
30
29
  """name of the user"""
@@ -456,8 +455,7 @@ class PasswordAccessControlRequest:
456
455
  """Permission level"""
457
456
 
458
457
  service_principal_name: Optional[str] = None
459
- """Application ID of an active service principal. Setting this field requires the
460
- `servicePrincipal/user` role."""
458
+ """application ID of a service principal"""
461
459
 
462
460
  user_name: Optional[str] = None
463
461
  """name of the user"""
@@ -1467,14 +1465,15 @@ class AccountGroupsAPI:
1467
1465
  f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups',
1468
1466
  query=query,
1469
1467
  headers=headers)
1468
+ if 'Resources' in json:
1469
+ for v in json['Resources']:
1470
+ i = v['id']
1471
+ if i in seen:
1472
+ continue
1473
+ seen.add(i)
1474
+ yield Group.from_dict(v)
1470
1475
  if 'Resources' not in json or not json['Resources']:
1471
1476
  return
1472
- for v in json['Resources']:
1473
- i = v['id']
1474
- if i in seen:
1475
- continue
1476
- seen.add(i)
1477
- yield Group.from_dict(v)
1478
1477
  query['startIndex'] += len(json['Resources'])
1479
1478
 
1480
1479
  def patch(self,
@@ -1707,14 +1706,15 @@ class AccountServicePrincipalsAPI:
1707
1706
  f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals',
1708
1707
  query=query,
1709
1708
  headers=headers)
1709
+ if 'Resources' in json:
1710
+ for v in json['Resources']:
1711
+ i = v['id']
1712
+ if i in seen:
1713
+ continue
1714
+ seen.add(i)
1715
+ yield ServicePrincipal.from_dict(v)
1710
1716
  if 'Resources' not in json or not json['Resources']:
1711
1717
  return
1712
- for v in json['Resources']:
1713
- i = v['id']
1714
- if i in seen:
1715
- continue
1716
- seen.add(i)
1717
- yield ServicePrincipal.from_dict(v)
1718
1718
  query['startIndex'] += len(json['Resources'])
1719
1719
 
1720
1720
  def patch(self,
@@ -2007,14 +2007,15 @@ class AccountUsersAPI:
2007
2007
  f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users',
2008
2008
  query=query,
2009
2009
  headers=headers)
2010
+ if 'Resources' in json:
2011
+ for v in json['Resources']:
2012
+ i = v['id']
2013
+ if i in seen:
2014
+ continue
2015
+ seen.add(i)
2016
+ yield User.from_dict(v)
2010
2017
  if 'Resources' not in json or not json['Resources']:
2011
2018
  return
2012
- for v in json['Resources']:
2013
- i = v['id']
2014
- if i in seen:
2015
- continue
2016
- seen.add(i)
2017
- yield User.from_dict(v)
2018
2019
  query['startIndex'] += len(json['Resources'])
2019
2020
 
2020
2021
  def patch(self,
@@ -2269,14 +2270,15 @@ class GroupsAPI:
2269
2270
  if "count" not in query: query['count'] = 100
2270
2271
  while True:
2271
2272
  json = self._api.do('GET', '/api/2.0/preview/scim/v2/Groups', query=query, headers=headers)
2273
+ if 'Resources' in json:
2274
+ for v in json['Resources']:
2275
+ i = v['id']
2276
+ if i in seen:
2277
+ continue
2278
+ seen.add(i)
2279
+ yield Group.from_dict(v)
2272
2280
  if 'Resources' not in json or not json['Resources']:
2273
2281
  return
2274
- for v in json['Resources']:
2275
- i = v['id']
2276
- if i in seen:
2277
- continue
2278
- seen.add(i)
2279
- yield Group.from_dict(v)
2280
2282
  query['startIndex'] += len(json['Resources'])
2281
2283
 
2282
2284
  def patch(self,
@@ -2648,14 +2650,15 @@ class ServicePrincipalsAPI:
2648
2650
  '/api/2.0/preview/scim/v2/ServicePrincipals',
2649
2651
  query=query,
2650
2652
  headers=headers)
2653
+ if 'Resources' in json:
2654
+ for v in json['Resources']:
2655
+ i = v['id']
2656
+ if i in seen:
2657
+ continue
2658
+ seen.add(i)
2659
+ yield ServicePrincipal.from_dict(v)
2651
2660
  if 'Resources' not in json or not json['Resources']:
2652
2661
  return
2653
- for v in json['Resources']:
2654
- i = v['id']
2655
- if i in seen:
2656
- continue
2657
- seen.add(i)
2658
- yield ServicePrincipal.from_dict(v)
2659
2662
  query['startIndex'] += len(json['Resources'])
2660
2663
 
2661
2664
  def patch(self,
@@ -2957,14 +2960,15 @@ class UsersAPI:
2957
2960
  if "count" not in query: query['count'] = 100
2958
2961
  while True:
2959
2962
  json = self._api.do('GET', '/api/2.0/preview/scim/v2/Users', query=query, headers=headers)
2963
+ if 'Resources' in json:
2964
+ for v in json['Resources']:
2965
+ i = v['id']
2966
+ if i in seen:
2967
+ continue
2968
+ seen.add(i)
2969
+ yield User.from_dict(v)
2960
2970
  if 'Resources' not in json or not json['Resources']:
2961
2971
  return
2962
- for v in json['Resources']:
2963
- i = v['id']
2964
- if i in seen:
2965
- continue
2966
- seen.add(i)
2967
- yield User.from_dict(v)
2968
2972
  query['startIndex'] += len(json['Resources'])
2969
2973
 
2970
2974
  def patch(self,
@@ -676,13 +676,22 @@ class DbtTask:
676
676
  specified. If no warehouse_id is specified and this folder is unset, the root directory is used."""
677
677
 
678
678
  project_directory: Optional[str] = None
679
- """Optional (relative) path to the project directory, if no value is provided, the root of the git
680
- repository is used."""
679
+ """Path to the project directory. Optional for Git sourced tasks, in which case if no value is
680
+ provided, the root of the Git repository is used."""
681
681
 
682
682
  schema: Optional[str] = None
683
683
  """Optional schema to write to. This parameter is only used when a warehouse_id is also provided.
684
684
  If not provided, the `default` schema is used."""
685
685
 
686
+ source: Optional[Source] = None
687
+ """Optional location type of the project directory. When set to `WORKSPACE`, the project will be
688
+ retrieved from the local <Databricks> workspace. When set to `GIT`, the project will be
689
+ retrieved from a Git repository defined in `git_source`. If the value is empty, the task will
690
+ use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.
691
+
692
+ * `WORKSPACE`: Project is located in <Databricks> workspace. * `GIT`: Project is located in
693
+ cloud Git provider."""
694
+
686
695
  warehouse_id: Optional[str] = None
687
696
  """ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the
688
697
  profile and connection details to dbt. It can be overridden on a per-command basis by using the
@@ -696,6 +705,7 @@ class DbtTask:
696
705
  if self.profiles_directory is not None: body['profiles_directory'] = self.profiles_directory
697
706
  if self.project_directory is not None: body['project_directory'] = self.project_directory
698
707
  if self.schema is not None: body['schema'] = self.schema
708
+ if self.source is not None: body['source'] = self.source.value
699
709
  if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
700
710
  return body
701
711
 
@@ -707,6 +717,7 @@ class DbtTask:
707
717
  profiles_directory=d.get('profiles_directory', None),
708
718
  project_directory=d.get('project_directory', None),
709
719
  schema=d.get('schema', None),
720
+ source=_enum(d, 'source', Source),
710
721
  warehouse_id=d.get('warehouse_id', None))
711
722
 
712
723
 
@@ -771,8 +782,8 @@ class FileArrivalTriggerConfiguration:
771
782
  time the trigger fired. The minimum allowed value is 60 seconds"""
772
783
 
773
784
  url: Optional[str] = None
774
- """URL to be monitored for file arrivals. The path must point to the root or a subpath of the
775
- external location."""
785
+ """The storage location to monitor for file arrivals. The value must point to the root or a subpath
786
+ of an external location URL or the root or subpath of a Unity Catalog volume."""
776
787
 
777
788
  wait_after_last_change_seconds: Optional[int] = None
778
789
  """If set, the trigger starts a run only after no file activity has occurred for the specified
@@ -797,6 +808,117 @@ class FileArrivalTriggerConfiguration:
797
808
  wait_after_last_change_seconds=d.get('wait_after_last_change_seconds', None))
798
809
 
799
810
 
811
+ @dataclass
812
+ class ForEachStats:
813
+ error_message_stats: Optional[ForEachTaskErrorMessageStats] = None
814
+ """Sample of 3 most common error messages occurred during the iteration."""
815
+
816
+ task_run_stats: Optional[ForEachTaskTaskRunStats] = None
817
+ """Describes stats of the iteration. Only latest retries are considered."""
818
+
819
+ def as_dict(self) -> dict:
820
+ """Serializes the ForEachStats into a dictionary suitable for use as a JSON request body."""
821
+ body = {}
822
+ if self.error_message_stats: body['error_message_stats'] = self.error_message_stats.as_dict()
823
+ if self.task_run_stats: body['task_run_stats'] = self.task_run_stats.as_dict()
824
+ return body
825
+
826
+ @classmethod
827
+ def from_dict(cls, d: Dict[str, any]) -> ForEachStats:
828
+ """Deserializes the ForEachStats from a dictionary."""
829
+ return cls(error_message_stats=_from_dict(d, 'error_message_stats', ForEachTaskErrorMessageStats),
830
+ task_run_stats=_from_dict(d, 'task_run_stats', ForEachTaskTaskRunStats))
831
+
832
+
833
+ @dataclass
834
+ class ForEachTask:
835
+ inputs: str
836
+ """Array for task to iterate on. This can be a JSON string or a reference to an array parameter."""
837
+
838
+ task: Task
839
+
840
+ concurrency: Optional[int] = None
841
+ """Controls the number of active iterations task runs. Default is 100 (maximal value)."""
842
+
843
+ def as_dict(self) -> dict:
844
+ """Serializes the ForEachTask into a dictionary suitable for use as a JSON request body."""
845
+ body = {}
846
+ if self.concurrency is not None: body['concurrency'] = self.concurrency
847
+ if self.inputs is not None: body['inputs'] = self.inputs
848
+ if self.task: body['task'] = self.task.as_dict()
849
+ return body
850
+
851
+ @classmethod
852
+ def from_dict(cls, d: Dict[str, any]) -> ForEachTask:
853
+ """Deserializes the ForEachTask from a dictionary."""
854
+ return cls(concurrency=d.get('concurrency', None),
855
+ inputs=d.get('inputs', None),
856
+ task=_from_dict(d, 'task', Task))
857
+
858
+
859
+ @dataclass
860
+ class ForEachTaskErrorMessageStats:
861
+ count: Optional[str] = None
862
+ """Describes the count of such error message encountered during the iterations."""
863
+
864
+ error_message: Optional[str] = None
865
+ """Describes the error message occured during the iterations."""
866
+
867
+ def as_dict(self) -> dict:
868
+ """Serializes the ForEachTaskErrorMessageStats into a dictionary suitable for use as a JSON request body."""
869
+ body = {}
870
+ if self.count is not None: body['count'] = self.count
871
+ if self.error_message is not None: body['error_message'] = self.error_message
872
+ return body
873
+
874
+ @classmethod
875
+ def from_dict(cls, d: Dict[str, any]) -> ForEachTaskErrorMessageStats:
876
+ """Deserializes the ForEachTaskErrorMessageStats from a dictionary."""
877
+ return cls(count=d.get('count', None), error_message=d.get('error_message', None))
878
+
879
+
880
+ @dataclass
881
+ class ForEachTaskTaskRunStats:
882
+ active_iterations: Optional[int] = None
883
+ """Describes the iteration runs having an active lifecycle state or an active run sub state."""
884
+
885
+ completed_iterations: Optional[int] = None
886
+ """Describes the number of failed and succeeded iteration runs."""
887
+
888
+ failed_iterations: Optional[int] = None
889
+ """Describes the number of failed iteration runs."""
890
+
891
+ scheduled_iterations: Optional[int] = None
892
+ """Describes the number of iteration runs that have been scheduled."""
893
+
894
+ succeeded_iterations: Optional[int] = None
895
+ """Describes the number of succeeded iteration runs."""
896
+
897
+ total_iterations: Optional[int] = None
898
+ """Describes the length of the list of items to iterate over."""
899
+
900
+ def as_dict(self) -> dict:
901
+ """Serializes the ForEachTaskTaskRunStats into a dictionary suitable for use as a JSON request body."""
902
+ body = {}
903
+ if self.active_iterations is not None: body['active_iterations'] = self.active_iterations
904
+ if self.completed_iterations is not None: body['completed_iterations'] = self.completed_iterations
905
+ if self.failed_iterations is not None: body['failed_iterations'] = self.failed_iterations
906
+ if self.scheduled_iterations is not None: body['scheduled_iterations'] = self.scheduled_iterations
907
+ if self.succeeded_iterations is not None: body['succeeded_iterations'] = self.succeeded_iterations
908
+ if self.total_iterations is not None: body['total_iterations'] = self.total_iterations
909
+ return body
910
+
911
+ @classmethod
912
+ def from_dict(cls, d: Dict[str, any]) -> ForEachTaskTaskRunStats:
913
+ """Deserializes the ForEachTaskTaskRunStats from a dictionary."""
914
+ return cls(active_iterations=d.get('active_iterations', None),
915
+ completed_iterations=d.get('completed_iterations', None),
916
+ failed_iterations=d.get('failed_iterations', None),
917
+ scheduled_iterations=d.get('scheduled_iterations', None),
918
+ succeeded_iterations=d.get('succeeded_iterations', None),
919
+ total_iterations=d.get('total_iterations', None))
920
+
921
+
800
922
  class Format(Enum):
801
923
 
802
924
  MULTI_TASK = 'MULTI_TASK'
@@ -973,8 +1095,7 @@ class JobAccessControlRequest:
973
1095
  """Permission level"""
974
1096
 
975
1097
  service_principal_name: Optional[str] = None
976
- """Application ID of an active service principal. Setting this field requires the
977
- `servicePrincipal/user` role."""
1098
+ """application ID of a service principal"""
978
1099
 
979
1100
  user_name: Optional[str] = None
980
1101
  """name of the user"""
@@ -2532,6 +2653,36 @@ class RunConditionTaskOp(Enum):
2532
2653
  NOT_EQUAL = 'NOT_EQUAL'
2533
2654
 
2534
2655
 
2656
+ @dataclass
2657
+ class RunForEachTask:
2658
+ concurrency: Optional[int] = None
2659
+ """Controls the number of active iterations task runs. Default is 100 (maximal value)."""
2660
+
2661
+ inputs: Optional[str] = None
2662
+ """Array for task to iterate on. This can be a JSON string or a reference to an array parameter."""
2663
+
2664
+ stats: Optional[ForEachStats] = None
2665
+
2666
+ task: Optional[Task] = None
2667
+
2668
+ def as_dict(self) -> dict:
2669
+ """Serializes the RunForEachTask into a dictionary suitable for use as a JSON request body."""
2670
+ body = {}
2671
+ if self.concurrency is not None: body['concurrency'] = self.concurrency
2672
+ if self.inputs is not None: body['inputs'] = self.inputs
2673
+ if self.stats: body['stats'] = self.stats.as_dict()
2674
+ if self.task: body['task'] = self.task.as_dict()
2675
+ return body
2676
+
2677
+ @classmethod
2678
+ def from_dict(cls, d: Dict[str, any]) -> RunForEachTask:
2679
+ """Deserializes the RunForEachTask from a dictionary."""
2680
+ return cls(concurrency=d.get('concurrency', None),
2681
+ inputs=d.get('inputs', None),
2682
+ stats=_from_dict(d, 'stats', ForEachStats),
2683
+ task=_from_dict(d, 'task', Task))
2684
+
2685
+
2535
2686
  class RunIf(Enum):
2536
2687
  """An optional value indicating the condition that determines whether the task should be run once
2537
2688
  its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`.
@@ -3057,6 +3208,9 @@ class RunTask:
3057
3208
  When running jobs on an existing cluster, you may need to manually restart the cluster if it
3058
3209
  stops responding. We suggest running jobs on new clusters for greater reliability."""
3059
3210
 
3211
+ for_each_task: Optional[RunForEachTask] = None
3212
+ """If for_each_task, indicates that this task must execute the nested task within it."""
3213
+
3060
3214
  git_source: Optional[GitSource] = None
3061
3215
  """An optional specification for a remote Git repository containing the source code used by tasks.
3062
3216
  Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
@@ -3160,6 +3314,7 @@ class RunTask:
3160
3314
  if self.end_time is not None: body['end_time'] = self.end_time
3161
3315
  if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
3162
3316
  if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
3317
+ if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
3163
3318
  if self.git_source: body['git_source'] = self.git_source.as_dict()
3164
3319
  if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
3165
3320
  if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
@@ -3194,6 +3349,7 @@ class RunTask:
3194
3349
  end_time=d.get('end_time', None),
3195
3350
  execution_duration=d.get('execution_duration', None),
3196
3351
  existing_cluster_id=d.get('existing_cluster_id', None),
3352
+ for_each_task=_from_dict(d, 'for_each_task', RunForEachTask),
3197
3353
  git_source=_from_dict(d, 'git_source', GitSource),
3198
3354
  libraries=_repeated_dict(d, 'libraries', compute.Library),
3199
3355
  new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec),
@@ -3663,18 +3819,29 @@ class SqlTaskDashboard:
3663
3819
  @dataclass
3664
3820
  class SqlTaskFile:
3665
3821
  path: str
3666
- """Relative path of the SQL file in the remote Git repository."""
3822
+ """Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for
3823
+ workspace paths."""
3824
+
3825
+ source: Optional[Source] = None
3826
+ """Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved
3827
+ from the local <Databricks> workspace. When set to `GIT`, the SQL file will be retrieved from a
3828
+ Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if
3829
+ `git_source` is defined and `WORKSPACE` otherwise.
3830
+
3831
+ * `WORKSPACE`: SQL file is located in <Databricks> workspace. * `GIT`: SQL file is located in
3832
+ cloud Git provider."""
3667
3833
 
3668
3834
  def as_dict(self) -> dict:
3669
3835
  """Serializes the SqlTaskFile into a dictionary suitable for use as a JSON request body."""
3670
3836
  body = {}
3671
3837
  if self.path is not None: body['path'] = self.path
3838
+ if self.source is not None: body['source'] = self.source.value
3672
3839
  return body
3673
3840
 
3674
3841
  @classmethod
3675
3842
  def from_dict(cls, d: Dict[str, any]) -> SqlTaskFile:
3676
3843
  """Deserializes the SqlTaskFile from a dictionary."""
3677
- return cls(path=d.get('path', None))
3844
+ return cls(path=d.get('path', None), source=_enum(d, 'source', Source))
3678
3845
 
3679
3846
 
3680
3847
  @dataclass
@@ -3844,8 +4011,13 @@ class SubmitTask:
3844
4011
 
3845
4012
  existing_cluster_id: Optional[str] = None
3846
4013
  """If existing_cluster_id, the ID of an existing cluster that is used for all runs of this task.
3847
- When running tasks on an existing cluster, you may need to manually restart the cluster if it
3848
- stops responding. We suggest running jobs on new clusters for greater reliability."""
4014
+ Only all-purpose clusters are supported. When running tasks on an existing cluster, you may need
4015
+ to manually restart the cluster if it stops responding. We suggest running jobs on new clusters
4016
+ for greater reliability."""
4017
+
4018
+ for_each_task: Optional[ForEachTask] = None
4019
+ """If for_each_task, indicates that this must execute the nested task within it for the inputs
4020
+ provided."""
3849
4021
 
3850
4022
  health: Optional[JobsHealthRules] = None
3851
4023
  """An optional set of health rules that can be defined for this job."""
@@ -3920,6 +4092,7 @@ class SubmitTask:
3920
4092
  if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
3921
4093
  if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
3922
4094
  if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
4095
+ if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
3923
4096
  if self.health: body['health'] = self.health.as_dict()
3924
4097
  if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
3925
4098
  if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
@@ -3945,6 +4118,7 @@ class SubmitTask:
3945
4118
  depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
3946
4119
  email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
3947
4120
  existing_cluster_id=d.get('existing_cluster_id', None),
4121
+ for_each_task=_from_dict(d, 'for_each_task', ForEachTask),
3948
4122
  health=_from_dict(d, 'health', JobsHealthRules),
3949
4123
  libraries=_repeated_dict(d, 'libraries', compute.Library),
3950
4124
  new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec),
@@ -3998,8 +4172,13 @@ class Task:
3998
4172
 
3999
4173
  existing_cluster_id: Optional[str] = None
4000
4174
  """If existing_cluster_id, the ID of an existing cluster that is used for all runs of this task.
4001
- When running tasks on an existing cluster, you may need to manually restart the cluster if it
4002
- stops responding. We suggest running jobs on new clusters for greater reliability."""
4175
+ Only all-purpose clusters are supported. When running tasks on an existing cluster, you may need
4176
+ to manually restart the cluster if it stops responding. We suggest running jobs on new clusters
4177
+ for greater reliability."""
4178
+
4179
+ for_each_task: Optional[ForEachTask] = None
4180
+ """If for_each_task, indicates that this must execute the nested task within it for the inputs
4181
+ provided."""
4003
4182
 
4004
4183
  health: Optional[JobsHealthRules] = None
4005
4184
  """An optional set of health rules that can be defined for this job."""
@@ -4097,6 +4276,7 @@ class Task:
4097
4276
  if self.description is not None: body['description'] = self.description
4098
4277
  if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
4099
4278
  if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
4279
+ if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
4100
4280
  if self.health: body['health'] = self.health.as_dict()
4101
4281
  if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
4102
4282
  if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
@@ -4130,6 +4310,7 @@ class Task:
4130
4310
  description=d.get('description', None),
4131
4311
  email_notifications=_from_dict(d, 'email_notifications', TaskEmailNotifications),
4132
4312
  existing_cluster_id=d.get('existing_cluster_id', None),
4313
+ for_each_task=_from_dict(d, 'for_each_task', ForEachTask),
4133
4314
  health=_from_dict(d, 'health', JobsHealthRules),
4134
4315
  job_cluster_key=d.get('job_cluster_key', None),
4135
4316
  libraries=_repeated_dict(d, 'libraries', compute.Library),
@@ -4944,10 +5125,9 @@ class JobsAPI:
4944
5125
 
4945
5126
  while True:
4946
5127
  json = self._api.do('GET', '/api/2.1/jobs/list', query=query, headers=headers)
4947
- if 'jobs' not in json or not json['jobs']:
4948
- return
4949
- for v in json['jobs']:
4950
- yield BaseJob.from_dict(v)
5128
+ if 'jobs' in json:
5129
+ for v in json['jobs']:
5130
+ yield BaseJob.from_dict(v)
4951
5131
  if 'next_page_token' not in json or not json['next_page_token']:
4952
5132
  return
4953
5133
  query['page_token'] = json['next_page_token']
@@ -5016,10 +5196,9 @@ class JobsAPI:
5016
5196
 
5017
5197
  while True:
5018
5198
  json = self._api.do('GET', '/api/2.1/jobs/runs/list', query=query, headers=headers)
5019
- if 'runs' not in json or not json['runs']:
5020
- return
5021
- for v in json['runs']:
5022
- yield BaseRun.from_dict(v)
5199
+ if 'runs' in json:
5200
+ for v in json['runs']:
5201
+ yield BaseRun.from_dict(v)
5023
5202
  if 'next_page_token' not in json or not json['next_page_token']:
5024
5203
  return
5025
5204
  query['page_token'] = json['next_page_token']