dtlpy 1.90.39__py3-none-any.whl → 1.92.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. dtlpy/__init__.py +5 -2
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/lock_open.png +0 -0
  4. dtlpy/entities/__init__.py +1 -1
  5. dtlpy/entities/analytic.py +118 -98
  6. dtlpy/entities/annotation.py +22 -31
  7. dtlpy/entities/annotation_collection.py +19 -21
  8. dtlpy/entities/app.py +13 -3
  9. dtlpy/entities/assignment.py +6 -0
  10. dtlpy/entities/base_entity.py +0 -23
  11. dtlpy/entities/command.py +3 -2
  12. dtlpy/entities/dataset.py +53 -3
  13. dtlpy/entities/dpk.py +15 -0
  14. dtlpy/entities/execution.py +13 -1
  15. dtlpy/entities/feature_set.py +3 -0
  16. dtlpy/entities/filters.py +87 -8
  17. dtlpy/entities/integration.py +1 -1
  18. dtlpy/entities/item.py +41 -1
  19. dtlpy/entities/node.py +49 -3
  20. dtlpy/entities/ontology.py +62 -5
  21. dtlpy/entities/package_function.py +2 -0
  22. dtlpy/entities/package_module.py +13 -0
  23. dtlpy/entities/pipeline.py +20 -1
  24. dtlpy/entities/pipeline_execution.py +37 -6
  25. dtlpy/entities/prompt_item.py +240 -27
  26. dtlpy/entities/recipe.py +37 -0
  27. dtlpy/entities/service.py +33 -4
  28. dtlpy/ml/base_model_adapter.py +166 -18
  29. dtlpy/new_instance.py +80 -9
  30. dtlpy/repositories/apps.py +68 -22
  31. dtlpy/repositories/assignments.py +1 -1
  32. dtlpy/repositories/commands.py +10 -2
  33. dtlpy/repositories/datasets.py +143 -13
  34. dtlpy/repositories/dpks.py +34 -1
  35. dtlpy/repositories/executions.py +27 -30
  36. dtlpy/repositories/feature_sets.py +23 -3
  37. dtlpy/repositories/features.py +4 -1
  38. dtlpy/repositories/models.py +1 -1
  39. dtlpy/repositories/packages.py +6 -3
  40. dtlpy/repositories/pipeline_executions.py +58 -5
  41. dtlpy/repositories/services.py +28 -7
  42. dtlpy/repositories/tasks.py +8 -2
  43. dtlpy/repositories/uploader.py +5 -2
  44. dtlpy/services/api_client.py +74 -12
  45. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/METADATA +2 -2
  46. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/RECORD +54 -57
  47. tests/features/environment.py +67 -1
  48. dtlpy/callbacks/__init__.py +0 -16
  49. dtlpy/callbacks/piper_progress_reporter.py +0 -29
  50. dtlpy/callbacks/progress_viewer.py +0 -54
  51. {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp +0 -0
  52. {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp.bat +0 -0
  53. {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp.py +0 -0
  54. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/LICENSE +0 -0
  55. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/WHEEL +0 -0
  56. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/entry_points.txt +0 -0
  57. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/top_level.txt +0 -0
@@ -4,10 +4,12 @@ Datasets Repository
4
4
 
5
5
  import os
6
6
  import sys
7
+ import time
7
8
  import copy
8
9
  import tqdm
9
10
  import logging
10
- from urllib.parse import urlencode
11
+ import json
12
+ from typing import Union
11
13
 
12
14
  from .. import entities, repositories, miscellaneous, exceptions, services, PlatformException, _api_reference
13
15
  from ..services.api_client import ApiClient
@@ -93,7 +95,11 @@ class Datasets:
93
95
  filters._user_query = 'false'
94
96
  if not folder_path.startswith('/'):
95
97
  folder_path = '/' + folder_path
96
- filters.add(field='dir', values=folder_path + '*')
98
+ filters.add(field='dir', values=folder_path, method=entities.FiltersMethod.OR)
99
+ if not folder_path.endswith('*'):
100
+ if not folder_path.endswith('/'):
101
+ folder_path += '/'
102
+ filters.add(field='dir', values=folder_path + '*', method=entities.FiltersMethod.OR)
97
103
  return filters
98
104
 
99
105
  def _get_binaries_dataset(self):
@@ -111,6 +117,70 @@ class Datasets:
111
117
  dataset = datasets[0]
112
118
  return dataset
113
119
 
120
+ def _resolve_dataset_id(self, dataset, dataset_name, dataset_id):
121
+ if dataset is None and dataset_name is None and dataset_id is None:
122
+ raise ValueError('Must provide dataset, dataset name or dataset id')
123
+ if dataset_id is None:
124
+ if dataset is None:
125
+ dataset = self.get(dataset_name=dataset_name)
126
+ dataset_id = dataset.id
127
+ return dataset_id
128
+
129
+ @staticmethod
130
+ def _build_payload(filters, include_feature_vectors, include_annotations, export_type, annotation_filters,
131
+ feature_vector_filters):
132
+ valid_list = [e.value for e in entities.ExportType]
133
+ valid_types = ', '.join(valid_list)
134
+ if export_type not in ['json', 'zip']:
135
+ raise ValueError('export_type must be one of the following: {}'.format(valid_types))
136
+ payload = {'exportType': export_type}
137
+ if filters is None:
138
+ filters = entities.Filters()
139
+
140
+ if isinstance(filters, entities.Filters):
141
+ payload['itemsQuery'] = {'filter': filters.prepare()['filter'], 'join': filters.prepare().get("join", {})}
142
+ elif isinstance(filters, dict):
143
+ payload['itemsQuery'] = filters
144
+ else:
145
+ raise exceptions.BadRequest(message='filters must be of type dict or Filters', status_code=500)
146
+
147
+ payload['itemsVectorQuery'] = {}
148
+ if include_feature_vectors:
149
+ payload['includeItemVectors'] = True
150
+ payload['itemsVectorQuery']['select'] = {"datasetId": 1, 'featureSetId': 1, 'value': 1}
151
+
152
+ if feature_vector_filters is not None:
153
+ payload['itemsVectorQuery']['filter'] = feature_vector_filters.prepare()['filter']
154
+
155
+ payload['annotations'] = {"include": include_annotations, "convertSemantic": False}
156
+
157
+ if annotation_filters is not None:
158
+ payload['annotationsQuery'] = annotation_filters.prepare()['filter']
159
+ payload['annotations']['filter'] = True
160
+
161
+ return payload
162
+
163
+ def _download_exported_item(self, item_id, export_type, local_path=None):
164
+ export_item = repositories.Items(client_api=self._client_api).get(item_id=item_id)
165
+ export_item_path = export_item.download(local_path=local_path)
166
+
167
+ if export_type == entities.ExportType.ZIP:
168
+ # unzipping annotations to directory
169
+ if isinstance(export_item_path, list) or not os.path.isfile(export_item_path):
170
+ raise exceptions.PlatformException(
171
+ error='404',
172
+ message='error downloading annotation zip file. see above for more information. item id: {!r}'.format(
173
+ export_item.id))
174
+ try:
175
+ miscellaneous.Zipping.unzip_directory(zip_filename=export_item_path,
176
+ to_directory=local_path)
177
+ except Exception as e:
178
+ logger.warning("Failed to extract zip file error: {}".format(e))
179
+ finally:
180
+ # cleanup
181
+ if isinstance(export_item_path, str) and os.path.isfile(export_item_path):
182
+ os.remove(export_item_path)
183
+
114
184
  @property
115
185
  def platform_url(self):
116
186
  return self._client_api._get_resource_url("projects/{}/datasets".format(self.project.id))
@@ -139,7 +209,7 @@ class Datasets:
139
209
  if dataset is not None:
140
210
  dataset.open_in_web()
141
211
  elif dataset_id is not None:
142
- self._client_api._open_in_web(url=self.platform_url + '/' + str(dataset_id))
212
+ self._client_api._open_in_web(url=f'{self.platform_url}/{dataset_id}/items')
143
213
  else:
144
214
  self._client_api._open_in_web(url=self.platform_url)
145
215
 
@@ -425,12 +495,7 @@ class Datasets:
425
495
  directory_tree = dataset.directory_tree
426
496
  directory_tree = project.datasets.directory_tree(dataset='dataset_entity')
427
497
  """
428
- if dataset is None and dataset_name is None and dataset_id is None:
429
- raise exceptions.PlatformException('400', 'Must provide dataset, dataset name or dataset id')
430
- if dataset_id is None:
431
- if dataset is None:
432
- dataset = self.get(dataset_name=dataset_name)
433
- dataset_id = dataset.id
498
+ dataset_id = self._resolve_dataset_id(dataset, dataset_name, dataset_id)
434
499
 
435
500
  url_path = '/datasets/{}/directoryTree'.format(dataset_id)
436
501
 
@@ -519,6 +584,72 @@ class Datasets:
519
584
  .format(response))
520
585
  return self.get(dataset_id=command.spec['returnedModelId'])
521
586
 
587
+ @_api_reference.add(path='/datasets/{id}/export', method='post')
588
+ def export(self,
589
+ dataset: entities.Dataset = None,
590
+ dataset_name: str = None,
591
+ dataset_id: str = None,
592
+ local_path: str = None,
593
+ filters: Union[dict, entities.Filters] = None,
594
+ annotation_filters: entities.Filters = None,
595
+ feature_vector_filters: entities.Filters = None,
596
+ include_feature_vectors: bool = False,
597
+ include_annotations: bool = False,
598
+ export_type: entities.ExportType = entities.ExportType.JSON,
599
+ timeout: int = 0):
600
+ """
601
+ Export dataset items and annotations.
602
+
603
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
604
+
605
+ You must provide at least ONE of the following params: dataset, dataset_name, dataset_id.
606
+
607
+ :param dtlpy.entities.dataset.Dataset dataset: Dataset object
608
+ :param str dataset_name: The name of the dataset
609
+ :param str dataset_id: The ID of the dataset
610
+ :param str local_path: Local path to save the exported dataset
611
+ :param Union[dict, dtlpy.entities.filters.Filters] filters: Filters entity or a query dictionary
612
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for export
613
+ :param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity to filter feature vectors for export
614
+ :param bool include_feature_vectors: Include item feature vectors in the export
615
+ :param bool include_annotations: Include item annotations in the export
616
+ :param entities.ExportType export_type: Type of export ('json' or 'zip')
617
+ :param int timeout: Maximum time in seconds to wait for the export to complete
618
+ :return: Exported item
619
+ :rtype: dtlpy.entities.item.Item
620
+
621
+ **Example**:
622
+
623
+ .. code-block:: python
624
+
625
+ export_item = project.datasets.export(dataset_id='dataset_id',
626
+ filters=filters,
627
+ include_feature_vectors=True,
628
+ include_annotations=True,
629
+ export_type=dl.ExportType.JSON)
630
+ """
631
+ dataset_id = self._resolve_dataset_id(dataset, dataset_name, dataset_id)
632
+ payload = self._build_payload(filters, include_feature_vectors, include_annotations, export_type,
633
+ annotation_filters, feature_vector_filters)
634
+
635
+ success, response = self._client_api.gen_request(req_type='post', path=f'/datasets/{dataset_id}/export',
636
+ json_req=payload)
637
+ if not success:
638
+ raise exceptions.PlatformException(response)
639
+
640
+ command = entities.Command.from_json(_json=response.json(),
641
+ client_api=self._client_api)
642
+
643
+ time.sleep(2) # as the command have wrong progress in the beginning
644
+ command = command.wait(timeout=timeout)
645
+ if 'outputItemId' not in command.spec:
646
+ raise exceptions.PlatformException(
647
+ error='400',
648
+ message="outputItemId key is missing in command response: {}".format(response))
649
+ item_id = command.spec['outputItemId']
650
+ self._download_exported_item(item_id=item_id, export_type=export_type, local_path=local_path)
651
+ return local_path
652
+
522
653
  @_api_reference.add(path='/datasets/merge', method='post')
523
654
  def merge(self,
524
655
  merge_name: str,
@@ -769,7 +900,7 @@ class Datasets:
769
900
  Download dataset's annotations by filters.
770
901
 
771
902
  You may filter the dataset both for items and for annotations and download annotations.
772
-
903
+
773
904
  Optional -- download annotations as: mask, instance, image mask of the item.
774
905
 
775
906
  **Prerequisites**: You must be in the role of an *owner* or *developer*.
@@ -917,9 +1048,8 @@ class Datasets:
917
1048
  ):
918
1049
  """
919
1050
  Upload annotations to dataset.
920
-
921
- Example for remote_root_path: If the item filepath is a/b/item and
922
- remote_root_path is /a the start folder will be b instead of a
1051
+
1052
+ Example for remote_root_path: If the item filepath is "/a/b/item" and remote_root_path is "/a" - the start folder will be b instead of a
923
1053
 
924
1054
  **Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
925
1055
 
@@ -196,7 +196,8 @@ class Dpks:
196
196
  dpk = dpk_v.items[0]
197
197
  return dpk
198
198
 
199
- def publish(self, dpk: entities.Dpk = None, ignore_max_file_size: bool = False, manifest_filepath='dataloop.json') -> entities.Dpk:
199
+ def publish(self, dpk: entities.Dpk = None, ignore_max_file_size: bool = False,
200
+ manifest_filepath='dataloop.json') -> entities.Dpk:
200
201
  """
201
202
  Upload a dpk entity to the dataloop platform.
202
203
 
@@ -290,6 +291,9 @@ class Dpks:
290
291
  elif filters.resource != entities.FiltersResource.DPK:
291
292
  raise ValueError('Filters resource must to be FiltersResource.DPK. Got: {!r}'.format(filters.resource))
292
293
 
294
+ if self._project is not None:
295
+ filters.add(field='context.project', values=self._project.id)
296
+
293
297
  paged = entities.PagedEntities(items_repository=self,
294
298
  filters=filters,
295
299
  page_offset=filters.page,
@@ -309,6 +313,35 @@ class Dpks:
309
313
  raise exceptions.PlatformException(response)
310
314
  return response.json()
311
315
 
316
+ def get_revisions(self, dpk_id: str, version: str):
317
+ """
318
+ Get the revision of a specific dpk.
319
+
320
+ :param str dpk_id: the id of the dpk.
321
+ :param str version: the version of the dpk.
322
+ :return the entity of the dpk
323
+ :rtype entities.Dpk
324
+
325
+ ** Example **
326
+ ..coed-block:: python
327
+ dpk = dl.dpks.get_revisions(dpk_id='id', version='1.0.0')
328
+ """
329
+ if dpk_id is None or version is None:
330
+ raise ValueError('You must provide both dpk_id and version')
331
+ url = '/app-registry/{}/revisions/{}'.format(dpk_id, version)
332
+
333
+ # request
334
+ success, response = self._client_api.gen_request(req_type='get',
335
+ path=url)
336
+ if not success:
337
+ raise exceptions.PlatformException(response)
338
+
339
+ dpk = entities.Dpk.from_json(_json=response.json(),
340
+ client_api=self._client_api,
341
+ project=self._project,
342
+ is_fetched=False)
343
+ return dpk
344
+
312
345
  def get(self, dpk_name: str = None, dpk_version: str = None, dpk_id: str = None) -> entities.Dpk:
313
346
  """
314
347
  Get a specific dpk from the platform.
@@ -7,6 +7,7 @@ from .. import exceptions, entities, repositories, miscellaneous, _api_reference
7
7
  from ..services.api_client import ApiClient
8
8
 
9
9
  logger = logging.getLogger(name='dtlpy')
10
+ MAX_SLEEP_TIME = 30
10
11
 
11
12
 
12
13
  class Executions:
@@ -562,15 +563,19 @@ class Executions:
562
563
  return execution
563
564
 
564
565
  def wait(self,
565
- execution_id: str,
566
- timeout: int = None):
566
+ execution_id: str = None,
567
+ execution: entities.Execution = None,
568
+ timeout: int = None,
569
+ backoff_factor=1):
567
570
  """
568
571
  Get Service execution object.
569
572
 
570
573
  **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
571
574
 
572
575
  :param str execution_id: execution id
576
+ :param str execution: dl.Execution, optional. must input one of execution or execution_id
573
577
  :param int timeout: seconds to wait until TimeoutError is raised. if <=0 - wait until done - by default wait take the service timeout
578
+ :param float backoff_factor: A backoff factor to apply between attempts after the second try
574
579
  :return: Service execution object
575
580
  :rtype: dtlpy.entities.execution.Execution
576
581
 
@@ -580,40 +585,32 @@ class Executions:
580
585
 
581
586
  service.executions.wait(execution_id='execution_id')
582
587
  """
583
- url_path = "/executions/{}".format(execution_id)
588
+ if execution is None:
589
+ if execution_id is None:
590
+ raise ValueError('Must input at least one: [execution, execution_id]')
591
+ else:
592
+ execution = self.get(execution_id=execution_id)
584
593
  elapsed = 0
585
594
  start = int(time.time())
586
- if timeout is not None and timeout <= 0:
595
+ if timeout is None or timeout <= 0:
587
596
  timeout = np.inf
588
597
 
589
- i = 1
590
- while True:
591
- success, response = self._client_api.gen_request(req_type="get",
592
- path=url_path,
593
- log_error=False)
594
- if not success:
595
- raise exceptions.PlatformException(response)
596
- # return entity
597
- execution = entities.Execution.from_json(client_api=self._client_api,
598
- _json=response.json(),
599
- project=self._project,
600
- service=self._service)
601
- if timeout is None:
602
- timeout = execution.service.execution_timeout + 60
603
- if execution.latest_status['status'] in ['failed', 'success', 'terminated', 'aborted', 'canceled',
604
- 'system-failure']:
598
+ num_tries = 1
599
+ while elapsed < timeout:
600
+ execution = self.get(execution_id=execution.id)
601
+ if not execution.in_progress():
605
602
  break
606
- elapsed = int(time.time()) - start
607
- i += 1
608
- if i > 18 or elapsed > timeout:
609
- break
610
- sleep_time = np.minimum(timeout - elapsed, 2 ** i)
603
+ elapsed = time.time() - start
604
+ if elapsed >= timeout:
605
+ raise TimeoutError(
606
+ f"execution wait() got timeout. id: {execution.id!r}, status: {execution.latest_status}")
607
+ sleep_time = np.min([timeout - elapsed, backoff_factor * (2 ** num_tries), MAX_SLEEP_TIME])
608
+ num_tries += 1
609
+ logger.debug("Execution {!r} is running for {:.2f}[s] and now Going to sleep {:.2f}[s]".format(execution.id,
610
+ elapsed,
611
+ sleep_time))
611
612
  time.sleep(sleep_time)
612
- if execution is None:
613
- raise ValueError('Nothing to wait for')
614
- if elapsed >= timeout:
615
- raise TimeoutError("execution wait() got timeout. id: {!r}, status: {}".format(
616
- execution.id, execution.latest_status))
613
+
617
614
  return execution
618
615
 
619
616
  @_api_reference.add(path='/executions/{id}/terminate', method='post')
@@ -11,11 +11,16 @@ class FeatureSets:
11
11
  """
12
12
  URL = '/features/sets'
13
13
 
14
- def __init__(self, client_api: ApiClient,
14
+ def __init__(self,
15
+ client_api: ApiClient,
15
16
  project_id: str = None,
16
- project: entities.Project = None):
17
+ project: entities.Project = None,
18
+ model_id: str = None,
19
+ model: entities.Model = None):
17
20
  self._project = project
18
21
  self._project_id = project_id
22
+ self._model = model
23
+ self._model_id = model_id
19
24
  self._client_api = client_api
20
25
 
21
26
  ############
@@ -34,11 +39,23 @@ class FeatureSets:
34
39
  if self._project is None:
35
40
  raise exceptions.PlatformException(
36
41
  error='2001',
37
- message='Cannot perform action WITHOUT Project entity in Datasets repository.'
42
+ message='Cannot perform action WITHOUT Project entity in FeatureSets repository.'
38
43
  ' Please checkout or set a project')
39
44
  assert isinstance(self._project, entities.Project)
40
45
  return self._project
41
46
 
47
+ @property
48
+ def model(self) -> entities.Model:
49
+ if self._model is None and self._model_id is not None:
50
+ # get from id
51
+ self._model = repositories.Models(client_api=self._client_api).get(model_id=self._model_id)
52
+ if self._model is None:
53
+ raise exceptions.PlatformException(
54
+ error='2001',
55
+ message='Cannot perform action WITHOUT Model entity in FeatureSets repository.')
56
+ assert isinstance(self._model, entities.Model)
57
+ return self._model
58
+
42
59
  ###########
43
60
  # methods #
44
61
  ###########
@@ -132,6 +149,7 @@ class FeatureSets:
132
149
  set_type: str,
133
150
  entity_type: entities.FeatureEntityType,
134
151
  project_id: str = None,
152
+ model_id: set = None,
135
153
  org_id: str = None):
136
154
  """
137
155
  Create a new Feature Set
@@ -141,6 +159,7 @@ class FeatureSets:
141
159
  :param str set_type: string of the feature type: 2d, 3d, modelFC, TSNE,PCA,FFT
142
160
  :param entity_type: the entity that feature vector is linked to. Use the enum dl.FeatureEntityType
143
161
  :param str project_id: the ID of the project where feature set will be created
162
+ :param str model_id: the ID of the model that creates the vectors
144
163
  :param str org_id: the ID of the org where feature set will be created
145
164
  :return: Feature Set object
146
165
  """
@@ -154,6 +173,7 @@ class FeatureSets:
154
173
  'size': size,
155
174
  'type': set_type,
156
175
  'project': project_id,
176
+ 'modelId': model_id,
157
177
  'entityType': entity_type}
158
178
  if org_id is not None:
159
179
  payload['org'] = org_id
@@ -89,6 +89,9 @@ class Features:
89
89
  if filters is None:
90
90
  filters = entities.Filters(resource=entities.FiltersResource.FEATURE)
91
91
  filters._user_query = 'false'
92
+ # default sorting
93
+ if filters.sort == dict():
94
+ filters.sort_by(field='id')
92
95
  # assert type filters
93
96
  if not isinstance(filters, entities.Filters):
94
97
  raise exceptions.PlatformException(error='400',
@@ -149,7 +152,7 @@ class Features:
149
152
  :param immutable value: actual vector - immutable (list of floats [1,2,3])
150
153
  :param str project_id: the id of the project where feature will be created
151
154
  :param str feature_set_id: ref to a featureSet this vector is a part of
152
- :param entity: the entity the featureVector is linked to (item.id, annotation.id etc)
155
+ :param entity: the entity the featureVector is linked to (item, annotation, etc)
153
156
  :param str version: version of the featureSet generator
154
157
  :param str parent_id: optional: parent FeatureSet id - used when FeatureVector is a subFeature
155
158
  :param str org_id: the id of the org where featureVector will be created
@@ -668,7 +668,7 @@ class Models:
668
668
  :param item_ids: a list of item id to run the prediction.
669
669
  :return:
670
670
  """
671
- if len(model.metadata['system']['deploy']['services']) == 0:
671
+ if len(model.metadata['system'].get('deploy', {}).get('services', [])) == 0:
672
672
  # no services for model
673
673
  raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
674
674
  payload = {'input': {'itemIds': item_ids},
@@ -664,9 +664,12 @@ class Packages:
664
664
  :return: Package object
665
665
  :rtype: dtlpy.entities.package.Package
666
666
  """
667
- # if is dtlpy entity convert to dict
668
- if modules and isinstance(modules[0], entities.PackageModule):
669
- modules = [module.to_json() for module in modules]
667
+ if modules is not None:
668
+ if not isinstance(modules, list):
669
+ modules = [modules]
670
+
671
+ if isinstance(modules[0], entities.PackageModule):
672
+ modules = [module.to_json() for module in modules]
670
673
 
671
674
  if slots and isinstance(slots[0], entities.PackageSlot):
672
675
  slots = [slot.to_json() for slot in slots]
@@ -1,8 +1,12 @@
1
1
  import logging
2
+ import time
3
+ import numpy as np
4
+
2
5
  from .. import entities, repositories, exceptions, miscellaneous, services, _api_reference
3
6
  from ..services.api_client import ApiClient
4
7
 
5
8
  logger = logging.getLogger(name='dtlpy')
9
+ MAX_SLEEP_TIME = 30
6
10
 
7
11
 
8
12
  class PipelineExecutions:
@@ -81,7 +85,7 @@ class PipelineExecutions:
81
85
  if pipeline_id is None and self._pipeline is None:
82
86
  raise exceptions.PlatformException('400', 'Must provide param pipeline_id')
83
87
  elif pipeline_id is None:
84
- pipeline_id = self.pipeline.id
88
+ pipeline_id = self._pipeline.id
85
89
 
86
90
  success, response = self._client_api.gen_request(
87
91
  req_type="get",
@@ -93,13 +97,13 @@ class PipelineExecutions:
93
97
  if not success:
94
98
  raise exceptions.PlatformException(response)
95
99
 
96
- pipeline = entities.PipelineExecution.from_json(
100
+ pipeline_execution = entities.PipelineExecution.from_json(
97
101
  client_api=self._client_api,
98
102
  _json=response.json(),
99
103
  pipeline=self._pipeline
100
104
  )
101
105
 
102
- return pipeline
106
+ return pipeline_execution
103
107
 
104
108
  def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.PipelineExecution]:
105
109
  pool = self._client_api.thread_pools(pool_name='entity.create')
@@ -175,7 +179,7 @@ class PipelineExecutions:
175
179
  # filters.add(field='projectId', values=self.project.id)
176
180
 
177
181
  if self._pipeline is not None:
178
- filters.add(field='pipelineId', values=self.pipeline.id)
182
+ filters.add(field='pipelineId', values=self._pipeline.id)
179
183
 
180
184
  paged = entities.PagedEntities(
181
185
  items_repository=self,
@@ -344,7 +348,7 @@ class PipelineExecutions:
344
348
  if pipeline_id is None and self._pipeline is None:
345
349
  raise exceptions.PlatformException('400', 'Must provide param pipeline_id')
346
350
  elif pipeline_id is None:
347
- pipeline_id = self.pipeline.id
351
+ pipeline_id = self._pipeline.id
348
352
 
349
353
  if filters is None:
350
354
  filters = entities.Filters(resource=entities.FiltersResource.PIPELINE_EXECUTION)
@@ -379,3 +383,52 @@ class PipelineExecutions:
379
383
  message="cycleOptions key is missing in command response: {!r}"
380
384
  .format(response))
381
385
  return True
386
+
387
+ def wait(self,
388
+ pipeline_execution_id: str = None,
389
+ pipeline_execution: entities.PipelineExecution = None,
390
+ timeout: int = None,
391
+ backoff_factor=1):
392
+ """
393
+ Get Service execution object.
394
+
395
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
396
+
397
+ :param str pipeline_execution_id: pipeline execution id
398
+ :param str pipeline_execution: dl.PipelineExecution, optional. must input one of pipeline execution or pipeline_execution_id
399
+ :param int timeout: seconds to wait until TimeoutError is raised. if <=0 - wait until done - by default wait take the service timeout
400
+ :param float backoff_factor: A backoff factor to apply between attempts after the second try
401
+ :return: Service execution object
402
+ :rtype: dtlpy.entities.pipeline_execution.PipelineExecution
403
+
404
+ **Example**:
405
+
406
+ .. code-block:: python
407
+
408
+ pipeline.pipeline_executions.wait(pipeline_execution_id='pipeline_execution_id')
409
+ """
410
+ if pipeline_execution is None:
411
+ if pipeline_execution_id is None:
412
+ raise ValueError('Must input at least one: [pipeline_execution, pipeline_execution_id]')
413
+ else:
414
+ pipeline_execution_id = pipeline_execution.id
415
+ elapsed = 0
416
+ start = time.time()
417
+ if timeout is None or timeout <= 0:
418
+ timeout = np.inf
419
+
420
+ num_tries = 1
421
+ while elapsed < timeout:
422
+ pipeline_execution = self.get(pipeline_execution_id=pipeline_execution_id)
423
+ if not pipeline_execution.in_progress():
424
+ break
425
+ elapsed = time.time() - start
426
+ if elapsed >= timeout:
427
+ raise TimeoutError(
428
+ f"Pipeline execution wait() function timed out. id: {pipeline_execution.id!r}, status: {pipeline_execution.status}.")
429
+ sleep_time = np.min([timeout - elapsed, backoff_factor * (2 ** num_tries), MAX_SLEEP_TIME])
430
+ num_tries += 1
431
+ logger.debug(
432
+ f"Pipeline execution {pipeline_execution.id!r} has been running for {elapsed:.2f}[s]. Sleeping for {sleep_time:.2f}[s]")
433
+ time.sleep(sleep_time)
434
+ return pipeline_execution