dtlpy 1.88.15__py3-none-any.whl → 1.90.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. dtlpy/__init__.py +3 -2
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/lock_open.png +0 -0
  4. dtlpy/dlp/command_executor.py +23 -10
  5. dtlpy/dlp/parser.py +2 -2
  6. dtlpy/entities/__init__.py +2 -2
  7. dtlpy/entities/app.py +56 -4
  8. dtlpy/entities/dataset.py +7 -1
  9. dtlpy/entities/dpk.py +29 -34
  10. dtlpy/entities/filters.py +7 -1
  11. dtlpy/entities/integration.py +8 -3
  12. dtlpy/entities/model.py +30 -2
  13. dtlpy/entities/package_function.py +1 -0
  14. dtlpy/entities/package_module.py +1 -0
  15. dtlpy/entities/pipeline.py +1 -1
  16. dtlpy/entities/service.py +17 -3
  17. dtlpy/examples/upload_items_with_modalities.py +1 -1
  18. dtlpy/ml/base_feature_extractor_adapter.py +28 -0
  19. dtlpy/ml/base_model_adapter.py +37 -10
  20. dtlpy/repositories/__init__.py +1 -0
  21. dtlpy/repositories/apps.py +82 -4
  22. dtlpy/repositories/dpks.py +37 -9
  23. dtlpy/repositories/executions.py +10 -6
  24. dtlpy/repositories/features.py +8 -2
  25. dtlpy/repositories/integrations.py +1 -0
  26. dtlpy/repositories/models.py +57 -10
  27. dtlpy/repositories/packages.py +5 -2
  28. dtlpy/repositories/pipeline_executions.py +8 -5
  29. dtlpy/repositories/recipes.py +3 -2
  30. dtlpy/repositories/schema.py +120 -0
  31. dtlpy/repositories/services.py +5 -2
  32. dtlpy/services/api_client.py +9 -0
  33. dtlpy/services/logins.py +49 -18
  34. dtlpy/utilities/converter.py +37 -20
  35. dtlpy/utilities/dataset_generators/dataset_generator.py +2 -1
  36. {dtlpy-1.88.15.dist-info → dtlpy-1.90.37.dist-info}/METADATA +2 -2
  37. {dtlpy-1.88.15.dist-info → dtlpy-1.90.37.dist-info}/RECORD +45 -43
  38. tests/features/environment.py +5 -1
  39. {dtlpy-1.88.15.data → dtlpy-1.90.37.data}/scripts/dlp +0 -0
  40. {dtlpy-1.88.15.data → dtlpy-1.90.37.data}/scripts/dlp.bat +0 -0
  41. {dtlpy-1.88.15.data → dtlpy-1.90.37.data}/scripts/dlp.py +0 -0
  42. {dtlpy-1.88.15.dist-info → dtlpy-1.90.37.dist-info}/LICENSE +0 -0
  43. {dtlpy-1.88.15.dist-info → dtlpy-1.90.37.dist-info}/WHEEL +0 -0
  44. {dtlpy-1.88.15.dist-info → dtlpy-1.90.37.dist-info}/entry_points.txt +0 -0
  45. {dtlpy-1.88.15.dist-info → dtlpy-1.90.37.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,28 @@
1
+ from abc import ABC
2
+
3
+ from base_model_adapter import BaseModelAdapter
4
+ from .. import entities
5
+
6
+
7
+ class BaseFeatureExtractorAdapter(BaseModelAdapter, ABC):
8
+ def __int__(self, model_entity: entities.Model = None):
9
+ super().__init__(model_entity)
10
+
11
+ def extract_features(self, batch: list, **kwargs):
12
+ """ Runs inference with the model, but does not predict. Instead, extracts features for the input batch.
13
+
14
+ Virtual method - need to implement
15
+
16
+ :param batch: `list` a list containing a batch of items whose features will be extracted
17
+ """
18
+ raise NotImplementedError("Please implement 'extract_features' method in {}".format(self.__class__.__name__))
19
+
20
+ def extract_dataset_features(self, dataset: entities.Dataset, **kwargs):
21
+ """ Runs inference to extract features for all items in a dataset.
22
+
23
+ Virtual method - need to implement
24
+
25
+ :param dataset: `entities.Dataset` dataset entity whose items will have their features extracted
26
+ """
27
+ raise NotImplementedError("Please implement 'extract_dataset_features' method in "
28
+ "{}".format(self.__class__.__name__))
@@ -157,6 +157,8 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
157
157
  """
158
158
  import dtlpymetrics
159
159
  compare_types = model.output_type
160
+ if not filters:
161
+ filters = entities.Filters()
160
162
  if filters is not None and isinstance(filters, dict):
161
163
  filters = entities.Filters(custom_filter=filters)
162
164
  model = dtlpymetrics.scoring.create_model_score(model=model,
@@ -252,9 +254,22 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
252
254
  else:
253
255
  self.logger.debug("Downloading subset {!r} of {}".format(subset,
254
256
  self.model_entity.dataset.name))
257
+
258
+ if self.configuration.get("include_model_annotations", False):
259
+ annotation_filters = None
260
+ else:
261
+ annotation_filters = entities.Filters(
262
+ field="metadata.system.model.name",
263
+ values=False,
264
+ operator=entities.FiltersOperations.EXISTS,
265
+ resource=entities.FiltersResource.ANNOTATION
266
+ )
267
+
255
268
  ret_list = dataset.items.download(filters=filters,
256
269
  local_path=data_subset_base_path,
257
- annotation_options=annotation_options)
270
+ annotation_options=annotation_options,
271
+ annotation_filters=annotation_filters
272
+ )
258
273
 
259
274
  self.convert_from_dtlpy(data_path=data_path, **kwargs)
260
275
  return root_path, data_path, output_path
@@ -395,10 +410,13 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
395
410
  self.logger.debug("Predicting dataset (name:{}, id:{}, using batch size {}".format(dataset.name,
396
411
  dataset.id,
397
412
  batch_size))
413
+ if not filters:
414
+ filters = entities.Filters()
398
415
  if filters is not None and isinstance(filters, dict):
399
416
  filters = entities.Filters(custom_filter=filters)
400
417
  pages = dataset.items.list(filters=filters, page_size=batch_size)
401
- self.predict_items(items=list(pages.all()),
418
+ items = [item for item in pages.all() if item.type == 'file']
419
+ self.predict_items(items=items,
402
420
  with_upload=with_upload,
403
421
  cleanup=cleanup,
404
422
  batch_size=batch_size,
@@ -424,6 +442,9 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
424
442
  output_path = None
425
443
  try:
426
444
  logger.info("Received {s} for training".format(s=model.id))
445
+ model = model.wait_for_model_ready()
446
+ if model.status == 'failed':
447
+ raise ValueError("Model is in failed state, cannot train.")
427
448
 
428
449
  ##############
429
450
  # Set status #
@@ -523,6 +544,8 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
523
544
  # Predicting #
524
545
  ##############
525
546
  logger.info(f"Calling prediction, dataset: {dataset.name!r} ({model.id!r}), filters: {filters}")
547
+ if not filters:
548
+ filters = entities.Filters()
526
549
  self.predict_dataset(dataset=dataset,
527
550
  filters=filters,
528
551
  with_upload=True)
@@ -614,17 +637,21 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
614
637
  """
615
638
  for prediction in predictions:
616
639
  if prediction.type == entities.AnnotationType.SEGMENTATION:
640
+ color = None
617
641
  try:
618
- color = self.model_entity.dataset._get_ontology().color_map.get(prediction.label)
642
+ color = item.dataset._get_ontology().color_map.get(prediction.label, None)
619
643
  except (exceptions.BadRequest, exceptions.NotFound):
620
- color = None
621
- logger.warning("Can't get annotation color from item's dataset, using model's dataset.")
644
+ ...
645
+ if color is None:
646
+ if self.model_entity._dataset is not None:
647
+ try:
648
+ color = self.model_entity.dataset._get_ontology().color_map.get(prediction.label,
649
+ (255, 255, 255))
650
+ except (exceptions.BadRequest, exceptions.NotFound):
651
+ ...
622
652
  if color is None:
623
- try:
624
- color = self.model_entity.dataset._get_ontology().color_map.get(prediction.label)
625
- except (exceptions.BadRequest, exceptions.NotFound):
626
- logger.warning("Can't get annotation color from model's dataset, using default.")
627
- color = prediction.color
653
+ logger.warning("Can't get annotation color from model's dataset, using default.")
654
+ color = prediction.color
628
655
  prediction.color = color
629
656
 
630
657
  prediction.item_id = item.id
@@ -50,3 +50,4 @@ from .apps import Apps
50
50
  from .dpks import Dpks
51
51
  from .messages import Messages
52
52
  from .compositions import Compositions
53
+ from .schema import Schema
@@ -183,7 +183,13 @@ class Apps:
183
183
  return success
184
184
  raise exceptions.PlatformException(response)
185
185
 
186
- def install(self, dpk: entities.Dpk, app_name: str = None, organization_id: str = None) -> entities.App:
186
+ def install(self,
187
+ dpk: entities.Dpk,
188
+ app_name: str = None,
189
+ organization_id: str = None,
190
+ custom_installation: dict = None,
191
+ scope: entities.AppScope = None
192
+ ) -> entities.App:
187
193
  """
188
194
  Install the specified app in the project.
189
195
 
@@ -191,6 +197,9 @@ class Apps:
191
197
  :param entities.App dpk: the app entity
192
198
  :param str app_name: installed app name. default is the dpk name
193
199
  :param str organization_id: the organization which you want to apply on the filter.
200
+ :param dict custom_installation: partial installation.
201
+ :param str scope: the scope of the app. default is project.
202
+
194
203
  :return the installed app.
195
204
  :rtype entities.App
196
205
 
@@ -203,13 +212,15 @@ class Apps:
203
212
 
204
213
  if app_name is None:
205
214
  app_name = dpk.display_name
215
+ if isinstance(scope, entities.AppScope):
216
+ scope = scope.value
206
217
  app = entities.App.from_json(_json={'name': app_name,
207
218
  'projectId': self.project.id,
208
219
  'orgId': organization_id,
209
220
  'dpkName': dpk.name,
210
- "dpkConfig": dpk.to_json(),
221
+ "customInstallation": custom_installation,
211
222
  'dpkVersion': dpk.version,
212
- 'scope': dpk.scope
223
+ 'scope': scope
213
224
  },
214
225
  client_api=self._client_api,
215
226
  project=self.project)
@@ -242,7 +253,8 @@ class Apps:
242
253
  error='400',
243
254
  message='You must provide an identifier in inputs')
244
255
  if app_name is not None:
245
- app_id = self.__get_by_name(app_name)
256
+ app = self.__get_by_name(app_name)
257
+ app_id = app.id
246
258
 
247
259
  success, response = self._client_api.gen_request(req_type='delete', path='/apps/{}'.format(app_id))
248
260
  if not success:
@@ -250,3 +262,69 @@ class Apps:
250
262
 
251
263
  logger.debug(f"App deleted successfully (id: {app_id}, name: {app_name}")
252
264
  return success
265
+
266
+ def resume(self, app: entities.App = None, app_id: str = None) -> bool:
267
+ """
268
+ Activate an app entity.
269
+
270
+ Note: You are required to add either app or app_id.
271
+
272
+ :param entities.App app: the app entity
273
+ :param str app_id: optional - the id of the app.
274
+ :return whether we succeed activating the specified app.
275
+ :rtype bool
276
+
277
+ **Example**
278
+ .. code-block:: python
279
+ # succeed = dl.apps.resume(app)
280
+ """
281
+ if app_id is not None and app is None:
282
+ app = self.get(app_id=app_id)
283
+ if app is None:
284
+ raise exceptions.PlatformException(error='400', message='You must provide app or app_id')
285
+
286
+ if app and app.status == entities.CompositionStatus.INSTALLED:
287
+ raise exceptions.PlatformException(
288
+ error='400',
289
+ message='Application is already active'
290
+ )
291
+
292
+ success, response = self._client_api.gen_request(req_type='post', path='/apps/{}/activate'.format(app.id))
293
+ if not success:
294
+ raise exceptions.PlatformException(response)
295
+
296
+ logger.debug(f"App resumed successfully (id: {app.id}, name: {app.name}")
297
+ return success
298
+
299
+ def pause(self, app: entities.App = None, app_id: str = None) -> bool:
300
+ """
301
+ Pausing an app entity.
302
+
303
+ Note: You are required to add either app or app_id.
304
+
305
+ :param entities.App app: the app entity
306
+ :param str app_id: optional - the id of the app.
307
+ :return whether we succeed pausing the specified app.
308
+ :rtype bool
309
+
310
+ **Example**
311
+ .. code-block:: python
312
+ # succeed = dl.apps.pause(app)
313
+ """
314
+ if app_id is not None and app is None:
315
+ app = self.get(app_id=app_id)
316
+ if app is None:
317
+ raise exceptions.PlatformException(error='400', message='You must provide app or app_id')
318
+
319
+ if app and app.status == entities.CompositionStatus.UNINSTALLED:
320
+ raise exceptions.PlatformException(
321
+ error='400',
322
+ message='Application is already inactive'
323
+ )
324
+
325
+ success, response = self._client_api.gen_request(req_type='post', path='/apps/{}/deactivate'.format(app.id))
326
+ if not success:
327
+ raise exceptions.PlatformException(response)
328
+
329
+ logger.debug(f"App paused successfully (id: {app.id}, name: {app.name}")
330
+ return success
@@ -2,6 +2,7 @@ import json
2
2
  import logging
3
3
  import os
4
4
  from typing import List, Optional
5
+ from pathlib import Path
5
6
 
6
7
  from .. import exceptions, entities, services, miscellaneous, assets
7
8
  from ..services.api_client import ApiClient
@@ -25,27 +26,33 @@ class Dpks:
25
26
  self._project = project
26
27
 
27
28
  def init(self, directory: str = None, name: str = None, description: str = None,
28
- categories: List[str] = None, icon: str = None, scope: str = None):
29
+ attributes: dict = None, icon: str = None, scope: str = None):
29
30
  """
30
31
  Initialize a dpk project with the specified projects.
31
32
 
32
33
  :param str directory: the directory where to initialize the project
33
34
  :param str name: the name of the dpk.
34
35
  :param str description: the description of the dpk.
35
- :param str categories: the categories of the dpk.
36
+ :param str attributes: the attributes of the dpk.
36
37
  :param str icon: the icon of the dpk.
37
38
  :param str scope: the scope of the dpk.
38
39
 
39
40
  ** Example **
40
41
  .. code-block:: python
41
- dl.dpks.init(name='Hello World', description='A description of the dpk', categories=['starter', 'advanced'],
42
+ dl.dpks.init(name='Hello World', description='A description of the dpk', attributes={
43
+ "Provider": "Dataloop",
44
+ "License": "",
45
+ "Category": "Model",
46
+ "Computer Vision": "Object Detection",
47
+ "Media Type": "Image"
48
+ },
42
49
  icon='path_to_icon', scope='organization')
43
50
  """
44
51
  if directory is None:
45
52
  directory = os.getcwd()
46
53
  dpk = entities.Dpk.from_json(_json={'name': miscellaneous.JsonUtils.get_if_absent(name),
47
54
  'description': miscellaneous.JsonUtils.get_if_absent(description),
48
- 'categories': miscellaneous.JsonUtils.get_if_absent(categories),
55
+ 'attributes': miscellaneous.JsonUtils.get_if_absent(attributes),
49
56
  'icon': miscellaneous.JsonUtils.get_if_absent(icon),
50
57
  'scope': miscellaneous.JsonUtils.get_if_absent(scope, 'organization'),
51
58
  'components': dict()
@@ -189,11 +196,17 @@ class Dpks:
189
196
  dpk = dpk_v.items[0]
190
197
  return dpk
191
198
 
192
- def publish(self, dpk: entities.Dpk = None, ignore_max_file_size: bool = False) -> entities.Dpk:
199
+ def publish(self, dpk: entities.Dpk = None, ignore_max_file_size: bool = False, manifest_filepath='dataloop.json') -> entities.Dpk:
193
200
  """
194
201
  Upload a dpk entity to the dataloop platform.
195
202
 
196
- :param entities.Dpk dpk: the dpk to publish
203
+ :param entities.Dpk dpk: Optional. The DPK entity to publish. If None, a new DPK is created
204
+ from the manifest file.
205
+ :param bool ignore_max_file_size: Optional. If True, the maximum file size check is ignored
206
+ during the packaging of the codebase.
207
+ :param str manifest_filepath: Optional. Path to the manifest file. Can be absolute or relative.
208
+ Defaults to 'dataloop.json'
209
+
197
210
  :return the published dpk
198
211
  :rtype dl.entities.Dpk
199
212
 
@@ -202,16 +215,31 @@ class Dpks:
202
215
  .. code-block:: python
203
216
  published_dpk = dl.dpks.publish()
204
217
  """
218
+ manifest_path = Path(manifest_filepath).resolve()
205
219
 
206
220
  if dpk is None:
207
- if not os.path.exists(os.path.abspath('dataloop.json')):
208
- raise ValueError('dataloop.json file must be exists in order to publish a dpk')
209
- with open('dataloop.json', 'r') as f:
221
+ if not manifest_path.exists():
222
+ raise FileNotFoundError(f'{manifest_filepath} file must exist in order to publish a dpk')
223
+ with open(manifest_filepath, 'r') as f:
210
224
  json_file = json.load(f)
211
225
  dpk = entities.Dpk.from_json(_json=json_file,
212
226
  client_api=self._client_api,
213
227
  project=self.project)
214
228
 
229
+ if not dpk.context:
230
+ dpk.context = {}
231
+ if 'project' not in dpk.context:
232
+ if not self.project:
233
+ raise exceptions.PlatformException('400', 'project id must be provided in the context')
234
+ dpk.context['project'] = self.project.id
235
+ if 'org' not in dpk.context and dpk.scope == 'organization':
236
+ if not self.project:
237
+ raise exceptions.PlatformException('400', 'org id must be provided in the context')
238
+ dpk.context['org'] = self.project.org['id']
239
+
240
+ if self.project and self.project.id != dpk.context['project']:
241
+ logger.warning("the project id that provide different from the dpk project id")
242
+
215
243
  if dpk.codebase is None:
216
244
  dpk.codebase = self.project.codebases.pack(directory=os.getcwd(),
217
245
  name=dpk.display_name,
@@ -285,7 +285,7 @@ class Executions:
285
285
  :param str service_id: service id to execute on
286
286
  :param filters: Filters entity for a filtering before execute
287
287
  :param str function_name: function name to run
288
- :param List[FunctionIO] or dict execution_inputs: input dictionary or list of FunctionIO entities
288
+ :param List[FunctionIO] or dict execution_inputs: input dictionary or list of FunctionIO entities, that represent the extra inputs of the function
289
289
  :param bool wait: wait until create task finish
290
290
  :return: execution object
291
291
  :rtype: dtlpy.entities.execution.Execution
@@ -307,15 +307,18 @@ class Executions:
307
307
  if filters is None:
308
308
  raise exceptions.PlatformException('400', 'Please provide filter')
309
309
 
310
+ if execution_inputs is None:
311
+ execution_inputs = dict()
312
+
310
313
  if isinstance(execution_inputs, dict):
311
- customer_inputs = execution_inputs
314
+ extra_inputs = execution_inputs
312
315
  else:
313
316
  if not isinstance(execution_inputs, list):
314
317
  execution_inputs = [execution_inputs]
315
318
  if len(execution_inputs) > 0 and isinstance(execution_inputs[0], entities.FunctionIO):
316
- customer_inputs = dict()
319
+ extra_inputs = dict()
317
320
  for single_input in execution_inputs:
318
- customer_inputs.update(single_input.to_json(resource='execution'))
321
+ extra_inputs.update(single_input.to_json(resource='execution'))
319
322
  else:
320
323
  raise exceptions.PlatformException('400', 'Unknown input type')
321
324
 
@@ -323,7 +326,7 @@ class Executions:
323
326
  payload = dict()
324
327
  payload['batch'] = dict()
325
328
  payload['batch']['query'] = filters.prepare()
326
- payload['batch']['args'] = customer_inputs
329
+ payload['batch']['args'] = extra_inputs
327
330
 
328
331
  if function_name is not None:
329
332
  payload['functionName'] = function_name
@@ -597,7 +600,8 @@ class Executions:
597
600
  service=self._service)
598
601
  if timeout is None:
599
602
  timeout = execution.service.execution_timeout + 60
600
- if execution.latest_status['status'] in ['failed', 'success', 'terminated', 'aborted', 'canceled', 'system-failure']:
603
+ if execution.latest_status['status'] in ['failed', 'success', 'terminated', 'aborted', 'canceled',
604
+ 'system-failure']:
601
605
  break
602
606
  elapsed = int(time.time()) - start
603
607
  i += 1
@@ -18,6 +18,8 @@ class Features:
18
18
  item: entities.Item = None,
19
19
  annotation: entities.Annotation = None,
20
20
  feature_set: entities.FeatureSet = None):
21
+ if project is not None and project_id is None:
22
+ project_id = project.id
21
23
  self._project = project
22
24
  self._project_id = project_id
23
25
  self._item = item
@@ -34,6 +36,9 @@ class Features:
34
36
 
35
37
  @property
36
38
  def project(self) -> entities.Project:
39
+ if self._project is None and self._project_id is None and self._item is not None:
40
+ self._project = self._item.project
41
+ self._project_id = self._project.id
37
42
  if self._project is None and self._project_id is not None:
38
43
  # get from id
39
44
  self._project = repositories.Projects(client_api=self._client_api).get(project_id=self._project_id)
@@ -96,8 +101,9 @@ class Features:
96
101
  filters.add(field='featureSetId', values=self._feature_set.id)
97
102
  if self._item is not None:
98
103
  filters.add(field='entityId', values=self._item.id)
99
- if self._project_id is not None:
100
- filters.context = {"projects": [self._project_id]}
104
+ if self._project_id is None:
105
+ self._project_id = self.project.id
106
+ filters.context = {"projects": [self._project_id]}
101
107
 
102
108
  paged = entities.PagedEntities(items_repository=self,
103
109
  filters=filters,
@@ -111,6 +111,7 @@ class Integrations:
111
111
  aws-sts - {key: "", secret: "", roleArns: ""}
112
112
  aws-cross - {}
113
113
  gcp-cross - {}
114
+ gcp-workload-identity-federation - {"secret": "", "content": "{}", "clientId": ""}
114
115
 
115
116
  **Prerequisites**: You must be an *owner* in the organization.
116
117
 
@@ -1,14 +1,16 @@
1
- import json
2
-
1
+ import time
3
2
  from typing import List
4
3
  import logging
5
- import os
6
4
 
7
5
  from .. import entities, repositories, exceptions, miscellaneous
8
6
  from ..services.api_client import ApiClient
9
7
 
10
8
  logger = logging.getLogger(name='dtlpy')
11
9
 
10
+ MIN_INTERVAL = 1
11
+ BACKOFF_FACTOR = 1.2
12
+ MAX_INTERVAL = 12
13
+
12
14
 
13
15
  class Models:
14
16
  """
@@ -197,7 +199,7 @@ class Models:
197
199
  def _set_model_filter(self,
198
200
  metadata: dict,
199
201
  train_filter: entities.Filters = None,
200
- validation_filter: entities.Filters = None, ):
202
+ validation_filter: entities.Filters = None):
201
203
  if metadata is None:
202
204
  metadata = {}
203
205
  if 'system' not in metadata:
@@ -283,6 +285,7 @@ class Models:
283
285
  output_type=None,
284
286
  train_filter: entities.Filters = None,
285
287
  validation_filter: entities.Filters = None,
288
+ app: entities.App = None
286
289
  ) -> entities.Model:
287
290
  """
288
291
  Create a Model entity
@@ -304,6 +307,7 @@ class Models:
304
307
  :param str output_type: dl.AnnotationType - the type of annotations the model produces (class, box segment, text, etc)
305
308
  :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
306
309
  :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
310
+ :param dtlpy.entities.App app: App entity to connect the model to
307
311
  :return: Model Entity
308
312
 
309
313
  **Example**:
@@ -340,10 +344,12 @@ class Models:
340
344
  raise exceptions.PlatformException('Please provide project_id')
341
345
  project_id = self._project.id
342
346
  else:
343
- if project_id != self._project_id and not package.is_global:
344
- logger.warning(
345
- "Note! you are specified project_id {!r} which is different from repository context: {!r}".format(
346
- project_id, self._project_id))
347
+ if project_id != self._project_id:
348
+ if (isinstance(package, entities.Package) and not package.is_global) or \
349
+ (isinstance(package, entities.Dpk) and not package.scope != 'public'):
350
+ logger.warning(
351
+ "Note! you are specified project_id {!r} which is different from repository context: {!r}".format(
352
+ project_id, self._project_id))
347
353
 
348
354
  if model_artifacts is None:
349
355
  model_artifacts = []
@@ -365,6 +371,29 @@ class Models:
365
371
  'outputType': output_type,
366
372
  }
367
373
 
374
+ if app is not None:
375
+ if not isinstance(package, entities.Dpk):
376
+ raise ValueError('package must be a Dpk entity')
377
+ if app.dpk_name != package.name or app.dpk_version != package.version:
378
+ raise ValueError('App and package must be the same')
379
+ component_name = None
380
+ compute_config = None
381
+ for model in package.components.models:
382
+ if model['name'] == model_name:
383
+ component_name = model['name']
384
+ compute_config = model.get('computeConfigs', None)
385
+ break
386
+ if component_name is None:
387
+ raise ValueError('Model name not found in package')
388
+ payload['app'] = {
389
+ "id": app.id,
390
+ "componentName": component_name,
391
+ "dpkName": package.name,
392
+ "dpkVersion": package.version
393
+ }
394
+ if compute_config is not None:
395
+ payload['app']['computeConfig'] = compute_config
396
+
368
397
  if configuration is not None:
369
398
  payload['configuration'] = configuration
370
399
 
@@ -378,7 +407,8 @@ class Models:
378
407
  payload['status'] = status
379
408
 
380
409
  if train_filter or validation_filter:
381
- metadata = self._set_model_filter(metadata={}, train_filter=train_filter,
410
+ metadata = self._set_model_filter(metadata={},
411
+ train_filter=train_filter,
382
412
  validation_filter=validation_filter)
383
413
  payload['metadata'] = metadata
384
414
 
@@ -411,6 +441,7 @@ class Models:
411
441
  tags: list = None,
412
442
  train_filter: entities.Filters = None,
413
443
  validation_filter: entities.Filters = None,
444
+ wait=True,
414
445
  ) -> entities.Model:
415
446
  """
416
447
  Clones and creates a new model out of existing one
@@ -427,6 +458,7 @@ class Models:
427
458
  :param list tags: `list` of `str` - label of the model
428
459
  :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
429
460
  :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
461
+ :param bool wait: `bool` wait for model to be ready
430
462
  :return: dl.Model which is a clone version of the existing model
431
463
  """
432
464
  from_json = {"name": model_name,
@@ -478,9 +510,24 @@ class Models:
478
510
  client_api=self._client_api,
479
511
  project=self._project,
480
512
  package=from_model._package)
481
-
513
+ if wait:
514
+ new_model = self.wait_for_model_ready(model=new_model)
482
515
  return new_model
483
516
 
517
+ def wait_for_model_ready(self, model: entities.Model):
518
+ """
519
+ Wait for model to be ready
520
+
521
+ :param model: Model entity
522
+ """
523
+ sleep_time = MIN_INTERVAL
524
+ while model.status == entities.ModelStatus.CLONING:
525
+ model = self.get(model_id=model.id)
526
+ time.sleep(sleep_time)
527
+ sleep_time = min(sleep_time * BACKOFF_FACTOR, MAX_INTERVAL)
528
+ time.sleep(sleep_time)
529
+ return model
530
+
484
531
  @property
485
532
  def platform_url(self):
486
533
  return self._client_api._get_resource_url("projects/{}/models".format(self.project.id))
@@ -155,7 +155,8 @@ class Packages:
155
155
  package_name: str = None,
156
156
  package_id: str = None,
157
157
  checkout: bool = False,
158
- fetch=None) -> entities.Package:
158
+ fetch=None,
159
+ log_error=True) -> entities.Package:
159
160
  """
160
161
  Get Package object to use in your code.
161
162
 
@@ -165,6 +166,7 @@ class Packages:
165
166
  :param str package_name: package name
166
167
  :param bool checkout: set the package as a default package object (cookies)
167
168
  :param fetch: optional - fetch entity from platform, default taken from cookie
169
+ :param bool log_error: log error if package not found
168
170
  :return: Package object
169
171
  :rtype: dtlpy.entities.package.Package
170
172
 
@@ -187,7 +189,8 @@ class Packages:
187
189
  if package_id is not None:
188
190
  success, response = self._client_api.gen_request(
189
191
  req_type="get",
190
- path="/packages/{}".format(package_id))
192
+ path="/packages/{}".format(package_id),
193
+ log_error=log_error)
191
194
  if not success:
192
195
  raise exceptions.PlatformException(response)
193
196
  package = entities.Package.from_json(client_api=self._client_api,
@@ -257,7 +257,7 @@ class PipelineExecutions:
257
257
 
258
258
  :param pipeline_id: pipeline id
259
259
  :param filters: Filters entity for a filtering before execute
260
- :param execution_inputs: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
260
+ :param execution_inputs: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}, that represent the extra inputs of the function
261
261
  :param bool wait: wait until create task finish
262
262
  :return: entities.PipelineExecution object
263
263
  :rtype: dtlpy.entities.pipeline_execution.PipelineExecution
@@ -277,22 +277,25 @@ class PipelineExecutions:
277
277
 
278
278
  if filters is None:
279
279
  raise exceptions.PlatformException('400', 'Please provide filter')
280
- customer_input = dict()
280
+ extra_input = dict()
281
+
282
+ if execution_inputs is None:
283
+ execution_inputs = {}
281
284
 
282
285
  if isinstance(execution_inputs, dict):
283
- customer_input = execution_inputs
286
+ extra_input = execution_inputs
284
287
  else:
285
288
  if not isinstance(execution_inputs, list):
286
289
  execution_inputs = [execution_inputs]
287
290
  if len(execution_inputs) > 0 and isinstance(execution_inputs[0], entities.FunctionIO):
288
291
  for single_input in execution_inputs:
289
- customer_input.update(single_input.to_json(resource='execution'))
292
+ extra_input.update(single_input.to_json(resource='execution'))
290
293
  else:
291
294
  raise exceptions.PlatformException('400', 'Unknown input type')
292
295
  payload = dict()
293
296
  payload['batch'] = dict()
294
297
  payload['batch']['query'] = filters.prepare()
295
- payload['batch']['args'] = customer_input
298
+ payload['batch']['args'] = extra_input
296
299
 
297
300
  success, response = self._client_api.gen_request(
298
301
  path='/pipelines/{}/execute'.format(pipeline_id),