dtlpy 1.90.37__py3-none-any.whl → 1.91.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. dtlpy/__version__.py +1 -1
  2. dtlpy/assets/lock_open.png +0 -0
  3. dtlpy/entities/analytic.py +118 -98
  4. dtlpy/entities/annotation.py +22 -31
  5. dtlpy/entities/annotation_collection.py +19 -21
  6. dtlpy/entities/app.py +13 -3
  7. dtlpy/entities/assignment.py +6 -0
  8. dtlpy/entities/base_entity.py +0 -24
  9. dtlpy/entities/dataset.py +1 -1
  10. dtlpy/entities/dpk.py +15 -0
  11. dtlpy/entities/execution.py +13 -1
  12. dtlpy/entities/filters.py +85 -6
  13. dtlpy/entities/integration.py +1 -1
  14. dtlpy/entities/item.py +26 -0
  15. dtlpy/entities/node.py +38 -2
  16. dtlpy/entities/ontology.py +61 -0
  17. dtlpy/entities/package_function.py +2 -0
  18. dtlpy/entities/package_module.py +13 -0
  19. dtlpy/entities/pipeline_execution.py +14 -6
  20. dtlpy/entities/prompt_item.py +10 -0
  21. dtlpy/entities/recipe.py +37 -0
  22. dtlpy/entities/service.py +31 -2
  23. dtlpy/ml/base_model_adapter.py +92 -2
  24. dtlpy/repositories/apps.py +12 -12
  25. dtlpy/repositories/assignments.py +1 -1
  26. dtlpy/repositories/datasets.py +1 -1
  27. dtlpy/repositories/dpks.py +29 -0
  28. dtlpy/repositories/executions.py +27 -30
  29. dtlpy/repositories/features.py +4 -1
  30. dtlpy/repositories/packages.py +6 -3
  31. dtlpy/repositories/pipeline_executions.py +5 -5
  32. dtlpy/repositories/services.py +28 -7
  33. dtlpy/repositories/tasks.py +8 -2
  34. dtlpy/repositories/uploader.py +2 -2
  35. dtlpy/services/api_client.py +15 -9
  36. {dtlpy-1.90.37.dist-info → dtlpy-1.91.37.dist-info}/METADATA +2 -2
  37. {dtlpy-1.90.37.dist-info → dtlpy-1.91.37.dist-info}/RECORD +50 -45
  38. tests/assets/__init__.py +0 -0
  39. tests/assets/models_flow/__init__.py +0 -0
  40. tests/assets/models_flow/failedmain.py +52 -0
  41. tests/assets/models_flow/main.py +51 -0
  42. tests/assets/models_flow/main_model.py +54 -0
  43. tests/features/environment.py +38 -1
  44. {dtlpy-1.90.37.data → dtlpy-1.91.37.data}/scripts/dlp +0 -0
  45. {dtlpy-1.90.37.data → dtlpy-1.91.37.data}/scripts/dlp.bat +0 -0
  46. {dtlpy-1.90.37.data → dtlpy-1.91.37.data}/scripts/dlp.py +0 -0
  47. {dtlpy-1.90.37.dist-info → dtlpy-1.91.37.dist-info}/LICENSE +0 -0
  48. {dtlpy-1.90.37.dist-info → dtlpy-1.91.37.dist-info}/WHEEL +0 -0
  49. {dtlpy-1.90.37.dist-info → dtlpy-1.91.37.dist-info}/entry_points.txt +0 -0
  50. {dtlpy-1.90.37.dist-info → dtlpy-1.91.37.dist-info}/top_level.txt +0 -0
@@ -145,6 +145,16 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
145
145
  """
146
146
  raise NotImplementedError("Please implement 'predict' method in {}".format(self.__class__.__name__))
147
147
 
148
+ def extract_features(self, batch, **kwargs):
149
+ """ Extract model features on batch of images
150
+
151
+ Virtual method - need to implement
152
+
153
+ :param batch: `np.ndarray`
154
+ :return: `list[list]` each feature is per each image / item in the batch
155
+ """
156
+ raise NotImplementedError("Please implement 'extract_features' method in {}".format(self.__class__.__name__))
157
+
148
158
  def evaluate(self, model: entities.Model, dataset: entities.Dataset, filters: entities.Filters) -> entities.Model:
149
159
  """
150
160
  This function evaluates the model prediction on a dataset (with GT annotations).
@@ -263,7 +273,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
263
273
  values=False,
264
274
  operator=entities.FiltersOperations.EXISTS,
265
275
  resource=entities.FiltersResource.ANNOTATION
266
- )
276
+ )
267
277
 
268
278
  ret_list = dataset.items.download(filters=filters,
269
279
  local_path=data_subset_base_path,
@@ -352,7 +362,8 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
352
362
  pool = ThreadPoolExecutor(max_workers=16)
353
363
 
354
364
  annotations = list()
355
- for i_batch in tqdm.tqdm(range(0, len(items), batch_size), desc='predicting', unit='bt', leave=None, file=sys.stdout):
365
+ for i_batch in tqdm.tqdm(range(0, len(items), batch_size), desc='predicting', unit='bt', leave=None,
366
+ file=sys.stdout):
356
367
  batch_items = items[i_batch: i_batch + batch_size]
357
368
  batch = list(pool.map(self.prepare_item_func, batch_items))
358
369
  batch_collections = self.predict(batch, **kwargs)
@@ -385,6 +396,72 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
385
396
  pool.shutdown()
386
397
  return items, annotations
387
398
 
399
+ @entities.Package.decorators.function(display_name='Extract Feature',
400
+ inputs={'items': 'Item[]'},
401
+ outputs={'items': 'Item[]', 'features': '[]'})
402
+ def extract_item_features(self, items: list, upload_features=True, batch_size=None, **kwargs):
403
+ """
404
+ Extract feature from an input list of items (or single) and return the items and the feature vector.
405
+
406
+ :param items: `List[dl.Item]` list of items to predict
407
+ :param upload_features: `bool` uploads the features on the given items
408
+ :param batch_size: `int` size of batch to run a single inference
409
+
410
+ :return: `List[dl.Item]`, `List[List[vector]]`
411
+ """
412
+ if batch_size is None:
413
+ batch_size = self.configuration.get('batch_size', 4)
414
+ input_type = self.model_entity.input_type
415
+ self.logger.debug(
416
+ "Predicting {} items, using batch size {}. input type: {}".format(len(items), batch_size, input_type))
417
+ pool = ThreadPoolExecutor(max_workers=16)
418
+
419
+ vectors = list()
420
+ feature_set_name = self.configuration.get('featureSetName', self.model_entity.name)
421
+ try:
422
+ feature_set = self.model_entity.project.feature_sets.get(feature_set_name)
423
+ logger.info(f'Feature Set found! name: {feature_set_name}')
424
+ except exceptions.NotFound as e:
425
+ logger.info('Feature Set not found. creating... ')
426
+ feature_set = self.model_entity.project.feature_sets.create(name=feature_set_name,
427
+ entity_type=entities.FeatureEntityType.ITEM,
428
+ project_id=self.model_entity.project_id,
429
+ set_type=self.model_entity.name,
430
+ size=self.configuration.get('embeddings_size',
431
+ 256))
432
+ if 'featureSetName' not in self.model_entity.configuration:
433
+ self.model_entity.configuration['featureSetName'] = feature_set_name
434
+ self.model_entity.update()
435
+ logger.info(f'Feature Set created! name: {feature_set.name}, id: {feature_set.id}')
436
+
437
+ feature_set_id = feature_set.id
438
+ project_id = self.model_entity.project_id
439
+
440
+ for i_batch in tqdm.tqdm(range(0, len(items), batch_size),
441
+ desc='predicting',
442
+ unit='bt',
443
+ leave=None,
444
+ file=sys.stdout):
445
+ batch_items = items[i_batch: i_batch + batch_size]
446
+ batch = list(pool.map(self.prepare_item_func, batch_items))
447
+ batch_vectors = self.extract_features(batch, **kwargs)
448
+ batch_features = list()
449
+ if upload_features is True:
450
+ self.logger.debug(
451
+ "Uploading items' feature vectors for model {!r}.".format(self.model_entity.name))
452
+ try:
453
+ batch_features = list(pool.map(partial(self._upload_model_features,
454
+ feature_set_id,
455
+ project_id),
456
+ batch_items,
457
+ batch_vectors))
458
+ except Exception as err:
459
+ self.logger.exception("Failed to upload feature vectors to items.")
460
+
461
+ vectors.extend(batch_features)
462
+ pool.shutdown()
463
+ return items, vectors
464
+
388
465
  @entities.Package.decorators.function(display_name='Predict Dataset with DQL',
389
466
  inputs={'dataset': 'Dataset',
390
467
  'filters': 'Json'})
@@ -571,6 +648,19 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
571
648
  # =============
572
649
  # INNER METHODS
573
650
  # =============
651
+
652
+ @staticmethod
653
+ def _upload_model_features(feature_set_id, project_id, item: entities.Item, vector):
654
+ try:
655
+ feature = item.features.create(value=vector,
656
+ project_id=project_id,
657
+ feature_set_id=feature_set_id,
658
+ entity=item)
659
+ return feature
660
+ except Exception as e:
661
+ logger.error(f'Failed to upload feature vector if length {len(vector)} to item {item.id}, Error: {e}')
662
+ return []
663
+
574
664
  def _upload_model_annotations(self, item: entities.Item, predictions, clean_annotations):
575
665
  """
576
666
  Utility function that upload prediction to dlp platform based on the package.output_type
@@ -280,15 +280,15 @@ class Apps:
280
280
  """
281
281
  if app_id is not None and app is None:
282
282
  app = self.get(app_id=app_id)
283
+
284
+ if app and app.status == entities.CompositionStatus.INSTALLED:
285
+ raise exceptions.PlatformException(
286
+ error='400',
287
+ message='Application is already active'
288
+ )
283
289
  if app is None:
284
290
  raise exceptions.PlatformException(error='400', message='You must provide app or app_id')
285
291
 
286
- if app and app.status == entities.CompositionStatus.INSTALLED:
287
- raise exceptions.PlatformException(
288
- error='400',
289
- message='Application is already active'
290
- )
291
-
292
292
  success, response = self._client_api.gen_request(req_type='post', path='/apps/{}/activate'.format(app.id))
293
293
  if not success:
294
294
  raise exceptions.PlatformException(response)
@@ -313,15 +313,15 @@ class Apps:
313
313
  """
314
314
  if app_id is not None and app is None:
315
315
  app = self.get(app_id=app_id)
316
+
317
+ if app and app.status == entities.CompositionStatus.UNINSTALLED:
318
+ raise exceptions.PlatformException(
319
+ error='400',
320
+ message='Application is already inactive'
321
+ )
316
322
  if app is None:
317
323
  raise exceptions.PlatformException(error='400', message='You must provide app or app_id')
318
324
 
319
- if app and app.status == entities.CompositionStatus.UNINSTALLED:
320
- raise exceptions.PlatformException(
321
- error='400',
322
- message='Application is already inactive'
323
- )
324
-
325
325
  success, response = self._client_api.gen_request(req_type='post', path='/apps/{}/deactivate'.format(app.id))
326
326
  if not success:
327
327
  raise exceptions.PlatformException(response)
@@ -117,7 +117,7 @@ class Assignments:
117
117
  elif self._project is not None:
118
118
  project_ids = [self._project.id]
119
119
  else:
120
- raise ('400', 'Must provide project')
120
+ raise exceptions.PlatformException(error='400', message='Must provide project')
121
121
 
122
122
  project_ids = ','.join(project_ids)
123
123
  query.append('projects={}'.format(project_ids))
@@ -139,7 +139,7 @@ class Datasets:
139
139
  if dataset is not None:
140
140
  dataset.open_in_web()
141
141
  elif dataset_id is not None:
142
- self._client_api._open_in_web(url=self.platform_url + '/' + str(dataset_id))
142
+ self._client_api._open_in_web(url=f'{self.platform_url}/{dataset_id}/items')
143
143
  else:
144
144
  self._client_api._open_in_web(url=self.platform_url)
145
145
 
@@ -309,6 +309,35 @@ class Dpks:
309
309
  raise exceptions.PlatformException(response)
310
310
  return response.json()
311
311
 
312
+ def get_revisions(self, dpk_id: str, version: str):
313
+ """
314
+ Get the revision of a specific dpk.
315
+
316
+ :param str dpk_id: the id of the dpk.
317
+ :param str version: the version of the dpk.
318
+ :return the entity of the dpk
319
+ :rtype entities.Dpk
320
+
321
+ ** Example **
322
+ ..coed-block:: python
323
+ dpk = dl.dpks.get_revisions(dpk_id='id', version='1.0.0')
324
+ """
325
+ if dpk_id is None or version is None:
326
+ raise ValueError('You must provide both dpk_id and version')
327
+ url = '/app-registry/{}/revisions/{}'.format(dpk_id, version)
328
+
329
+ # request
330
+ success, response = self._client_api.gen_request(req_type='get',
331
+ path=url)
332
+ if not success:
333
+ raise exceptions.PlatformException(response)
334
+
335
+ dpk = entities.Dpk.from_json(_json=response.json(),
336
+ client_api=self._client_api,
337
+ project=self._project,
338
+ is_fetched=False)
339
+ return dpk
340
+
312
341
  def get(self, dpk_name: str = None, dpk_version: str = None, dpk_id: str = None) -> entities.Dpk:
313
342
  """
314
343
  Get a specific dpk from the platform.
@@ -7,6 +7,7 @@ from .. import exceptions, entities, repositories, miscellaneous, _api_reference
7
7
  from ..services.api_client import ApiClient
8
8
 
9
9
  logger = logging.getLogger(name='dtlpy')
10
+ MAX_SLEEP_TIME = 30
10
11
 
11
12
 
12
13
  class Executions:
@@ -562,15 +563,19 @@ class Executions:
562
563
  return execution
563
564
 
564
565
  def wait(self,
565
- execution_id: str,
566
- timeout: int = None):
566
+ execution_id: str = None,
567
+ execution: entities.Execution = None,
568
+ timeout: int = None,
569
+ backoff_factor=1):
567
570
  """
568
571
  Get Service execution object.
569
572
 
570
573
  **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
571
574
 
572
575
  :param str execution_id: execution id
576
+ :param str execution: dl.Execution, optional. must input one of execution or execution_id
573
577
  :param int timeout: seconds to wait until TimeoutError is raised. if <=0 - wait until done - by default wait take the service timeout
578
+ :param float backoff_factor: A backoff factor to apply between attempts after the second try
574
579
  :return: Service execution object
575
580
  :rtype: dtlpy.entities.execution.Execution
576
581
 
@@ -580,40 +585,32 @@ class Executions:
580
585
 
581
586
  service.executions.wait(execution_id='execution_id')
582
587
  """
583
- url_path = "/executions/{}".format(execution_id)
588
+ if execution is None:
589
+ if execution_id is None:
590
+ raise ValueError('Must input at least one: [execution, execution_id]')
591
+ else:
592
+ execution = self.get(execution_id=execution_id)
584
593
  elapsed = 0
585
594
  start = int(time.time())
586
- if timeout is not None and timeout <= 0:
595
+ if timeout is None or timeout <= 0:
587
596
  timeout = np.inf
588
597
 
589
- i = 1
590
- while True:
591
- success, response = self._client_api.gen_request(req_type="get",
592
- path=url_path,
593
- log_error=False)
594
- if not success:
595
- raise exceptions.PlatformException(response)
596
- # return entity
597
- execution = entities.Execution.from_json(client_api=self._client_api,
598
- _json=response.json(),
599
- project=self._project,
600
- service=self._service)
601
- if timeout is None:
602
- timeout = execution.service.execution_timeout + 60
603
- if execution.latest_status['status'] in ['failed', 'success', 'terminated', 'aborted', 'canceled',
604
- 'system-failure']:
598
+ num_tries = 1
599
+ while elapsed < timeout:
600
+ execution = self.get(execution_id=execution.id)
601
+ if not execution.in_progress():
605
602
  break
606
- elapsed = int(time.time()) - start
607
- i += 1
608
- if i > 18 or elapsed > timeout:
609
- break
610
- sleep_time = np.minimum(timeout - elapsed, 2 ** i)
603
+ elapsed = time.time() - start
604
+ if elapsed >= timeout:
605
+ raise TimeoutError(
606
+ f"execution wait() got timeout. id: {execution.id!r}, status: {execution.latest_status}")
607
+ sleep_time = np.min([timeout - elapsed, backoff_factor * (2 ** num_tries), MAX_SLEEP_TIME])
608
+ num_tries += 1
609
+ logger.debug("Execution {!r} is running for {:.2f}[s] and now Going to sleep {:.2f}[s]".format(execution.id,
610
+ elapsed,
611
+ sleep_time))
611
612
  time.sleep(sleep_time)
612
- if execution is None:
613
- raise ValueError('Nothing to wait for')
614
- if elapsed >= timeout:
615
- raise TimeoutError("execution wait() got timeout. id: {!r}, status: {}".format(
616
- execution.id, execution.latest_status))
613
+
617
614
  return execution
618
615
 
619
616
  @_api_reference.add(path='/executions/{id}/terminate', method='post')
@@ -89,6 +89,9 @@ class Features:
89
89
  if filters is None:
90
90
  filters = entities.Filters(resource=entities.FiltersResource.FEATURE)
91
91
  filters._user_query = 'false'
92
+ # default sorting
93
+ if filters.sort == dict():
94
+ filters.sort_by(field='id')
92
95
  # assert type filters
93
96
  if not isinstance(filters, entities.Filters):
94
97
  raise exceptions.PlatformException(error='400',
@@ -149,7 +152,7 @@ class Features:
149
152
  :param immutable value: actual vector - immutable (list of floats [1,2,3])
150
153
  :param str project_id: the id of the project where feature will be created
151
154
  :param str feature_set_id: ref to a featureSet this vector is a part of
152
- :param entity: the entity the featureVector is linked to (item.id, annotation.id etc)
155
+ :param entity: the entity the featureVector is linked to (item, annotation, etc)
153
156
  :param str version: version of the featureSet generator
154
157
  :param str parent_id: optional: parent FeatureSet id - used when FeatureVector is a subFeature
155
158
  :param str org_id: the id of the org where featureVector will be created
@@ -664,9 +664,12 @@ class Packages:
664
664
  :return: Package object
665
665
  :rtype: dtlpy.entities.package.Package
666
666
  """
667
- # if is dtlpy entity convert to dict
668
- if modules and isinstance(modules[0], entities.PackageModule):
669
- modules = [module.to_json() for module in modules]
667
+ if modules is not None:
668
+ if not isinstance(modules, list):
669
+ modules = [modules]
670
+
671
+ if isinstance(modules[0], entities.PackageModule):
672
+ modules = [module.to_json() for module in modules]
670
673
 
671
674
  if slots and isinstance(slots[0], entities.PackageSlot):
672
675
  slots = [slot.to_json() for slot in slots]
@@ -81,7 +81,7 @@ class PipelineExecutions:
81
81
  if pipeline_id is None and self._pipeline is None:
82
82
  raise exceptions.PlatformException('400', 'Must provide param pipeline_id')
83
83
  elif pipeline_id is None:
84
- pipeline_id = self.pipeline.id
84
+ pipeline_id = self._pipeline.id
85
85
 
86
86
  success, response = self._client_api.gen_request(
87
87
  req_type="get",
@@ -93,13 +93,13 @@ class PipelineExecutions:
93
93
  if not success:
94
94
  raise exceptions.PlatformException(response)
95
95
 
96
- pipeline = entities.PipelineExecution.from_json(
96
+ pipeline_execution = entities.PipelineExecution.from_json(
97
97
  client_api=self._client_api,
98
98
  _json=response.json(),
99
99
  pipeline=self._pipeline
100
100
  )
101
101
 
102
- return pipeline
102
+ return pipeline_execution
103
103
 
104
104
  def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.PipelineExecution]:
105
105
  pool = self._client_api.thread_pools(pool_name='entity.create')
@@ -175,7 +175,7 @@ class PipelineExecutions:
175
175
  # filters.add(field='projectId', values=self.project.id)
176
176
 
177
177
  if self._pipeline is not None:
178
- filters.add(field='pipelineId', values=self.pipeline.id)
178
+ filters.add(field='pipelineId', values=self._pipeline.id)
179
179
 
180
180
  paged = entities.PagedEntities(
181
181
  items_repository=self,
@@ -344,7 +344,7 @@ class PipelineExecutions:
344
344
  if pipeline_id is None and self._pipeline is None:
345
345
  raise exceptions.PlatformException('400', 'Must provide param pipeline_id')
346
346
  elif pipeline_id is None:
347
- pipeline_id = self.pipeline.id
347
+ pipeline_id = self._pipeline.id
348
348
 
349
349
  if filters is None:
350
350
  filters = entities.Filters(resource=entities.FiltersResource.PIPELINE_EXECUTION)
@@ -25,7 +25,7 @@ class Services:
25
25
  def __init__(self,
26
26
  client_api: ApiClient,
27
27
  project: entities.Project = None,
28
- package: entities.Package = None,
28
+ package: Union[entities.Package, entities.Dpk] = None,
29
29
  project_id=None,
30
30
  model_id=None,
31
31
  model: entities.Model = None):
@@ -52,12 +52,12 @@ class Services:
52
52
  raise exceptions.PlatformException(
53
53
  error='2001',
54
54
  message='Cannot perform action WITHOUT package entity in services repository. Please set a package')
55
- assert isinstance(self._package, entities.Package)
55
+ assert (isinstance(self._package, entities.Package) or isinstance(self._package, entities.Dpk))
56
56
  return self._package
57
57
 
58
58
  @package.setter
59
- def package(self, package: entities.Package):
60
- if not isinstance(package, entities.Package):
59
+ def package(self, package: Union[entities.Package, entities.Dpk]):
60
+ if not isinstance(package, entities.Package) and not isinstance(package, entities.Dpk):
61
61
  raise ValueError('Must input a valid package entity')
62
62
  self._package = package
63
63
 
@@ -605,6 +605,7 @@ class Services:
605
605
  on_reset: str = None,
606
606
  max_attempts: int = None,
607
607
  secrets=None,
608
+ integrations=None,
608
609
  **kwargs
609
610
  ) -> entities.Service:
610
611
  """
@@ -631,6 +632,7 @@ class Services:
631
632
  :param int max_attempts: Maximum execution retries in-case of a service reset
632
633
  :param bool force: optional - terminate old replicas immediately
633
634
  :param list secrets: list of the integrations ids
635
+ :param list integrations: list of the integrations
634
636
  :param kwargs:
635
637
  :return: Service object
636
638
  :rtype: dtlpy.entities.service.Service
@@ -691,6 +693,11 @@ class Services:
691
693
  secrets = [secrets]
692
694
  payload['secrets'] = secrets
693
695
 
696
+ if integrations is not None:
697
+ if not isinstance(integrations, list):
698
+ integrations = [integrations]
699
+ payload['integrations'] = integrations
700
+
694
701
  if runtime is not None:
695
702
  if isinstance(runtime, entities.KubernetesRuntime):
696
703
  runtime = runtime.to_json()
@@ -748,7 +755,7 @@ class Services:
748
755
  )
749
756
 
750
757
  @_api_reference.add(path='/services/{id}', method='delete')
751
- def delete(self, service_name: str = None, service_id: str = None):
758
+ def delete(self, service_name: str = None, service_id: str = None, force=False):
752
759
  """
753
760
  Delete Service object
754
761
 
@@ -756,6 +763,7 @@ class Services:
756
763
 
757
764
  You must provide at least ONE of the following params: service_id, service_name.
758
765
 
766
+ :param force:
759
767
  :param str service_name: by name
760
768
  :param str service_id: by id
761
769
  :return: True
@@ -774,10 +782,14 @@ class Services:
774
782
  else:
775
783
  service_id = self.get(service_name=service_name).id
776
784
 
785
+ path = "/services/{}".format(service_id)
786
+ if force:
787
+ path = '{}?force=true'.format(path)
788
+
777
789
  # request
778
790
  success, response = self._client_api.gen_request(
779
791
  req_type="delete",
780
- path="/services/{}".format(service_id)
792
+ path=path
781
793
  )
782
794
  if not success:
783
795
  raise exceptions.PlatformException(response)
@@ -1168,6 +1180,7 @@ class Services:
1168
1180
  on_reset: str = None,
1169
1181
  force: bool = False,
1170
1182
  secrets: list = None,
1183
+ integrations: list = None,
1171
1184
  **kwargs) -> entities.Service:
1172
1185
  """
1173
1186
  Deploy service.
@@ -1196,6 +1209,7 @@ class Services:
1196
1209
  :param str on_reset: what happens on reset
1197
1210
  :param bool force: optional - if true, terminate old replicas immediately
1198
1211
  :param list secrets: list of the integrations ids
1212
+ :param list integrations: list of the integrations
1199
1213
  :param kwargs: list of additional arguments
1200
1214
  :return: Service object
1201
1215
  :rtype: dtlpy.entities.service.Service
@@ -1218,6 +1232,8 @@ class Services:
1218
1232
  )
1219
1233
  )
1220
1234
  """
1235
+ if package is None and isinstance(package, entities.Dpk):
1236
+ raise exceptions.PlatformException('400', 'cannot deploy dpk package. Please install the app')
1221
1237
  package = package if package is not None else self._package
1222
1238
  if service_name is None:
1223
1239
  get_name = False
@@ -1290,6 +1306,10 @@ class Services:
1290
1306
  if not isinstance(secrets, list):
1291
1307
  secrets = [secrets]
1292
1308
  service.secrets = secrets
1309
+ if integrations is not None:
1310
+ if not isinstance(integrations, list):
1311
+ integrations = [integrations]
1312
+ service.integrations = integrations
1293
1313
  service = self.update(service=service, force=force)
1294
1314
  else:
1295
1315
  service = self._create(service_name=service_name,
@@ -1312,7 +1332,8 @@ class Services:
1312
1332
  drain_time=drain_time,
1313
1333
  max_attempts=max_attempts,
1314
1334
  on_reset=on_reset,
1315
- secrets=secrets
1335
+ secrets=secrets,
1336
+ integrations=integrations,
1316
1337
  )
1317
1338
  if checkout:
1318
1339
  self.checkout(service=service)
@@ -585,7 +585,8 @@ class Tasks:
585
585
  consensus_task_type=None,
586
586
  consensus_percentage=None,
587
587
  consensus_assignees=None,
588
- scoring=True
588
+ scoring=True,
589
+ enforce_video_conversion=True,
589
590
  ) -> entities.Task:
590
591
  """
591
592
  Create a new Task (Annotation or QA).
@@ -619,6 +620,7 @@ class Tasks:
619
620
  :param int consensus_percentage: percentage of items to be copied to multiple annotators (consensus items)
620
621
  :param int consensus_assignees: the number of different annotators per item (number of copies per item)
621
622
  :param bool scoring: create a scoring app in project
623
+ :param bool enforce_video_conversion: Enforce WEBM conversion on video items for frame-accurate annotations. WEBM Conversion will be executed as a project service and incurs compute costs. Service compute resources can be set according to planned workload.
622
624
  :return: Task object
623
625
  :rtype: dtlpy.entities.task.Task
624
626
 
@@ -628,7 +630,8 @@ class Tasks:
628
630
 
629
631
  dataset.tasks.create(task= 'task_entity',
630
632
  due_date = datetime.datetime(day= 1, month= 1, year= 2029).timestamp(),
631
- assignee_ids =[ 'annotator1@dataloop.ai', 'annotator2@dataloop.ai'])
633
+ assignee_ids =[ 'annotator1@dataloop.ai', 'annotator2@dataloop.ai'],
634
+ available_actions=[dl.ItemAction("discard"), dl.ItemAction("to-check")])
632
635
  """
633
636
 
634
637
  if dataset is None and self._dataset is None:
@@ -706,6 +709,9 @@ class Tasks:
706
709
  if task_parent_id is not None:
707
710
  payload['spec']['parentTaskId'] = task_parent_id
708
711
 
712
+ if not enforce_video_conversion:
713
+ payload['disableWebm'] = not enforce_video_conversion
714
+
709
715
  is_pulling = any([batch_size, max_batch_workload])
710
716
  is_consensus = any([consensus_percentage, consensus_assignees, consensus_task_type])
711
717
  if is_pulling and is_consensus:
@@ -480,8 +480,8 @@ class Uploader:
480
480
  element.item_metadata = {}
481
481
  with open(element.annotations_filepath) as ann_f:
482
482
  item_metadata = json.load(ann_f)
483
- if 'metadata' in item_metadata and 'user' in item_metadata['metadata']:
484
- element.item_metadata['user'] = item_metadata['metadata']['user']
483
+ if 'metadata' in item_metadata:
484
+ element.item_metadata = item_metadata['metadata']
485
485
  item, action = await self.__single_async_upload(filepath=element.buffer,
486
486
  mode=mode,
487
487
  item_metadata=element.item_metadata,
@@ -43,6 +43,12 @@ logger = logging.getLogger(name='dtlpy')
43
43
  threadLock = threading.Lock()
44
44
 
45
45
 
46
+ def format_message(message):
47
+ if message:
48
+ return message.replace('\\n', '\n')
49
+ return message
50
+
51
+
46
52
  class VerboseLoggingLevel:
47
53
  DEBUG = "debug"
48
54
  INFO = "info"
@@ -61,9 +67,9 @@ class PlatformError(Exception):
61
67
  if hasattr(resp, 'status_code'):
62
68
  msg += '<Response [{}]>'.format(resp.status_code)
63
69
  if hasattr(resp, 'reason'):
64
- msg += '<Reason [{}]>'.format(resp.reason)
70
+ msg += '<Reason [{}]>'.format(format_message(resp.reason))
65
71
  elif hasattr(resp, 'text'):
66
- msg += '<Reason [{}]>'.format(resp.text)
72
+ msg += '<Reason [{}]>'.format(format_message(resp.text))
67
73
  super().__init__(msg)
68
74
 
69
75
 
@@ -777,7 +783,7 @@ class ApiClient:
777
783
  return information
778
784
 
779
785
  @property
780
- def __base_gate_url(self):
786
+ def base_gate_url(self):
781
787
  if self.__gate_url_for_requests is None:
782
788
  self.__gate_url_for_requests = self.environment
783
789
  internal_requests_url = os.environ.get('INTERNAL_REQUESTS_URL', None)
@@ -801,7 +807,7 @@ class ApiClient:
801
807
 
802
808
  # prepare request
803
809
  req = requests.Request(method=req_type,
804
- url=self.__base_gate_url + path,
810
+ url=self.base_gate_url + path,
805
811
  json=json_req,
806
812
  files=files,
807
813
  data=data,
@@ -981,7 +987,7 @@ class ApiClient:
981
987
 
982
988
  # prepare request
983
989
  if is_dataloop:
984
- full_url = self.environment + path
990
+ full_url = self.base_gate_url + path
985
991
  headers_req = self._build_request_headers(headers=headers)
986
992
  else:
987
993
  full_url = path
@@ -1018,7 +1024,7 @@ class ApiClient:
1018
1024
  timeout=timeout) as session:
1019
1025
  try:
1020
1026
  async with session._request(request=session._client.request,
1021
- url=self.environment + path,
1027
+ url=self.base_gate_url + path,
1022
1028
  method=req_type,
1023
1029
  json=json_req,
1024
1030
  data=data,
@@ -1135,7 +1141,7 @@ class ApiClient:
1135
1141
  form.add_field('file', AsyncUploadStream(buffer=to_upload,
1136
1142
  callback=callback,
1137
1143
  name=uploaded_filename))
1138
- url = '{}?mode={}'.format(self.environment + remote_url, mode)
1144
+ url = '{}?mode={}'.format(self.base_gate_url + remote_url, mode)
1139
1145
 
1140
1146
  # use SSL context
1141
1147
  ssl_context = None
@@ -1218,7 +1224,7 @@ class ApiClient:
1218
1224
  pool_connections=np.sum(list(self._thread_pools_names.values())))
1219
1225
  self.session.mount('http://', adapter)
1220
1226
  self.session.mount('https://', adapter)
1221
- resp = self.session.send(request=prepared, stream=stream, verify=self.verify, timeout=None)
1227
+ resp = self.session.send(request=prepared, stream=stream, verify=self.verify, timeout=120)
1222
1228
 
1223
1229
  with threadLock:
1224
1230
  self.calls_counter.add()
@@ -1337,7 +1343,7 @@ class ApiClient:
1337
1343
  if hasattr(resp, 'reason'):
1338
1344
  msg += '[Reason: {val}]'.format(val=resp.reason)
1339
1345
  if hasattr(resp, 'text'):
1340
- msg += '[Text: {val}]'.format(val=resp.text)
1346
+ msg += '[Text: {val}]'.format(val=format_message(resp.text))
1341
1347
 
1342
1348
  request_id = resp.headers.get('x-request-id', 'na')
1343
1349
  logger.debug('--- [Request] Start ---')