dtlpy 1.94.5__py3-none-any.whl → 1.96.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dtlpy/__init__.py CHANGED
@@ -77,8 +77,8 @@ from .entities import (
77
77
  # triggers
78
78
  TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
79
79
  # faas
80
- FunctionIO, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
81
- InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
80
+ FunctionIO, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
81
+ InstanceCatalog, PackageInputType, ServiceType, ServiceModeType, KubernetesRPSAutoscaler,
82
82
  PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
83
83
  # roberto
84
84
  DatasetSubsetType, ModelStatus, PlotSample, ArtifactType, Artifact, ItemArtifact, LinkArtifact, LocalArtifact,
@@ -316,8 +316,8 @@ EXECUTION_STATUS_FAILED = ExecutionStatus.FAILED
316
316
  LINK_TYPE_ID = LinkTypeEnum.ID
317
317
  LINK_TYPE_URL = LinkTypeEnum.URL
318
318
 
319
- KUBERNETES_AUTUSCALER_TYPE_CPU = KubernetesAutuscalerType.CPU
320
- KUBERNETES_AUTUSCALER_TYPE_RABBITMQ = KubernetesAutuscalerType.RABBITMQ
319
+ KUBERNETES_AUTUSCALER_TYPE_CPU = KubernetesAutoscalerType.CPU
320
+ KUBERNETES_AUTUSCALER_TYPE_RABBITMQ = KubernetesAutoscalerType.RABBITMQ
321
321
 
322
322
  INSTANCE_CATALOG_REGULAR_XS = InstanceCatalog.REGULAR_XS
323
323
  INSTANCE_CATALOG_REGULAR_S = InstanceCatalog.REGULAR_S
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.94.5'
1
+ version = '1.96.8'
@@ -43,7 +43,7 @@ from .package_slot import PackageSlot, SlotPostAction, SlotPostActionType, SlotD
43
43
  UiBindingPanel
44
44
  from .package_function import PackageFunction, FunctionIO, PackageInputType
45
45
  from .time_series import TimeSeries
46
- from .service import Service, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, \
46
+ from .service import Service, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRPSAutoscaler, \
47
47
  InstanceCatalog, KubernetesRuntime, ServiceType, ServiceModeType
48
48
  from .execution import Execution, ExecutionStatus
49
49
  from .command import Command, CommandsStatus
dtlpy/entities/compute.py CHANGED
@@ -221,6 +221,18 @@ class ComputeCluster:
221
221
  'authentication': self.authentication.to_json()
222
222
  }
223
223
 
224
+ @classmethod
225
+ def from_setup_json(cls, devops_output, integration):
226
+ node_pools = [NodePool.from_json(n) for n in devops_output['config']['nodePools']]
227
+ return cls(
228
+ devops_output['config']['name'],
229
+ devops_output['config']['endpoint'],
230
+ devops_output['config']['kubernetesVersion'],
231
+ ClusterProvider(devops_output['config']['provider']),
232
+ node_pools,
233
+ {},
234
+ Authentication(AuthenticationIntegration(integration.id,integration.type))
235
+ )
224
236
 
225
237
  class ComputeContext:
226
238
  def __init__(self, labels: List[str], org: str, project: Optional[str] = None):
@@ -284,10 +296,10 @@ class Compute:
284
296
  return self._serviceDrivers
285
297
 
286
298
  def delete(self):
287
- return self._computes.delete(compute_id=self.id)
299
+ return self.computes.delete(compute_id=self.id)
288
300
 
289
301
  def update(self):
290
- return self._computes.update(compute=self)
302
+ return self.computes.update(compute=self)
291
303
 
292
304
  @classmethod
293
305
  def from_json(cls, _json, client_api: ApiClient):
dtlpy/entities/dataset.py CHANGED
@@ -535,7 +535,8 @@ class Dataset(entities.BaseEntity):
535
535
  with_items_annotations=True,
536
536
  with_metadata=True,
537
537
  with_task_annotations_status=True,
538
- dst_dataset_id=None
538
+ dst_dataset_id=None,
539
+ target_directory=None,
539
540
  ):
540
541
  """
541
542
  Clone dataset
@@ -548,6 +549,7 @@ class Dataset(entities.BaseEntity):
548
549
  :param bool with_metadata: clone metadata
549
550
  :param bool with_task_annotations_status: clone task annotations status
550
551
  :param str dst_dataset_id: destination dataset id
552
+ :param str target_directory: target directory
551
553
  :return: dataset object
552
554
  :rtype: dtlpy.entities.dataset.Dataset
553
555
 
@@ -567,7 +569,8 @@ class Dataset(entities.BaseEntity):
567
569
  with_metadata=with_metadata,
568
570
  with_items_annotations=with_items_annotations,
569
571
  with_task_annotations_status=with_task_annotations_status,
570
- dst_dataset_id=dst_dataset_id)
572
+ dst_dataset_id=dst_dataset_id,
573
+ target_directory=target_directory)
571
574
 
572
575
  def sync(self, wait=True):
573
576
  """
dtlpy/entities/filters.py CHANGED
@@ -196,6 +196,8 @@ class Filters:
196
196
  """
197
197
  if method is None:
198
198
  method = self.method
199
+ if 'metadata.system.refs.metadata' in field and self.resource == FiltersResource.ITEM:
200
+ logger.warning('Filtering by metadata.system.refs.metadata may cause incorrect results. please use match operator')
199
201
 
200
202
  # create SingleFilter object and add to self.filter_list
201
203
  if method == FiltersMethod.OR:
dtlpy/entities/model.py CHANGED
@@ -388,13 +388,12 @@ class Model(entities.BaseEntity):
388
388
 
389
389
  @property
390
390
  def id_to_label_map(self):
391
+ # default
391
392
  if 'id_to_label_map' not in self.configuration:
392
- # default
393
- if self.ontology_id == 'null' or self.ontology_id is None:
394
- self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in enumerate(self.labels)}
395
- else:
396
- self.configuration['id_to_label_map'] = {int(idx): lbl.tag for idx, lbl in
397
- enumerate(self.ontology.labels)}
393
+ if not (self.dataset_id == 'null' or self.dataset_id is None):
394
+ self.labels = [label.tag for label in self.dataset.labels]
395
+ self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in enumerate(self.labels)}
396
+ # use existing
398
397
  else:
399
398
  self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in
400
399
  self.configuration['id_to_label_map'].items()}
dtlpy/entities/service.py CHANGED
@@ -156,8 +156,10 @@ class KubernetesRuntime(ServiceRuntime):
156
156
 
157
157
  self.autoscaler = kwargs.get('autoscaler', autoscaler)
158
158
  if self.autoscaler is not None and isinstance(self.autoscaler, dict):
159
- if self.autoscaler['type'] == KubernetesAutuscalerType.RABBITMQ:
159
+ if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
160
160
  self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
161
+ elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
162
+ self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
161
163
  else:
162
164
  raise NotImplementedError(
163
165
  'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
@@ -228,6 +230,7 @@ class Service(entities.BaseEntity):
228
230
  archive = attr.ib(repr=False)
229
231
  config = attr.ib(repr=False)
230
232
  settings = attr.ib(repr=False)
233
+ panels = attr.ib(repr=False)
231
234
 
232
235
  # SDK
233
236
  _package = attr.ib(repr=False)
@@ -340,7 +343,8 @@ class Service(entities.BaseEntity):
340
343
  settings=_json.get('settings', None),
341
344
  app=_json.get('app', None),
342
345
  integrations=_json.get('integrations', None),
343
- org_id=_json.get('orgId', None)
346
+ org_id=_json.get('orgId', None),
347
+ panels=_json.get('panels', None)
344
348
  )
345
349
  inst.is_fetched = is_fetched
346
350
  return inst
@@ -484,7 +488,8 @@ class Service(entities.BaseEntity):
484
488
  attr.fields(Service).settings,
485
489
  attr.fields(Service).app,
486
490
  attr.fields(Service).integrations,
487
- attr.fields(Service).org_id
491
+ attr.fields(Service).org_id,
492
+ attr.fields(Service).panels
488
493
  )
489
494
  )
490
495
 
@@ -508,6 +513,9 @@ class Service(entities.BaseEntity):
508
513
  if self.updated_by is not None:
509
514
  _json['updatedBy'] = self.updated_by
510
515
 
516
+ if self.panels is not None:
517
+ _json['panels'] = self.panels
518
+
511
519
  if self.max_attempts is not None:
512
520
  _json['maxAttempts'] = self.max_attempts
513
521
 
@@ -559,14 +567,14 @@ class Service(entities.BaseEntity):
559
567
  """
560
568
  return self.services.update(service=self, force=force)
561
569
 
562
- def delete(self):
570
+ def delete(self, force: bool = False):
563
571
  """
564
572
  Delete Service object
565
573
 
566
574
  :return: True
567
575
  :rtype: bool
568
576
  """
569
- return self.services.delete(service_id=self.id)
577
+ return self.services.delete(service_id=self.id, force=force)
570
578
 
571
579
  def status(self):
572
580
  """
@@ -751,6 +759,32 @@ class Service(entities.BaseEntity):
751
759
  wait=wait)
752
760
  return execution
753
761
 
762
+ def rerun_batch(self,
763
+ filters,
764
+ wait=True
765
+ ):
766
+ """
767
+ rerun a executions on an existing service
768
+
769
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
770
+
771
+ :param filters: Filters entity for a filtering before rerun
772
+ :param bool wait: wait until create task finish
773
+ :return: rerun command
774
+ :rtype: dtlpy.entities.command.Command
775
+
776
+ **Example**:
777
+
778
+ .. code-block:: python
779
+
780
+ command = service.executions.rerun_batch(
781
+ filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
782
+ """
783
+ execution = self.executions.rerun_batch(service_id=self.id,
784
+ filters=filters,
785
+ wait=wait)
786
+ return execution
787
+
754
788
  def activate_slots(
755
789
  self,
756
790
  project_id: str = None,
@@ -806,8 +840,8 @@ class Service(entities.BaseEntity):
806
840
  )
807
841
 
808
842
 
809
- class KubernetesAutuscalerType(str, Enum):
810
- """ The Service Autuscaler Type (RABBITMQ, CPU).
843
+ class KubernetesAutoscalerType(str, Enum):
844
+ """ The Service Autoscaler Type (RABBITMQ, CPU).
811
845
 
812
846
  .. list-table::
813
847
  :widths: 15 150
@@ -816,21 +850,42 @@ class KubernetesAutuscalerType(str, Enum):
816
850
  * - State
817
851
  - Description
818
852
  * - RABBITMQ
819
- - Service Autuscaler will be in RABBITMQ
853
+ - Service Autoscaler based on service queue length
820
854
  * - CPU
821
- - Service Autuscaler will be in in local CPU
855
+ - Service Autoscaler based on service CPU usage
856
+ * - RPS
857
+ - Service Autoscaler based on service RPS
822
858
  """
823
859
  RABBITMQ = 'rabbitmq'
824
860
  CPU = 'cpu'
861
+ RPS = 'rps'
862
+
863
+
864
+ # added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
865
+ class KubernetesAutuscalerTypeMeta(type):
866
+ def __getattribute__(cls, item):
867
+ if hasattr(KubernetesAutoscalerType, item):
868
+ warnings.warn(
869
+ 'KubernetesAutuscalerType is deprecated and will be removed in version 1.97.0, '
870
+ 'use KubernetesAutoscalerType instead',
871
+ DeprecationWarning
872
+ )
873
+ return getattr(KubernetesAutoscalerType, item)
874
+ else:
875
+ raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
876
+
877
+
878
+ class KubernetesAutuscalerType(metaclass=KubernetesAutuscalerTypeMeta):
879
+ pass
825
880
 
826
881
 
827
882
  class KubernetesAutoscaler(entities.BaseEntity):
828
883
  MIN_REPLICA_DEFAULT = 0
829
884
  MAX_REPLICA_DEFAULT = 1
830
- AUTOSCALER_TYPE_DEFAULT = KubernetesAutuscalerType.RABBITMQ
885
+ AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
831
886
 
832
887
  def __init__(self,
833
- autoscaler_type: KubernetesAutuscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
888
+ autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
834
889
  min_replicas=MIN_REPLICA_DEFAULT,
835
890
  max_replicas=MAX_REPLICA_DEFAULT,
836
891
  cooldown_period=None,
@@ -870,7 +925,7 @@ class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
870
925
  **kwargs):
871
926
  super().__init__(min_replicas=min_replicas,
872
927
  max_replicas=max_replicas,
873
- autoscaler_type=KubernetesAutuscalerType.RABBITMQ,
928
+ autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
874
929
  cooldown_period=cooldown_period,
875
930
  polling_interval=polling_interval, **kwargs)
876
931
  self.queue_length = kwargs.get('queueLength', queue_length)
@@ -879,3 +934,30 @@ class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
879
934
  _json = super().to_json()
880
935
  _json['queueLength'] = self.queue_length
881
936
  return _json
937
+
938
+
939
+ class KubernetesRPSAutoscaler(KubernetesAutoscaler):
940
+ THRESHOLD_DEFAULT = 10
941
+ RATE_SECONDS_DEFAULT = 30
942
+
943
+ def __init__(self,
944
+ min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
945
+ max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
946
+ threshold=THRESHOLD_DEFAULT,
947
+ rate_seconds=RATE_SECONDS_DEFAULT,
948
+ cooldown_period=None,
949
+ polling_interval=None,
950
+ **kwargs):
951
+ super().__init__(min_replicas=min_replicas,
952
+ max_replicas=max_replicas,
953
+ autoscaler_type=KubernetesAutoscalerType.RPS,
954
+ cooldown_period=cooldown_period,
955
+ polling_interval=polling_interval, **kwargs)
956
+ self.threshold = kwargs.get('threshold', threshold)
957
+ self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
958
+
959
+ def to_json(self):
960
+ _json = super().to_json()
961
+ _json['rateSeconds'] = self.rate_seconds
962
+ _json['threshold'] = self.threshold
963
+ return _json
dtlpy/entities/setting.py CHANGED
@@ -190,7 +190,8 @@ class Setting(BaseSetting):
190
190
  hint=None,
191
191
  client_api=None,
192
192
  project=None,
193
- org=None
193
+ org=None,
194
+ setting_type=SettingsTypes.USER_SETTINGS
194
195
  ):
195
196
  super().__init__(
196
197
  default_value=default_value,
@@ -199,7 +200,7 @@ class Setting(BaseSetting):
199
200
  value_type=value_type,
200
201
  scope=scope,
201
202
  metadata=metadata,
202
- setting_type=SettingsTypes.USER_SETTINGS,
203
+ setting_type=setting_type,
203
204
  client_api=client_api,
204
205
  project=project,
205
206
  org=org,
dtlpy/new_instance.py CHANGED
@@ -22,7 +22,7 @@ class Dtlpy:
22
22
  # triggers
23
23
  TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
24
24
  # faas
25
- FunctionIO, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
25
+ FunctionIO, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
26
26
  InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
27
27
  PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
28
28
  # roberto
@@ -4,6 +4,9 @@ import logging
4
4
  import json
5
5
  import jwt
6
6
  import os
7
+ from PIL import Image
8
+ from io import BytesIO
9
+ import base64
7
10
 
8
11
  from .. import entities, exceptions, miscellaneous, _api_reference
9
12
  from ..services.api_client import ApiClient
@@ -487,12 +490,9 @@ class Annotations:
487
490
  status = True
488
491
  result = w_annotation
489
492
  else:
490
- url_path = '/annotations/{}'.format(annotation_id)
491
- if system_metadata:
492
- url_path += '?system=true'
493
- suc, response = self._client_api.gen_request(req_type='put',
494
- path=url_path,
495
- json_req=json_req)
493
+ suc, response = self._update_annotation_req(annotation_json=json_req,
494
+ system_metadata=system_metadata,
495
+ annotation_id=annotation_id)
496
496
  if suc:
497
497
  result = entities.Annotation.from_json(_json=response.json(),
498
498
  annotations=self,
@@ -507,6 +507,15 @@ class Annotations:
507
507
  result = traceback.format_exc()
508
508
  return status, result
509
509
 
510
+ def _update_annotation_req(self, annotation_json, system_metadata, annotation_id):
511
+ url_path = '/annotations/{}'.format(annotation_id)
512
+ if system_metadata:
513
+ url_path += '?system=true'
514
+ suc, response = self._client_api.gen_request(req_type='put',
515
+ path=url_path,
516
+ json_req=annotation_json)
517
+ return suc, response
518
+
510
519
  @_api_reference.add(path='/annotations/{annotationId}', method='put')
511
520
  def update(self, annotations, system_metadata=False):
512
521
  """
@@ -572,11 +581,12 @@ class Annotations:
572
581
  last_frame = frame
573
582
  return annotation
574
583
 
575
- def _create_batches_for_upload(self, annotations):
584
+ def _create_batches_for_upload(self, annotations, merge=False):
576
585
  """
577
586
  receives a list of annotations and split them into batches to optimize the upload
578
587
 
579
588
  :param annotations: list of all annotations
589
+ :param merge: bool - merge the new binary annotations with the existing annotations
580
590
  :return: batch_annotations: list of list of annotation. each batch with size self._upload_batch_size
581
591
  """
582
592
  annotation_batches = list()
@@ -601,8 +611,107 @@ class Annotations:
601
611
  single_batch = list()
602
612
  if len(single_batch) > 0:
603
613
  annotation_batches.append(single_batch)
614
+ if merge and self.item:
615
+ annotation_batches = self._merge_new_annotations(annotation_batches)
616
+ annotation_batches = self._merge_to_exits_annotations(annotation_batches)
604
617
  return annotation_batches
605
618
 
619
+ def _merge_binary_annotations(self, data_url1, data_url2, item_width, item_height):
620
+ # Decode base64 data
621
+ img_data1 = base64.b64decode(data_url1.split(",")[1])
622
+ img_data2 = base64.b64decode(data_url2.split(",")[1])
623
+
624
+ # Convert binary data to images
625
+ img1 = Image.open(BytesIO(img_data1))
626
+ img2 = Image.open(BytesIO(img_data2))
627
+
628
+ # Create a new image with the target item size
629
+ merged_img = Image.new('RGBA', (item_width, item_height))
630
+
631
+ # Paste both images on the new canvas at their original sizes and positions
632
+ # Adjust positioning logic if needed (assuming top-left corner for both images here)
633
+ merged_img.paste(img1, (0, 0), img1) # Use img1 as a mask to handle transparency
634
+ merged_img.paste(img2, (0, 0), img2) # Overlay img2 at the same position
635
+
636
+ # Save the merged image to a buffer
637
+ buffer = BytesIO()
638
+ merged_img.save(buffer, format="PNG")
639
+ merged_img_data = buffer.getvalue()
640
+
641
+ # Encode the merged image back to a base64 string
642
+ merged_data_url = "data:image/png;base64," + base64.b64encode(merged_img_data).decode()
643
+
644
+ return merged_data_url
645
+
646
+ def _merge_new_annotations(self, annotations_batch):
647
+ """
648
+ Merge the new binary annotations with the existing annotations
649
+ :param annotations_batch: list of list of annotation. each batch with size self._upload_batch_size
650
+ :return: merged_annotations_batch: list of list of annotation. each batch with size self._upload_batch_size
651
+ """
652
+ for annotations in annotations_batch:
653
+ for annotation in annotations:
654
+ if annotation['type'] == 'binary' and not annotation.get('clean', False):
655
+ to_merge = [a for a in annotations if
656
+ not a.get('clean', False) and a.get("metadata", {}).get('system', {}).get('objectId',
657
+ None) ==
658
+ annotation.get("metadata", {}).get('system', {}).get('objectId', None) and a['label'] ==
659
+ annotation['label']]
660
+ if len(to_merge) == 0:
661
+ # no annotation to merge with
662
+ continue
663
+ for a in to_merge:
664
+ if a['coordinates'] == annotation['coordinates']:
665
+ continue
666
+ merged_data_url = self._merge_binary_annotations(a['coordinates'], annotation['coordinates'],
667
+ self.item.width, self.item.height)
668
+ annotation['coordinates'] = merged_data_url
669
+ a['clean'] = True
670
+ return [[a for a in annotations if not a.get('clean', False)] for annotations in annotations_batch]
671
+
672
+ def _merge_to_exits_annotations(self, annotations_batch):
673
+ filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION, field='type', values='binary')
674
+ filters.add(field='itemId', values=self.item.id, method=entities.FiltersMethod.AND)
675
+ exist_annotations = self.list(filters=filters).annotations or list()
676
+ to_delete = list()
677
+ for annotations in annotations_batch:
678
+ for ann in annotations:
679
+ if ann['type'] == 'binary':
680
+ to_merge = [a for a in exist_annotations if
681
+ a.object_id == ann.get("metadata", {}).get('system', {}).get('objectId',
682
+ None) and a.label == ann[
683
+ 'label']]
684
+ if len(to_merge) == 0:
685
+ # no annotation to merge with
686
+ continue
687
+ if to_merge[0].coordinates == ann['coordinates']:
688
+ # same annotation
689
+ continue
690
+ if len(to_merge) > 1:
691
+ raise exceptions.PlatformException('400', 'Multiple annotations with the same label')
692
+ # merge
693
+ exist_annotations.remove(to_merge[0])
694
+ merged_data_url = self._merge_binary_annotations(to_merge[0].coordinates, ann['coordinates'],
695
+ self.item.width, self.item.height)
696
+ json_ann = to_merge[0].to_json()
697
+ json_ann['coordinates'] = merged_data_url
698
+ suc, response = self._update_annotation_req(annotation_json=json_ann,
699
+ system_metadata=True,
700
+ annotation_id=to_merge[0].id)
701
+ if not suc:
702
+ raise exceptions.PlatformException(response)
703
+ if suc:
704
+ result = entities.Annotation.from_json(_json=response.json(),
705
+ annotations=self,
706
+ dataset=self._dataset,
707
+ item=self._item)
708
+ exist_annotations.append(result)
709
+ to_delete.append(ann)
710
+ if len(to_delete) > 0:
711
+ annotations_batch = [[a for a in annotations if a not in to_delete] for annotations in annotations_batch]
712
+
713
+ return annotations_batch
714
+
606
715
  def _upload_single_batch(self, annotation_batch):
607
716
  try:
608
717
  suc, response = self._client_api.gen_request(req_type='post',
@@ -650,14 +759,15 @@ class Annotations:
650
759
  logger.info('Annotation/s uploaded successfully. num: {}'.format(len(uploaded_annotations)))
651
760
  return uploaded_annotations
652
761
 
653
- async def _async_upload_annotations(self, annotations):
762
+ async def _async_upload_annotations(self, annotations, merge=False):
654
763
  """
655
764
  Async function to run from the uploader. will use asyncio to not break the async
656
- :param annotations:
765
+ :param annotations: list of all annotations
766
+ :param merge: bool - merge the new binary annotations with the existing annotations
657
767
  :return:
658
768
  """
659
769
  async with self._client_api.event_loop.semaphore('annotations.upload'):
660
- annotation_batch = self._create_batches_for_upload(annotations=annotations)
770
+ annotation_batch = self._create_batches_for_upload(annotations=annotations, merge=merge)
661
771
  output_annotations = list()
662
772
  for annotations_list in annotation_batch:
663
773
  success, response = await self._client_api.gen_async_request(req_type='post',
@@ -679,7 +789,7 @@ class Annotations:
679
789
  return result
680
790
 
681
791
  @_api_reference.add(path='/items/{itemId}/annotations', method='post')
682
- def upload(self, annotations) -> entities.AnnotationCollection:
792
+ def upload(self, annotations, merge=False) -> entities.AnnotationCollection:
683
793
  """
684
794
  Upload a new annotation/annotations. You must first create the annotation using the annotation *builder* method.
685
795
 
@@ -687,6 +797,7 @@ class Annotations:
687
797
 
688
798
  :param List[dtlpy.entities.annotation.Annotation] or dtlpy.entities.annotation.Annotation annotations: list or
689
799
  single annotation of type Annotation
800
+ :param bool merge: optional - merge the new binary annotations with the existing annotations
690
801
  :return: list of annotation objects
691
802
  :rtype: entities.AnnotationCollection
692
803
 
@@ -718,7 +829,7 @@ class Annotations:
718
829
  logger.warning('Annotation upload receives 0 annotations. Not doing anything')
719
830
  out_annotations = list()
720
831
  else:
721
- annotation_batches = self._create_batches_for_upload(annotations=annotations)
832
+ annotation_batches = self._create_batches_for_upload(annotations=annotations, merge=merge)
722
833
  out_annotations = self._upload_annotations_batches(annotation_batches=annotation_batches)
723
834
  out_annotations = entities.AnnotationCollection.from_json(_json=out_annotations,
724
835
  item=self.item)
@@ -57,7 +57,7 @@ class Commands:
57
57
  if url is None:
58
58
  url_path = "/commands/{}".format(command_id)
59
59
  else:
60
- url_path = url.split('v1')[1]
60
+ url_path = url.split('api/v1')[1]
61
61
 
62
62
  success, response = self._client_api.gen_request(req_type="get",
63
63
  path=url_path)
@@ -1,7 +1,12 @@
1
+ import base64
2
+ import datetime
3
+ import json
4
+
1
5
  from ..services.api_client import ApiClient
2
6
  from .. import exceptions, entities, repositories
3
7
  from typing import List, Optional, Dict
4
-
8
+ from ..entities import ComputeCluster, ComputeContext, ComputeType, Project
9
+ from ..entities.integration import IntegrationType
5
10
 
6
11
  class Computes:
7
12
 
@@ -9,6 +14,8 @@ class Computes:
9
14
  self._client_api = client_api
10
15
  self._base_url = '/compute'
11
16
  self._commands = None
17
+ self._projects = None
18
+ self._organizations = None
12
19
 
13
20
  @property
14
21
  def commands(self) -> repositories.Commands:
@@ -16,6 +23,18 @@ class Computes:
16
23
  self._commands = repositories.Commands(client_api=self._client_api)
17
24
  return self._commands
18
25
 
26
+ @property
27
+ def projects(self):
28
+ if self._projects is None:
29
+ self._projects = repositories.Projects(client_api=self._client_api)
30
+ return self._projects
31
+
32
+ @property
33
+ def organizations(self):
34
+ if self._organizations is None:
35
+ self._organizations = repositories.Organizations(client_api=self._client_api)
36
+ return self._organizations
37
+
19
38
  def create(
20
39
  self,
21
40
  name: str,
@@ -142,6 +161,59 @@ class Computes:
142
161
 
143
162
  return True
144
163
 
164
+ @staticmethod
165
+ def read_file(file_path):
166
+ try:
167
+ with open(file_path, 'r') as file:
168
+ content = file.read()
169
+ return content
170
+ except FileNotFoundError:
171
+ print(f"The file at {file_path} was not found.")
172
+ except IOError:
173
+ print(f"An error occurred while reading the file at {file_path}.")
174
+
175
+ def decode_and_parse_input(self, file_path):
176
+ """Decode a base64 encoded string from file a and parse it as JSON."""
177
+ decoded_bytes = base64.b64decode(self.read_file(file_path))
178
+ return json.loads(decoded_bytes)
179
+
180
+ @staticmethod
181
+ def create_integration(org, name, auth_data):
182
+ """Create a new key-value integration within the specified project."""
183
+ return org.integrations.create(
184
+ integrations_type=IntegrationType.KEY_VALUE,
185
+ name=name,
186
+ options={
187
+ 'key': name,
188
+ 'value': json.dumps(auth_data)
189
+ }
190
+ )
191
+
192
+ def setup_compute_cluster(self, config, integration, org_id, project=None):
193
+ """Set up a compute cluster using the provided configuration and integration."""
194
+ cluster = ComputeCluster.from_setup_json(config, integration)
195
+ project_id = None
196
+ if project is not None:
197
+ project_id = project.id
198
+ compute = self.create(
199
+ config['config']['name'],
200
+ ComputeContext([], org_id, project_id),
201
+ [],
202
+ cluster,
203
+ ComputeType.KUBERNETES)
204
+ return compute
205
+
206
+ def create_from_config_file(self, config_file_path, org_id, project_name: Optional[str] = None):
207
+ config = self.decode_and_parse_input(config_file_path)
208
+ project = None
209
+ if project_name is not None:
210
+ project = self.projects.get(project_name=project_name)
211
+ org = self.organizations.get(organization_id=org_id)
212
+ integration_name = ('cluster_integration_test_' + datetime.datetime.now().isoformat().split('.')[0]
213
+ .replace(':', '_'))
214
+ integration = self.create_integration(org, integration_name, config['authentication'])
215
+ compute = self.setup_compute_cluster(config, integration, org_id, project)
216
+ return compute
145
217
 
146
218
  class ServiceDrivers:
147
219
 
@@ -234,7 +306,7 @@ class ServiceDrivers:
234
306
  """
235
307
  Set a service driver as default
236
308
 
237
- :param service_driver_id: Service driver ID
309
+ :param service_driver_id: Compute name
238
310
  :param org_id: Organization ID
239
311
  :param update_existing_services: Update existing services
240
312
 
@@ -515,7 +515,8 @@ class Datasets:
515
515
  with_items_annotations: bool = True,
516
516
  with_metadata: bool = True,
517
517
  with_task_annotations_status: bool = True,
518
- dst_dataset_id: str = None):
518
+ dst_dataset_id: str = None,
519
+ target_directory: str = None):
519
520
  """
520
521
  Clone a dataset. Read more about cloning datatsets and items in our `documentation <https://dataloop.ai/docs/clone-merge-dataset#cloned-dataset>`_ and `SDK documentation <https://developers.dataloop.ai/tutorials/data_management/data_versioning/chapter/>`_.
521
522
 
@@ -528,6 +529,7 @@ class Datasets:
528
529
  :param bool with_metadata: true to clone with metadata
529
530
  :param bool with_task_annotations_status: true to clone with task annotations' status
530
531
  :param str dst_dataset_id: destination dataset id
532
+ :param str target_directory: target directory
531
533
  :return: dataset object
532
534
  :rtype: dtlpy.entities.dataset.Dataset
533
535
 
@@ -555,13 +557,17 @@ class Datasets:
555
557
  if copy_filters.has_field('hidden'):
556
558
  copy_filters.pop('hidden')
557
559
 
560
+ if target_directory is not None and not target_directory.startswith('/'):
561
+ target_directory = '/' + target_directory
562
+
558
563
  payload = {
559
564
  "name": clone_name,
560
565
  "filter": copy_filters.prepare(),
561
566
  "cloneDatasetParams": {
562
567
  "withItemsAnnotations": with_items_annotations,
563
568
  "withMetadata": with_metadata,
564
- "withTaskAnnotationsStatus": with_task_annotations_status
569
+ "withTaskAnnotationsStatus": with_task_annotations_status,
570
+ "targetDirectory": target_directory
565
571
  }
566
572
  }
567
573
  if dst_dataset_id is not None:
@@ -287,7 +287,7 @@ class Dpks:
287
287
  """
288
288
  success, response = self._client_api.gen_request(req_type='delete', path=f'/app-registry/{dpk_id}')
289
289
  if success:
290
- logger.info('Deleted dpk successfully')
290
+ logger.info(f'Deleted dpk: {dpk_id} successfully')
291
291
  else:
292
292
  raise exceptions.PlatformException(response)
293
293
  return success
@@ -1,6 +1,8 @@
1
1
  import threading
2
2
  import logging
3
3
  import time
4
+ from copy import deepcopy
5
+
4
6
  import numpy as np
5
7
 
6
8
  from .. import exceptions, entities, repositories, miscellaneous, _api_reference
@@ -351,6 +353,58 @@ class Executions:
351
353
  command = command.wait(timeout=0)
352
354
  return command
353
355
 
356
+ @_api_reference.add(path='/executions/rerun', method='post')
357
+ def rerun_batch(self,
358
+ filters,
359
+ service_id: str = None,
360
+ wait=True
361
+ ):
362
+ """
363
+ rerun a executions on an existing service
364
+
365
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
366
+
367
+ :param filters: Filters entity for a filtering before rerun
368
+ :param str service_id: service id to rerun on
369
+ :param bool wait: wait until create task finish
370
+ :return: rerun command
371
+ :rtype: dtlpy.entities.command.Command
372
+
373
+ **Example**:
374
+
375
+ .. code-block:: python
376
+
377
+ command = service.executions.rerun_batch(
378
+ filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
379
+ """
380
+ url_path = '/executions/rerun'
381
+
382
+ if filters is None:
383
+ raise exceptions.PlatformException('400', 'Please provide filter')
384
+
385
+ if filters.resource != entities.FiltersResource.EXECUTION:
386
+ raise exceptions.PlatformException(
387
+ error='400',
388
+ message='Filters resource must to be FiltersResource.EXECUTION. Got: {!r}'.format(filters.resource))
389
+
390
+ if service_id is not None and not filters.has_field('serviceId'):
391
+ filters = deepcopy(filters)
392
+ filters.add(field='serviceId', values=service_id, method=entities.FiltersMethod.AND)
393
+
394
+ success, response = self._client_api.gen_request(req_type='post',
395
+ path=url_path,
396
+ json_req={'query': filters.prepare()['filter']})
397
+ # exception handling
398
+ if not success:
399
+ raise exceptions.PlatformException(response)
400
+
401
+ response_json = response.json()
402
+ command = entities.Command.from_json(_json=response_json,
403
+ client_api=self._client_api)
404
+ if wait:
405
+ command = command.wait(timeout=0)
406
+ return command
407
+
354
408
  def _list(self, filters: entities.Filters):
355
409
  """
356
410
  List service executions
@@ -638,11 +638,6 @@ class Items:
638
638
  item_metadata={'Hellow': 'Word'}
639
639
  )
640
640
  """
641
- # fix remote path
642
- if remote_path is not None:
643
- if not remote_path.startswith('/'):
644
- remote_path = '/' + remote_path
645
-
646
641
  # initiate and use uploader
647
642
  uploader = repositories.Uploader(items_repository=self, output_entity=output_entity, no_output=no_output)
648
643
  return uploader.upload(
@@ -642,10 +642,19 @@ class Tasks:
642
642
  if filters is None and items is None:
643
643
  query = entities.Filters().prepare()
644
644
  elif filters is None:
645
- if not isinstance(items, list):
646
- items = [items]
645
+ item_list = list()
646
+ if isinstance(items, entities.PagedEntities):
647
+ for page in items:
648
+ for item in page:
649
+ item_list.append(item)
650
+ elif isinstance(items, list):
651
+ item_list = items
652
+ elif isinstance(items, entities.Item):
653
+ item_list.append(items)
654
+ else:
655
+ raise exceptions.PlatformException('400', 'Unknown items type')
647
656
  query = entities.Filters(field='id',
648
- values=[item.id for item in items],
657
+ values=[item.id for item in item_list],
649
658
  operator=entities.FiltersOperations.IN,
650
659
  use_defaults=False).prepare()
651
660
  else:
@@ -133,11 +133,13 @@ class Uploader:
133
133
  item_metadata,
134
134
  export_version: str = entities.ExportVersion.V1,
135
135
  item_description=None):
136
-
136
+ # fix remote path
137
137
  if remote_path is None:
138
- remote_path = '/'
138
+ remote_path = "/"
139
+ if not remote_path.startswith('/'):
140
+ remote_path = f"/{remote_path}"
139
141
  if not remote_path.endswith("/"):
140
- remote_path += "/"
142
+ remote_path = f"{remote_path}/"
141
143
  if file_types is not None and not isinstance(file_types, list):
142
144
  msg = '"file_types" should be a list of file extension. e.g [".jpg", ".png"]'
143
145
  raise PlatformException(error="400", message=msg)
@@ -74,8 +74,6 @@ class PlatformError(Exception):
74
74
  super().__init__(msg)
75
75
 
76
76
 
77
-
78
-
79
77
  class Callbacks:
80
78
  def __init__(self):
81
79
  self._callbacks = {}
@@ -1172,7 +1170,12 @@ class ApiClient:
1172
1170
  def callback(bytes_read):
1173
1171
  pass
1174
1172
 
1175
- timeout = aiohttp.ClientTimeout(total=2 * 60)
1173
+ timeout = aiohttp.ClientTimeout(
1174
+ total=None, # Disable overall timeout
1175
+ connect=2 * 60, # Set connect timeout (in seconds)
1176
+ sock_read=10 * 60, # Set read timeout for socket read operations
1177
+ sock_connect=2 * 60 # Set timeout for connection setup
1178
+ )
1176
1179
  async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
1177
1180
  try:
1178
1181
  form = aiohttp.FormData({})
@@ -1634,6 +1637,30 @@ class ApiClient:
1634
1637
  self._send_login_event(user_type='human', login_type='refresh')
1635
1638
  return res
1636
1639
 
1640
+ def generate_api_key(self, description: str = None, login: bool = False):
1641
+ """
1642
+ Generate an API key for a user
1643
+ :param description: description for the API key
1644
+ :param login: if True, login with the new API key
1645
+ :return: User token
1646
+ """
1647
+ user_email = self.info()['user_email']
1648
+ payload = {
1649
+ 'userId': user_email
1650
+ }
1651
+ if description:
1652
+ if not isinstance(description, str):
1653
+ raise ValueError('description should be a string')
1654
+ payload['description'] = description
1655
+ success, response = self.gen_request(req_type='post', path='/apiKeys', json_req=payload)
1656
+ if not success:
1657
+ raise exceptions.PlatformException(response)
1658
+ if login:
1659
+ self.login_api_key(response.json()['jwt'])
1660
+ return True
1661
+
1662
+ return response.json()['jwt']
1663
+
1637
1664
  def _renew_token_with_refresh_token(self):
1638
1665
  renewed = False
1639
1666
  if self.refresh_token_active is False:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dtlpy
3
- Version: 1.94.5
3
+ Version: 1.96.8
4
4
  Summary: SDK and CLI for Dataloop platform
5
5
  Home-page: https://github.com/dataloop-ai/dtlpy
6
6
  Author: Dataloop Team
@@ -42,6 +42,7 @@ Requires-Dist: diskcache (>=5.4)
42
42
  Requires-Dist: redis (>=3.5)
43
43
  Requires-Dist: inquirer
44
44
  Requires-Dist: dtlpymetrics
45
+ Requires-Dist: dataclasses
45
46
 
46
47
  ![logo.svg](docs%2F_static%2Flogo.svg)
47
48
  [![Documentation Status](https://readthedocs.org/projects/dtlpy/badge/?version=latest)](https://sdk-docs.dataloop.ai/en/latest/?badge=latest)
@@ -1,7 +1,7 @@
1
- dtlpy/__init__.py,sha256=6-Ioishmg5KdDJ3ZtouCDp-UYLzgysq7BI94IpiLl9Y,20596
2
- dtlpy/__version__.py,sha256=92vcuLv_y_Lbt5eF6SK28kb-Af_c63QPRbWKdv9OnwQ,19
1
+ dtlpy/__init__.py,sha256=nE2SN0AD2rZ_ekF_kD7OzZbSE32H8zV5UM6t_E0LzTw,20647
2
+ dtlpy/__version__.py,sha256=8A7-aAEwSxpoFoUNMO139i-077CC5W2bPhNdC-ARCAI,19
3
3
  dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
4
- dtlpy/new_instance.py,sha256=I4Gc658s-yUD0-gEiC2pRDKaADZPdr1dm67K4mkx5xw,10065
4
+ dtlpy/new_instance.py,sha256=ORhXmIsc8Kut2M1jekKL3dG_adRp7axK-25B4zJNqMU,10091
5
5
  dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
6
6
  dtlpy/assets/lock_open.png,sha256=BH9uyf5uYvgZrDpDw9qCUnT3UbkXG8XbeRmWDpWlV4M,18215
7
7
  dtlpy/assets/main.py,sha256=N1JUsx79qnXI7Hx22C8JOzHJdGHxvrXeTx5UZAxvJfE,1380
@@ -44,7 +44,7 @@ dtlpy/dlp/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
44
44
  dtlpy/dlp/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
45
45
  dtlpy/dlp/dlp.py,sha256=YjNBjeCDTXJ7tj8qdiGZ8lFb8DtPZl-FvViyjxt9xF8,4278
46
46
  dtlpy/dlp/parser.py,sha256=p-TFaiAU2c3QkI97TXzL2LDR3Eq0hGDFrTc9J2jWLh4,30551
47
- dtlpy/entities/__init__.py,sha256=eD0ON6MAmzvc0NQmTWzxLPOdHbl5bu4Np3a1JMBfR4k,4805
47
+ dtlpy/entities/__init__.py,sha256=R2kDC9VHOeRSTgXXqNowbf_yZwy7tbAkukvIlPZmPVE,4856
48
48
  dtlpy/entities/analytic.py,sha256=5MpYDKPVsZ1MIy20Ju515RWed6P667j4TLxsan2gyNM,11925
49
49
  dtlpy/entities/annotation.py,sha256=yk-JQzgzXvnDLFrOkmcHQfEtsiPqZeIisv80ksNB-f8,66912
50
50
  dtlpy/entities/annotation_collection.py,sha256=CEYSBHhhDkC0VJdHsBSrA6TgdKGMcKeI3tFM40UJwS8,29838
@@ -56,21 +56,21 @@ dtlpy/entities/base_entity.py,sha256=i83KrtAz6dX4t8JEiUimLI5ZRrN0VnoUWKG2Zz49N5w
56
56
  dtlpy/entities/bot.py,sha256=is3NUCnPg56HSjsHIvFcVkymValMqDV0uHRDC1Ib-ds,3819
57
57
  dtlpy/entities/codebase.py,sha256=pwRkAq2GV0wvmzshg89IAmE-0I2Wsy_-QNOu8OV8uqc,8999
58
58
  dtlpy/entities/command.py,sha256=ARu8ttk-C7_Ice7chRyTtyOtakBTF09FC04mEk73SO8,5010
59
- dtlpy/entities/compute.py,sha256=TretE_K_rE5FneM4to4zgIvvRFysdlKR2Jb2mChcTsE,12214
60
- dtlpy/entities/dataset.py,sha256=87o6FA9MYCIc0KBCUqQr_VsX-W2mGbJn64JvD-zp-EA,47354
59
+ dtlpy/entities/compute.py,sha256=4FEpahPFFGHxye_fLh_p_kP6iEQ3QJK7S5hAdd6Afos,12744
60
+ dtlpy/entities/dataset.py,sha256=tNCl7nNCx-DrZ3z96APhRdvllfQA1-9y8DpL6Ma2l0I,47516
61
61
  dtlpy/entities/directory_tree.py,sha256=Rni6pLSWytR6yeUPgEdCCRfTg_cqLOdUc9uCqz9KT-Q,1186
62
62
  dtlpy/entities/dpk.py,sha256=a5C1UG_cvDnXSee650WHH43QflxbJCo_g0V17-GRb24,17639
63
63
  dtlpy/entities/driver.py,sha256=O_QdK1EaLjQyQkmvKsmkNgmvmMb1mPjKnJGxK43KrOA,7197
64
64
  dtlpy/entities/execution.py,sha256=WBiAws-6wZnQQ3y9wyvOeexA3OjxfaRdwDu5dSFYL1g,13420
65
65
  dtlpy/entities/feature.py,sha256=9fFjD0W57anOVSAVU55ypxN_WTCsWTG03Wkc3cAAj78,3732
66
66
  dtlpy/entities/feature_set.py,sha256=niw4MkmrDbD_LWQu1X30uE6U4DCzmFhPTaYeZ6VZDB0,4443
67
- dtlpy/entities/filters.py,sha256=tA-A0dS8nhMbnkHIo-INK6UuKzEPMyCdTs51K1-Vl9Y,22441
67
+ dtlpy/entities/filters.py,sha256=x0SZqhguQ7CWfBZPWmoTU7MrDLO6c8iN5a6gI_ar224,22664
68
68
  dtlpy/entities/integration.py,sha256=CA5F1eQCGE_4c_Kry4nWRdeyjHctNnvexcDXg_M5HLU,5734
69
69
  dtlpy/entities/item.py,sha256=G6VVcVCudqeShWigZmNIuKD4OkvTRJ05CeXFXNe3Jk8,29691
70
70
  dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
71
71
  dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
72
72
  dtlpy/entities/message.py,sha256=ApJuaKEqxATpXjNYUjGdYPu3ibQzEMo8-LtJ_4xAcPI,5865
73
- dtlpy/entities/model.py,sha256=LEot0PHOxPSeK9SCzAT6pofbLrbfybFYmr6v9YsiuB4,24927
73
+ dtlpy/entities/model.py,sha256=UKtai_V8ckTNPlhzflmJNHXJvH6BH9UYOwCMWXNZueU,24822
74
74
  dtlpy/entities/node.py,sha256=yPPYDLtNMc6vZbbf4FIffY86y7tkaTvYm42Jb7k3Ofk,39617
75
75
  dtlpy/entities/ontology.py,sha256=ok4p3sLBc_SS5hs2gZr5-gbblrveM7qSIX4z67QSKeQ,31967
76
76
  dtlpy/entities/organization.py,sha256=AMkx8hNIIIjnu5pYlNjckMRuKt6H3lnOAqtEynkr7wg,9893
@@ -87,8 +87,8 @@ dtlpy/entities/prompt_item.py,sha256=Kmvguz3f0sGtkKZS9OEA_-Yi4aQRCgdg1GBkaLQyyTg
87
87
  dtlpy/entities/recipe.py,sha256=Q1HtYgind3bEe-vnDZWhw6H-rcIAGhkGHPRWtLIkPSE,11917
88
88
  dtlpy/entities/reflect_dict.py,sha256=2NaSAL-CO0T0FYRYFQlaSpbsoLT2Q18AqdHgQSLX5Y4,3273
89
89
  dtlpy/entities/resource_execution.py,sha256=1HuVV__U4jAUOtOkWlWImnM3Yts8qxMSAkMA9sBhArY,5033
90
- dtlpy/entities/service.py,sha256=OaEcKsGgapwWRIzBUU8wvJqd0h_mpY7ICugVjzV7pDA,30211
91
- dtlpy/entities/setting.py,sha256=koydO8b0_bWVNklR2vpsXswxzBo8q83XtGk3wkma0MI,8522
90
+ dtlpy/entities/service.py,sha256=3A_kcEUCbaS-Qx31rfNyThYK7OxUrzHiE6shT0Oxh60,33467
91
+ dtlpy/entities/setting.py,sha256=uXagJHtcCR3nJYClR_AUGZjz_kx3TejPcUZ8ginHFIA,8561
92
92
  dtlpy/entities/task.py,sha256=XHiEqZYFlrDCtmw1MXsysjoBLdIzAk7coMrVk8bNIiE,19534
93
93
  dtlpy/entities/time_series.py,sha256=336jWNckjuSn0G29WJFetB7nBoFAKqs4VH9_IB4m4FE,4017
94
94
  dtlpy/entities/trigger.py,sha256=zh3wYUY2-zATh_7ous0Ck87Yojo9r9PAVQrkcESxoko,14266
@@ -153,24 +153,24 @@ dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,27
153
153
  dtlpy/ml/train_utils.py,sha256=R-BHKRfqDoLLhFyLzsRFyJ4E-8iedj9s9oZqy3IO2rg,2404
154
154
  dtlpy/repositories/__init__.py,sha256=aBWg6mayTAy6CtfSPLxyT_Uae7hQyNTILI7sRLKNEPU,1996
155
155
  dtlpy/repositories/analytics.py,sha256=dQPCYTPAIuyfVI_ppR49W7_GBj0033feIm9Gd7LW1V0,2966
156
- dtlpy/repositories/annotations.py,sha256=E7iHo8UwDAhdulqh0lGr3fGQ-TSwZXXGsEXZA-WJ_NA,35780
156
+ dtlpy/repositories/annotations.py,sha256=Vly9htqoH79s6uDB1HMiM-uG1EvguzsGS2BoaAjKReI,42387
157
157
  dtlpy/repositories/apps.py,sha256=J-PDCPWVtvTLmzzkABs2-8zo9hGLk_z_sNR2JB1mB0c,15752
158
158
  dtlpy/repositories/artifacts.py,sha256=Ke2ustTNw-1eQ0onLsWY7gL2aChjXPAX5p1uQ_EzMbo,19081
159
159
  dtlpy/repositories/assignments.py,sha256=1VwJZ7ctQe1iaDDDpeYDgoj2G-TCgzolVLUEqUocd2w,25506
160
160
  dtlpy/repositories/bots.py,sha256=q1SqH01JHloljKxknhHU09psV1vQx9lPhu3g8mBBeRg,8104
161
161
  dtlpy/repositories/codebases.py,sha256=pvcZxdrq0-zWysVbdXjUOhnfcF6hJD8v5VclNZ-zhGA,24668
162
- dtlpy/repositories/commands.py,sha256=8GJU2OQTH0grHFQE30l0UVqaPAwio4psk4VpiYklkFk,5589
162
+ dtlpy/repositories/commands.py,sha256=kXhmyBpLZNs-6vKBo4iXaommpjcGBDXs287IICUnQMw,5593
163
163
  dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
164
- dtlpy/repositories/computes.py,sha256=-GuAQEIW_az8FO7CZ3wg_d_dH6OjJyce8yBYuUMRcYE,7094
165
- dtlpy/repositories/datasets.py,sha256=Rauh-apKSKP7cWS99uhiZYZ-679qNpPm7HoMkMzyJ-s,51789
164
+ dtlpy/repositories/computes.py,sha256=EtfE_3JhTdNlSYDPkKXBFkq-DBl4sgQqIm50ajvFdWM,9976
165
+ dtlpy/repositories/datasets.py,sha256=rDpJXNyxOlJwDQB-wNkM-JIqOGH10q9nujnAl6y8_xU,52077
166
166
  dtlpy/repositories/downloader.py,sha256=pNwL7Nid8xmOyYNiv4DB_WY4RoKlxQ-U9nG2V99Gyr8,41342
167
- dtlpy/repositories/dpks.py,sha256=xFdT-F0XodNCxvmOzoomWZhIzCv7zDEowfvKxmN4Zbs,17476
167
+ dtlpy/repositories/dpks.py,sha256=mj3QPvfzj_jZAscwIgpKUfa7fLxptc3OJQ_RrSfgYxo,17487
168
168
  dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
169
- dtlpy/repositories/executions.py,sha256=M84nhpFPPZq4fQeJ2m_sv6JT4NE2WDRMOXWr451J0bU,30403
169
+ dtlpy/repositories/executions.py,sha256=4UoU6bnB3kl5cMuF1eJvDecfZCaB06gKWxPfv6_g1_k,32598
170
170
  dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
171
171
  dtlpy/repositories/features.py,sha256=7xA2ihEuNgZD7HBQMMGLWpsS2V_3PgieKW2YAk1OeUU,9712
172
172
  dtlpy/repositories/integrations.py,sha256=Wi-CpT2PH36GFu3znWP5Uf2CmkqWBUYyOdwvatGD_eM,11798
173
- dtlpy/repositories/items.py,sha256=DqJ3g9bc4OLMm9KqI-OebXbr-zcEiohO1wGZJ1uE2Lg,37874
173
+ dtlpy/repositories/items.py,sha256=wZL438X49lGcLcQjREqzjr-7JzBFriZmdKBOzI_C1mI,37715
174
174
  dtlpy/repositories/messages.py,sha256=zYcoz8Us6j8Tb5Z7luJuvtO9xSRTuOCS7pl-ztt97Ac,3082
175
175
  dtlpy/repositories/models.py,sha256=GdVWHJ6kOIxM01wH7RVQ3CVaR4OmGurWJdQVHZezLDM,34789
176
176
  dtlpy/repositories/nodes.py,sha256=xXJm_YA0vDUn0dVvaGeq6ORM0vI3YXvfjuylvGRtkxo,3061
@@ -185,15 +185,15 @@ dtlpy/repositories/resource_executions.py,sha256=PyzsbdJxz6jf17Gx13GZmqdu6tZo3TT
185
185
  dtlpy/repositories/schema.py,sha256=kTKDrbwm7BfQnBAK81LpAl9ChNFdyUweSLNazlJJhjk,3953
186
186
  dtlpy/repositories/services.py,sha256=8hu6CgIyGQHOOlBmZmmM-oY8i-adU_99lSN46FGvvkc,67421
187
187
  dtlpy/repositories/settings.py,sha256=pvqNse0ANCdU3NSLJEzHco-PZq__OIsPSPVJveB9E4I,12296
188
- dtlpy/repositories/tasks.py,sha256=nA3rODvS8Q361xDmPXII-VPzktzoxbAApxTkzC5wv4M,48601
188
+ dtlpy/repositories/tasks.py,sha256=v09S2pYGkKx_vBG7SWigJeuMhp0GsefKo3Td7ImrWb0,49039
189
189
  dtlpy/repositories/times_series.py,sha256=m-bKFEgiZ13yQNelDjBfeXMUy_HgsPD_JAHj1GVx9fU,11420
190
190
  dtlpy/repositories/triggers.py,sha256=izdNyCN1gDc5uo7AXntso0HSMTDIzGFUp-dSEz8cn_U,21990
191
191
  dtlpy/repositories/upload_element.py,sha256=4CDZRKLubanOP0ZyGwxAHTtl6GLzwAyRAIm-PLYt0ck,10140
192
- dtlpy/repositories/uploader.py,sha256=iOlDYWIMy_h1Rbd7Mfug1I0e93dBJ0SxqP_BOwqYQPQ,30697
192
+ dtlpy/repositories/uploader.py,sha256=Flqd3gxHoTNIoTVpLGOt-EO5AYWydPiHwkYJ1461d3w,30823
193
193
  dtlpy/repositories/webhooks.py,sha256=IIpxOJ-7KeQp1TY9aJZz-FuycSjAoYx0TDk8z86KAK8,9033
194
194
  dtlpy/services/__init__.py,sha256=VfVJy2otIrDra6i7Sepjyez2ujiE6171ChQZp-YgxsM,904
195
195
  dtlpy/services/aihttp_retry.py,sha256=tgntZsAY0dW9v08rkjX1T5BLNDdDd8svtgn7nH8DSGU,5022
196
- dtlpy/services/api_client.py,sha256=DBelaW5qpZoX7vQXjgLL2xTcTwUqJodZ901g0C3Panw,68331
196
+ dtlpy/services/api_client.py,sha256=htIy5RN7gJ7k1ddXJ8X1Im6Y1fl2fvs825rF8bvJi1U,69484
197
197
  dtlpy/services/api_reference.py,sha256=cW-B3eoi9Xs3AwI87_Kr6GV_E6HPoC73aETFaGz3A-0,1515
198
198
  dtlpy/services/async_utils.py,sha256=lfpkTkRUvQoMTxaRZBHbPt5e43qdvpCGDe_-KcY2Jps,2810
199
199
  dtlpy/services/calls_counter.py,sha256=gr0io5rIsO5-7Cgc8neA1vK8kUtYhgFPmDQ2jXtiZZs,1036
@@ -221,9 +221,9 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
221
221
  dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
222
222
  dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
223
223
  dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
224
- dtlpy-1.94.5.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
225
- dtlpy-1.94.5.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
226
- dtlpy-1.94.5.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
224
+ dtlpy-1.96.8.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
225
+ dtlpy-1.96.8.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
226
+ dtlpy-1.96.8.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
227
227
  tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
228
228
  tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
229
229
  tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
@@ -231,9 +231,9 @@ tests/assets/models_flow/main.py,sha256=87O3-JaWcC6m_kA39sqPhX70_VCBzzbLWmX2YQFi
231
231
  tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
232
232
  tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
233
233
  tests/features/environment.py,sha256=V23cUx_p4VpNk9kc2I0BDZJHO_xcJBFJq8m3JlYCooc,16736
234
- dtlpy-1.94.5.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
235
- dtlpy-1.94.5.dist-info/METADATA,sha256=r9ln-rjNAbzADyWfWV84h862StlR-a-2QOwca58hL4A,2975
236
- dtlpy-1.94.5.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
237
- dtlpy-1.94.5.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
238
- dtlpy-1.94.5.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
239
- dtlpy-1.94.5.dist-info/RECORD,,
234
+ dtlpy-1.96.8.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
235
+ dtlpy-1.96.8.dist-info/METADATA,sha256=disS6SF9AJSCxx0jFfo2PrNCo1LBMmHHpAQbfyXz5OU,3002
236
+ dtlpy-1.96.8.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
237
+ dtlpy-1.96.8.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
238
+ dtlpy-1.96.8.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
239
+ dtlpy-1.96.8.dist-info/RECORD,,
File without changes
File without changes
File without changes
File without changes