dtlpy 1.107.8__py3-none-any.whl → 1.109.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. dtlpy/__init__.py +1 -7
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/entities/__init__.py +5 -4
  4. dtlpy/entities/annotation.py +28 -57
  5. dtlpy/entities/annotation_definitions/base_annotation_definition.py +6 -14
  6. dtlpy/entities/app.py +1 -1
  7. dtlpy/entities/command.py +10 -7
  8. dtlpy/entities/compute.py +77 -94
  9. dtlpy/entities/dataset.py +29 -14
  10. dtlpy/entities/dpk.py +1 -0
  11. dtlpy/entities/filters.py +7 -6
  12. dtlpy/entities/item.py +7 -14
  13. dtlpy/entities/node.py +0 -12
  14. dtlpy/entities/service.py +0 -9
  15. dtlpy/entities/service_driver.py +118 -0
  16. dtlpy/entities/trigger.py +1 -1
  17. dtlpy/new_instance.py +1 -1
  18. dtlpy/repositories/__init__.py +2 -1
  19. dtlpy/repositories/apps.py +8 -4
  20. dtlpy/repositories/collections.py +86 -34
  21. dtlpy/repositories/commands.py +14 -4
  22. dtlpy/repositories/computes.py +173 -127
  23. dtlpy/repositories/datasets.py +20 -9
  24. dtlpy/repositories/downloader.py +20 -8
  25. dtlpy/repositories/dpks.py +26 -1
  26. dtlpy/repositories/items.py +5 -2
  27. dtlpy/repositories/service_drivers.py +213 -0
  28. dtlpy/repositories/services.py +6 -0
  29. dtlpy/repositories/uploader.py +4 -0
  30. dtlpy-1.109.19.dist-info/METADATA +172 -0
  31. {dtlpy-1.107.8.dist-info → dtlpy-1.109.19.dist-info}/RECORD +39 -37
  32. tests/features/environment.py +16 -15
  33. dtlpy-1.107.8.dist-info/METADATA +0 -69
  34. {dtlpy-1.107.8.data → dtlpy-1.109.19.data}/scripts/dlp +0 -0
  35. {dtlpy-1.107.8.data → dtlpy-1.109.19.data}/scripts/dlp.bat +0 -0
  36. {dtlpy-1.107.8.data → dtlpy-1.109.19.data}/scripts/dlp.py +0 -0
  37. {dtlpy-1.107.8.dist-info → dtlpy-1.109.19.dist-info}/LICENSE +0 -0
  38. {dtlpy-1.107.8.dist-info → dtlpy-1.109.19.dist-info}/WHEEL +0 -0
  39. {dtlpy-1.107.8.dist-info → dtlpy-1.109.19.dist-info}/entry_points.txt +0 -0
  40. {dtlpy-1.107.8.dist-info → dtlpy-1.109.19.dist-info}/top_level.txt +0 -0
dtlpy/entities/dataset.py CHANGED
@@ -626,8 +626,9 @@ class Dataset(entities.BaseEntity):
626
626
  alpha=1,
627
627
  export_version=ExportVersion.V1,
628
628
  dataset_lock=False,
629
- lock_timeout_sec=None
630
- ):
629
+ lock_timeout_sec=None,
630
+ export_summary=False,
631
+ ):
631
632
  """
632
633
  Download dataset by filters.
633
634
  Filtering the dataset for items and save them local
@@ -641,6 +642,7 @@ class Dataset(entities.BaseEntity):
641
642
  :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
642
643
  :param bool overwrite: optional - default = False
643
644
  :param bool dataset_lock: optional - default = False
645
+ :param bool export_summary: optional - default = False
644
646
  :param int lock_timeout_sec: optional
645
647
  :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
646
648
  :param bool with_text: optional - add text to annotations, default = False
@@ -664,9 +666,10 @@ class Dataset(entities.BaseEntity):
664
666
  thickness=1,
665
667
  with_text=False,
666
668
  alpha=1,
667
- dataset_lock=False
668
- lock_timeout_sec=300
669
- )
669
+ dataset_lock=False,
670
+ lock_timeout_sec=300,
671
+ export_summary=False
672
+ )
670
673
  """
671
674
 
672
675
  return self.datasets.download_annotations(
@@ -685,7 +688,8 @@ class Dataset(entities.BaseEntity):
685
688
  alpha=alpha,
686
689
  export_version=export_version,
687
690
  dataset_lock=dataset_lock,
688
- lock_timeout_sec=lock_timeout_sec
691
+ lock_timeout_sec=lock_timeout_sec,
692
+ export_summary=export_summary
689
693
  )
690
694
 
691
695
  def export(self,
@@ -698,7 +702,8 @@ class Dataset(entities.BaseEntity):
698
702
  export_type: ExportType = ExportType.JSON,
699
703
  timeout: int = 0,
700
704
  dataset_lock: bool = False,
701
- lock_timeout_sec: int = None):
705
+ lock_timeout_sec: int = None,
706
+ export_summary: bool = False):
702
707
  """
703
708
  Export dataset items and annotations.
704
709
 
@@ -713,6 +718,7 @@ class Dataset(entities.BaseEntity):
713
718
  :param bool include_feature_vectors: Include item feature vectors in the export
714
719
  :param bool include_annotations: Include item annotations in the export
715
720
  :param bool dataset_lock: Make dataset readonly during the export
721
+ :param bool export_summary: Download dataset export summary
716
722
  :param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
717
723
  :param entities.ExportType export_type: Type of export ('json' or 'zip')
718
724
  :param int timeout: Maximum time in seconds to wait for the export to complete
@@ -739,7 +745,8 @@ class Dataset(entities.BaseEntity):
739
745
  export_type=export_type,
740
746
  timeout=timeout,
741
747
  dataset_lock=dataset_lock,
742
- lock_timeout_sec=lock_timeout_sec)
748
+ lock_timeout_sec=lock_timeout_sec,
749
+ export_summary=export_summary)
743
750
 
744
751
  def upload_annotations(self,
745
752
  local_path,
@@ -975,7 +982,8 @@ class Dataset(entities.BaseEntity):
975
982
  alpha=1,
976
983
  export_version=ExportVersion.V1,
977
984
  dataset_lock=False,
978
- lock_timeout_sec=None
985
+ lock_timeout_sec=None,
986
+ export_summary=False,
979
987
  ):
980
988
  """
981
989
  Download dataset by filters.
@@ -991,6 +999,7 @@ class Dataset(entities.BaseEntity):
991
999
  :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
992
1000
  :param bool overwrite: optional - default = False to overwrite the existing files
993
1001
  :param bool dataset_lock: optional - default = False to make dataset readonly during the download
1002
+ :param bool export_summary: optional - default = False to get the symmary of the export
994
1003
  :param int lock_timeout_sec: optional - Set lock timeout for the export
995
1004
  :param bool to_items_folder: Create 'items' folder and download items to it
996
1005
  :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
@@ -1011,7 +1020,8 @@ class Dataset(entities.BaseEntity):
1011
1020
  with_text=False,
1012
1021
  alpha=1,
1013
1022
  dataset_lock=False,
1014
- lock_timeout_sec=300
1023
+ lock_timeout_sec=300,
1024
+ export_summary=False
1015
1025
  )
1016
1026
  """
1017
1027
  return self.items.download(filters=filters,
@@ -1027,7 +1037,8 @@ class Dataset(entities.BaseEntity):
1027
1037
  alpha=alpha,
1028
1038
  export_version=export_version,
1029
1039
  dataset_lock=dataset_lock,
1030
- lock_timeout_sec=lock_timeout_sec
1040
+ lock_timeout_sec=lock_timeout_sec,
1041
+ export_summary=export_summary
1031
1042
  )
1032
1043
 
1033
1044
  def download_folder(
@@ -1046,7 +1057,8 @@ class Dataset(entities.BaseEntity):
1046
1057
  alpha=1,
1047
1058
  export_version=ExportVersion.V1,
1048
1059
  dataset_lock=False,
1049
- lock_timeout_sec=None
1060
+ lock_timeout_sec=None,
1061
+ export_summary=False,
1050
1062
  ):
1051
1063
  """
1052
1064
  Download dataset folder.
@@ -1062,6 +1074,7 @@ class Dataset(entities.BaseEntity):
1062
1074
  :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
1063
1075
  :param bool overwrite: optional - default = False to overwrite the existing files
1064
1076
  :param bool dataset_lock: optional - default = False to make the dataset readonly during the download
1077
+ :param bool export_summary: optional - default = False to get the symmary of the export
1065
1078
  :param bool lock_timeout_sec: optional - Set lock timeout for the export
1066
1079
  :param bool to_items_folder: Create 'items' folder and download items to it
1067
1080
  :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
@@ -1084,7 +1097,8 @@ class Dataset(entities.BaseEntity):
1084
1097
  alpha=1,
1085
1098
  save_locally=True,
1086
1099
  dataset_lock=False
1087
- lock_timeout_sec=300
1100
+ lock_timeout_sec=300,
1101
+ export_summary=False
1088
1102
  )
1089
1103
  """
1090
1104
  filters = self.datasets._bulid_folder_filter(folder_path=folder_path, filters=filters)
@@ -1101,7 +1115,8 @@ class Dataset(entities.BaseEntity):
1101
1115
  alpha=alpha,
1102
1116
  export_version=export_version,
1103
1117
  dataset_lock=dataset_lock,
1104
- lock_timeout_sec=lock_timeout_sec
1118
+ lock_timeout_sec=lock_timeout_sec,
1119
+ export_summary=export_summary
1105
1120
  )
1106
1121
 
1107
1122
  def delete_labels(self, label_names):
dtlpy/entities/dpk.py CHANGED
@@ -43,6 +43,7 @@ class Slot(entities.DlEntity):
43
43
 
44
44
 
45
45
  class Toolbar(entities.DlEntity):
46
+ name: str = entities.DlProperty(location=['name'], _type=str)
46
47
  display_name: str = entities.DlProperty(location=['displayName'], _type=str)
47
48
  conditions: dict = entities.DlProperty(location=['conditions'], _type=dict)
48
49
  invoke: dict = entities.DlProperty(location=['invoke'], _type=dict)
dtlpy/entities/filters.py CHANGED
@@ -46,7 +46,9 @@ class FiltersResource(str, Enum):
46
46
  DRIVERS = 'drivers'
47
47
  SETTINGS = 'setting'
48
48
  RESOURCE_EXECUTION = 'resourceExecution'
49
- METRICS = 'metrics'
49
+ METRICS = 'metrics',
50
+ SERVICE_DRIVER = 'serviceDrivers',
51
+ COMPUTE = 'compute'
50
52
 
51
53
 
52
54
  class FiltersOperations(str, Enum):
@@ -328,11 +330,10 @@ class Filters:
328
330
  # add annotations defaults
329
331
  elif self.resource == FiltersResource.ANNOTATION:
330
332
  self._unique_fields = ['type']
331
- self.add(field='type',
332
- values=['box', 'class', 'comparison', 'ellipse', 'point', 'segment', 'polyline', 'binary',
333
- 'subtitle', 'cube', 'cube_3d', 'pose', 'text_mark', 'text', 'ref_image', 'gis'],
334
- operator=FiltersOperations.IN,
335
- method=FiltersMethod.AND)
333
+ values = [annotation_type.value for annotation_type in entities.AnnotationType]
334
+ values.remove(entities.AnnotationType.NOTE.value)
335
+ values += ["text", "ref_image"] # Prompt Annotation Types
336
+ self.add(field='type', values=values, operator=FiltersOperations.IN, method=FiltersMethod.AND)
336
337
 
337
338
  def __generate_query(self):
338
339
  filters_dict = dict()
dtlpy/entities/item.py CHANGED
@@ -455,7 +455,8 @@ class Item(entities.BaseEntity):
455
455
  alpha=1,
456
456
  export_version=ExportVersion.V1,
457
457
  dataset_lock=False,
458
- lock_timeout_sec=None
458
+ lock_timeout_sec=None,
459
+ export_summary=False,
459
460
  ):
460
461
  """
461
462
  Download dataset by filters.
@@ -470,6 +471,7 @@ class Item(entities.BaseEntity):
470
471
  :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
471
472
  :param bool overwrite: optional - default = False
472
473
  :param bool dataset_lock: optional - default = False
474
+ :param bool export_summary: optional - default = False
473
475
  :param int lock_timeout_sec: optional
474
476
  :param bool to_items_folder: Create 'items' folder and download items to it
475
477
  :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
@@ -491,7 +493,8 @@ class Item(entities.BaseEntity):
491
493
  alpha=1,
492
494
  save_locally=True,
493
495
  dataset_lock=False
494
- lock_timeout_sec=300
496
+ lock_timeout_sec=300,
497
+ export_summary=False
495
498
  )
496
499
  """
497
500
  # if dir - concatenate local path and item name
@@ -527,7 +530,8 @@ class Item(entities.BaseEntity):
527
530
  export_version=export_version,
528
531
  filters=filters,
529
532
  dataset_lock=dataset_lock,
530
- lock_timeout_sec=lock_timeout_sec)
533
+ lock_timeout_sec=lock_timeout_sec,
534
+ export_summary=export_summary)
531
535
 
532
536
  def delete(self):
533
537
  """
@@ -821,17 +825,6 @@ class Item(entities.BaseEntity):
821
825
  {"key": key, "name": self.collections.get_name_by_key(key)}
822
826
  for key in collections.keys()
823
827
  ]
824
-
825
- def list_missing_collections(self) -> List[str]:
826
- """
827
- List all items in the dataset that are not assigned to any collection.
828
-
829
- :return: A list of item IDs that are not part of any collection.
830
- """
831
- filters = entities.Filters()
832
- filters.add(field='metadata.system.collections', values=None)
833
- filters.add(field='datasetId', values=self._dataset.id)
834
- return self._dataset.items.list(filters=filters)
835
828
 
836
829
  def task_scores(self, task_id: str, page_offset: int = None, page_size: int = None):
837
830
  """
dtlpy/entities/node.py CHANGED
@@ -91,7 +91,6 @@ class PipelineNodeIO:
91
91
  port_id: str = None,
92
92
  color: tuple = None,
93
93
  port_percentage: int = None,
94
- action: str = None,
95
94
  default_value=None,
96
95
  variable_name: str = None,
97
96
  actions: list = None,
@@ -118,19 +117,8 @@ class PipelineNodeIO:
118
117
  self.default_value = default_value
119
118
  self.variable_name = variable_name
120
119
  self.description = description
121
-
122
- if action is not None:
123
- warnings.warn('action param has been deprecated in version 1.95', DeprecationWarning)
124
- if actions is None:
125
- actions = []
126
- actions.append(action)
127
120
  self.actions = actions
128
121
 
129
- @property
130
- def action(self):
131
- warnings.warn('action attribute has been deprecated in version 1.95', DeprecationWarning)
132
- return None
133
-
134
122
  @staticmethod
135
123
  def from_json(_json: dict):
136
124
  return PipelineNodeIO(
dtlpy/entities/service.py CHANGED
@@ -869,20 +869,11 @@ class KubernetesAutoscalerType(str, Enum):
869
869
  class KubernetesAutuscalerTypeMeta(type):
870
870
  def __getattribute__(cls, item):
871
871
  if hasattr(KubernetesAutoscalerType, item):
872
- warnings.warn(
873
- 'KubernetesAutuscalerType is deprecated and will be removed in version 1.97.0, '
874
- 'use KubernetesAutoscalerType instead',
875
- DeprecationWarning
876
- )
877
872
  return getattr(KubernetesAutoscalerType, item)
878
873
  else:
879
874
  raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
880
875
 
881
876
 
882
- class KubernetesAutuscalerType(metaclass=KubernetesAutuscalerTypeMeta):
883
- pass
884
-
885
-
886
877
  class KubernetesAutoscaler(entities.BaseEntity):
887
878
  MIN_REPLICA_DEFAULT = 0
888
879
  MAX_REPLICA_DEFAULT = 1
@@ -0,0 +1,118 @@
1
+ import traceback
2
+ from typing import Dict
3
+ from ..services.api_client import ApiClient
4
+ from .. import repositories
5
+ from .compute import ComputeContext, ComputeType
6
+
7
+ class ServiceDriver:
8
+ def __init__(
9
+ self,
10
+ name: str,
11
+ context: ComputeContext,
12
+ compute_id: str,
13
+ client_api: ApiClient,
14
+ type: ComputeType = None,
15
+ created_at: str = None,
16
+ updated_at: str = None,
17
+ namespace: str = None,
18
+ metadata: Dict = None,
19
+ url: str = None,
20
+ archived: bool = None,
21
+ id: str = None,
22
+ is_cache_available: bool = None
23
+ ):
24
+ self.name = name
25
+ self.context = context
26
+ self.compute_id = compute_id
27
+ self.client_api = client_api
28
+ self.type = type or ComputeType.KUBERNETES
29
+ self.created_at = created_at
30
+ self.updated_at = updated_at
31
+ self.namespace = namespace
32
+ self.metadata = metadata
33
+ self.url = url
34
+ self.archived = archived
35
+ self.id = id
36
+ self.is_cache_available = is_cache_available
37
+ self._service_drivers = None
38
+ self._client_api = client_api
39
+
40
+ @property
41
+ def service_drivers(self):
42
+ if self._service_drivers is None:
43
+ self._service_drivers = repositories.ServiceDrivers(client_api=self._client_api)
44
+ return self._service_drivers
45
+
46
+ @staticmethod
47
+ def _protected_from_json(_json: dict, client_api: ApiClient):
48
+ """
49
+ Same as from_json but with try-except to catch if error
50
+
51
+ :param _json: platform json
52
+ :param client_api: ApiClient entity
53
+ :return:
54
+ """
55
+ try:
56
+ service = ServiceDriver.from_json(_json=_json,
57
+ client_api=client_api)
58
+ status = True
59
+ except Exception:
60
+ service = traceback.format_exc()
61
+ status = False
62
+ return status, service
63
+
64
+ @classmethod
65
+ def from_json(cls, _json, client_api: ApiClient):
66
+ return cls(
67
+ name=_json.get('name'),
68
+ context=ComputeContext.from_json(_json.get('context', dict())),
69
+ compute_id=_json.get('computeId'),
70
+ client_api=client_api,
71
+ type=_json.get('type', None),
72
+ created_at=_json.get('createdAt', None),
73
+ updated_at=_json.get('updatedAt', None),
74
+ namespace=_json.get('namespace', None),
75
+ metadata=_json.get('metadata', None),
76
+ url=_json.get('url', None),
77
+ archived=_json.get('archived', None),
78
+ id=_json.get('id', None),
79
+ is_cache_available=_json.get('isCacheAvailable', None)
80
+ )
81
+
82
+ def to_json(self):
83
+ _json = {
84
+ 'name': self.name,
85
+ 'context': self.context.to_json(),
86
+ 'computeId': self.compute_id,
87
+ 'type': self.type,
88
+ }
89
+ if self.created_at is not None:
90
+ _json['createdAt'] = self.created_at
91
+ if self.updated_at is not None:
92
+ _json['updatedAt'] = self.updated_at
93
+ if self.namespace is not None:
94
+ _json['namespace'] = self.namespace
95
+ if self.metadata is not None:
96
+ _json['metadata'] = self.metadata
97
+ if self.url is not None:
98
+ _json['url'] = self.url
99
+ if self.archived is not None:
100
+ _json['archived'] = self.archived
101
+ if self.id is not None:
102
+ _json['id'] = self.id
103
+ if self.is_cache_available is not None:
104
+ _json['isCacheAvailable'] = self.is_cache_available
105
+
106
+ return _json
107
+
108
+ def delete(self):
109
+ """
110
+ Delete a service driver
111
+ """
112
+ return self.service_drivers.delete(service_driver_id=self.id)
113
+
114
+ def update(self):
115
+ """
116
+ Update a service driver
117
+ """
118
+ return self.service_drivers.update(service_driver=self)
dtlpy/entities/trigger.py CHANGED
@@ -343,7 +343,7 @@ class Trigger(BaseTrigger):
343
343
  filters=spec.get('filter', dict()),
344
344
  project=project,
345
345
  service=service,
346
- id=_json['id'],
346
+ id=_json.get('id', None),
347
347
  op_type=operation.get('type', None),
348
348
  spec=spec,
349
349
  pipeline_id=pipeline_id,
dtlpy/new_instance.py CHANGED
@@ -22,7 +22,7 @@ class Dtlpy:
22
22
  # triggers
23
23
  TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
24
24
  # faas
25
- FunctionIO, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
25
+ FunctionIO, KubernetesAutoscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
26
26
  InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
27
27
  PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
28
28
  # roberto
@@ -51,5 +51,6 @@ from .dpks import Dpks
51
51
  from .messages import Messages
52
52
  from .compositions import Compositions
53
53
  from .schema import Schema
54
- from .computes import Computes, ServiceDrivers
54
+ from .computes import Computes
55
+ from .service_drivers import ServiceDrivers
55
56
  from .collections import Collections
@@ -8,17 +8,21 @@ logger = logging.getLogger(name='dtlpy')
8
8
 
9
9
  class Apps:
10
10
 
11
- def __init__(self, client_api: ApiClient, project: entities.Project = None):
11
+ def __init__(self, client_api: ApiClient, project: entities.Project = None, project_id: str = None):
12
12
  self._client_api = client_api
13
13
  self._project = project
14
+ self._project_id = project_id
14
15
  self._commands = None
15
16
 
16
17
  @property
17
18
  def project(self) -> entities.Project:
18
19
  if self._project is None:
19
- raise exceptions.PlatformException(
20
- error='2001',
21
- message='Missing "project". need to set a Project entity or use project.apps repository')
20
+ if self._project_id is None:
21
+ raise exceptions.PlatformException(
22
+ error='2001',
23
+ message='Missing "project". need to set a Project entity or use project.apps repository')
24
+ else:
25
+ self._project = repositories.Projects(client_api=self._client_api).get(project_id=self._project_id)
22
26
  assert isinstance(self._project, entities.Project)
23
27
  return self._project
24
28