dtlpy 1.91.37__py3-none-any.whl → 1.92.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. dtlpy/__init__.py +5 -2
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/entities/__init__.py +1 -1
  4. dtlpy/entities/command.py +3 -2
  5. dtlpy/entities/dataset.py +52 -2
  6. dtlpy/entities/feature_set.py +3 -0
  7. dtlpy/entities/filters.py +2 -2
  8. dtlpy/entities/item.py +15 -1
  9. dtlpy/entities/node.py +11 -1
  10. dtlpy/entities/ontology.py +36 -40
  11. dtlpy/entities/pipeline.py +20 -1
  12. dtlpy/entities/pipeline_execution.py +23 -0
  13. dtlpy/entities/prompt_item.py +240 -37
  14. dtlpy/entities/service.py +5 -5
  15. dtlpy/ml/base_model_adapter.py +101 -41
  16. dtlpy/new_instance.py +80 -9
  17. dtlpy/repositories/apps.py +56 -10
  18. dtlpy/repositories/commands.py +10 -2
  19. dtlpy/repositories/datasets.py +142 -12
  20. dtlpy/repositories/dpks.py +5 -1
  21. dtlpy/repositories/feature_sets.py +23 -3
  22. dtlpy/repositories/models.py +1 -1
  23. dtlpy/repositories/pipeline_executions.py +53 -0
  24. dtlpy/repositories/uploader.py +3 -0
  25. dtlpy/services/api_client.py +59 -3
  26. {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/METADATA +1 -1
  27. {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/RECORD +35 -38
  28. tests/features/environment.py +29 -0
  29. dtlpy/callbacks/__init__.py +0 -16
  30. dtlpy/callbacks/piper_progress_reporter.py +0 -29
  31. dtlpy/callbacks/progress_viewer.py +0 -54
  32. {dtlpy-1.91.37.data → dtlpy-1.92.19.data}/scripts/dlp +0 -0
  33. {dtlpy-1.91.37.data → dtlpy-1.92.19.data}/scripts/dlp.bat +0 -0
  34. {dtlpy-1.91.37.data → dtlpy-1.92.19.data}/scripts/dlp.py +0 -0
  35. {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/LICENSE +0 -0
  36. {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/WHEEL +0 -0
  37. {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/entry_points.txt +0 -0
  38. {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py CHANGED
@@ -35,7 +35,8 @@ if len(logger.handlers) == 0:
35
35
  datefmt="%Y-%m-%d %H:%M:%S",
36
36
  )
37
37
  file_formatter = logging.Formatter(
38
- fmt="[%(asctime)s.%(msecs)03d][%(threadName)s][%(levelname).3s][%(name)s:v" + __version__ + "][%(relativepath)-s:%(lineno)-d](%(funcName)-s): %(message)s",
38
+ fmt="[%(asctime)s.%(msecs)03d][%(threadName)s][%(levelname).3s][%(name)s:v" +
39
+ __version__ + "][%(relativepath)-s:%(lineno)-d](%(funcName)-s): %(message)s",
39
40
  datefmt="%Y-%m-%d %H:%M:%S",
40
41
  )
41
42
  package_path = os.path.dirname(__file__)
@@ -80,7 +81,8 @@ from .entities import (
80
81
  InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
81
82
  PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
82
83
  # roberto
83
- DatasetSubsetType, ModelStatus, PlotSample, ArtifactType, Artifact, ItemArtifact, LinkArtifact, LocalArtifact, EntityScopeLevel,
84
+ DatasetSubsetType, ModelStatus, PlotSample, ArtifactType, Artifact, ItemArtifact, LinkArtifact, LocalArtifact,
85
+ EntityScopeLevel,
84
86
  # features
85
87
  FeatureEntityType, Feature, FeatureSet,
86
88
  #
@@ -245,6 +247,7 @@ class LoggingLevel:
245
247
  #################
246
248
  # ENUMS #
247
249
  #################
250
+ CallbackEvent = client_api.callbacks.CallbackEvent
248
251
  LOGGING_LEVEL_DEBUG = LoggingLevel.DEBUG
249
252
  LOGGING_LEVEL_WARNING = LoggingLevel.WARNING
250
253
  LOGGING_LEVEL_CRITICAL = LoggingLevel.CRITICAL
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.91.37'
1
+ version = '1.92.19'
@@ -22,7 +22,7 @@ from .trigger import Trigger, TriggerResource, TriggerAction, TriggerExecutionMo
22
22
  TriggerType
23
23
  from .project import Project, MemberRole
24
24
  from .artifact import ItemArtifact, LocalArtifact, LinkArtifact, ArtifactType, Artifact
25
- from .dataset import Dataset, ExpirationOptions, IndexDriver
25
+ from .dataset import Dataset, ExpirationOptions, IndexDriver, ExportType
26
26
  from .codebase import Codebase
27
27
  from .annotation import Annotation, FrameAnnotation, ViewAnnotationOptions, AnnotationStatus, AnnotationType, \
28
28
  ExportVersion
dtlpy/entities/command.py CHANGED
@@ -141,7 +141,7 @@ class Command(entities.BaseEntity):
141
141
  entities.CommandsStatus.FINALIZING,
142
142
  entities.CommandsStatus.IN_PROGRESS]
143
143
 
144
- def wait(self, timeout=0, step=None, backoff_factor=1):
144
+ def wait(self, timeout=0, step=None, backoff_factor=1):
145
145
  """
146
146
  Wait for Command to finish
147
147
 
@@ -157,4 +157,5 @@ class Command(entities.BaseEntity):
157
157
  timeout=timeout,
158
158
  step=step,
159
159
  url=self.url,
160
- backoff_factor=backoff_factor)
160
+ backoff_factor=backoff_factor
161
+ )
dtlpy/entities/dataset.py CHANGED
@@ -18,6 +18,11 @@ class IndexDriver(str, Enum):
18
18
  V2 = "v2"
19
19
 
20
20
 
21
+ class ExportType(str, Enum):
22
+ JSON = "json"
23
+ ZIP = "zip"
24
+
25
+
21
26
  class ExpirationOptions:
22
27
  """
23
28
  ExpirationOptions object
@@ -58,7 +63,6 @@ class Dataset(entities.BaseEntity):
58
63
  items_count = attr.ib()
59
64
  metadata = attr.ib(repr=False)
60
65
  directoryTree = attr.ib(repr=False)
61
- export = attr.ib(repr=False)
62
66
  expiration_options = attr.ib()
63
67
  index_driver = attr.ib()
64
68
  enable_sync_with_cloned = attr.ib(repr=False)
@@ -165,7 +169,6 @@ class Dataset(entities.BaseEntity):
165
169
  projects=projects,
166
170
  creator=_json.get('creator', None),
167
171
  items_url=_json.get('items', None),
168
- export=_json.get('export', None),
169
172
  driver=_json.get('driver', None),
170
173
  name=_json.get('name', None),
171
174
  url=_json.get('url', None),
@@ -653,6 +656,53 @@ class Dataset(entities.BaseEntity):
653
656
  export_version=export_version
654
657
  )
655
658
 
659
+ def export(self,
660
+ local_path=None,
661
+ filters=None,
662
+ annotation_filters=None,
663
+ feature_vector_filters=None,
664
+ include_feature_vectors: bool = False,
665
+ include_annotations: bool = False,
666
+ export_type: ExportType = ExportType.JSON,
667
+ timeout: int = 0):
668
+ """
669
+ Export dataset items and annotations.
670
+
671
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
672
+
673
+ You must provide at least ONE of the following params: dataset, dataset_name, dataset_id.
674
+
675
+ :param str local_path: The local path to save the exported dataset
676
+ :param Union[dict, dtlpy.entities.filters.Filters] filters: Filters entity or a query dictionary
677
+ :param dtlpy.entities.filters.Filters annotation_filters: Filters entity
678
+ :param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity
679
+ :param bool include_feature_vectors: Include item feature vectors in the export
680
+ :param bool include_annotations: Include item annotations in the export
681
+ :param entities.ExportType export_type: Type of export ('json' or 'zip')
682
+ :param int timeout: Maximum time in seconds to wait for the export to complete
683
+ :return: Exported item
684
+ :rtype: dtlpy.entities.item.Item
685
+
686
+ **Example**:
687
+
688
+ .. code-block:: python
689
+
690
+ export_item = dataset.export(filters=filters,
691
+ include_feature_vectors=True,
692
+ include_annotations=True,
693
+ export_type=dl.ExportType.JSON)
694
+ """
695
+
696
+ return self.datasets.export(dataset=self,
697
+ local_path=local_path,
698
+ filters=filters,
699
+ annotation_filters=annotation_filters,
700
+ feature_vector_filters=feature_vector_filters,
701
+ include_feature_vectors=include_feature_vectors,
702
+ include_annotations=include_annotations,
703
+ export_type=export_type,
704
+ timeout=timeout)
705
+
656
706
  def upload_annotations(self,
657
707
  local_path,
658
708
  filters=None,
@@ -31,6 +31,7 @@ class FeatureSet(entities.BaseEntity):
31
31
  set_type = attr.ib()
32
32
  entity_type = attr.ib()
33
33
  project_id = attr.ib()
34
+ model_id = attr.ib()
34
35
  org_id = attr.ib()
35
36
 
36
37
  # sdk
@@ -100,6 +101,7 @@ class FeatureSet(entities.BaseEntity):
100
101
  size=_json.get('size', None),
101
102
  url=_json.get('url', None),
102
103
  project_id=_json.get('project', None),
104
+ model_id=_json.get('modelId', None),
103
105
  created_at=_json.get('createdAt', None),
104
106
  creator=_json.get('creator', None),
105
107
  updated_by=_json.get('updatedBy', None),
@@ -121,6 +123,7 @@ class FeatureSet(entities.BaseEntity):
121
123
  'type': self.set_type,
122
124
  'entityType': self.entity_type,
123
125
  'project': self.project_id,
126
+ 'modelId': self.model_id,
124
127
  'creator': self.creator,
125
128
  'createdAt': self.created_at,
126
129
  'updatedBy': self.updated_by,
dtlpy/entities/filters.py CHANGED
@@ -296,9 +296,9 @@ class Filters:
296
296
  self.join = dict()
297
297
  if 'on' not in self.join:
298
298
  if self.resource == FiltersResource.ITEM:
299
- self.join['on'] = {'resource': FiltersResource.ANNOTATION, 'local': 'itemId', 'forigen': 'id'}
299
+ self.join['on'] = {'resource': FiltersResource.ANNOTATION.value, 'local': 'itemId', 'forigen': 'id'}
300
300
  else:
301
- self.join['on'] = {'resource': FiltersResource.ITEM, 'local': 'id', 'forigen': 'itemId'}
301
+ self.join['on'] = {'resource': FiltersResource.ITEM.value, 'local': 'id', 'forigen': 'itemId'}
302
302
  if 'filter' not in self.join:
303
303
  self.join['filter'] = dict()
304
304
  join_method = '$' + method
dtlpy/entities/item.py CHANGED
@@ -6,11 +6,13 @@ import logging
6
6
  import attr
7
7
  import copy
8
8
  import os
9
-
9
+ import io
10
10
  from .. import repositories, entities, exceptions
11
11
  from .annotation import ViewAnnotationOptions, ExportVersion
12
12
  from ..services.api_client import ApiClient
13
13
  from ..services.api_client import client as client_api
14
+ import json
15
+ import requests
14
16
 
15
17
  logger = logging.getLogger(name='dtlpy')
16
18
 
@@ -182,6 +184,18 @@ class Item(entities.BaseEntity):
182
184
  def model(self):
183
185
  return self._model
184
186
 
187
+ def __update_item_binary(self, _json):
188
+ binary = io.BytesIO()
189
+ binary.write(json.dumps(_json).encode())
190
+ binary.seek(0)
191
+ binary.name = self.name
192
+ resp = requests.post(url=client_api.environment + f'/items/{self.id}/revisions',
193
+ headers=client_api.auth,
194
+ files={'file': (binary.name, binary)}
195
+ )
196
+ if not resp.ok:
197
+ raise ValueError(resp.text)
198
+
185
199
  @property
186
200
  def project(self):
187
201
  if self._project is None:
dtlpy/entities/node.py CHANGED
@@ -221,6 +221,8 @@ class PipelineNode:
221
221
  config: dict = None,
222
222
  position: tuple = (1, 1),
223
223
  app_id: str = None,
224
+ dpk_name: str = None,
225
+ app_name: str = None,
224
226
  ):
225
227
  """
226
228
  :param str name: node name
@@ -234,6 +236,8 @@ class PipelineNode:
234
236
  :param dict config: for the code node dict in format { package: {code : the_code}}
235
237
  :param tuple position: tuple of the node place
236
238
  :param str app_id: app id
239
+ :param str dpk_name: dpk name
240
+ :param str app_name: app name
237
241
  """
238
242
  self.name = name
239
243
  self.node_id = node_id
@@ -246,6 +250,8 @@ class PipelineNode:
246
250
  self.config = config
247
251
  self.position = position
248
252
  self.app_id = app_id
253
+ self.dpk_name = dpk_name
254
+ self.app_name = app_name
249
255
  self._pipeline = None
250
256
 
251
257
  @property
@@ -297,7 +303,9 @@ class PipelineNode:
297
303
  project_id=_json.get('projectId', None),
298
304
  config=_json.get('config', None),
299
305
  position=position,
300
- app_id=_json.get('appId', None)
306
+ app_id=_json.get('appId', None),
307
+ dpk_name=_json.get('dpkName', None),
308
+ app_name=_json.get('appName', None),
301
309
  )
302
310
 
303
311
  def to_json(self):
@@ -310,6 +318,8 @@ class PipelineNode:
310
318
  'type': self.node_type,
311
319
  'namespace': self.namespace.to_json(),
312
320
  'projectId': self.project_id,
321
+ 'dpkName': self.dpk_name,
322
+ 'appName': self.app_name,
313
323
  }
314
324
  if self.config is not None:
315
325
  _json['config'] = self.config
@@ -207,7 +207,14 @@ class Ontology(entities.BaseEntity):
207
207
 
208
208
  @property
209
209
  def _use_attributes_2(self):
210
- return os.environ.get("USE_ATTRIBUTE_2", 'false') == 'true'
210
+ if isinstance(self.metadata, dict):
211
+ attributes = self.metadata.get("attributes", None)
212
+ if attributes is not None:
213
+ return True
214
+ else:
215
+ if isinstance(self.attributes, list) and len(self.attributes) > 0:
216
+ return False
217
+ return True
211
218
 
212
219
  @classmethod
213
220
  def from_json(cls, _json, client_api, recipe, dataset=None, project=None, is_fetched=True):
@@ -223,10 +230,9 @@ class Ontology(entities.BaseEntity):
223
230
  :return: Ontology object
224
231
  :rtype: dtlpy.entities.ontology.Ontology
225
232
  """
226
- if not os.environ.get("USE_ATTRIBUTE_2", 'false') == 'true':
227
- attributes = _json.get("attributes", [])
228
- else:
229
- attributes = _json.get('metadata', {}).get("attributes", [])
233
+ attributes_v2 = _json.get('metadata', {}).get("attributes", [])
234
+ attributes_v1 = _json.get("attributes", [])
235
+ attributes = attributes_v2 if attributes_v2 else attributes_v1
230
236
 
231
237
  labels = list()
232
238
  for root in _json["roots"]:
@@ -744,10 +750,11 @@ class Ontology(entities.BaseEntity):
744
750
 
745
751
  def copy_from(self, ontology_json: dict):
746
752
  """
747
- Import ontology to the platform ('ontology' is taken before 'ontology_json')
753
+ Import ontology to the platform.\n
754
+ Notice: only the following fields will be updated: `labels`, `attributes`, `instance_map` and `color_map`.
748
755
 
749
- :param dict ontology_json: ontology json
750
- :return: Ontology object
756
+ :param dict ontology_json: The source ontology json to copy from
757
+ :return: Ontology object: The updated ontology entity
751
758
  :rtype: dtlpy.entities.ontology.Ontology
752
759
 
753
760
  **Example**:
@@ -756,49 +763,38 @@ class Ontology(entities.BaseEntity):
756
763
 
757
764
  ontology = ontology.import_ontology(ontology_json=ontology_json)
758
765
  """
759
- # TODO: Add support for import from ontology entity
760
- ontology = self.from_json(_json=ontology_json, client_api=self._client_api, recipe=self.recipe)
761
- attributes = ontology.attributes
762
-
763
- # params
764
- self.labels = ontology.labels
765
- for key, value in ontology.metadata.items():
766
- if key != "system":
767
- self.metadata[key] = value
768
-
769
- if attributes:
770
- # Delete irrelevant attribute keys
771
- attribute_keys = [attribute.get("key", None) for attribute in attributes]
772
- to_delete_keys = [attribute.get("key", None) for attribute in self.attributes
773
- if attribute.get("key", None) not in attribute_keys]
774
- self.delete_attributes(keys=to_delete_keys)
775
-
776
- # Update attributes
777
- for attribute in attributes:
778
- attribute_range = attribute.get("range", None)
766
+ # TODO: Add support for import from ontology entity in the Future
767
+ if not self._use_attributes_2:
768
+ raise ValueError("This method is only supported for attributes 2 mode!")
769
+ new_ontology = self.from_json(_json=ontology_json, client_api=self._client_api, recipe=self.recipe)
770
+
771
+ # Update 'labels' and 'attributes'
772
+ self.labels = new_ontology.labels
773
+ new_attributes = new_ontology.attributes
774
+ if isinstance(new_attributes, list):
775
+ for new_attribute in new_attributes:
776
+ attribute_range = new_attribute.get("range", None)
779
777
  if attribute_range is not None:
780
778
  attribute_range = entities.AttributesRange(
781
779
  min_range=attribute_range.get("min", None),
782
780
  max_range=attribute_range.get("max", None),
783
781
  step=attribute_range.get("step", None)
784
782
  )
785
-
786
- script_data = attribute.get("scriptData", None)
783
+ script_data = new_attribute.get("scriptData", None)
787
784
  if script_data is None:
788
- raise Exception(f"Attribute '{attribute.get('key')}' scriptData is missing in the ontology json!")
785
+ new_attribute_key = new_attribute.get("key", None)
786
+ raise Exception(f"Attribute '{new_attribute_key}' scriptData is missing in the ontology json!")
789
787
  self.update_attributes(
790
788
  title=script_data.get("title", None),
791
- key=attribute.get("key", None),
792
- attribute_type=attribute.get("type", None),
793
- scope=attribute.get("scope", None),
789
+ key=new_attribute.get("key", None),
790
+ attribute_type=new_attribute.get("type", None),
791
+ scope=new_attribute.get("scope", None),
794
792
  optional=script_data.get("optional", None),
795
- values=attribute.get("values", None),
793
+ values=new_attribute.get("values", None),
796
794
  attribute_range=attribute_range
797
795
  )
798
- else:
799
- logger.warning("No attributes were found (Make sure that you use the correct attributes mode).")
800
796
 
801
- # defaults
802
- self._instance_map = ontology.instance_map
803
- self._color_map = ontology.color_map
797
+ # Update 'instance map' and 'color map'
798
+ self._instance_map = new_ontology.instance_map
799
+ self._color_map = new_ontology.color_map
804
800
  return self.update(system_metadata=True)
@@ -456,7 +456,6 @@ class Pipeline(entities.BaseEntity):
456
456
  """
457
457
  return self.pipelines.update(pipeline=self)
458
458
 
459
-
460
459
  def delete(self):
461
460
  """
462
461
  Delete pipeline object
@@ -560,3 +559,23 @@ class Pipeline(entities.BaseEntity):
560
559
  else:
561
560
  self.start_nodes = [{"nodeId": node.node_id,
562
561
  "type": "root", }]
562
+
563
+ def update_variables_values(self, **kwargs):
564
+ """
565
+ Update pipeline variables values for the given keyword arguments.
566
+
567
+ **Example**:
568
+
569
+ .. code-block:: python
570
+ pipeline.update_variables_values(
571
+ dataset=dataset.id,
572
+ model=model.id,
573
+ threshold=0.9
574
+ )
575
+ pipeline.update()
576
+ """
577
+ keys = kwargs.keys()
578
+ for variable in self.variables:
579
+ if variable.name in keys:
580
+ variable.value = kwargs[variable.name]
581
+
@@ -11,6 +11,16 @@ from ..services.api_client import ApiClient
11
11
  logger = logging.getLogger(name='dtlpy')
12
12
 
13
13
 
14
+ class PipelineExecutionStatus(str, Enum):
15
+ PENDING = "pending"
16
+ IN_PROGRESS = "in-progress"
17
+ FAILED = "failed"
18
+ SUCCESS = "success"
19
+ QUEUE = "queue"
20
+ TERMINATED = "terminated"
21
+ RERUN = "rerun"
22
+
23
+
14
24
  class CycleRerunMethod(str, Enum):
15
25
  START_FROM_NODES = 'startFromNodes',
16
26
  START_FROM_FAILED_EXECUTIONS = 'startFromFailedExecutions',
@@ -254,3 +264,16 @@ class PipelineExecution(entities.BaseEntity):
254
264
  filters=filters,
255
265
  wait=wait
256
266
  )
267
+
268
+ def wait(self):
269
+ """
270
+ Wait for pipeline execution
271
+
272
+ :return: Pipeline execution object
273
+ """
274
+ return self.pipeline_executions.wait(pipeline_execution_id=self.id)
275
+
276
+ def in_progress(self):
277
+ return self.status not in [PipelineExecutionStatus.FAILED,
278
+ PipelineExecutionStatus.SUCCESS,
279
+ PipelineExecutionStatus.TERMINATED]