dtlpy 1.104.14__py3-none-any.whl → 1.106.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dtlpy/__init__.py CHANGED
@@ -176,11 +176,6 @@ try:
176
176
  except Exception:
177
177
  logger.debug("Failed to check SDK! Continue without")
178
178
 
179
- try:
180
- check_sdk.resolve_platform_settings(client_api=client_api, settings=settings)
181
- except Exception:
182
- pass
183
-
184
179
  verbose = client_api.verbose
185
180
  login = client_api.login
186
181
  logout = client_api.logout
@@ -195,7 +190,6 @@ info = client_api.info
195
190
  cache_state = client_api.cache_state
196
191
  attributes_mode = client_api.attributes_mode
197
192
  sdk_cache = client_api.sdk_cache
198
- platform_settings = client_api.platform_settings
199
193
 
200
194
 
201
195
  def get_secret(secret):
@@ -329,8 +323,6 @@ INSTANCE_CATALOG_HIGHMEM_XS = InstanceCatalog.HIGHMEM_XS
329
323
  INSTANCE_CATALOG_HIGHMEM_S = InstanceCatalog.HIGHMEM_S
330
324
  INSTANCE_CATALOG_HIGHMEM_M = InstanceCatalog.HIGHMEM_M
331
325
  INSTANCE_CATALOG_HIGHMEM_L = InstanceCatalog.HIGHMEM_L
332
- INSTANCE_CATALOG_GPU_K80_S = InstanceCatalog.GPU_K80_S
333
- INSTANCE_CATALOG_GPU_K80_M = InstanceCatalog.GPU_K80_M
334
326
  INSTANCE_CATALOG_GPU_T4_S = InstanceCatalog.GPU_T4_S
335
327
  INSTANCE_CATALOG_GPU_T4_M = InstanceCatalog.GPU_T4_M
336
328
 
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.104.14'
1
+ version = '1.106.5'
@@ -1634,6 +1634,17 @@ class Annotation(entities.BaseEntity):
1634
1634
 
1635
1635
  return _json
1636
1636
 
1637
+ def task_scores(self, task_id: str, page_offset: int = None, page_size: int = None):
1638
+ """
1639
+ Get the scores of the annotation in a specific task.
1640
+ :param task_id: The ID of the task.
1641
+ :param page_offset: The page offset.
1642
+ :param page_size: The page size.
1643
+ :return: page of scores
1644
+ """
1645
+ return self.annotations.task_scores(annotation_id=self.id ,task_id=task_id, page_offset=page_offset, page_size=page_size)
1646
+
1647
+
1637
1648
 
1638
1649
  @attr.s
1639
1650
  class FrameAnnotation(entities.BaseEntity):
@@ -1,9 +1,6 @@
1
- import copy
2
-
3
1
  import numpy as np
4
2
  from . import BaseAnnotationDefinition
5
3
  from .polygon import Polygon
6
- import warnings
7
4
 
8
5
 
9
6
  class Box(BaseAnnotationDefinition):
@@ -45,72 +42,21 @@ class Box(BaseAnnotationDefinition):
45
42
  self.bottom_left = [left, bottom]
46
43
  self.bottom_right = [right, bottom]
47
44
  self.label = label
48
- self._four_points = self._rotate_around_point() if self.is_rotated else [self.top_left,
49
- self.bottom_left,
50
- self.bottom_right,
51
- self.top_right]
52
-
53
- @property
54
- def is_rotated(self):
55
- return self.angle is not None and self.angle != 0
56
45
 
57
46
  @property
58
47
  def x(self):
59
- if self._box_points_setting():
60
- return [x_point[0] for x_point in self._four_points]
61
48
  return [self.left, self.right]
62
49
 
63
50
  @property
64
51
  def y(self):
65
- if self._box_points_setting():
66
- return [y_point[1] for y_point in self._four_points]
67
52
  return [self.top, self.bottom]
68
53
 
69
54
  @property
70
55
  def geo(self):
71
- if self._box_points_setting():
72
- res = self._four_points
73
- else:
74
- res = [
75
- [self.left, self.top],
76
- [self.right, self.bottom]
77
- ]
78
- return res
79
-
80
- def _box_points_setting(self):
81
- res = False
82
- if self._annotation and self._annotation.item:
83
- item = self._annotation.item
84
- project_id = item.project_id if item.project_id else item.project.id
85
- settings_dict = item._client_api.platform_settings.settings.get('4ptBox', None)
86
- if settings_dict is not None:
87
- if project_id in settings_dict:
88
- res = settings_dict.get(project_id, None)
89
- elif '*' in settings_dict:
90
- res = settings_dict.get('*', None)
91
- return res
92
-
93
- def _rotate_points(self, points):
94
- angle = np.radians(self.angle)
95
- rotation_matrix = np.asarray([[np.cos(angle), -np.sin(angle)],
96
- [np.sin(angle), np.cos(angle)]])
97
- pts2 = np.asarray([rotation_matrix.dot(pt)[:2] for pt in points])
98
- return pts2
99
-
100
- def _translate(self, points, translate_x, translate_y=None):
101
- translation_matrix = np.asarray([[1, 0, translate_x],
102
- [0, 1, translate_y],
103
- [0, 0, 1]])
104
- pts2 = np.asarray([translation_matrix.dot(list(pt) + [1])[:2] for pt in points])
105
- return pts2
106
-
107
- def _rotate_around_point(self):
108
- points = copy.deepcopy(self.four_points)
109
- center = [((self.left + self.right) / 2), ((self.top + self.bottom) / 2)]
110
- centerized = self._translate(points, -center[0], -center[1])
111
- rotated = self._rotate_points(centerized)
112
- moved = self._translate(rotated, center[0], center[1])
113
- return moved
56
+ return [
57
+ [self.left, self.top],
58
+ [self.right, self.bottom]
59
+ ]
114
60
 
115
61
  @property
116
62
  def four_points(self):
@@ -140,10 +86,7 @@ class Box(BaseAnnotationDefinition):
140
86
  thickness = 2
141
87
 
142
88
  # draw annotation
143
- if self.is_rotated:
144
- points = self._rotate_around_point()
145
- else:
146
- points = self.four_points
89
+ points = self.four_points
147
90
 
148
91
  # create image to draw on
149
92
  if alpha != 1:
dtlpy/entities/compute.py CHANGED
@@ -20,6 +20,7 @@ class ComputeStatus(str, Enum):
20
20
  READY = "ready"
21
21
  INITIALIZING = "initializing"
22
22
  PAUSE = "pause"
23
+ FAILED = "failed"
23
24
 
24
25
 
25
26
  class Toleration:
dtlpy/entities/dataset.py CHANGED
@@ -516,6 +516,23 @@ class Dataset(entities.BaseEntity):
516
516
  return self.datasets.update(dataset=self,
517
517
  system_metadata=system_metadata)
518
518
 
519
+ def unlock(self):
520
+ """
521
+ Unlock dataset
522
+
523
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
524
+
525
+ :return: Dataset object
526
+ :rtype: dtlpy.entities.dataset.Dataset
527
+
528
+ **Example**:
529
+
530
+ .. code-block:: python
531
+
532
+ dataset = dataset.unlock()
533
+ """
534
+ return self.datasets.unlock(dataset=self)
535
+
519
536
  def set_readonly(self, state: bool):
520
537
  """
521
538
  Set dataset readonly mode
@@ -607,7 +624,8 @@ class Dataset(entities.BaseEntity):
607
624
  export_png_files=False,
608
625
  filter_output_annotations=False,
609
626
  alpha=1,
610
- export_version=ExportVersion.V1
627
+ export_version=ExportVersion.V1,
628
+ dataset_lock=False
611
629
  ):
612
630
  """
613
631
  Download dataset by filters.
@@ -621,6 +639,7 @@ class Dataset(entities.BaseEntity):
621
639
  :param list(dtlpy.entities.annotation.ViewAnnotationOptions) annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
622
640
  :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
623
641
  :param bool overwrite: optional - default = False
642
+ :param bool dataset_lock: optional - default = False
624
643
  :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
625
644
  :param bool with_text: optional - add text to annotations, default = False
626
645
  :param str remote_path: DEPRECATED and ignored
@@ -642,7 +661,8 @@ class Dataset(entities.BaseEntity):
642
661
  overwrite=False,
643
662
  thickness=1,
644
663
  with_text=False,
645
- alpha=1
664
+ alpha=1,
665
+ dataset_lock=False
646
666
  )
647
667
  """
648
668
 
@@ -660,7 +680,8 @@ class Dataset(entities.BaseEntity):
660
680
  export_png_files=export_png_files,
661
681
  filter_output_annotations=filter_output_annotations,
662
682
  alpha=alpha,
663
- export_version=export_version
683
+ export_version=export_version,
684
+ dataset_lock=dataset_lock
664
685
  )
665
686
 
666
687
  def export(self,
@@ -671,7 +692,8 @@ class Dataset(entities.BaseEntity):
671
692
  include_feature_vectors: bool = False,
672
693
  include_annotations: bool = False,
673
694
  export_type: ExportType = ExportType.JSON,
674
- timeout: int = 0):
695
+ timeout: int = 0,
696
+ dataset_lock: bool = False):
675
697
  """
676
698
  Export dataset items and annotations.
677
699
 
@@ -685,6 +707,7 @@ class Dataset(entities.BaseEntity):
685
707
  :param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity
686
708
  :param bool include_feature_vectors: Include item feature vectors in the export
687
709
  :param bool include_annotations: Include item annotations in the export
710
+ :param bool dataset_lock: Make dataset readonly during the export
688
711
  :param entities.ExportType export_type: Type of export ('json' or 'zip')
689
712
  :param int timeout: Maximum time in seconds to wait for the export to complete
690
713
  :return: Exported item
@@ -708,7 +731,8 @@ class Dataset(entities.BaseEntity):
708
731
  include_feature_vectors=include_feature_vectors,
709
732
  include_annotations=include_annotations,
710
733
  export_type=export_type,
711
- timeout=timeout)
734
+ timeout=timeout,
735
+ dataset_lock=dataset_lock)
712
736
 
713
737
  def upload_annotations(self,
714
738
  local_path,
@@ -942,7 +966,8 @@ class Dataset(entities.BaseEntity):
942
966
  with_text=False,
943
967
  without_relative_path=None,
944
968
  alpha=1,
945
- export_version=ExportVersion.V1
969
+ export_version=ExportVersion.V1,
970
+ dataset_lock=False
946
971
  ):
947
972
  """
948
973
  Download dataset by filters.
@@ -957,6 +982,7 @@ class Dataset(entities.BaseEntity):
957
982
  :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
958
983
  :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
959
984
  :param bool overwrite: optional - default = False to overwrite the existing files
985
+ :param bool dataset_lock: optional - default = False to make dataset readonly during the download
960
986
  :param bool to_items_folder: Create 'items' folder and download items to it
961
987
  :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
962
988
  :param bool with_text: optional - add text to annotations, default = False
@@ -974,7 +1000,8 @@ class Dataset(entities.BaseEntity):
974
1000
  overwrite=False,
975
1001
  thickness=1,
976
1002
  with_text=False,
977
- alpha=1
1003
+ alpha=1,
1004
+ dataset_lock=False,
978
1005
  )
979
1006
  """
980
1007
  return self.items.download(filters=filters,
@@ -988,7 +1015,8 @@ class Dataset(entities.BaseEntity):
988
1015
  with_text=with_text,
989
1016
  without_relative_path=without_relative_path,
990
1017
  alpha=alpha,
991
- export_version=export_version)
1018
+ export_version=export_version,
1019
+ dataset_lock=dataset_lock)
992
1020
 
993
1021
  def download_folder(
994
1022
  self,
@@ -1004,7 +1032,8 @@ class Dataset(entities.BaseEntity):
1004
1032
  with_text=False,
1005
1033
  without_relative_path=None,
1006
1034
  alpha=1,
1007
- export_version=ExportVersion.V1
1035
+ export_version=ExportVersion.V1,
1036
+ dataset_lock=False
1008
1037
  ):
1009
1038
  """
1010
1039
  Download dataset folder.
@@ -1019,6 +1048,7 @@ class Dataset(entities.BaseEntity):
1019
1048
  :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
1020
1049
  :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
1021
1050
  :param bool overwrite: optional - default = False to overwrite the existing files
1051
+ :param bool dataset_lock: optional - default = False to make the dataset readonly during the download
1022
1052
  :param bool to_items_folder: Create 'items' folder and download items to it
1023
1053
  :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
1024
1054
  :param bool with_text: optional - add text to annotations, default = False
@@ -1038,7 +1068,8 @@ class Dataset(entities.BaseEntity):
1038
1068
  thickness=1,
1039
1069
  with_text=False,
1040
1070
  alpha=1,
1041
- save_locally=True
1071
+ save_locally=True,
1072
+ dataset_lock=False
1042
1073
  )
1043
1074
  """
1044
1075
  filters = self.datasets._bulid_folder_filter(folder_path=folder_path, filters=filters)
@@ -1053,7 +1084,8 @@ class Dataset(entities.BaseEntity):
1053
1084
  with_text=with_text,
1054
1085
  without_relative_path=without_relative_path,
1055
1086
  alpha=alpha,
1056
- export_version=export_version)
1087
+ export_version=export_version,
1088
+ dataset_lock=dataset_lock)
1057
1089
 
1058
1090
  def delete_labels(self, label_names):
1059
1091
  """
dtlpy/entities/item.py CHANGED
@@ -453,7 +453,8 @@ class Item(entities.BaseEntity):
453
453
  with_text=False,
454
454
  annotation_filters=None,
455
455
  alpha=1,
456
- export_version=ExportVersion.V1
456
+ export_version=ExportVersion.V1,
457
+ dataset_lock=False
457
458
  ):
458
459
  """
459
460
  Download dataset by filters.
@@ -467,6 +468,7 @@ class Item(entities.BaseEntity):
467
468
  :param list annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
468
469
  :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
469
470
  :param bool overwrite: optional - default = False
471
+ :param bool dataset_lock: optional - default = False
470
472
  :param bool to_items_folder: Create 'items' folder and download items to it
471
473
  :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
472
474
  :param bool with_text: optional - add text to annotations, default = False
@@ -485,7 +487,8 @@ class Item(entities.BaseEntity):
485
487
  thickness=1,
486
488
  with_text=False,
487
489
  alpha=1,
488
- save_locally=True
490
+ save_locally=True,
491
+ dataset_lock=False
489
492
  )
490
493
  """
491
494
  # if dir - concatenate local path and item name
@@ -519,7 +522,8 @@ class Item(entities.BaseEntity):
519
522
  alpha=alpha,
520
523
  with_text=with_text,
521
524
  export_version=export_version,
522
- filters=filters)
525
+ filters=filters,
526
+ dataset_lock=dataset_lock)
523
527
 
524
528
  def delete(self):
525
529
  """
@@ -824,6 +828,15 @@ class Item(entities.BaseEntity):
824
828
  filters.add(field='metadata.system.collections', values=None)
825
829
  filters.add(field='datasetId', values=self._dataset.id)
826
830
  return self._dataset.items.list(filters=filters)
831
+
832
+ def task_scores(self, task_id: str, page_offset: int = None, page_size: int = None):
833
+ """
834
+ Get the scores of the item in a specific task.
835
+ :param task_id: The ID of the task.
836
+ :return: page of scores
837
+ """
838
+ return self.items.task_scores(item_id=self.id, task_id=task_id, page_offset=page_offset, page_size=page_size)
839
+
827
840
 
828
841
  class ModalityTypeEnum(str, Enum):
829
842
  """
@@ -766,7 +766,7 @@ class Ontology(entities.BaseEntity):
766
766
  # TODO: Add support for import from ontology entity in the Future
767
767
  if not self._use_attributes_2:
768
768
  raise ValueError("This method is only supported for attributes 2 mode!")
769
- new_ontology = self.from_json(_json=ontology_json, client_api=self._client_api, recipe=self.recipe)
769
+ new_ontology = self.from_json(_json=ontology_json, client_api=self._client_api)
770
770
 
771
771
  # Update 'labels' and 'attributes'
772
772
  self.labels = new_ontology.labels
@@ -794,6 +794,9 @@ class Ontology(entities.BaseEntity):
794
794
  attribute_range=attribute_range
795
795
  )
796
796
 
797
+ # Get remote updated 'attributes'
798
+ self.metadata["attributes"] = self.ontologies.get(ontology_id=self.id).attributes
799
+
797
800
  # Update 'instance map' and 'color map'
798
801
  self._instance_map = new_ontology.instance_map
799
802
  self._color_map = new_ontology.color_map
@@ -447,3 +447,12 @@ class PromptItem:
447
447
  # update the annotation with the new text
448
448
  annotation.annotation_definition.text = existing_prompt_element['value']
449
449
  self._item.annotations.update(annotation)
450
+
451
+ def update(self):
452
+ """
453
+ Update the prompt item in the platform.
454
+ """
455
+ if self._item is not None:
456
+ self._item._Item__update_item_binary(_json=self.to_json())
457
+ else:
458
+ raise ValueError('Cannot update PromptItem without an item.')
dtlpy/entities/service.py CHANGED
@@ -89,10 +89,6 @@ class InstanceCatalog(str, Enum):
89
89
  - highmem pod with medium size
90
90
  * - HIGHMEM_L
91
91
  - highmem pod with large size
92
- * - GPU_K80_S
93
- - GPU NVIDIA K80 pod with small size
94
- * - GPU_K80_M
95
- - GPU NVIDIA K80 pod with medium size
96
92
  * - GPU_T4_S
97
93
  - GPU NVIDIA T4 pod with regular memory
98
94
  * - GPU_T4_M
@@ -106,8 +102,6 @@ class InstanceCatalog(str, Enum):
106
102
  HIGHMEM_S = "highmem-s"
107
103
  HIGHMEM_M = "highmem-m"
108
104
  HIGHMEM_L = "highmem-l"
109
- GPU_K80_S = "gpu-k80-s"
110
- GPU_K80_M = "gpu-k80-m"
111
105
  GPU_T4_S = "gpu-t4"
112
106
  GPU_T4_M = "gpu-t4-m"
113
107
 
dtlpy/new_instance.py CHANGED
@@ -104,7 +104,6 @@ class Dtlpy:
104
104
  self.cache_state = self.client_api.cache_state
105
105
  self.attributes_mode = self.client_api.attributes_mode
106
106
  self.sdk_cache = self.client_api.sdk_cache
107
- self.platform_settings = self.client_api.platform_settings
108
107
 
109
108
  def __del__(self):
110
109
  for name, pool in self.client_api._thread_pools.items():
@@ -243,8 +242,6 @@ class Dtlpy:
243
242
  HIGHMEM_S = 'highmem-s'
244
243
  HIGHMEM_M = 'highmem-m'
245
244
  HIGHMEM_L = 'highmem-l'
246
- GPU_K80_S = "gpu-k80-s"
247
- GPU_K80_M = "gpu-k80-m"
248
245
  GPU_T4_S = "gpu-t4-s"
249
246
  GPU_T4_M = "gpu-t4-m"
250
247
 
@@ -885,6 +885,32 @@ class Annotations:
885
885
  """
886
886
  return entities.AnnotationCollection(item=self.item)
887
887
 
888
+ def task_scores(self, annotation_id: str, task_id: str, page_offset: int = 0, page_size: int = 100):
889
+ """
890
+ Get annotation scores in a task
891
+
892
+ **Prerequisites**: You must be able to read the task
893
+
894
+ :param str annotation_id: The id of the annotation
895
+ :param str task_id: The id of the task
896
+ :param int page_offset: starting page
897
+ :param int page_size: size of page
898
+ :return: json response
899
+ :rtype: dict
900
+ """
901
+ if annotation_id is None:
902
+ raise exceptions.PlatformException('400', 'annotation_id must be provided')
903
+ if task_id is None:
904
+ raise exceptions.PlatformException('400', 'task_id must be provided')
905
+
906
+ success, response = self._client_api.gen_request(req_type='get',
907
+ path='/scores/tasks/{}/annotations/{}?page={}&pageSize={}'
908
+ .format(task_id, annotation_id, page_offset, page_size))
909
+ if success:
910
+ return response.json()
911
+ else:
912
+ raise exceptions.PlatformException(response)
913
+
888
914
  ##################
889
915
  # async function #
890
916
  ##################
@@ -8,6 +8,7 @@ from typing import List, Optional, Dict
8
8
  from ..entities import ComputeCluster, ComputeContext, ComputeType, Project
9
9
  from ..entities.integration import IntegrationType
10
10
 
11
+
11
12
  class Computes:
12
13
 
13
14
  def __init__(self, client_api: ApiClient):
@@ -44,7 +45,8 @@ class Computes:
44
45
  type: entities.ComputeType = entities.ComputeType.KUBERNETES,
45
46
  is_global: Optional[bool] = False,
46
47
  features: Optional[Dict] = None,
47
- wait=True
48
+ wait=True,
49
+ status: entities.ComputeStatus = None
48
50
  ):
49
51
  """
50
52
  Create a new compute
@@ -57,6 +59,7 @@ class Computes:
57
59
  :param is_global: Is global
58
60
  :param features: Features
59
61
  :param wait: Wait for compute creation
62
+ :param status: Compute status
60
63
  :return: Compute
61
64
  """
62
65
 
@@ -67,7 +70,8 @@ class Computes:
67
70
  'global': is_global,
68
71
  'features': features,
69
72
  'shared_contexts': [sc.to_json() for sc in shared_contexts],
70
- 'cluster': cluster.to_json()
73
+ 'cluster': cluster.to_json(),
74
+ 'status': status
71
75
  }
72
76
 
73
77
  # request
@@ -86,7 +90,7 @@ class Computes:
86
90
  )
87
91
 
88
92
  if wait:
89
- command_id = compute.metadata.get('system', {}).get('commands', {}).get('create', {})
93
+ command_id = compute.metadata.get('system', {}).get('commands', {}).get('create', None)
90
94
  if command_id is not None:
91
95
  command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
92
96
  command.wait()
@@ -200,7 +204,8 @@ class Computes:
200
204
  ComputeContext([], org_id, project_id),
201
205
  [],
202
206
  cluster,
203
- ComputeType.KUBERNETES)
207
+ ComputeType.KUBERNETES,
208
+ status=config['config'].get('status', None))
204
209
  return compute
205
210
 
206
211
  def create_from_config_file(self, config_file_path, org_id, project_name: Optional[str] = None):
@@ -215,6 +220,7 @@ class Computes:
215
220
  compute = self.setup_compute_cluster(config, integration, org_id, project)
216
221
  return compute
217
222
 
223
+
218
224
  class ServiceDrivers:
219
225
 
220
226
  def __init__(self, client_api: ApiClient):
@@ -127,8 +127,8 @@ class Datasets:
127
127
  return dataset_id
128
128
 
129
129
  @staticmethod
130
- def _build_payload(filters, include_feature_vectors, include_annotations, export_type, annotation_filters,
131
- feature_vector_filters):
130
+ def _build_payload(filters, include_feature_vectors, include_annotations,
131
+ export_type, annotation_filters, feature_vector_filters, dataset_lock):
132
132
  valid_list = [e.value for e in entities.ExportType]
133
133
  valid_types = ', '.join(valid_list)
134
134
  if export_type not in ['json', 'zip']:
@@ -157,6 +157,8 @@ class Datasets:
157
157
  if annotation_filters is not None:
158
158
  payload['annotationsQuery'] = annotation_filters.prepare()['filter']
159
159
  payload['annotations']['filter'] = True
160
+ if dataset_lock:
161
+ payload['datasetLock'] = dataset_lock
160
162
 
161
163
  return payload
162
164
 
@@ -471,6 +473,32 @@ class Datasets:
471
473
  return dataset
472
474
  else:
473
475
  raise exceptions.PlatformException(response)
476
+
477
+ @_api_reference.add(path='/datasets/{id}/unlock', method='patch')
478
+ def unlock(self, dataset: entities.Dataset ) -> entities.Dataset:
479
+ """
480
+ Unlock dataset.
481
+
482
+ **Prerequisites**: You must be an *owner* or *developer* to use this method.
483
+
484
+ :param dtlpy.entities.dataset.Dataset dataset: dataset object
485
+ :return: Dataset object
486
+ :rtype: dtlpy.entities.dataset.Dataset
487
+
488
+ **Example**:
489
+
490
+ .. code-block:: python
491
+
492
+ dataset = project.datasets.unlock(dataset='dataset_entity')
493
+ """
494
+ url_path = '/datasets/{}/unlock'.format(dataset.id)
495
+
496
+ success, response = self._client_api.gen_request(req_type='patch', path=url_path)
497
+ if success:
498
+ logger.info('Dataset was unlocked successfully')
499
+ return dataset
500
+ else:
501
+ raise exceptions.PlatformException(response)
474
502
 
475
503
  @_api_reference.add(path='/datasets/{id}/directoryTree', method='get')
476
504
  def directory_tree(self,
@@ -602,7 +630,8 @@ class Datasets:
602
630
  include_feature_vectors: bool = False,
603
631
  include_annotations: bool = False,
604
632
  export_type: entities.ExportType = entities.ExportType.JSON,
605
- timeout: int = 0):
633
+ timeout: int = 0,
634
+ dataset_lock: bool = False):
606
635
  """
607
636
  Export dataset items and annotations.
608
637
 
@@ -619,6 +648,7 @@ class Datasets:
619
648
  :param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity to filter feature vectors for export
620
649
  :param bool include_feature_vectors: Include item feature vectors in the export
621
650
  :param bool include_annotations: Include item annotations in the export
651
+ :param bool dataset_lock: Make dataset readonly during the export
622
652
  :param entities.ExportType export_type: Type of export ('json' or 'zip')
623
653
  :param int timeout: Maximum time in seconds to wait for the export to complete
624
654
  :return: Exported item
@@ -632,11 +662,12 @@ class Datasets:
632
662
  filters=filters,
633
663
  include_feature_vectors=True,
634
664
  include_annotations=True,
635
- export_type=dl.ExportType.JSON)
665
+ export_type=dl.ExportType.JSON,
666
+ dataset_lock=True)
636
667
  """
637
668
  dataset_id = self._resolve_dataset_id(dataset, dataset_name, dataset_id)
638
- payload = self._build_payload(filters, include_feature_vectors, include_annotations, export_type,
639
- annotation_filters, feature_vector_filters)
669
+ payload = self._build_payload(filters, include_feature_vectors, include_annotations,
670
+ export_type, annotation_filters, feature_vector_filters, dataset_lock)
640
671
 
641
672
  success, response = self._client_api.gen_request(req_type='post', path=f'/datasets/{dataset_id}/export',
642
673
  json_req=payload)
@@ -900,7 +931,8 @@ class Datasets:
900
931
  export_png_files: bool = False,
901
932
  filter_output_annotations: bool = False,
902
933
  alpha: float = None,
903
- export_version=entities.ExportVersion.V1
934
+ export_version=entities.ExportVersion.V1,
935
+ dataset_lock: bool = False
904
936
  ) -> str:
905
937
  """
906
938
  Download dataset's annotations by filters.
@@ -917,6 +949,7 @@ class Datasets:
917
949
  :param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
918
950
  :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
919
951
  :param bool overwrite: optional - default = False to overwrite the existing files
952
+ :param bool dataset_loc: optional - default = False to make the dataset readonly
920
953
  :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
921
954
  :param bool with_text: optional - add text to annotations, default = False
922
955
  :param str remote_path: DEPRECATED and ignored
@@ -926,6 +959,7 @@ class Datasets:
926
959
  :param float alpha: opacity value [0 1], default 1
927
960
  :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
928
961
  :return: local_path of the directory where all the downloaded item
962
+ :param bool dataset_lock: optional - default = False
929
963
  :rtype: str
930
964
 
931
965
  **Example**:
@@ -938,7 +972,8 @@ class Datasets:
938
972
  overwrite=False,
939
973
  thickness=1,
940
974
  with_text=False,
941
- alpha=1
975
+ alpha=1,
976
+ dataset_lock=False
942
977
  )
943
978
  """
944
979
  if annotation_options is None:
@@ -998,7 +1033,8 @@ class Datasets:
998
1033
  include_annotations_in_output=include_annotations_in_output,
999
1034
  export_png_files=export_png_files,
1000
1035
  filter_output_annotations=filter_output_annotations,
1001
- export_version=export_version
1036
+ export_version=export_version,
1037
+ dataset_lock=dataset_lock
1002
1038
  )
1003
1039
  if annotation_options:
1004
1040
  pages = dataset.items.list(filters=filters)