dtlpy 1.105.6__py3-none-any.whl → 1.107.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +0 -8
- dtlpy/__version__.py +1 -1
- dtlpy/entities/annotation.py +11 -0
- dtlpy/entities/annotation_definitions/box.py +5 -62
- dtlpy/entities/compute.py +1 -0
- dtlpy/entities/dataset.py +60 -11
- dtlpy/entities/item.py +20 -3
- dtlpy/entities/prompt_item.py +9 -0
- dtlpy/entities/service.py +0 -6
- dtlpy/entities/task.py +1 -1
- dtlpy/ml/base_model_adapter.py +3 -0
- dtlpy/new_instance.py +0 -3
- dtlpy/repositories/annotations.py +26 -0
- dtlpy/repositories/datasets.py +56 -9
- dtlpy/repositories/downloader.py +24 -13
- dtlpy/repositories/integrations.py +147 -37
- dtlpy/repositories/items.py +43 -2
- dtlpy/repositories/projects.py +0 -8
- dtlpy/repositories/settings.py +0 -13
- dtlpy/repositories/tasks.py +36 -2
- dtlpy/services/api_client.py +0 -53
- dtlpy/services/check_sdk.py +0 -26
- {dtlpy-1.105.6.dist-info → dtlpy-1.107.8.dist-info}/METADATA +1 -1
- {dtlpy-1.105.6.dist-info → dtlpy-1.107.8.dist-info}/RECORD +31 -31
- {dtlpy-1.105.6.data → dtlpy-1.107.8.data}/scripts/dlp +0 -0
- {dtlpy-1.105.6.data → dtlpy-1.107.8.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.105.6.data → dtlpy-1.107.8.data}/scripts/dlp.py +0 -0
- {dtlpy-1.105.6.dist-info → dtlpy-1.107.8.dist-info}/LICENSE +0 -0
- {dtlpy-1.105.6.dist-info → dtlpy-1.107.8.dist-info}/WHEEL +0 -0
- {dtlpy-1.105.6.dist-info → dtlpy-1.107.8.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.105.6.dist-info → dtlpy-1.107.8.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py
CHANGED
|
@@ -176,11 +176,6 @@ try:
|
|
|
176
176
|
except Exception:
|
|
177
177
|
logger.debug("Failed to check SDK! Continue without")
|
|
178
178
|
|
|
179
|
-
try:
|
|
180
|
-
check_sdk.resolve_platform_settings(client_api=client_api, settings=settings)
|
|
181
|
-
except Exception:
|
|
182
|
-
pass
|
|
183
|
-
|
|
184
179
|
verbose = client_api.verbose
|
|
185
180
|
login = client_api.login
|
|
186
181
|
logout = client_api.logout
|
|
@@ -195,7 +190,6 @@ info = client_api.info
|
|
|
195
190
|
cache_state = client_api.cache_state
|
|
196
191
|
attributes_mode = client_api.attributes_mode
|
|
197
192
|
sdk_cache = client_api.sdk_cache
|
|
198
|
-
platform_settings = client_api.platform_settings
|
|
199
193
|
|
|
200
194
|
|
|
201
195
|
def get_secret(secret):
|
|
@@ -329,8 +323,6 @@ INSTANCE_CATALOG_HIGHMEM_XS = InstanceCatalog.HIGHMEM_XS
|
|
|
329
323
|
INSTANCE_CATALOG_HIGHMEM_S = InstanceCatalog.HIGHMEM_S
|
|
330
324
|
INSTANCE_CATALOG_HIGHMEM_M = InstanceCatalog.HIGHMEM_M
|
|
331
325
|
INSTANCE_CATALOG_HIGHMEM_L = InstanceCatalog.HIGHMEM_L
|
|
332
|
-
INSTANCE_CATALOG_GPU_K80_S = InstanceCatalog.GPU_K80_S
|
|
333
|
-
INSTANCE_CATALOG_GPU_K80_M = InstanceCatalog.GPU_K80_M
|
|
334
326
|
INSTANCE_CATALOG_GPU_T4_S = InstanceCatalog.GPU_T4_S
|
|
335
327
|
INSTANCE_CATALOG_GPU_T4_M = InstanceCatalog.GPU_T4_M
|
|
336
328
|
|
dtlpy/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '1.
|
|
1
|
+
version = '1.107.8'
|
dtlpy/entities/annotation.py
CHANGED
|
@@ -1634,6 +1634,17 @@ class Annotation(entities.BaseEntity):
|
|
|
1634
1634
|
|
|
1635
1635
|
return _json
|
|
1636
1636
|
|
|
1637
|
+
def task_scores(self, task_id: str, page_offset: int = None, page_size: int = None):
|
|
1638
|
+
"""
|
|
1639
|
+
Get the scores of the annotation in a specific task.
|
|
1640
|
+
:param task_id: The ID of the task.
|
|
1641
|
+
:param page_offset: The page offset.
|
|
1642
|
+
:param page_size: The page size.
|
|
1643
|
+
:return: page of scores
|
|
1644
|
+
"""
|
|
1645
|
+
return self.annotations.task_scores(annotation_id=self.id ,task_id=task_id, page_offset=page_offset, page_size=page_size)
|
|
1646
|
+
|
|
1647
|
+
|
|
1637
1648
|
|
|
1638
1649
|
@attr.s
|
|
1639
1650
|
class FrameAnnotation(entities.BaseEntity):
|
|
@@ -1,9 +1,6 @@
|
|
|
1
|
-
import copy
|
|
2
|
-
|
|
3
1
|
import numpy as np
|
|
4
2
|
from . import BaseAnnotationDefinition
|
|
5
3
|
from .polygon import Polygon
|
|
6
|
-
import warnings
|
|
7
4
|
|
|
8
5
|
|
|
9
6
|
class Box(BaseAnnotationDefinition):
|
|
@@ -45,72 +42,21 @@ class Box(BaseAnnotationDefinition):
|
|
|
45
42
|
self.bottom_left = [left, bottom]
|
|
46
43
|
self.bottom_right = [right, bottom]
|
|
47
44
|
self.label = label
|
|
48
|
-
self._four_points = self._rotate_around_point() if self.is_rotated else [self.top_left,
|
|
49
|
-
self.bottom_left,
|
|
50
|
-
self.bottom_right,
|
|
51
|
-
self.top_right]
|
|
52
|
-
|
|
53
|
-
@property
|
|
54
|
-
def is_rotated(self):
|
|
55
|
-
return self.angle is not None and self.angle != 0
|
|
56
45
|
|
|
57
46
|
@property
|
|
58
47
|
def x(self):
|
|
59
|
-
if self._box_points_setting():
|
|
60
|
-
return [x_point[0] for x_point in self._four_points]
|
|
61
48
|
return [self.left, self.right]
|
|
62
49
|
|
|
63
50
|
@property
|
|
64
51
|
def y(self):
|
|
65
|
-
if self._box_points_setting():
|
|
66
|
-
return [y_point[1] for y_point in self._four_points]
|
|
67
52
|
return [self.top, self.bottom]
|
|
68
53
|
|
|
69
54
|
@property
|
|
70
55
|
def geo(self):
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
[self.left, self.top],
|
|
76
|
-
[self.right, self.bottom]
|
|
77
|
-
]
|
|
78
|
-
return res
|
|
79
|
-
|
|
80
|
-
def _box_points_setting(self):
|
|
81
|
-
res = False
|
|
82
|
-
if self._annotation and self._annotation.item:
|
|
83
|
-
item = self._annotation.item
|
|
84
|
-
project_id = item.project_id if item.project_id else item.project.id
|
|
85
|
-
settings_dict = item._client_api.platform_settings.settings.get('4ptBox', None)
|
|
86
|
-
if settings_dict is not None:
|
|
87
|
-
if project_id in settings_dict:
|
|
88
|
-
res = settings_dict.get(project_id, None)
|
|
89
|
-
elif '*' in settings_dict:
|
|
90
|
-
res = settings_dict.get('*', None)
|
|
91
|
-
return res
|
|
92
|
-
|
|
93
|
-
def _rotate_points(self, points):
|
|
94
|
-
angle = np.radians(self.angle)
|
|
95
|
-
rotation_matrix = np.asarray([[np.cos(angle), -np.sin(angle)],
|
|
96
|
-
[np.sin(angle), np.cos(angle)]])
|
|
97
|
-
pts2 = np.asarray([rotation_matrix.dot(pt)[:2] for pt in points])
|
|
98
|
-
return pts2
|
|
99
|
-
|
|
100
|
-
def _translate(self, points, translate_x, translate_y=None):
|
|
101
|
-
translation_matrix = np.asarray([[1, 0, translate_x],
|
|
102
|
-
[0, 1, translate_y],
|
|
103
|
-
[0, 0, 1]])
|
|
104
|
-
pts2 = np.asarray([translation_matrix.dot(list(pt) + [1])[:2] for pt in points])
|
|
105
|
-
return pts2
|
|
106
|
-
|
|
107
|
-
def _rotate_around_point(self):
|
|
108
|
-
points = copy.deepcopy(self.four_points)
|
|
109
|
-
center = [((self.left + self.right) / 2), ((self.top + self.bottom) / 2)]
|
|
110
|
-
centerized = self._translate(points, -center[0], -center[1])
|
|
111
|
-
rotated = self._rotate_points(centerized)
|
|
112
|
-
moved = self._translate(rotated, center[0], center[1])
|
|
113
|
-
return moved
|
|
56
|
+
return [
|
|
57
|
+
[self.left, self.top],
|
|
58
|
+
[self.right, self.bottom]
|
|
59
|
+
]
|
|
114
60
|
|
|
115
61
|
@property
|
|
116
62
|
def four_points(self):
|
|
@@ -140,10 +86,7 @@ class Box(BaseAnnotationDefinition):
|
|
|
140
86
|
thickness = 2
|
|
141
87
|
|
|
142
88
|
# draw annotation
|
|
143
|
-
|
|
144
|
-
points = self._rotate_around_point()
|
|
145
|
-
else:
|
|
146
|
-
points = self.four_points
|
|
89
|
+
points = self.four_points
|
|
147
90
|
|
|
148
91
|
# create image to draw on
|
|
149
92
|
if alpha != 1:
|
dtlpy/entities/compute.py
CHANGED
dtlpy/entities/dataset.py
CHANGED
|
@@ -516,6 +516,23 @@ class Dataset(entities.BaseEntity):
|
|
|
516
516
|
return self.datasets.update(dataset=self,
|
|
517
517
|
system_metadata=system_metadata)
|
|
518
518
|
|
|
519
|
+
def unlock(self):
|
|
520
|
+
"""
|
|
521
|
+
Unlock dataset
|
|
522
|
+
|
|
523
|
+
**Prerequisites**: You must be an *owner* or *developer* to use this method.
|
|
524
|
+
|
|
525
|
+
:return: Dataset object
|
|
526
|
+
:rtype: dtlpy.entities.dataset.Dataset
|
|
527
|
+
|
|
528
|
+
**Example**:
|
|
529
|
+
|
|
530
|
+
.. code-block:: python
|
|
531
|
+
|
|
532
|
+
dataset = dataset.unlock()
|
|
533
|
+
"""
|
|
534
|
+
return self.datasets.unlock(dataset=self)
|
|
535
|
+
|
|
519
536
|
def set_readonly(self, state: bool):
|
|
520
537
|
"""
|
|
521
538
|
Set dataset readonly mode
|
|
@@ -607,7 +624,9 @@ class Dataset(entities.BaseEntity):
|
|
|
607
624
|
export_png_files=False,
|
|
608
625
|
filter_output_annotations=False,
|
|
609
626
|
alpha=1,
|
|
610
|
-
export_version=ExportVersion.V1
|
|
627
|
+
export_version=ExportVersion.V1,
|
|
628
|
+
dataset_lock=False,
|
|
629
|
+
lock_timeout_sec=None
|
|
611
630
|
):
|
|
612
631
|
"""
|
|
613
632
|
Download dataset by filters.
|
|
@@ -621,6 +640,8 @@ class Dataset(entities.BaseEntity):
|
|
|
621
640
|
:param list(dtlpy.entities.annotation.ViewAnnotationOptions) annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
|
|
622
641
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
623
642
|
:param bool overwrite: optional - default = False
|
|
643
|
+
:param bool dataset_lock: optional - default = False
|
|
644
|
+
:param int lock_timeout_sec: optional
|
|
624
645
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
625
646
|
:param bool with_text: optional - add text to annotations, default = False
|
|
626
647
|
:param str remote_path: DEPRECATED and ignored
|
|
@@ -642,7 +663,9 @@ class Dataset(entities.BaseEntity):
|
|
|
642
663
|
overwrite=False,
|
|
643
664
|
thickness=1,
|
|
644
665
|
with_text=False,
|
|
645
|
-
alpha=1
|
|
666
|
+
alpha=1,
|
|
667
|
+
dataset_lock=False
|
|
668
|
+
lock_timeout_sec=300
|
|
646
669
|
)
|
|
647
670
|
"""
|
|
648
671
|
|
|
@@ -660,7 +683,9 @@ class Dataset(entities.BaseEntity):
|
|
|
660
683
|
export_png_files=export_png_files,
|
|
661
684
|
filter_output_annotations=filter_output_annotations,
|
|
662
685
|
alpha=alpha,
|
|
663
|
-
export_version=export_version
|
|
686
|
+
export_version=export_version,
|
|
687
|
+
dataset_lock=dataset_lock,
|
|
688
|
+
lock_timeout_sec=lock_timeout_sec
|
|
664
689
|
)
|
|
665
690
|
|
|
666
691
|
def export(self,
|
|
@@ -671,7 +696,9 @@ class Dataset(entities.BaseEntity):
|
|
|
671
696
|
include_feature_vectors: bool = False,
|
|
672
697
|
include_annotations: bool = False,
|
|
673
698
|
export_type: ExportType = ExportType.JSON,
|
|
674
|
-
timeout: int = 0
|
|
699
|
+
timeout: int = 0,
|
|
700
|
+
dataset_lock: bool = False,
|
|
701
|
+
lock_timeout_sec: int = None):
|
|
675
702
|
"""
|
|
676
703
|
Export dataset items and annotations.
|
|
677
704
|
|
|
@@ -685,6 +712,8 @@ class Dataset(entities.BaseEntity):
|
|
|
685
712
|
:param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity
|
|
686
713
|
:param bool include_feature_vectors: Include item feature vectors in the export
|
|
687
714
|
:param bool include_annotations: Include item annotations in the export
|
|
715
|
+
:param bool dataset_lock: Make dataset readonly during the export
|
|
716
|
+
:param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
|
|
688
717
|
:param entities.ExportType export_type: Type of export ('json' or 'zip')
|
|
689
718
|
:param int timeout: Maximum time in seconds to wait for the export to complete
|
|
690
719
|
:return: Exported item
|
|
@@ -708,7 +737,9 @@ class Dataset(entities.BaseEntity):
|
|
|
708
737
|
include_feature_vectors=include_feature_vectors,
|
|
709
738
|
include_annotations=include_annotations,
|
|
710
739
|
export_type=export_type,
|
|
711
|
-
timeout=timeout
|
|
740
|
+
timeout=timeout,
|
|
741
|
+
dataset_lock=dataset_lock,
|
|
742
|
+
lock_timeout_sec=lock_timeout_sec)
|
|
712
743
|
|
|
713
744
|
def upload_annotations(self,
|
|
714
745
|
local_path,
|
|
@@ -942,7 +973,9 @@ class Dataset(entities.BaseEntity):
|
|
|
942
973
|
with_text=False,
|
|
943
974
|
without_relative_path=None,
|
|
944
975
|
alpha=1,
|
|
945
|
-
export_version=ExportVersion.V1
|
|
976
|
+
export_version=ExportVersion.V1,
|
|
977
|
+
dataset_lock=False,
|
|
978
|
+
lock_timeout_sec=None
|
|
946
979
|
):
|
|
947
980
|
"""
|
|
948
981
|
Download dataset by filters.
|
|
@@ -957,6 +990,8 @@ class Dataset(entities.BaseEntity):
|
|
|
957
990
|
:param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
|
|
958
991
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
959
992
|
:param bool overwrite: optional - default = False to overwrite the existing files
|
|
993
|
+
:param bool dataset_lock: optional - default = False to make dataset readonly during the download
|
|
994
|
+
:param int lock_timeout_sec: optional - Set lock timeout for the export
|
|
960
995
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
961
996
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
962
997
|
:param bool with_text: optional - add text to annotations, default = False
|
|
@@ -974,7 +1009,9 @@ class Dataset(entities.BaseEntity):
|
|
|
974
1009
|
overwrite=False,
|
|
975
1010
|
thickness=1,
|
|
976
1011
|
with_text=False,
|
|
977
|
-
alpha=1
|
|
1012
|
+
alpha=1,
|
|
1013
|
+
dataset_lock=False,
|
|
1014
|
+
lock_timeout_sec=300
|
|
978
1015
|
)
|
|
979
1016
|
"""
|
|
980
1017
|
return self.items.download(filters=filters,
|
|
@@ -988,7 +1025,10 @@ class Dataset(entities.BaseEntity):
|
|
|
988
1025
|
with_text=with_text,
|
|
989
1026
|
without_relative_path=without_relative_path,
|
|
990
1027
|
alpha=alpha,
|
|
991
|
-
export_version=export_version
|
|
1028
|
+
export_version=export_version,
|
|
1029
|
+
dataset_lock=dataset_lock,
|
|
1030
|
+
lock_timeout_sec=lock_timeout_sec
|
|
1031
|
+
)
|
|
992
1032
|
|
|
993
1033
|
def download_folder(
|
|
994
1034
|
self,
|
|
@@ -1004,7 +1044,9 @@ class Dataset(entities.BaseEntity):
|
|
|
1004
1044
|
with_text=False,
|
|
1005
1045
|
without_relative_path=None,
|
|
1006
1046
|
alpha=1,
|
|
1007
|
-
export_version=ExportVersion.V1
|
|
1047
|
+
export_version=ExportVersion.V1,
|
|
1048
|
+
dataset_lock=False,
|
|
1049
|
+
lock_timeout_sec=None
|
|
1008
1050
|
):
|
|
1009
1051
|
"""
|
|
1010
1052
|
Download dataset folder.
|
|
@@ -1019,6 +1061,8 @@ class Dataset(entities.BaseEntity):
|
|
|
1019
1061
|
:param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
|
|
1020
1062
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
1021
1063
|
:param bool overwrite: optional - default = False to overwrite the existing files
|
|
1064
|
+
:param bool dataset_lock: optional - default = False to make the dataset readonly during the download
|
|
1065
|
+
:param bool lock_timeout_sec: optional - Set lock timeout for the export
|
|
1022
1066
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
1023
1067
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
1024
1068
|
:param bool with_text: optional - add text to annotations, default = False
|
|
@@ -1038,7 +1082,9 @@ class Dataset(entities.BaseEntity):
|
|
|
1038
1082
|
thickness=1,
|
|
1039
1083
|
with_text=False,
|
|
1040
1084
|
alpha=1,
|
|
1041
|
-
save_locally=True
|
|
1085
|
+
save_locally=True,
|
|
1086
|
+
dataset_lock=False
|
|
1087
|
+
lock_timeout_sec=300
|
|
1042
1088
|
)
|
|
1043
1089
|
"""
|
|
1044
1090
|
filters = self.datasets._bulid_folder_filter(folder_path=folder_path, filters=filters)
|
|
@@ -1053,7 +1099,10 @@ class Dataset(entities.BaseEntity):
|
|
|
1053
1099
|
with_text=with_text,
|
|
1054
1100
|
without_relative_path=without_relative_path,
|
|
1055
1101
|
alpha=alpha,
|
|
1056
|
-
export_version=export_version
|
|
1102
|
+
export_version=export_version,
|
|
1103
|
+
dataset_lock=dataset_lock,
|
|
1104
|
+
lock_timeout_sec=lock_timeout_sec
|
|
1105
|
+
)
|
|
1057
1106
|
|
|
1058
1107
|
def delete_labels(self, label_names):
|
|
1059
1108
|
"""
|
dtlpy/entities/item.py
CHANGED
|
@@ -453,7 +453,9 @@ class Item(entities.BaseEntity):
|
|
|
453
453
|
with_text=False,
|
|
454
454
|
annotation_filters=None,
|
|
455
455
|
alpha=1,
|
|
456
|
-
export_version=ExportVersion.V1
|
|
456
|
+
export_version=ExportVersion.V1,
|
|
457
|
+
dataset_lock=False,
|
|
458
|
+
lock_timeout_sec=None
|
|
457
459
|
):
|
|
458
460
|
"""
|
|
459
461
|
Download dataset by filters.
|
|
@@ -467,6 +469,8 @@ class Item(entities.BaseEntity):
|
|
|
467
469
|
:param list annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
|
|
468
470
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
469
471
|
:param bool overwrite: optional - default = False
|
|
472
|
+
:param bool dataset_lock: optional - default = False
|
|
473
|
+
:param int lock_timeout_sec: optional
|
|
470
474
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
471
475
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
472
476
|
:param bool with_text: optional - add text to annotations, default = False
|
|
@@ -485,7 +489,9 @@ class Item(entities.BaseEntity):
|
|
|
485
489
|
thickness=1,
|
|
486
490
|
with_text=False,
|
|
487
491
|
alpha=1,
|
|
488
|
-
save_locally=True
|
|
492
|
+
save_locally=True,
|
|
493
|
+
dataset_lock=False
|
|
494
|
+
lock_timeout_sec=300
|
|
489
495
|
)
|
|
490
496
|
"""
|
|
491
497
|
# if dir - concatenate local path and item name
|
|
@@ -519,7 +525,9 @@ class Item(entities.BaseEntity):
|
|
|
519
525
|
alpha=alpha,
|
|
520
526
|
with_text=with_text,
|
|
521
527
|
export_version=export_version,
|
|
522
|
-
filters=filters
|
|
528
|
+
filters=filters,
|
|
529
|
+
dataset_lock=dataset_lock,
|
|
530
|
+
lock_timeout_sec=lock_timeout_sec)
|
|
523
531
|
|
|
524
532
|
def delete(self):
|
|
525
533
|
"""
|
|
@@ -824,6 +832,15 @@ class Item(entities.BaseEntity):
|
|
|
824
832
|
filters.add(field='metadata.system.collections', values=None)
|
|
825
833
|
filters.add(field='datasetId', values=self._dataset.id)
|
|
826
834
|
return self._dataset.items.list(filters=filters)
|
|
835
|
+
|
|
836
|
+
def task_scores(self, task_id: str, page_offset: int = None, page_size: int = None):
|
|
837
|
+
"""
|
|
838
|
+
Get the scores of the item in a specific task.
|
|
839
|
+
:param task_id: The ID of the task.
|
|
840
|
+
:return: page of scores
|
|
841
|
+
"""
|
|
842
|
+
return self.items.task_scores(item_id=self.id, task_id=task_id, page_offset=page_offset, page_size=page_size)
|
|
843
|
+
|
|
827
844
|
|
|
828
845
|
class ModalityTypeEnum(str, Enum):
|
|
829
846
|
"""
|
dtlpy/entities/prompt_item.py
CHANGED
|
@@ -447,3 +447,12 @@ class PromptItem:
|
|
|
447
447
|
# update the annotation with the new text
|
|
448
448
|
annotation.annotation_definition.text = existing_prompt_element['value']
|
|
449
449
|
self._item.annotations.update(annotation)
|
|
450
|
+
|
|
451
|
+
def update(self):
|
|
452
|
+
"""
|
|
453
|
+
Update the prompt item in the platform.
|
|
454
|
+
"""
|
|
455
|
+
if self._item is not None:
|
|
456
|
+
self._item._Item__update_item_binary(_json=self.to_json())
|
|
457
|
+
else:
|
|
458
|
+
raise ValueError('Cannot update PromptItem without an item.')
|
dtlpy/entities/service.py
CHANGED
|
@@ -89,10 +89,6 @@ class InstanceCatalog(str, Enum):
|
|
|
89
89
|
- highmem pod with medium size
|
|
90
90
|
* - HIGHMEM_L
|
|
91
91
|
- highmem pod with large size
|
|
92
|
-
* - GPU_K80_S
|
|
93
|
-
- GPU NVIDIA K80 pod with small size
|
|
94
|
-
* - GPU_K80_M
|
|
95
|
-
- GPU NVIDIA K80 pod with medium size
|
|
96
92
|
* - GPU_T4_S
|
|
97
93
|
- GPU NVIDIA T4 pod with regular memory
|
|
98
94
|
* - GPU_T4_M
|
|
@@ -106,8 +102,6 @@ class InstanceCatalog(str, Enum):
|
|
|
106
102
|
HIGHMEM_S = "highmem-s"
|
|
107
103
|
HIGHMEM_M = "highmem-m"
|
|
108
104
|
HIGHMEM_L = "highmem-l"
|
|
109
|
-
GPU_K80_S = "gpu-k80-s"
|
|
110
|
-
GPU_K80_M = "gpu-k80-m"
|
|
111
105
|
GPU_T4_S = "gpu-t4"
|
|
112
106
|
GPU_T4_M = "gpu-t4-m"
|
|
113
107
|
|
dtlpy/entities/task.py
CHANGED
|
@@ -329,7 +329,7 @@ class Task:
|
|
|
329
329
|
"""
|
|
330
330
|
Update an Annotation Task
|
|
331
331
|
|
|
332
|
-
:param bool system_metadata:
|
|
332
|
+
:param bool system_metadata: DEPRECATED
|
|
333
333
|
"""
|
|
334
334
|
return self.tasks.update(task=self, system_metadata=system_metadata)
|
|
335
335
|
|
dtlpy/ml/base_model_adapter.py
CHANGED
|
@@ -340,6 +340,9 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
340
340
|
annotation_options=annotation_options,
|
|
341
341
|
annotation_filters=annotation_filters
|
|
342
342
|
)
|
|
343
|
+
if isinstance(ret_list, list) and len(ret_list) == 0:
|
|
344
|
+
raise ValueError(f"No items downloaded for subset {subset}! Cannot train model with empty subset.\n"
|
|
345
|
+
f"Subset {subset} filters: {filters.prepare()}\nAnnotation filters: {annotation_filters.prepare()}")
|
|
343
346
|
|
|
344
347
|
self.convert_from_dtlpy(data_path=data_path, **kwargs)
|
|
345
348
|
return root_path, data_path, output_path
|
dtlpy/new_instance.py
CHANGED
|
@@ -104,7 +104,6 @@ class Dtlpy:
|
|
|
104
104
|
self.cache_state = self.client_api.cache_state
|
|
105
105
|
self.attributes_mode = self.client_api.attributes_mode
|
|
106
106
|
self.sdk_cache = self.client_api.sdk_cache
|
|
107
|
-
self.platform_settings = self.client_api.platform_settings
|
|
108
107
|
|
|
109
108
|
def __del__(self):
|
|
110
109
|
for name, pool in self.client_api._thread_pools.items():
|
|
@@ -243,8 +242,6 @@ class Dtlpy:
|
|
|
243
242
|
HIGHMEM_S = 'highmem-s'
|
|
244
243
|
HIGHMEM_M = 'highmem-m'
|
|
245
244
|
HIGHMEM_L = 'highmem-l'
|
|
246
|
-
GPU_K80_S = "gpu-k80-s"
|
|
247
|
-
GPU_K80_M = "gpu-k80-m"
|
|
248
245
|
GPU_T4_S = "gpu-t4-s"
|
|
249
246
|
GPU_T4_M = "gpu-t4-m"
|
|
250
247
|
|
|
@@ -885,6 +885,32 @@ class Annotations:
|
|
|
885
885
|
"""
|
|
886
886
|
return entities.AnnotationCollection(item=self.item)
|
|
887
887
|
|
|
888
|
+
def task_scores(self, annotation_id: str, task_id: str, page_offset: int = 0, page_size: int = 100):
|
|
889
|
+
"""
|
|
890
|
+
Get annotation scores in a task
|
|
891
|
+
|
|
892
|
+
**Prerequisites**: You must be able to read the task
|
|
893
|
+
|
|
894
|
+
:param str annotation_id: The id of the annotation
|
|
895
|
+
:param str task_id: The id of the task
|
|
896
|
+
:param int page_offset: starting page
|
|
897
|
+
:param int page_size: size of page
|
|
898
|
+
:return: json response
|
|
899
|
+
:rtype: dict
|
|
900
|
+
"""
|
|
901
|
+
if annotation_id is None:
|
|
902
|
+
raise exceptions.PlatformException('400', 'annotation_id must be provided')
|
|
903
|
+
if task_id is None:
|
|
904
|
+
raise exceptions.PlatformException('400', 'task_id must be provided')
|
|
905
|
+
|
|
906
|
+
success, response = self._client_api.gen_request(req_type='get',
|
|
907
|
+
path='/scores/tasks/{}/annotations/{}?page={}&pageSize={}'
|
|
908
|
+
.format(task_id, annotation_id, page_offset, page_size))
|
|
909
|
+
if success:
|
|
910
|
+
return response.json()
|
|
911
|
+
else:
|
|
912
|
+
raise exceptions.PlatformException(response)
|
|
913
|
+
|
|
888
914
|
##################
|
|
889
915
|
# async function #
|
|
890
916
|
##################
|
dtlpy/repositories/datasets.py
CHANGED
|
@@ -127,8 +127,8 @@ class Datasets:
|
|
|
127
127
|
return dataset_id
|
|
128
128
|
|
|
129
129
|
@staticmethod
|
|
130
|
-
def _build_payload(filters, include_feature_vectors, include_annotations,
|
|
131
|
-
feature_vector_filters):
|
|
130
|
+
def _build_payload(filters, include_feature_vectors, include_annotations,
|
|
131
|
+
export_type, annotation_filters, feature_vector_filters, dataset_lock, lock_timeout_sec):
|
|
132
132
|
valid_list = [e.value for e in entities.ExportType]
|
|
133
133
|
valid_types = ', '.join(valid_list)
|
|
134
134
|
if export_type not in ['json', 'zip']:
|
|
@@ -158,6 +158,12 @@ class Datasets:
|
|
|
158
158
|
payload['annotationsQuery'] = annotation_filters.prepare()['filter']
|
|
159
159
|
payload['annotations']['filter'] = True
|
|
160
160
|
|
|
161
|
+
if dataset_lock:
|
|
162
|
+
payload['datasetLock'] = dataset_lock
|
|
163
|
+
|
|
164
|
+
if lock_timeout_sec:
|
|
165
|
+
payload['lockTimeoutSec'] = lock_timeout_sec
|
|
166
|
+
|
|
161
167
|
return payload
|
|
162
168
|
|
|
163
169
|
def _download_exported_item(self, item_id, export_type, local_path=None):
|
|
@@ -471,6 +477,32 @@ class Datasets:
|
|
|
471
477
|
return dataset
|
|
472
478
|
else:
|
|
473
479
|
raise exceptions.PlatformException(response)
|
|
480
|
+
|
|
481
|
+
@_api_reference.add(path='/datasets/{id}/unlock', method='patch')
|
|
482
|
+
def unlock(self, dataset: entities.Dataset ) -> entities.Dataset:
|
|
483
|
+
"""
|
|
484
|
+
Unlock dataset.
|
|
485
|
+
|
|
486
|
+
**Prerequisites**: You must be an *owner* or *developer* to use this method.
|
|
487
|
+
|
|
488
|
+
:param dtlpy.entities.dataset.Dataset dataset: dataset object
|
|
489
|
+
:return: Dataset object
|
|
490
|
+
:rtype: dtlpy.entities.dataset.Dataset
|
|
491
|
+
|
|
492
|
+
**Example**:
|
|
493
|
+
|
|
494
|
+
.. code-block:: python
|
|
495
|
+
|
|
496
|
+
dataset = project.datasets.unlock(dataset='dataset_entity')
|
|
497
|
+
"""
|
|
498
|
+
url_path = '/datasets/{}/unlock'.format(dataset.id)
|
|
499
|
+
|
|
500
|
+
success, response = self._client_api.gen_request(req_type='patch', path=url_path)
|
|
501
|
+
if success:
|
|
502
|
+
logger.info('Dataset was unlocked successfully')
|
|
503
|
+
return dataset
|
|
504
|
+
else:
|
|
505
|
+
raise exceptions.PlatformException(response)
|
|
474
506
|
|
|
475
507
|
@_api_reference.add(path='/datasets/{id}/directoryTree', method='get')
|
|
476
508
|
def directory_tree(self,
|
|
@@ -602,7 +634,9 @@ class Datasets:
|
|
|
602
634
|
include_feature_vectors: bool = False,
|
|
603
635
|
include_annotations: bool = False,
|
|
604
636
|
export_type: entities.ExportType = entities.ExportType.JSON,
|
|
605
|
-
timeout: int = 0
|
|
637
|
+
timeout: int = 0,
|
|
638
|
+
dataset_lock: bool = False,
|
|
639
|
+
lock_timeout_sec: int = None):
|
|
606
640
|
"""
|
|
607
641
|
Export dataset items and annotations.
|
|
608
642
|
|
|
@@ -619,6 +653,8 @@ class Datasets:
|
|
|
619
653
|
:param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity to filter feature vectors for export
|
|
620
654
|
:param bool include_feature_vectors: Include item feature vectors in the export
|
|
621
655
|
:param bool include_annotations: Include item annotations in the export
|
|
656
|
+
:param bool dataset_lock: Make dataset readonly during the export
|
|
657
|
+
:param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
|
|
622
658
|
:param entities.ExportType export_type: Type of export ('json' or 'zip')
|
|
623
659
|
:param int timeout: Maximum time in seconds to wait for the export to complete
|
|
624
660
|
:return: Exported item
|
|
@@ -632,11 +668,13 @@ class Datasets:
|
|
|
632
668
|
filters=filters,
|
|
633
669
|
include_feature_vectors=True,
|
|
634
670
|
include_annotations=True,
|
|
635
|
-
export_type=dl.ExportType.JSON
|
|
671
|
+
export_type=dl.ExportType.JSON,
|
|
672
|
+
dataset_lock=True
|
|
673
|
+
lock_timeout_sec=300)
|
|
636
674
|
"""
|
|
637
675
|
dataset_id = self._resolve_dataset_id(dataset, dataset_name, dataset_id)
|
|
638
|
-
payload = self._build_payload(filters, include_feature_vectors, include_annotations,
|
|
639
|
-
|
|
676
|
+
payload = self._build_payload(filters, include_feature_vectors, include_annotations,
|
|
677
|
+
export_type, annotation_filters, feature_vector_filters, dataset_lock, lock_timeout_sec)
|
|
640
678
|
|
|
641
679
|
success, response = self._client_api.gen_request(req_type='post', path=f'/datasets/{dataset_id}/export',
|
|
642
680
|
json_req=payload)
|
|
@@ -900,7 +938,9 @@ class Datasets:
|
|
|
900
938
|
export_png_files: bool = False,
|
|
901
939
|
filter_output_annotations: bool = False,
|
|
902
940
|
alpha: float = None,
|
|
903
|
-
export_version=entities.ExportVersion.V1
|
|
941
|
+
export_version=entities.ExportVersion.V1,
|
|
942
|
+
dataset_lock: bool = False,
|
|
943
|
+
lock_timeout_sec: int = None
|
|
904
944
|
) -> str:
|
|
905
945
|
"""
|
|
906
946
|
Download dataset's annotations by filters.
|
|
@@ -917,6 +957,7 @@ class Datasets:
|
|
|
917
957
|
:param list annotation_options: type of download annotations: list(dl.ViewAnnotationOptions)
|
|
918
958
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
919
959
|
:param bool overwrite: optional - default = False to overwrite the existing files
|
|
960
|
+
:param bool dataset_loc: optional - default = False to make the dataset readonly
|
|
920
961
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
921
962
|
:param bool with_text: optional - add text to annotations, default = False
|
|
922
963
|
:param str remote_path: DEPRECATED and ignored
|
|
@@ -926,6 +967,8 @@ class Datasets:
|
|
|
926
967
|
:param float alpha: opacity value [0 1], default 1
|
|
927
968
|
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
928
969
|
:return: local_path of the directory where all the downloaded item
|
|
970
|
+
:param bool dataset_lock: optional - default = False
|
|
971
|
+
:param int lock_timeout_sec: optional
|
|
929
972
|
:rtype: str
|
|
930
973
|
|
|
931
974
|
**Example**:
|
|
@@ -938,7 +981,9 @@ class Datasets:
|
|
|
938
981
|
overwrite=False,
|
|
939
982
|
thickness=1,
|
|
940
983
|
with_text=False,
|
|
941
|
-
alpha=1
|
|
984
|
+
alpha=1,
|
|
985
|
+
dataset_lock=False
|
|
986
|
+
lock_timeout_sec=300
|
|
942
987
|
)
|
|
943
988
|
"""
|
|
944
989
|
if annotation_options is None:
|
|
@@ -998,7 +1043,9 @@ class Datasets:
|
|
|
998
1043
|
include_annotations_in_output=include_annotations_in_output,
|
|
999
1044
|
export_png_files=export_png_files,
|
|
1000
1045
|
filter_output_annotations=filter_output_annotations,
|
|
1001
|
-
export_version=export_version
|
|
1046
|
+
export_version=export_version,
|
|
1047
|
+
dataset_lock=dataset_lock,
|
|
1048
|
+
lock_timeout_sec=lock_timeout_sec
|
|
1002
1049
|
)
|
|
1003
1050
|
if annotation_options:
|
|
1004
1051
|
pages = dataset.items.list(filters=filters)
|