dtlpy 1.99.12__py3-none-any.whl → 1.101.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +1 -1
- dtlpy/__version__.py +1 -1
- dtlpy/entities/annotation_definitions/segmentation.py +52 -28
- dtlpy/entities/dataset.py +66 -0
- dtlpy/entities/dpk.py +1 -0
- dtlpy/entities/item.py +54 -0
- dtlpy/entities/model.py +27 -3
- dtlpy/entities/paged_entities.py +1 -1
- dtlpy/entities/service.py +10 -0
- dtlpy/ml/base_model_adapter.py +27 -30
- dtlpy/repositories/commands.py +1 -1
- dtlpy/repositories/datasets.py +107 -2
- dtlpy/repositories/downloader.py +2 -2
- dtlpy/repositories/models.py +12 -2
- dtlpy/repositories/services.py +35 -0
- dtlpy/repositories/uploader.py +1 -1
- dtlpy/services/api_client.py +93 -2
- dtlpy/utilities/converter.py +2 -2
- {dtlpy-1.99.12.dist-info → dtlpy-1.101.8.dist-info}/METADATA +1 -1
- {dtlpy-1.99.12.dist-info → dtlpy-1.101.8.dist-info}/RECORD +27 -27
- {dtlpy-1.99.12.data → dtlpy-1.101.8.data}/scripts/dlp +0 -0
- {dtlpy-1.99.12.data → dtlpy-1.101.8.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.99.12.data → dtlpy-1.101.8.data}/scripts/dlp.py +0 -0
- {dtlpy-1.99.12.dist-info → dtlpy-1.101.8.dist-info}/LICENSE +0 -0
- {dtlpy-1.99.12.dist-info → dtlpy-1.101.8.dist-info}/WHEEL +0 -0
- {dtlpy-1.99.12.dist-info → dtlpy-1.101.8.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.99.12.dist-info → dtlpy-1.101.8.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py
CHANGED
|
@@ -239,7 +239,7 @@ def checkout_state():
|
|
|
239
239
|
|
|
240
240
|
|
|
241
241
|
def use_attributes_2(state: bool = True):
|
|
242
|
-
warnings.warn("Function 'use_attributes_2()' is deprecated as of version 1.99.
|
|
242
|
+
warnings.warn("Function 'use_attributes_2()' is deprecated as of version 1.99.12 and has been non-functional since version 1.90.39. To work with attributes 2.0, simply use 'update_attributes()'.", DeprecationWarning)
|
|
243
243
|
client_api.attributes_mode.use_attributes_2 = state
|
|
244
244
|
|
|
245
245
|
|
dtlpy/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '1.
|
|
1
|
+
version = '1.101.8'
|
|
@@ -15,12 +15,31 @@ class Segmentation(BaseAnnotationDefinition):
|
|
|
15
15
|
"""
|
|
16
16
|
type = "binary"
|
|
17
17
|
|
|
18
|
-
def __init__(self, geo, label, attributes=None, description=None, color=None):
|
|
18
|
+
def __init__(self, geo: np.ndarray, label: str, attributes=None, description=None, color=None):
|
|
19
19
|
super().__init__(description=description, attributes=attributes)
|
|
20
|
-
self.
|
|
20
|
+
self._geo = geo
|
|
21
|
+
self._coordinates = None
|
|
21
22
|
self.label = label
|
|
22
23
|
self._color = color
|
|
23
24
|
|
|
25
|
+
@property
|
|
26
|
+
def geo(self) -> np.ndarray:
|
|
27
|
+
if self._geo is None:
|
|
28
|
+
self._geo = self.from_coordinates(self._coordinates)
|
|
29
|
+
if self._color is None:
|
|
30
|
+
color = None
|
|
31
|
+
fill_coordinates = self._geo.nonzero()
|
|
32
|
+
if len(fill_coordinates) > 0 and len(fill_coordinates[0]) > 0 and len(fill_coordinates[1]) > 0:
|
|
33
|
+
color = self._geo[fill_coordinates[0][0]][fill_coordinates[1][0]]
|
|
34
|
+
self._color = color
|
|
35
|
+
self._geo = (self._geo[:, :, 3] > 127).astype(float)
|
|
36
|
+
return self._geo
|
|
37
|
+
|
|
38
|
+
@geo.setter
|
|
39
|
+
def geo(self, geo: np.ndarray):
|
|
40
|
+
self._geo = geo
|
|
41
|
+
self._coordinates = None
|
|
42
|
+
|
|
24
43
|
@property
|
|
25
44
|
def x(self):
|
|
26
45
|
return
|
|
@@ -106,25 +125,31 @@ class Segmentation(BaseAnnotationDefinition):
|
|
|
106
125
|
return image
|
|
107
126
|
|
|
108
127
|
def to_coordinates(self, color=None):
|
|
109
|
-
|
|
110
|
-
|
|
128
|
+
need_encode = False
|
|
129
|
+
if color is not None and self._color is not None:
|
|
130
|
+
# if input color is not the same as the annotation's color - need to re-encode
|
|
131
|
+
if self._color != color:
|
|
132
|
+
need_encode = True
|
|
133
|
+
|
|
134
|
+
if need_encode or self._coordinates is None:
|
|
135
|
+
if self._color is not None:
|
|
111
136
|
color = self._color
|
|
112
137
|
else:
|
|
113
138
|
color = (255, 255, 255)
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
return
|
|
139
|
+
max_val = np.max(self.geo)
|
|
140
|
+
if max_val > 1:
|
|
141
|
+
self.geo = self.geo / max_val
|
|
142
|
+
png_ann = np.stack((color[0] * self.geo,
|
|
143
|
+
color[1] * self.geo,
|
|
144
|
+
color[2] * self.geo,
|
|
145
|
+
255 * self.geo),
|
|
146
|
+
axis=2).astype(np.uint8)
|
|
147
|
+
pil_img = Image.fromarray(png_ann)
|
|
148
|
+
buff = io.BytesIO()
|
|
149
|
+
pil_img.save(buff, format="PNG")
|
|
150
|
+
new_image_string = base64.b64encode(buff.getvalue()).decode("utf-8")
|
|
151
|
+
self._coordinates = "data:image/png;base64,%s" % new_image_string
|
|
152
|
+
return self._coordinates
|
|
128
153
|
|
|
129
154
|
def to_box(self):
|
|
130
155
|
"""
|
|
@@ -186,23 +211,22 @@ class Segmentation(BaseAnnotationDefinition):
|
|
|
186
211
|
else:
|
|
187
212
|
raise TypeError('unknown binary data type')
|
|
188
213
|
decode = base64.b64decode(data)
|
|
189
|
-
|
|
214
|
+
mask = np.array(Image.open(io.BytesIO(decode)))
|
|
215
|
+
return mask
|
|
190
216
|
|
|
191
217
|
@classmethod
|
|
192
218
|
def from_json(cls, _json):
|
|
193
219
|
if "coordinates" in _json:
|
|
194
|
-
|
|
220
|
+
coordinates = _json["coordinates"]
|
|
195
221
|
elif "data" in _json:
|
|
196
|
-
|
|
222
|
+
coordinates = _json["data"]
|
|
197
223
|
else:
|
|
198
224
|
raise ValueError('can not find "coordinates" or "data" in annotation. id: {}'.format(_json["id"]))
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
if len(fill_coordinates) > 0 and len(fill_coordinates[0]) > 0 and len(fill_coordinates[1]) > 0:
|
|
202
|
-
color = mask[fill_coordinates[0][0]][fill_coordinates[1][0]]
|
|
203
|
-
return cls(
|
|
204
|
-
geo=(mask[:, :, 3] > 127).astype(float),
|
|
225
|
+
inst = cls(
|
|
226
|
+
geo=None,
|
|
205
227
|
label=_json["label"],
|
|
206
228
|
attributes=_json.get("attributes", None),
|
|
207
|
-
color=
|
|
229
|
+
color=None
|
|
208
230
|
)
|
|
231
|
+
inst._coordinates = coordinates
|
|
232
|
+
return inst
|
dtlpy/entities/dataset.py
CHANGED
|
@@ -1149,3 +1149,69 @@ class Dataset(entities.BaseEntity):
|
|
|
1149
1149
|
ontology_id = recipe.ontology_ids[0]
|
|
1150
1150
|
ontology = recipe.ontologies.get(ontology_id=ontology_id)
|
|
1151
1151
|
return ontology.delete_attributes(ontology_id=ontology.id, keys=keys)
|
|
1152
|
+
|
|
1153
|
+
def split_ml_subsets(self,
|
|
1154
|
+
items_query = None,
|
|
1155
|
+
percentages: dict = None ):
|
|
1156
|
+
"""
|
|
1157
|
+
Split dataset items into ML subsets.
|
|
1158
|
+
|
|
1159
|
+
:param dl.Filters items_query: Filters object to select items.
|
|
1160
|
+
:param dict percentages: {'train': x, 'validation': y, 'test': z}.
|
|
1161
|
+
:return: True if the split operation was successful.
|
|
1162
|
+
:rtype: bool
|
|
1163
|
+
"""
|
|
1164
|
+
return self.datasets.split_ml_subsets(dataset_id=self.id,
|
|
1165
|
+
items_query=items_query,
|
|
1166
|
+
ml_split_list=percentages)
|
|
1167
|
+
|
|
1168
|
+
def assign_subset_to_items(self, subset: str, items_query=None) -> bool:
|
|
1169
|
+
"""
|
|
1170
|
+
Assign a specific ML subset (train/validation/test) to items defined by the given filters.
|
|
1171
|
+
This will set the chosen subset to True and the others to None.
|
|
1172
|
+
|
|
1173
|
+
:param dl.Filters items_query: Filters to select items
|
|
1174
|
+
:param str subset: 'train', 'validation', or 'test'
|
|
1175
|
+
:return: True if successful
|
|
1176
|
+
:rtype: bool
|
|
1177
|
+
"""
|
|
1178
|
+
|
|
1179
|
+
return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
|
|
1180
|
+
items_query=items_query,
|
|
1181
|
+
subset=subset)
|
|
1182
|
+
|
|
1183
|
+
def remove_subset_from_items(self, items_query= None,) -> bool:
|
|
1184
|
+
"""
|
|
1185
|
+
Remove any ML subset assignment from items defined by the given filters.
|
|
1186
|
+
This sets train, validation, and test tags to None.
|
|
1187
|
+
|
|
1188
|
+
:param dl.Filters items_query: Filters to select items
|
|
1189
|
+
:return: True if successful
|
|
1190
|
+
:rtype: bool
|
|
1191
|
+
"""
|
|
1192
|
+
return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
|
|
1193
|
+
items_query=items_query,
|
|
1194
|
+
subset=None,
|
|
1195
|
+
deleteTag=True)
|
|
1196
|
+
|
|
1197
|
+
def get_items_missing_ml_subset(self, filters = None) -> list:
|
|
1198
|
+
"""
|
|
1199
|
+
Get the list of item IDs that are missing ML subset assignment.
|
|
1200
|
+
An item is considered missing ML subset if train, validation, and test tags are not True (all None).
|
|
1201
|
+
|
|
1202
|
+
:param dl.Filters filters: optional filters to narrow down items. If None, will use a default filter for files.
|
|
1203
|
+
:return: list of item IDs
|
|
1204
|
+
:rtype: list
|
|
1205
|
+
"""
|
|
1206
|
+
if filters is None:
|
|
1207
|
+
filters = entities.Filters()
|
|
1208
|
+
filters.add(field='metadata.system.tags.train', values=None)
|
|
1209
|
+
filters.add(field='metadata.system.tags.validation', values=None)
|
|
1210
|
+
filters.add(field='metadata.system.tags.test', values=None)
|
|
1211
|
+
missing_ids = []
|
|
1212
|
+
pages = self.items.list(filters=filters)
|
|
1213
|
+
for page in pages:
|
|
1214
|
+
for item in page:
|
|
1215
|
+
# item that pass filters means no subsets assigned
|
|
1216
|
+
missing_ids.append(item.id)
|
|
1217
|
+
return missing_ids
|
dtlpy/entities/dpk.py
CHANGED
|
@@ -48,6 +48,7 @@ class Toolbar(entities.DlEntity):
|
|
|
48
48
|
invoke: dict = entities.DlProperty(location=['invoke'], _type=dict)
|
|
49
49
|
location: str = entities.DlProperty(location=['location'], _type=str)
|
|
50
50
|
icon: str = entities.DlProperty(location=['icon'], _type=str)
|
|
51
|
+
action: str = entities.DlProperty(location=['action'], _type=str, default=None)
|
|
51
52
|
|
|
52
53
|
def to_json(self) -> dict:
|
|
53
54
|
return self._dict.copy()
|
dtlpy/entities/item.py
CHANGED
|
@@ -716,6 +716,60 @@ class Item(entities.BaseEntity):
|
|
|
716
716
|
self._platform_dict = self.update()._platform_dict
|
|
717
717
|
return self
|
|
718
718
|
|
|
719
|
+
def assign_subset(self, subset: str):
|
|
720
|
+
"""
|
|
721
|
+
Assign a single ML subset (train/validation/test) to this item.
|
|
722
|
+
Sets the chosen subset to True and the others to None.
|
|
723
|
+
Then calls item.update(system_metadata=True).
|
|
724
|
+
|
|
725
|
+
:param str subset: 'train', 'validation', or 'test'
|
|
726
|
+
"""
|
|
727
|
+
if subset not in ['train', 'validation', 'test']:
|
|
728
|
+
raise ValueError("subset must be one of: 'train', 'validation', 'test'")
|
|
729
|
+
|
|
730
|
+
if 'system' not in self.metadata:
|
|
731
|
+
self.metadata['system'] = {}
|
|
732
|
+
if 'tags' not in self.metadata['system']:
|
|
733
|
+
self.metadata['system']['tags'] = {}
|
|
734
|
+
|
|
735
|
+
self.metadata['system']['tags']['train'] = True if subset == 'train' else None
|
|
736
|
+
self.metadata['system']['tags']['validation'] = True if subset == 'validation' else None
|
|
737
|
+
self.metadata['system']['tags']['test'] = True if subset == 'test' else None
|
|
738
|
+
|
|
739
|
+
return self.update(system_metadata=True)
|
|
740
|
+
|
|
741
|
+
|
|
742
|
+
def remove_subset(self):
|
|
743
|
+
"""
|
|
744
|
+
Remove any ML subset assignment from this item.
|
|
745
|
+
Sets train, validation, and test to None.
|
|
746
|
+
Then calls item.update(system_metadata=True).
|
|
747
|
+
"""
|
|
748
|
+
if 'system' not in self.metadata:
|
|
749
|
+
self.metadata['system'] = {}
|
|
750
|
+
if 'tags' not in self.metadata['system']:
|
|
751
|
+
self.metadata['system']['tags'] = {}
|
|
752
|
+
|
|
753
|
+
self.metadata['system']['tags']['train'] = None
|
|
754
|
+
self.metadata['system']['tags']['validation'] = None
|
|
755
|
+
self.metadata['system']['tags']['test'] = None
|
|
756
|
+
|
|
757
|
+
return self.update(system_metadata=True)
|
|
758
|
+
|
|
759
|
+
|
|
760
|
+
def get_current_subset(self) -> str:
|
|
761
|
+
"""
|
|
762
|
+
Get the current ML subset assignment of this item.
|
|
763
|
+
Returns 'train', 'validation', 'test', or None if not assigned.
|
|
764
|
+
|
|
765
|
+
:return: subset name or None
|
|
766
|
+
:rtype: str or None
|
|
767
|
+
"""
|
|
768
|
+
tags = self.metadata.get('system', {}).get('tags', {})
|
|
769
|
+
for subset in ['train', 'validation', 'test']:
|
|
770
|
+
if tags.get(subset) is True:
|
|
771
|
+
return subset
|
|
772
|
+
return None
|
|
719
773
|
|
|
720
774
|
class ModalityTypeEnum(str, Enum):
|
|
721
775
|
"""
|
dtlpy/entities/model.py
CHANGED
|
@@ -1,11 +1,8 @@
|
|
|
1
|
-
import json
|
|
2
1
|
from collections import namedtuple
|
|
3
2
|
from enum import Enum
|
|
4
3
|
import traceback
|
|
5
4
|
import logging
|
|
6
|
-
|
|
7
5
|
import attr
|
|
8
|
-
|
|
9
6
|
from .. import repositories, entities
|
|
10
7
|
from ..services.api_client import ApiClient
|
|
11
8
|
|
|
@@ -114,6 +111,7 @@ class Model(entities.BaseEntity):
|
|
|
114
111
|
_project = attr.ib(repr=False)
|
|
115
112
|
_package = attr.ib(repr=False)
|
|
116
113
|
_dataset = attr.ib(repr=False)
|
|
114
|
+
_feature_set = attr.ib(repr=False)
|
|
117
115
|
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
118
116
|
_repositories = attr.ib(repr=False)
|
|
119
117
|
_ontology = attr.ib(repr=False, default=None)
|
|
@@ -191,6 +189,7 @@ class Model(entities.BaseEntity):
|
|
|
191
189
|
project=project,
|
|
192
190
|
package=package,
|
|
193
191
|
dataset=None,
|
|
192
|
+
feature_set=None,
|
|
194
193
|
id=_json.get('id', None),
|
|
195
194
|
url=_json.get('url', None),
|
|
196
195
|
scope=_json.get('scope', entities.EntityScopeLevel.PROJECT),
|
|
@@ -218,6 +217,7 @@ class Model(entities.BaseEntity):
|
|
|
218
217
|
attr.fields(Model)._dataset,
|
|
219
218
|
attr.fields(Model)._ontology,
|
|
220
219
|
attr.fields(Model)._repositories,
|
|
220
|
+
attr.fields(Model)._feature_set,
|
|
221
221
|
attr.fields(Model)._client_api,
|
|
222
222
|
attr.fields(Model).package_id,
|
|
223
223
|
attr.fields(Model).project_id,
|
|
@@ -267,6 +267,30 @@ class Model(entities.BaseEntity):
|
|
|
267
267
|
assert isinstance(self._project, entities.Project)
|
|
268
268
|
return self._project
|
|
269
269
|
|
|
270
|
+
@property
|
|
271
|
+
def feature_set(self) -> 'entities.FeatureSet':
|
|
272
|
+
if self._feature_set is None:
|
|
273
|
+
filters = entities.Filters(field='modelId',
|
|
274
|
+
values=self.id,
|
|
275
|
+
resource=entities.FiltersResource.FEATURE_SET)
|
|
276
|
+
feature_sets = self.project.feature_sets.list(filters=filters)
|
|
277
|
+
if feature_sets.items_count > 1:
|
|
278
|
+
logger.warning("Found more than one feature set associated with model entity. Returning first result."
|
|
279
|
+
"Set feature_set if other feature set entity is needed.")
|
|
280
|
+
self._feature_set = feature_sets.items[0]
|
|
281
|
+
elif feature_sets.items_count == 1:
|
|
282
|
+
self._feature_set = feature_sets.items[0]
|
|
283
|
+
else:
|
|
284
|
+
self._feature_set = None
|
|
285
|
+
return self._feature_set
|
|
286
|
+
|
|
287
|
+
@feature_set.setter
|
|
288
|
+
def feature_set(self, feature_set: 'entities.FeatureSet'):
|
|
289
|
+
if not isinstance(feature_set, entities.FeatureSet):
|
|
290
|
+
raise ValueError("feature_set must be of type dl.FeatureSet")
|
|
291
|
+
else:
|
|
292
|
+
self._feature_set = feature_set
|
|
293
|
+
|
|
270
294
|
@property
|
|
271
295
|
def package(self):
|
|
272
296
|
if self._package is None:
|
dtlpy/entities/paged_entities.py
CHANGED
|
@@ -71,7 +71,7 @@ class PagedEntities:
|
|
|
71
71
|
return self.items_count
|
|
72
72
|
|
|
73
73
|
def __iter__(self):
|
|
74
|
-
pbar = tqdm.tqdm(total=self.total_pages_count, disable=self._client_api.verbose.
|
|
74
|
+
pbar = tqdm.tqdm(total=self.total_pages_count, disable=self._client_api.verbose.disable_progress_bar_iterate_pages,
|
|
75
75
|
file=sys.stdout, desc="Iterate Pages")
|
|
76
76
|
if self.page_offset != 0:
|
|
77
77
|
# reset the count for page 0
|
dtlpy/entities/service.py
CHANGED
|
@@ -839,6 +839,16 @@ class Service(entities.BaseEntity):
|
|
|
839
839
|
**kwargs
|
|
840
840
|
)
|
|
841
841
|
|
|
842
|
+
def restart(self, replica_name: str = None):
|
|
843
|
+
"""
|
|
844
|
+
Restart service
|
|
845
|
+
|
|
846
|
+
:param str replica_name: replica name
|
|
847
|
+
:return: True
|
|
848
|
+
:rtype: bool
|
|
849
|
+
"""
|
|
850
|
+
return self.services.restart(service=self, replica_name=replica_name)
|
|
851
|
+
|
|
842
852
|
|
|
843
853
|
class KubernetesAutoscalerType(str, Enum):
|
|
844
854
|
""" The Service Autoscaler Type (RABBITMQ, CPU).
|
dtlpy/ml/base_model_adapter.py
CHANGED
|
@@ -2,7 +2,9 @@ import dataclasses
|
|
|
2
2
|
import tempfile
|
|
3
3
|
import datetime
|
|
4
4
|
import logging
|
|
5
|
+
import string
|
|
5
6
|
import shutil
|
|
7
|
+
import random
|
|
6
8
|
import base64
|
|
7
9
|
import tqdm
|
|
8
10
|
import sys
|
|
@@ -119,7 +121,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
119
121
|
if self._model_entity is not None:
|
|
120
122
|
self.package = self.model_entity.package
|
|
121
123
|
if self._package is None:
|
|
122
|
-
raise ValueError('Missing Package entity on adapter.
|
|
124
|
+
raise ValueError('Missing Package entity on adapter. Please set: "adapter.package=package"')
|
|
123
125
|
assert isinstance(self._package, (entities.Package, entities.Dpk))
|
|
124
126
|
return self._package
|
|
125
127
|
|
|
@@ -306,7 +308,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
306
308
|
data_subset_base_path = os.path.join(data_path, subset)
|
|
307
309
|
if os.path.isdir(data_subset_base_path) and not overwrite:
|
|
308
310
|
# existing and dont overwrite
|
|
309
|
-
self.logger.debug("Subset {!r}
|
|
311
|
+
self.logger.debug("Subset {!r} already exists (and overwrite=False). Skipping.".format(subset))
|
|
310
312
|
else:
|
|
311
313
|
self.logger.debug("Downloading subset {!r} of {}".format(subset,
|
|
312
314
|
self.model_entity.dataset.name))
|
|
@@ -349,7 +351,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
349
351
|
|
|
350
352
|
:param model_entity: `str` dl.Model entity
|
|
351
353
|
:param local_path: `str` directory path in local FileSystem to download the model_entity to
|
|
352
|
-
:param overwrite: `bool` (default False) if False does not
|
|
354
|
+
:param overwrite: `bool` (default False) if False does not download files with same name else (True) download all
|
|
353
355
|
"""
|
|
354
356
|
if model_entity is not None:
|
|
355
357
|
self.model_entity = model_entity
|
|
@@ -387,8 +389,8 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
387
389
|
self.save(local_path=local_path, **kwargs)
|
|
388
390
|
|
|
389
391
|
if self.model_entity is None:
|
|
390
|
-
raise ValueError('
|
|
391
|
-
'
|
|
392
|
+
raise ValueError('Missing model entity on the adapter. '
|
|
393
|
+
'Please set before saving: "adapter.model_entity=model"')
|
|
392
394
|
|
|
393
395
|
self.model_entity.artifacts.upload(filepath=os.path.join(local_path, '*'),
|
|
394
396
|
overwrite=True)
|
|
@@ -466,9 +468,9 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
466
468
|
"""
|
|
467
469
|
Extract feature from an input list of items (or single) and return the items and the feature vector.
|
|
468
470
|
|
|
469
|
-
:param items: `List[dl.Item]` list of items to
|
|
471
|
+
:param items: `List[dl.Item]` list of items to embed
|
|
470
472
|
:param upload_features: `bool` uploads the features on the given items
|
|
471
|
-
:param batch_size: `int` size of batch to run a single
|
|
473
|
+
:param batch_size: `int` size of batch to run a single embed
|
|
472
474
|
|
|
473
475
|
:return: `List[dl.Item]`, `List[List[vector]]`
|
|
474
476
|
"""
|
|
@@ -477,16 +479,18 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
477
479
|
upload_features = self.adapter_defaults.resolve("upload_features", upload_features)
|
|
478
480
|
input_type = self.model_entity.input_type
|
|
479
481
|
self.logger.debug(
|
|
480
|
-
"
|
|
482
|
+
"Embedding {} items, using batch size {}. input type: {}".format(len(items), batch_size, input_type))
|
|
481
483
|
|
|
482
484
|
# Search for existing feature set for this model id
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
resource=entities.FiltersResource.FEATURE_SET)
|
|
486
|
-
pages = self.model_entity.project.feature_sets.list(filters)
|
|
487
|
-
if pages.items_count == 0:
|
|
488
|
-
feature_set_name = self.configuration.get('featureSetName', self.model_entity.name)
|
|
485
|
+
feature_set = self.model_entity.feature_set
|
|
486
|
+
if feature_set is None:
|
|
489
487
|
logger.info('Feature Set not found. creating... ')
|
|
488
|
+
try:
|
|
489
|
+
self.model_entity.project.feature_sets.get(name=self.model_entity.name)
|
|
490
|
+
feature_set_name = f"{self.model_entity.name}-{''.join(random.choices(string.ascii_letters + string.digits, k=5))}"
|
|
491
|
+
logger.warning(f"Feature set with the model name already exists. Creating new feature set with name {feature_set_name}")
|
|
492
|
+
except exceptions.NotFound:
|
|
493
|
+
feature_set_name = self.model_entity.name
|
|
490
494
|
feature_set = self.model_entity.project.feature_sets.create(name=feature_set_name,
|
|
491
495
|
entity_type=entities.FeatureEntityType.ITEM,
|
|
492
496
|
model_id=self.model_entity.id,
|
|
@@ -494,22 +498,15 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
494
498
|
set_type=self.model_entity.name,
|
|
495
499
|
size=self.configuration.get('embeddings_size',
|
|
496
500
|
256))
|
|
497
|
-
if 'featureSetName' not in self.model_entity.configuration:
|
|
498
|
-
self.model_entity.configuration['featureSetName'] = feature_set_name
|
|
499
|
-
self.model_entity.update()
|
|
500
501
|
logger.info(f'Feature Set created! name: {feature_set.name}, id: {feature_set.id}')
|
|
501
|
-
elif pages.items_count > 1:
|
|
502
|
-
raise ValueError(
|
|
503
|
-
f'More than one feature set for model. model_id: {self.model_entity.id}, feature_sets_ids: {[f.id for f in pages.all()]}')
|
|
504
502
|
else:
|
|
505
|
-
feature_set = pages.items[0]
|
|
506
503
|
logger.info(f'Feature Set found! name: {feature_set.name}, id: {feature_set.id}')
|
|
507
504
|
|
|
508
505
|
# upload the feature vectors
|
|
509
506
|
pool = ThreadPoolExecutor(max_workers=16)
|
|
510
507
|
vectors = list()
|
|
511
508
|
for i_batch in tqdm.tqdm(range(0, len(items), batch_size),
|
|
512
|
-
desc='
|
|
509
|
+
desc='embedding',
|
|
513
510
|
unit='bt',
|
|
514
511
|
leave=None,
|
|
515
512
|
file=sys.stdout):
|
|
@@ -545,19 +542,19 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
545
542
|
Extract feature from all items given
|
|
546
543
|
|
|
547
544
|
:param dataset: Dataset entity to predict
|
|
548
|
-
:param filters: Filters entity for a filtering before
|
|
545
|
+
:param filters: Filters entity for a filtering before embedding
|
|
549
546
|
:param upload_features: `bool` uploads the features back to the given items
|
|
550
|
-
:param batch_size: `int` size of batch to run a single
|
|
547
|
+
:param batch_size: `int` size of batch to run a single embed
|
|
551
548
|
|
|
552
|
-
:return: `bool` indicating if the
|
|
549
|
+
:return: `bool` indicating if the embedding process completed successfully
|
|
553
550
|
"""
|
|
554
551
|
if batch_size is None:
|
|
555
552
|
batch_size = self.configuration.get('batch_size', 4)
|
|
556
553
|
upload_features = self.adapter_defaults.resolve("upload_features", upload_features)
|
|
557
554
|
|
|
558
|
-
self.logger.debug("Creating embeddings for dataset (name:{}, id:{}, using batch size {}".format(dataset.name,
|
|
559
|
-
|
|
560
|
-
|
|
555
|
+
self.logger.debug("Creating embeddings for dataset (name:{}, id:{}), using batch size {}".format(dataset.name,
|
|
556
|
+
dataset.id,
|
|
557
|
+
batch_size))
|
|
561
558
|
if not filters:
|
|
562
559
|
filters = entities.Filters()
|
|
563
560
|
if filters is not None and isinstance(filters, dict):
|
|
@@ -771,7 +768,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
771
768
|
entity=item)
|
|
772
769
|
return feature
|
|
773
770
|
except Exception as e:
|
|
774
|
-
logger.error(f'Failed to upload feature vector
|
|
771
|
+
logger.error(f'Failed to upload feature vector of length {len(vector)} to item {item.id}, Error: {e}')
|
|
775
772
|
return []
|
|
776
773
|
|
|
777
774
|
def _upload_model_annotations(self, item: entities.Item, predictions, clean_annotations):
|
|
@@ -794,7 +791,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
794
791
|
@staticmethod
|
|
795
792
|
def _item_to_image(item):
|
|
796
793
|
"""
|
|
797
|
-
Preprocess items before
|
|
794
|
+
Preprocess items before calling the `predict` functions.
|
|
798
795
|
Convert item to numpy array
|
|
799
796
|
|
|
800
797
|
:param item:
|
dtlpy/repositories/commands.py
CHANGED
|
@@ -95,7 +95,7 @@ class Commands:
|
|
|
95
95
|
|
|
96
96
|
command = None
|
|
97
97
|
pbar = tqdm.tqdm(total=100,
|
|
98
|
-
disable=self._client_api.verbose.
|
|
98
|
+
disable=self._client_api.verbose.disable_progress_bar_command_progress,
|
|
99
99
|
file=sys.stdout,
|
|
100
100
|
desc='Command Progress')
|
|
101
101
|
num_tries = 1
|
dtlpy/repositories/datasets.py
CHANGED
|
@@ -1008,7 +1008,7 @@ class Datasets:
|
|
|
1008
1008
|
pool = dataset._client_api.thread_pools(pool_name='dataset.download')
|
|
1009
1009
|
jobs = [None for _ in range(pages.items_count)]
|
|
1010
1010
|
progress = tqdm.tqdm(total=pages.items_count,
|
|
1011
|
-
disable=dataset._client_api.verbose.
|
|
1011
|
+
disable=dataset._client_api.verbose.disable_progress_bar_download_annotations,
|
|
1012
1012
|
file=sys.stdout, desc='Download Annotations')
|
|
1013
1013
|
i_item = 0
|
|
1014
1014
|
for page in pages:
|
|
@@ -1081,7 +1081,7 @@ class Datasets:
|
|
|
1081
1081
|
filters._user_query = 'false'
|
|
1082
1082
|
pages = dataset.items.list(filters=filters)
|
|
1083
1083
|
total_items = pages.items_count
|
|
1084
|
-
pbar = tqdm.tqdm(total=total_items, disable=dataset._client_api.verbose.
|
|
1084
|
+
pbar = tqdm.tqdm(total=total_items, disable=dataset._client_api.verbose.disable_progress_bar_upload_annotations,
|
|
1085
1085
|
file=sys.stdout, desc='Upload Annotations')
|
|
1086
1086
|
pool = self._client_api.thread_pools('annotation.upload')
|
|
1087
1087
|
annotations_uploaded_count = 0
|
|
@@ -1126,3 +1126,108 @@ class Datasets:
|
|
|
1126
1126
|
"""
|
|
1127
1127
|
import warnings
|
|
1128
1128
|
warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
|
|
1129
|
+
|
|
1130
|
+
|
|
1131
|
+
@_api_reference.add(path='/datasets/{id}/split', method='post')
|
|
1132
|
+
def split_ml_subsets(self,
|
|
1133
|
+
dataset_id: str,
|
|
1134
|
+
items_query: entities.filters,
|
|
1135
|
+
ml_split_list: dict) -> bool:
|
|
1136
|
+
"""
|
|
1137
|
+
Split dataset items into ML subsets.
|
|
1138
|
+
|
|
1139
|
+
:param str dataset_id: The ID of the dataset.
|
|
1140
|
+
:param dict items_query: Query to select items.
|
|
1141
|
+
:param dict ml_split_list: Dictionary with 'train', 'validation', 'test' keys and integer percentages.
|
|
1142
|
+
:return: True if the split operation was successful.
|
|
1143
|
+
:rtype: bool
|
|
1144
|
+
:raises: PlatformException on failure and ValueError if percentages do not sum to 100 or invalid keys/values.
|
|
1145
|
+
"""
|
|
1146
|
+
# Validate percentages
|
|
1147
|
+
if not ml_split_list:
|
|
1148
|
+
ml_split_list = {'train': 80, 'validation': 10, 'test': 10}
|
|
1149
|
+
|
|
1150
|
+
if not items_query:
|
|
1151
|
+
items_query = entities.Filters()
|
|
1152
|
+
|
|
1153
|
+
items_query_dict = items_query.prepare()
|
|
1154
|
+
required_keys = {'train', 'validation', 'test'}
|
|
1155
|
+
if set(ml_split_list.keys()) != required_keys:
|
|
1156
|
+
raise ValueError("MLSplitList must have exactly the keys 'train', 'validation', 'test'.")
|
|
1157
|
+
total = sum(ml_split_list.values())
|
|
1158
|
+
if total != 100:
|
|
1159
|
+
raise ValueError(
|
|
1160
|
+
"Please set the Train, Validation, and Test subsets percentages to add up to 100%. "
|
|
1161
|
+
"For example: 70, 15, 15."
|
|
1162
|
+
)
|
|
1163
|
+
for key, value in ml_split_list.items():
|
|
1164
|
+
if not isinstance(value, int) or value < 0:
|
|
1165
|
+
raise ValueError("Percentages must be integers >= 0.")
|
|
1166
|
+
payload = {
|
|
1167
|
+
'itemsQuery': items_query_dict,
|
|
1168
|
+
'MLSplitList': ml_split_list
|
|
1169
|
+
}
|
|
1170
|
+
path = f'/datasets/{dataset_id}/split'
|
|
1171
|
+
success, response = self._client_api.gen_request(req_type='post',
|
|
1172
|
+
path=path,
|
|
1173
|
+
json_req=payload)
|
|
1174
|
+
if success:
|
|
1175
|
+
# Wait for the split operation to complete
|
|
1176
|
+
command = entities.Command.from_json(_json=response.json(),
|
|
1177
|
+
client_api=self._client_api)
|
|
1178
|
+
command.wait()
|
|
1179
|
+
return True
|
|
1180
|
+
else:
|
|
1181
|
+
raise exceptions.PlatformException(response)
|
|
1182
|
+
|
|
1183
|
+
|
|
1184
|
+
@_api_reference.add(path='/datasets/{id}/items/bulk-update-metadata', method='post')
|
|
1185
|
+
def bulk_update_ml_subset(self, dataset_id: str, items_query: dict, subset: str = None, deleteTag: bool = False) -> bool:
|
|
1186
|
+
"""
|
|
1187
|
+
Bulk update ML subset assignment for selected items.
|
|
1188
|
+
If subset is None, remove subsets. Otherwise, assign the specified subset.
|
|
1189
|
+
|
|
1190
|
+
:param str dataset_id: ID of the dataset
|
|
1191
|
+
:param dict items_query: DQLResourceQuery (filters) for selecting items
|
|
1192
|
+
:param str subset: 'train', 'validation', 'test' or None to remove all
|
|
1193
|
+
:return: True if success
|
|
1194
|
+
:rtype: bool
|
|
1195
|
+
"""
|
|
1196
|
+
if items_query is None:
|
|
1197
|
+
items_query = entities.Filters()
|
|
1198
|
+
items_query_dict = items_query.prepare()
|
|
1199
|
+
if not deleteTag and subset not in ['train', 'validation', 'test']:
|
|
1200
|
+
raise ValueError("subset must be one of: 'train', 'validation', 'test'")
|
|
1201
|
+
# Determine tag values based on subset
|
|
1202
|
+
tags = {
|
|
1203
|
+
'train': True if subset == 'train' else None,
|
|
1204
|
+
'validation': True if subset == 'validation' else None,
|
|
1205
|
+
'test': True if subset == 'test' else None
|
|
1206
|
+
}
|
|
1207
|
+
|
|
1208
|
+
payload = {
|
|
1209
|
+
"query": items_query_dict,
|
|
1210
|
+
"updateQuery": {
|
|
1211
|
+
"update": {
|
|
1212
|
+
"metadata": {
|
|
1213
|
+
"system": {
|
|
1214
|
+
"tags": tags
|
|
1215
|
+
}
|
|
1216
|
+
}
|
|
1217
|
+
},
|
|
1218
|
+
"systemSpace": True
|
|
1219
|
+
}
|
|
1220
|
+
}
|
|
1221
|
+
|
|
1222
|
+
success, response = self._client_api.gen_request(
|
|
1223
|
+
req_type='post',
|
|
1224
|
+
path=f'/datasets/{dataset_id}/items/bulk-update-metadata',
|
|
1225
|
+
json_req=payload
|
|
1226
|
+
)
|
|
1227
|
+
if success:
|
|
1228
|
+
# Similar to split operation, a command is returned
|
|
1229
|
+
command = entities.Command.from_json(_json=response.json(), client_api=self._client_api)
|
|
1230
|
+
command.wait()
|
|
1231
|
+
return True
|
|
1232
|
+
else:
|
|
1233
|
+
raise exceptions.PlatformException(response)
|
dtlpy/repositories/downloader.py
CHANGED
|
@@ -211,7 +211,7 @@ class Downloader:
|
|
|
211
211
|
# pool
|
|
212
212
|
pool = client_api.thread_pools(pool_name='item.download')
|
|
213
213
|
# download
|
|
214
|
-
pbar = tqdm.tqdm(total=num_items, disable=client_api.verbose.
|
|
214
|
+
pbar = tqdm.tqdm(total=num_items, disable=client_api.verbose.disable_progress_bar_download_dataset, file=sys.stdout,
|
|
215
215
|
desc='Download Items')
|
|
216
216
|
try:
|
|
217
217
|
i_item = 0
|
|
@@ -699,7 +699,7 @@ class Downloader:
|
|
|
699
699
|
unit_divisor=1024,
|
|
700
700
|
position=1,
|
|
701
701
|
file=sys.stdout,
|
|
702
|
-
disable=self.items_repository._client_api.verbose.
|
|
702
|
+
disable=self.items_repository._client_api.verbose.disable_progress_bar_download_item,
|
|
703
703
|
desc='Download Item')
|
|
704
704
|
except Exception as err:
|
|
705
705
|
one_file_progress_bar = False
|
dtlpy/repositories/models.py
CHANGED
|
@@ -674,7 +674,12 @@ class Models:
|
|
|
674
674
|
raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
|
|
675
675
|
if item_ids is None and dataset_id is None:
|
|
676
676
|
raise ValueError("Need to provide either item_ids or dataset_id")
|
|
677
|
-
|
|
677
|
+
payload_input = {}
|
|
678
|
+
if item_ids is not None:
|
|
679
|
+
payload_input['itemIds'] = item_ids
|
|
680
|
+
if dataset_id is not None:
|
|
681
|
+
payload_input['datasetId'] = dataset_id
|
|
682
|
+
payload = {'input': payload_input,
|
|
678
683
|
'config': {'serviceId': model.metadata['system']['deploy']['services'][0]}}
|
|
679
684
|
|
|
680
685
|
success, response = self._client_api.gen_request(req_type="post",
|
|
@@ -701,7 +706,12 @@ class Models:
|
|
|
701
706
|
raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
|
|
702
707
|
if item_ids is None and dataset_id is None:
|
|
703
708
|
raise ValueError("Need to provide either item_ids or dataset_id")
|
|
704
|
-
|
|
709
|
+
payload_input = {}
|
|
710
|
+
if item_ids is not None:
|
|
711
|
+
payload_input['itemIds'] = item_ids
|
|
712
|
+
if dataset_id is not None:
|
|
713
|
+
payload_input['datasetId'] = dataset_id
|
|
714
|
+
payload = {'input': payload_input,
|
|
705
715
|
'config': {'serviceId': model.metadata['system']['deploy']['services'][0]}}
|
|
706
716
|
|
|
707
717
|
success, response = self._client_api.gen_request(req_type="post",
|
dtlpy/repositories/services.py
CHANGED
|
@@ -1589,6 +1589,40 @@ class Services:
|
|
|
1589
1589
|
|
|
1590
1590
|
return True
|
|
1591
1591
|
|
|
1592
|
+
def restart(self, service: entities.Service, replica_name: str = None):
|
|
1593
|
+
"""
|
|
1594
|
+
Restart service replica
|
|
1595
|
+
|
|
1596
|
+
**Prerequisites**: You must be in the role of a *developer*.
|
|
1597
|
+
|
|
1598
|
+
:param dtlpy.entities.service.Service service: service entity
|
|
1599
|
+
:param str replica_name: replica name
|
|
1600
|
+
:return: True
|
|
1601
|
+
:rtype: bool
|
|
1602
|
+
|
|
1603
|
+
**Example**:
|
|
1604
|
+
|
|
1605
|
+
.. code-block:: python
|
|
1606
|
+
|
|
1607
|
+
is_restarted = dl.services.restart(service='service_entity',
|
|
1608
|
+
replica_name='replica_name')
|
|
1609
|
+
"""
|
|
1610
|
+
payload = {}
|
|
1611
|
+
|
|
1612
|
+
if replica_name is not None:
|
|
1613
|
+
payload['replicaName'] = replica_name
|
|
1614
|
+
|
|
1615
|
+
# request
|
|
1616
|
+
success, response = self._client_api.gen_request(req_type='post',
|
|
1617
|
+
path='/services/{}/restart'.format(service.id),
|
|
1618
|
+
json_req=payload)
|
|
1619
|
+
|
|
1620
|
+
# exception handling
|
|
1621
|
+
if not success:
|
|
1622
|
+
raise exceptions.PlatformException(response)
|
|
1623
|
+
|
|
1624
|
+
return True
|
|
1625
|
+
|
|
1592
1626
|
|
|
1593
1627
|
class ServiceLog:
|
|
1594
1628
|
"""
|
|
@@ -1661,3 +1695,4 @@ class ServiceLog:
|
|
|
1661
1695
|
for log in self.logs:
|
|
1662
1696
|
yield '{}: {}'.format(log.get('timestamp', self.start), log.get('message', '').strip())
|
|
1663
1697
|
self.get_next_log()
|
|
1698
|
+
|
dtlpy/repositories/uploader.py
CHANGED
|
@@ -38,7 +38,7 @@ class Uploader:
|
|
|
38
38
|
self.num_files = 0
|
|
39
39
|
self.i_item = 0
|
|
40
40
|
self.pbar = tqdm.tqdm(total=0,
|
|
41
|
-
disable=self.items_repository._client_api.verbose.
|
|
41
|
+
disable=self.items_repository._client_api.verbose.disable_progress_bar_upload_items,
|
|
42
42
|
file=sys.stdout, desc='Upload Items')
|
|
43
43
|
self.reporter = Reporter(num_workers=0,
|
|
44
44
|
resource=Reporter.ITEMS_UPLOAD,
|
dtlpy/services/api_client.py
CHANGED
|
@@ -104,6 +104,16 @@ class Verbose:
|
|
|
104
104
|
__DEFAULT_DISABLE_PROGRESS_BAR = False
|
|
105
105
|
__DEFAULT_PRINT_ALL_RESPONSES = False
|
|
106
106
|
__PRINT_ERROR_LOGS = False
|
|
107
|
+
__DEFAULT_PROGRESS_BAR_SETTINGS = {
|
|
108
|
+
'Iterate Pages': False,
|
|
109
|
+
'Command Progress': False,
|
|
110
|
+
'Download Dataset': False,
|
|
111
|
+
'Download Item': False,
|
|
112
|
+
'Upload Items': False,
|
|
113
|
+
'Download Annotations': False,
|
|
114
|
+
'Upload Annotations': False,
|
|
115
|
+
'Convert Annotations': False
|
|
116
|
+
}
|
|
107
117
|
|
|
108
118
|
def __init__(self, cookie):
|
|
109
119
|
self.cookie = cookie
|
|
@@ -115,6 +125,7 @@ class Verbose:
|
|
|
115
125
|
self._disable_progress_bar = self.__DEFAULT_DISABLE_PROGRESS_BAR
|
|
116
126
|
self._print_all_responses = self.__DEFAULT_PRINT_ALL_RESPONSES
|
|
117
127
|
self._print_error_logs = self.__PRINT_ERROR_LOGS
|
|
128
|
+
self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
|
|
118
129
|
if os.getenv('DTLPY_REFRESH_TOKEN_METHOD', "") == "proxy":
|
|
119
130
|
self._print_error_logs = True
|
|
120
131
|
self.to_cookie()
|
|
@@ -123,7 +134,9 @@ class Verbose:
|
|
|
123
134
|
dictionary = {'logging_level': self._logging_level,
|
|
124
135
|
'disable_progress_bar': self._disable_progress_bar,
|
|
125
136
|
'print_all_responses': self._print_all_responses,
|
|
126
|
-
'print_error_logs': self._print_error_logs
|
|
137
|
+
'print_error_logs': self._print_error_logs,
|
|
138
|
+
'progress_bar_setting': json.dumps(self._progress_bar_settings)
|
|
139
|
+
}
|
|
127
140
|
self.cookie.put(key='verbose', value=dictionary)
|
|
128
141
|
|
|
129
142
|
def from_cookie(self, dictionary):
|
|
@@ -131,6 +144,83 @@ class Verbose:
|
|
|
131
144
|
self._disable_progress_bar = dictionary.get('disable_progress_bar', self.__DEFAULT_DISABLE_PROGRESS_BAR)
|
|
132
145
|
self._print_all_responses = dictionary.get('print_all_responses', self.__DEFAULT_PRINT_ALL_RESPONSES)
|
|
133
146
|
self._print_error_logs = dictionary.get('print_error_logs', self.__PRINT_ERROR_LOGS)
|
|
147
|
+
progress_bar_settings = dictionary.get('progress_bar_setting', None)
|
|
148
|
+
if progress_bar_settings is None:
|
|
149
|
+
self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
|
|
150
|
+
else:
|
|
151
|
+
self._progress_bar_settings = json.loads(progress_bar_settings)
|
|
152
|
+
|
|
153
|
+
@property
|
|
154
|
+
def disable_progress_bar_iterate_pages(self):
|
|
155
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Iterate Pages', False)
|
|
156
|
+
|
|
157
|
+
@disable_progress_bar_iterate_pages.setter
|
|
158
|
+
def disable_progress_bar_iterate_pages(self, val):
|
|
159
|
+
self._progress_bar_settings['Iterate Pages'] = val
|
|
160
|
+
self.to_cookie()
|
|
161
|
+
|
|
162
|
+
@property
|
|
163
|
+
def disable_progress_bar_command_progress(self):
|
|
164
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Command Progress', False)
|
|
165
|
+
|
|
166
|
+
@disable_progress_bar_command_progress.setter
|
|
167
|
+
def disable_progress_bar_command_progress(self, val):
|
|
168
|
+
self._progress_bar_settings['Command Progress'] = val
|
|
169
|
+
self.to_cookie()
|
|
170
|
+
|
|
171
|
+
@property
|
|
172
|
+
def disable_progress_bar_download_item(self):
|
|
173
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Download Item', False)
|
|
174
|
+
|
|
175
|
+
@disable_progress_bar_download_item.setter
|
|
176
|
+
def disable_progress_bar_download_item(self, val):
|
|
177
|
+
self._progress_bar_settings['Download Item'] = val
|
|
178
|
+
self.to_cookie()
|
|
179
|
+
|
|
180
|
+
@property
|
|
181
|
+
def disable_progress_bar_download_dataset(self):
|
|
182
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Download Dataset', False)
|
|
183
|
+
|
|
184
|
+
@disable_progress_bar_download_dataset.setter
|
|
185
|
+
def disable_progress_bar_download_dataset(self, val):
|
|
186
|
+
self._progress_bar_settings['Download Dataset'] = val
|
|
187
|
+
self.to_cookie()
|
|
188
|
+
|
|
189
|
+
@property
|
|
190
|
+
def disable_progress_bar_upload_items(self):
|
|
191
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Upload Items', False)
|
|
192
|
+
|
|
193
|
+
@disable_progress_bar_upload_items.setter
|
|
194
|
+
def disable_progress_bar_upload_items(self, val):
|
|
195
|
+
self._progress_bar_settings['Upload Items'] = val
|
|
196
|
+
self.to_cookie()
|
|
197
|
+
|
|
198
|
+
@property
|
|
199
|
+
def disable_progress_bar_download_annotations(self):
|
|
200
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Download Annotations', False)
|
|
201
|
+
|
|
202
|
+
@disable_progress_bar_download_annotations.setter
|
|
203
|
+
def disable_progress_bar_download_annotations(self, val):
|
|
204
|
+
self._progress_bar_settings['Download Annotations'] = val
|
|
205
|
+
self.to_cookie()
|
|
206
|
+
|
|
207
|
+
@property
|
|
208
|
+
def disable_progress_bar_upload_annotations(self):
|
|
209
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Upload Annotations', False)
|
|
210
|
+
|
|
211
|
+
@disable_progress_bar_upload_annotations.setter
|
|
212
|
+
def disable_progress_bar_upload_annotations(self, val):
|
|
213
|
+
self._progress_bar_settings['Upload Annotations'] = val
|
|
214
|
+
self.to_cookie()
|
|
215
|
+
|
|
216
|
+
@property
|
|
217
|
+
def disable_progress_bar_convert_annotations(self):
|
|
218
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Convert Annotations', False)
|
|
219
|
+
|
|
220
|
+
@disable_progress_bar_convert_annotations.setter
|
|
221
|
+
def disable_progress_bar_convert_annotations(self, val):
|
|
222
|
+
self._progress_bar_settings['Convert Annotations'] = val
|
|
223
|
+
self.to_cookie()
|
|
134
224
|
|
|
135
225
|
@property
|
|
136
226
|
def disable_progress_bar(self):
|
|
@@ -1073,6 +1163,7 @@ class ApiClient:
|
|
|
1073
1163
|
headers=headers_req,
|
|
1074
1164
|
chunked=stream,
|
|
1075
1165
|
retry_attempts=5,
|
|
1166
|
+
ssl=self.verify,
|
|
1076
1167
|
retry_exceptions={aiohttp.client_exceptions.ClientOSError,
|
|
1077
1168
|
aiohttp.client_exceptions.ServerDisconnectedError,
|
|
1078
1169
|
aiohttp.client_exceptions.ClientPayloadError},
|
|
@@ -1161,7 +1252,7 @@ class ApiClient:
|
|
|
1161
1252
|
unit_divisor=1024,
|
|
1162
1253
|
position=1,
|
|
1163
1254
|
file=sys.stdout,
|
|
1164
|
-
disable=self.verbose.
|
|
1255
|
+
disable=self.verbose.disable_progress_bar_upload_items,
|
|
1165
1256
|
desc='Upload Items')
|
|
1166
1257
|
|
|
1167
1258
|
def callback(bytes_read):
|
dtlpy/utilities/converter.py
CHANGED
|
@@ -209,7 +209,7 @@ class Converter:
|
|
|
209
209
|
for label in labels:
|
|
210
210
|
fp.write("{}\n".format(label))
|
|
211
211
|
|
|
212
|
-
pbar = tqdm.tqdm(total=pages.items_count, disable=dataset._client_api.verbose.
|
|
212
|
+
pbar = tqdm.tqdm(total=pages.items_count, disable=dataset._client_api.verbose.disable_progress_bar_convert_annotations,
|
|
213
213
|
file=sys.stdout, desc='Convert Annotations')
|
|
214
214
|
reporter = Reporter(
|
|
215
215
|
num_workers=pages.items_count,
|
|
@@ -359,7 +359,7 @@ class Converter:
|
|
|
359
359
|
converted_annotations = [None for _ in range(pages.items_count)]
|
|
360
360
|
item_id_counter = 0
|
|
361
361
|
pool = ThreadPool(processes=self.concurrency)
|
|
362
|
-
pbar = tqdm.tqdm(total=pages.items_count, disable=dataset._client_api.verbose.
|
|
362
|
+
pbar = tqdm.tqdm(total=pages.items_count, disable=dataset._client_api.verbose.disable_progress_bar_convert_annotations,
|
|
363
363
|
file=sys.stdout, desc='Convert Annotations')
|
|
364
364
|
reporter = Reporter(
|
|
365
365
|
num_workers=pages.items_count,
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
dtlpy/__init__.py,sha256=
|
|
2
|
-
dtlpy/__version__.py,sha256=
|
|
1
|
+
dtlpy/__init__.py,sha256=1Zngp5ftTgWb3r-sc8z98TYpEs6fOB_7snFhsXYQLVg,20899
|
|
2
|
+
dtlpy/__version__.py,sha256=Ozzlk4O9Dg2923lh6fsAFvrBcMuihy5L2HWDivhCqaA,20
|
|
3
3
|
dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
|
|
4
4
|
dtlpy/new_instance.py,sha256=u_c6JtgqsKCr7TU24-g7_CaST9ghqamMhM4Z0Zxt50w,10121
|
|
5
5
|
dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
|
|
@@ -57,9 +57,9 @@ dtlpy/entities/bot.py,sha256=is3NUCnPg56HSjsHIvFcVkymValMqDV0uHRDC1Ib-ds,3819
|
|
|
57
57
|
dtlpy/entities/codebase.py,sha256=pwRkAq2GV0wvmzshg89IAmE-0I2Wsy_-QNOu8OV8uqc,8999
|
|
58
58
|
dtlpy/entities/command.py,sha256=ARu8ttk-C7_Ice7chRyTtyOtakBTF09FC04mEk73SO8,5010
|
|
59
59
|
dtlpy/entities/compute.py,sha256=4FEpahPFFGHxye_fLh_p_kP6iEQ3QJK7S5hAdd6Afos,12744
|
|
60
|
-
dtlpy/entities/dataset.py,sha256=
|
|
60
|
+
dtlpy/entities/dataset.py,sha256=4_jSlAtj-vlI4egN_tg5ac_R9tS4wyK9j0-Vpf5icmY,50456
|
|
61
61
|
dtlpy/entities/directory_tree.py,sha256=Rni6pLSWytR6yeUPgEdCCRfTg_cqLOdUc9uCqz9KT-Q,1186
|
|
62
|
-
dtlpy/entities/dpk.py,sha256=
|
|
62
|
+
dtlpy/entities/dpk.py,sha256=Hqct4NbYzX7dcPxvc2QLD0QQxmGbxurwabqrHI3k6X4,17798
|
|
63
63
|
dtlpy/entities/driver.py,sha256=O_QdK1EaLjQyQkmvKsmkNgmvmMb1mPjKnJGxK43KrOA,7197
|
|
64
64
|
dtlpy/entities/execution.py,sha256=WBiAws-6wZnQQ3y9wyvOeexA3OjxfaRdwDu5dSFYL1g,13420
|
|
65
65
|
dtlpy/entities/feature.py,sha256=9fFjD0W57anOVSAVU55ypxN_WTCsWTG03Wkc3cAAj78,3732
|
|
@@ -67,11 +67,11 @@ dtlpy/entities/feature_set.py,sha256=niw4MkmrDbD_LWQu1X30uE6U4DCzmFhPTaYeZ6VZDB0
|
|
|
67
67
|
dtlpy/entities/filters.py,sha256=_A7rLc0yuMw1eW4gSu4-hogQzXbNUheRvFxnTQWntuo,22671
|
|
68
68
|
dtlpy/entities/gis_item.py,sha256=Uk-wMBxwcHsImjz4qOjP-EyZAohbRzN43kMpCaVjCXU,3982
|
|
69
69
|
dtlpy/entities/integration.py,sha256=CA5F1eQCGE_4c_Kry4nWRdeyjHctNnvexcDXg_M5HLU,5734
|
|
70
|
-
dtlpy/entities/item.py,sha256=
|
|
70
|
+
dtlpy/entities/item.py,sha256=UnAZ7yLaTu2vkbD2sxysJQNn-ceC9zS3Uf304OvcC4E,31746
|
|
71
71
|
dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
|
|
72
72
|
dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
|
|
73
73
|
dtlpy/entities/message.py,sha256=ApJuaKEqxATpXjNYUjGdYPu3ibQzEMo8-LtJ_4xAcPI,5865
|
|
74
|
-
dtlpy/entities/model.py,sha256=
|
|
74
|
+
dtlpy/entities/model.py,sha256=EeJsCePIddavvMVhyNGt6ZIsyteucQoI0cY6byPOi9Y,26878
|
|
75
75
|
dtlpy/entities/node.py,sha256=yPPYDLtNMc6vZbbf4FIffY86y7tkaTvYm42Jb7k3Ofk,39617
|
|
76
76
|
dtlpy/entities/ontology.py,sha256=ok4p3sLBc_SS5hs2gZr5-gbblrveM7qSIX4z67QSKeQ,31967
|
|
77
77
|
dtlpy/entities/organization.py,sha256=AMkx8hNIIIjnu5pYlNjckMRuKt6H3lnOAqtEynkr7wg,9893
|
|
@@ -80,7 +80,7 @@ dtlpy/entities/package_defaults.py,sha256=wTD7Z7rGYjVy8AcUxTFEnkOkviiJaLVZYvduiU
|
|
|
80
80
|
dtlpy/entities/package_function.py,sha256=M42Kvw9A8b6msAkv-wRNAQg_-UC2bejniCjeKDugudc,6314
|
|
81
81
|
dtlpy/entities/package_module.py,sha256=cOkIITATkzzCQpE0sdPiBUisAz8ImlPG2YGZ0K7SypA,5151
|
|
82
82
|
dtlpy/entities/package_slot.py,sha256=XBwCodQe618sQm0bmx46Npo94mEk-zUV7ZX0mDRcsD8,3946
|
|
83
|
-
dtlpy/entities/paged_entities.py,sha256=
|
|
83
|
+
dtlpy/entities/paged_entities.py,sha256=grNjt2FYg4gKBlVRDkztI1BPOI4JoGeyjvmOW3BnB3k,5927
|
|
84
84
|
dtlpy/entities/pipeline.py,sha256=X9238WbMGfZcXdQVEtkw8twZwl0O4EZB4TxbTSEyPeI,20788
|
|
85
85
|
dtlpy/entities/pipeline_execution.py,sha256=XCXlBAHFYVL2HajE71hK-bPxI4gTwZvg5SKri4BgyRA,9928
|
|
86
86
|
dtlpy/entities/project.py,sha256=ZUx8zA3mr6N145M62R3UDPCCzO1vxfyWO6vjES-bO-g,14653
|
|
@@ -88,7 +88,7 @@ dtlpy/entities/prompt_item.py,sha256=d4rqP961PYlJvJJDRXZPI7Z6NdwRXlx_Q0_N0xtZ_B8
|
|
|
88
88
|
dtlpy/entities/recipe.py,sha256=Q1HtYgind3bEe-vnDZWhw6H-rcIAGhkGHPRWtLIkPSE,11917
|
|
89
89
|
dtlpy/entities/reflect_dict.py,sha256=2NaSAL-CO0T0FYRYFQlaSpbsoLT2Q18AqdHgQSLX5Y4,3273
|
|
90
90
|
dtlpy/entities/resource_execution.py,sha256=1HuVV__U4jAUOtOkWlWImnM3Yts8qxMSAkMA9sBhArY,5033
|
|
91
|
-
dtlpy/entities/service.py,sha256=
|
|
91
|
+
dtlpy/entities/service.py,sha256=NI4lFC6FqLw4aEGarr2rMptxe3omVfC39C9VAnYYEJA,33733
|
|
92
92
|
dtlpy/entities/setting.py,sha256=uXagJHtcCR3nJYClR_AUGZjz_kx3TejPcUZ8ginHFIA,8561
|
|
93
93
|
dtlpy/entities/task.py,sha256=XHiEqZYFlrDCtmw1MXsysjoBLdIzAk7coMrVk8bNIiE,19534
|
|
94
94
|
dtlpy/entities/time_series.py,sha256=336jWNckjuSn0G29WJFetB7nBoFAKqs4VH9_IB4m4FE,4017
|
|
@@ -112,7 +112,7 @@ dtlpy/entities/annotation_definitions/polygon.py,sha256=gI28yzvYgDj_js6bU9djAzsw
|
|
|
112
112
|
dtlpy/entities/annotation_definitions/polyline.py,sha256=8Rid2MxwImHM3-fM-0QjzZZE41-dIpdF45ut8KwNxtA,3237
|
|
113
113
|
dtlpy/entities/annotation_definitions/pose.py,sha256=Ah4vtcYTEieDHgaZXQGy4KPEZbvTID0CuWHmRiLz3hw,2527
|
|
114
114
|
dtlpy/entities/annotation_definitions/ref_image.py,sha256=uDXjZPRSU9rjnzkjklUp7P7Bx9ODeBz87vzjz_LHGqA,2826
|
|
115
|
-
dtlpy/entities/annotation_definitions/segmentation.py,sha256=
|
|
115
|
+
dtlpy/entities/annotation_definitions/segmentation.py,sha256=CEHP9iz3Xc8Wu-A8Wy7C0Zcmwo_9OQYC9u4T-XDW5ww,7354
|
|
116
116
|
dtlpy/entities/annotation_definitions/subtitle.py,sha256=cfNi-19KVYInmxsy5usvjDZdyGgH1Mgss_SiJhT9Bn0,1005
|
|
117
117
|
dtlpy/entities/annotation_definitions/text.py,sha256=r-7laetbKvXL7hSG7AsRl9B5ZVYopUm5vB7rxgkmrCo,2559
|
|
118
118
|
dtlpy/entities/annotation_definitions/undefined_annotation.py,sha256=XUocNEnmWst4D0erlf8GBAjedgFLy0K6K4fr2p_dNas,1882
|
|
@@ -148,7 +148,7 @@ dtlpy/miscellaneous/list_print.py,sha256=leEg3RodgYfH5t_0JG8VuM8NiesR8sJLK_mRStt
|
|
|
148
148
|
dtlpy/miscellaneous/zipping.py,sha256=GMdPhAeHQXeMS5ClaiKWMJWVYQLBLAaJUWxvdYrL4Ro,5337
|
|
149
149
|
dtlpy/ml/__init__.py,sha256=vPkyXpc9kcWWZ_PxyPEOsjKBJdEbowLkZr8FZIb_OBM,799
|
|
150
150
|
dtlpy/ml/base_feature_extractor_adapter.py,sha256=iiEGYAx0Rdn4K46H_FlKrAv3ebTXHSxNVAmio0BxhaI,1178
|
|
151
|
-
dtlpy/ml/base_model_adapter.py,sha256=
|
|
151
|
+
dtlpy/ml/base_model_adapter.py,sha256=y7txv5JdTpVCZY6IvYXiQRSz4k3YXhHFEKoDTkcVHz0,50610
|
|
152
152
|
dtlpy/ml/metrics.py,sha256=BG2E-1Mvjv2e2No9mIJKVmvzqBvLqytKcw3hA7wVUNc,20037
|
|
153
153
|
dtlpy/ml/predictions_utils.py,sha256=He_84U14oS2Ss7T_-Zj5GDiBZwS-GjMPURUh7u7DjF8,12484
|
|
154
154
|
dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,2784
|
|
@@ -161,11 +161,11 @@ dtlpy/repositories/artifacts.py,sha256=Ke2ustTNw-1eQ0onLsWY7gL2aChjXPAX5p1uQ_EzM
|
|
|
161
161
|
dtlpy/repositories/assignments.py,sha256=1VwJZ7ctQe1iaDDDpeYDgoj2G-TCgzolVLUEqUocd2w,25506
|
|
162
162
|
dtlpy/repositories/bots.py,sha256=q1SqH01JHloljKxknhHU09psV1vQx9lPhu3g8mBBeRg,8104
|
|
163
163
|
dtlpy/repositories/codebases.py,sha256=pvcZxdrq0-zWysVbdXjUOhnfcF6hJD8v5VclNZ-zhGA,24668
|
|
164
|
-
dtlpy/repositories/commands.py,sha256=
|
|
164
|
+
dtlpy/repositories/commands.py,sha256=i6gQgOmRDG8ixqKU7672H3CvGt8VLT3ihDVfri1eWWc,5610
|
|
165
165
|
dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
|
|
166
166
|
dtlpy/repositories/computes.py,sha256=EtfE_3JhTdNlSYDPkKXBFkq-DBl4sgQqIm50ajvFdWM,9976
|
|
167
|
-
dtlpy/repositories/datasets.py,sha256=
|
|
168
|
-
dtlpy/repositories/downloader.py,sha256=
|
|
167
|
+
dtlpy/repositories/datasets.py,sha256=SpG86uToq-E5nVHMwHgWx6VwwwkgfYo8x5vZ0WA3Ouw,56546
|
|
168
|
+
dtlpy/repositories/downloader.py,sha256=rtgGj6jAfXxHZ1oihFoOkK4MUtapFpVMdREKzXKLnu0,44113
|
|
169
169
|
dtlpy/repositories/dpks.py,sha256=mj3QPvfzj_jZAscwIgpKUfa7fLxptc3OJQ_RrSfgYxo,17487
|
|
170
170
|
dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
|
|
171
171
|
dtlpy/repositories/executions.py,sha256=4UoU6bnB3kl5cMuF1eJvDecfZCaB06gKWxPfv6_g1_k,32598
|
|
@@ -174,7 +174,7 @@ dtlpy/repositories/features.py,sha256=7xA2ihEuNgZD7HBQMMGLWpsS2V_3PgieKW2YAk1OeU
|
|
|
174
174
|
dtlpy/repositories/integrations.py,sha256=Wi-CpT2PH36GFu3znWP5Uf2CmkqWBUYyOdwvatGD_eM,11798
|
|
175
175
|
dtlpy/repositories/items.py,sha256=90Z8-thLWBd49fmmnP-P6pZxhHX1k4Wv6Qfxq-Ovcz4,38092
|
|
176
176
|
dtlpy/repositories/messages.py,sha256=QU0Psckg6CA_Tlw9AVxqa-Ay1fRM4n269sSIJkH9o7E,3066
|
|
177
|
-
dtlpy/repositories/models.py,sha256=
|
|
177
|
+
dtlpy/repositories/models.py,sha256=IekNMcnuKVaAVTJf2AJv6YvX5qCd9kkSl4ETPMWP4Zc,38213
|
|
178
178
|
dtlpy/repositories/nodes.py,sha256=xXJm_YA0vDUn0dVvaGeq6ORM0vI3YXvfjuylvGRtkxo,3061
|
|
179
179
|
dtlpy/repositories/ontologies.py,sha256=unnMhD2isR9DVE5S8Fg6fSDf1ZZ5Xemxxufx4LEUT3w,19577
|
|
180
180
|
dtlpy/repositories/organizations.py,sha256=6ijUDFbsogfRul1g_vUB5AZOb41MRmV5NhNU7WLHt3A,22825
|
|
@@ -185,17 +185,17 @@ dtlpy/repositories/projects.py,sha256=tZyFLqVs-8ggTIi5echlX7XdGOJGW4LzKuXke7jkRn
|
|
|
185
185
|
dtlpy/repositories/recipes.py,sha256=ZZDhHn9g28C99bsf0nFaIpVYn6f6Jisz9upkHEkeaYY,15843
|
|
186
186
|
dtlpy/repositories/resource_executions.py,sha256=PyzsbdJxz6jf17Gx13GZmqdu6tZo3TTVv-DypnJ_sY0,5374
|
|
187
187
|
dtlpy/repositories/schema.py,sha256=kTKDrbwm7BfQnBAK81LpAl9ChNFdyUweSLNazlJJhjk,3953
|
|
188
|
-
dtlpy/repositories/services.py,sha256=
|
|
188
|
+
dtlpy/repositories/services.py,sha256=2ruoPwyznRwsNtM7YK2vSGQP9jtCHB6WitRo-Z0yB_c,68576
|
|
189
189
|
dtlpy/repositories/settings.py,sha256=pvqNse0ANCdU3NSLJEzHco-PZq__OIsPSPVJveB9E4I,12296
|
|
190
190
|
dtlpy/repositories/tasks.py,sha256=v09S2pYGkKx_vBG7SWigJeuMhp0GsefKo3Td7ImrWb0,49039
|
|
191
191
|
dtlpy/repositories/times_series.py,sha256=m-bKFEgiZ13yQNelDjBfeXMUy_HgsPD_JAHj1GVx9fU,11420
|
|
192
192
|
dtlpy/repositories/triggers.py,sha256=izdNyCN1gDc5uo7AXntso0HSMTDIzGFUp-dSEz8cn_U,21990
|
|
193
193
|
dtlpy/repositories/upload_element.py,sha256=R2KWIXmkp_dMAIr81tu3Y_VRfldj0ju8__V28ombkcg,10677
|
|
194
|
-
dtlpy/repositories/uploader.py,sha256=
|
|
194
|
+
dtlpy/repositories/uploader.py,sha256=9tbXPuBmRJN95Ifcrr-UDT-6tiKqpTiRMnuS8qcjTag,31319
|
|
195
195
|
dtlpy/repositories/webhooks.py,sha256=IIpxOJ-7KeQp1TY9aJZz-FuycSjAoYx0TDk8z86KAK8,9033
|
|
196
196
|
dtlpy/services/__init__.py,sha256=VfVJy2otIrDra6i7Sepjyez2ujiE6171ChQZp-YgxsM,904
|
|
197
197
|
dtlpy/services/aihttp_retry.py,sha256=tgntZsAY0dW9v08rkjX1T5BLNDdDd8svtgn7nH8DSGU,5022
|
|
198
|
-
dtlpy/services/api_client.py,sha256=
|
|
198
|
+
dtlpy/services/api_client.py,sha256=7ctRdpLX6QvJLI-xNRXqlqfrhOiGl5EynY9nrLkuJw4,73113
|
|
199
199
|
dtlpy/services/api_reference.py,sha256=cW-B3eoi9Xs3AwI87_Kr6GV_E6HPoC73aETFaGz3A-0,1515
|
|
200
200
|
dtlpy/services/async_utils.py,sha256=kaYHTPw0Lg8PeJJq8whPyzrBYkzD7offs5hsKRZXJm8,3960
|
|
201
201
|
dtlpy/services/calls_counter.py,sha256=gr0io5rIsO5-7Cgc8neA1vK8kUtYhgFPmDQ2jXtiZZs,1036
|
|
@@ -208,7 +208,7 @@ dtlpy/services/reporter.py,sha256=4zi9-bshKAPHG2XMOXS39cFZ0mhqNc3Qa9uaMN7CSZ8,91
|
|
|
208
208
|
dtlpy/services/service_defaults.py,sha256=a7KoqkVmn2TXmM9gN9JRaVVtcG2b8JGIieVnaZeEaao,3860
|
|
209
209
|
dtlpy/utilities/__init__.py,sha256=ncQD1O5lZ7L9n9rNRBivyqNVFDZyQcmqn-X-wyQhhIs,898
|
|
210
210
|
dtlpy/utilities/base_package_runner.py,sha256=tux_XCiCoOhMPtFaQludzhj0ny6OTKhyoN1aXjPal54,8522
|
|
211
|
-
dtlpy/utilities/converter.py,sha256=
|
|
211
|
+
dtlpy/utilities/converter.py,sha256=4CDrhmG1DZPxhZf2AfB4s7-mb3apYJ7zQ4XHuY8aPH4,74856
|
|
212
212
|
dtlpy/utilities/annotations/__init__.py,sha256=Eb72MloiwDQWe8H4NptFP1RZEEhcY2Fz_w_e34tdCiE,728
|
|
213
213
|
dtlpy/utilities/annotations/annotation_converters.py,sha256=KOqLVtb88GnrvuVi5x-t5vtzVN9Am98RersBl_D44SU,10796
|
|
214
214
|
dtlpy/utilities/dataset_generators/__init__.py,sha256=pA7UqhTh51gC407FyNa_WG8fUFnd__4tmEUTkNBlcLs,65
|
|
@@ -223,9 +223,9 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
|
|
|
223
223
|
dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
|
|
224
224
|
dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
|
|
225
225
|
dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
|
|
226
|
-
dtlpy-1.
|
|
227
|
-
dtlpy-1.
|
|
228
|
-
dtlpy-1.
|
|
226
|
+
dtlpy-1.101.8.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
|
|
227
|
+
dtlpy-1.101.8.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
|
|
228
|
+
dtlpy-1.101.8.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
|
|
229
229
|
tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
230
230
|
tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
231
231
|
tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
|
|
@@ -233,9 +233,9 @@ tests/assets/models_flow/main.py,sha256=xotAjdHpFnIic3Wb-4f7GSg2igtuXZjvRPiYdCTa
|
|
|
233
233
|
tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
|
|
234
234
|
tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
235
235
|
tests/features/environment.py,sha256=V23cUx_p4VpNk9kc2I0BDZJHO_xcJBFJq8m3JlYCooc,16736
|
|
236
|
-
dtlpy-1.
|
|
237
|
-
dtlpy-1.
|
|
238
|
-
dtlpy-1.
|
|
239
|
-
dtlpy-1.
|
|
240
|
-
dtlpy-1.
|
|
241
|
-
dtlpy-1.
|
|
236
|
+
dtlpy-1.101.8.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
237
|
+
dtlpy-1.101.8.dist-info/METADATA,sha256=EtBmjxKIz7pBOOh-ZOrI_IiLcUhSICMf7oyCNXDKIDI,3019
|
|
238
|
+
dtlpy-1.101.8.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
|
239
|
+
dtlpy-1.101.8.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
|
|
240
|
+
dtlpy-1.101.8.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
|
|
241
|
+
dtlpy-1.101.8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|