dtlpy 1.108.7__py3-none-any.whl → 1.109.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +1 -7
- dtlpy/__version__.py +1 -1
- dtlpy/entities/__init__.py +3 -3
- dtlpy/entities/annotation.py +26 -57
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +6 -14
- dtlpy/entities/command.py +10 -7
- dtlpy/entities/compute.py +40 -91
- dtlpy/entities/dataset.py +29 -14
- dtlpy/entities/dpk.py +1 -0
- dtlpy/entities/filters.py +3 -1
- dtlpy/entities/item.py +7 -14
- dtlpy/entities/node.py +0 -12
- dtlpy/entities/service.py +0 -9
- dtlpy/entities/service_driver.py +118 -0
- dtlpy/entities/trigger.py +1 -1
- dtlpy/new_instance.py +1 -1
- dtlpy/repositories/__init__.py +2 -1
- dtlpy/repositories/collections.py +86 -34
- dtlpy/repositories/commands.py +14 -4
- dtlpy/repositories/computes.py +160 -123
- dtlpy/repositories/datasets.py +20 -9
- dtlpy/repositories/downloader.py +20 -8
- dtlpy/repositories/dpks.py +26 -1
- dtlpy/repositories/items.py +5 -2
- dtlpy/repositories/service_drivers.py +213 -0
- dtlpy/repositories/services.py +6 -0
- dtlpy-1.109.19.dist-info/METADATA +172 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/RECORD +35 -33
- dtlpy-1.108.7.dist-info/METADATA +0 -82
- {dtlpy-1.108.7.data → dtlpy-1.109.19.data}/scripts/dlp +0 -0
- {dtlpy-1.108.7.data → dtlpy-1.109.19.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.108.7.data → dtlpy-1.109.19.data}/scripts/dlp.py +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/LICENSE +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/WHEEL +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/top_level.txt +0 -0
dtlpy/repositories/computes.py
CHANGED
|
@@ -2,11 +2,16 @@ import base64
|
|
|
2
2
|
import datetime
|
|
3
3
|
import json
|
|
4
4
|
|
|
5
|
+
from dtlpy import miscellaneous
|
|
6
|
+
|
|
5
7
|
from ..services.api_client import ApiClient
|
|
6
8
|
from .. import exceptions, entities, repositories
|
|
7
9
|
from typing import List, Optional, Dict
|
|
8
|
-
from ..entities import ComputeCluster, ComputeContext, ComputeType
|
|
10
|
+
from ..entities import ComputeCluster, ComputeContext, ComputeType
|
|
9
11
|
from ..entities.integration import IntegrationType
|
|
12
|
+
import logging
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(name='dtlpy')
|
|
10
15
|
|
|
11
16
|
|
|
12
17
|
class Computes:
|
|
@@ -17,6 +22,7 @@ class Computes:
|
|
|
17
22
|
self._commands = None
|
|
18
23
|
self._projects = None
|
|
19
24
|
self._organizations = None
|
|
25
|
+
self.log_cache = dict()
|
|
20
26
|
|
|
21
27
|
@property
|
|
22
28
|
def commands(self) -> repositories.Commands:
|
|
@@ -63,6 +69,7 @@ class Computes:
|
|
|
63
69
|
:param status: Compute status
|
|
64
70
|
:param settings: Compute settings
|
|
65
71
|
:return: Compute
|
|
72
|
+
:rtype: dl.entities.compute.Compute
|
|
66
73
|
"""
|
|
67
74
|
|
|
68
75
|
shared_contexts_json = []
|
|
@@ -91,26 +98,69 @@ class Computes:
|
|
|
91
98
|
if not success:
|
|
92
99
|
raise exceptions.PlatformException(response)
|
|
93
100
|
|
|
94
|
-
compute =
|
|
95
|
-
_json=response.json(),
|
|
96
|
-
client_api=self._client_api
|
|
97
|
-
)
|
|
101
|
+
compute = self._build_compute_by_type(response.json())
|
|
98
102
|
|
|
99
103
|
if wait:
|
|
100
104
|
command_id = compute.metadata.get('system', {}).get('commands', {}).get('create', None)
|
|
101
105
|
if command_id is not None:
|
|
102
106
|
command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
|
|
103
|
-
|
|
107
|
+
try:
|
|
108
|
+
command.wait(iteration_callback=self.__get_log_compute_progress_callback(compute.id))
|
|
109
|
+
except Exception as e:
|
|
110
|
+
self.log_cache.pop(compute.id, None)
|
|
111
|
+
raise e
|
|
104
112
|
compute = self.get(compute_id=compute.id)
|
|
105
113
|
|
|
106
114
|
return compute
|
|
107
115
|
|
|
116
|
+
def _build_compute_by_type(self, _json):
|
|
117
|
+
if _json.get('type') == 'kubernetes':
|
|
118
|
+
compute = entities.KubernetesCompute.from_json(
|
|
119
|
+
_json=_json,
|
|
120
|
+
client_api=self._client_api
|
|
121
|
+
)
|
|
122
|
+
else:
|
|
123
|
+
compute = entities.Compute.from_json(
|
|
124
|
+
_json=_json,
|
|
125
|
+
client_api=self._client_api
|
|
126
|
+
)
|
|
127
|
+
return compute
|
|
128
|
+
|
|
129
|
+
def __get_log_compute_progress_callback(self, compute_id: str):
|
|
130
|
+
def func():
|
|
131
|
+
compute = self.get(compute_id=compute_id)
|
|
132
|
+
bootstrap_progress = compute.metadata.get('system', {}).get('bootstrapProcess', {}).get('progress', None)
|
|
133
|
+
bootstrap_logs = compute.metadata.get('system', {}).get('bootstrapProcess', {}).get('logs', None)
|
|
134
|
+
validation_progress = compute.metadata.get('system', {}).get('validation', {}).get('progress', None)
|
|
135
|
+
validation_logs = compute.metadata.get('system', {}).get('validation', {}).get('logs', None)
|
|
136
|
+
if bootstrap_progress not in [None, 100]:
|
|
137
|
+
logger.info(f"Bootstrap in progress: {bootstrap_progress}%")
|
|
138
|
+
last_index = len(self.log_cache.get(compute_id, {}).get('bootstrap', []))
|
|
139
|
+
new_logs = bootstrap_logs[last_index:]
|
|
140
|
+
if new_logs:
|
|
141
|
+
logger.info("Bootstrap Logs: {}".format('\n'.join(new_logs)))
|
|
142
|
+
if compute_id not in self.log_cache:
|
|
143
|
+
self.log_cache[compute_id] = {}
|
|
144
|
+
self.log_cache[compute_id]['bootstrap'] = bootstrap_logs
|
|
145
|
+
if validation_progress not in [None, 100]:
|
|
146
|
+
logger.info(f"Validating created compute. Progress: {validation_progress}%")
|
|
147
|
+
last_index = len(self.log_cache.get(compute_id, {}).get('validation', []))
|
|
148
|
+
new_logs = validation_logs[last_index:]
|
|
149
|
+
if new_logs:
|
|
150
|
+
logger.info("Validation Logs: {}".format('\n'.join(new_logs)))
|
|
151
|
+
if compute_id not in self.log_cache:
|
|
152
|
+
self.log_cache[compute_id] = {}
|
|
153
|
+
self.log_cache[compute_id]['validation'] = validation_logs
|
|
154
|
+
return func
|
|
155
|
+
|
|
156
|
+
|
|
108
157
|
def get(self, compute_id: str):
|
|
109
158
|
"""
|
|
110
159
|
Get a compute
|
|
111
160
|
|
|
112
161
|
:param compute_id: Compute ID
|
|
113
162
|
:return: Compute
|
|
163
|
+
:rtype: dl.entities.compute.Compute
|
|
114
164
|
"""
|
|
115
165
|
|
|
116
166
|
# request
|
|
@@ -122,10 +172,7 @@ class Computes:
|
|
|
122
172
|
if not success:
|
|
123
173
|
raise exceptions.PlatformException(response)
|
|
124
174
|
|
|
125
|
-
compute =
|
|
126
|
-
_json=response.json(),
|
|
127
|
-
client_api=self._client_api
|
|
128
|
-
)
|
|
175
|
+
compute = self._build_compute_by_type(response.json())
|
|
129
176
|
|
|
130
177
|
return compute
|
|
131
178
|
|
|
@@ -135,6 +182,7 @@ class Computes:
|
|
|
135
182
|
|
|
136
183
|
:param compute: Compute
|
|
137
184
|
:return: Compute
|
|
185
|
+
:rtype: dl.entities.compute.Compute
|
|
138
186
|
"""
|
|
139
187
|
|
|
140
188
|
# request
|
|
@@ -147,10 +195,7 @@ class Computes:
|
|
|
147
195
|
if not success:
|
|
148
196
|
raise exceptions.PlatformException(response)
|
|
149
197
|
|
|
150
|
-
compute =
|
|
151
|
-
_json=response.json(),
|
|
152
|
-
client_api=self._client_api
|
|
153
|
-
)
|
|
198
|
+
compute = self._build_compute_by_type(response.json())
|
|
154
199
|
|
|
155
200
|
return compute
|
|
156
201
|
|
|
@@ -172,6 +217,60 @@ class Computes:
|
|
|
172
217
|
|
|
173
218
|
return True
|
|
174
219
|
|
|
220
|
+
def validate(self, compute_id: str, wait: bool = True):
|
|
221
|
+
"""
|
|
222
|
+
Validate a compute
|
|
223
|
+
|
|
224
|
+
:param str compute_id: Compute ID
|
|
225
|
+
:param bool wait: Wait for validation
|
|
226
|
+
:return: Compute
|
|
227
|
+
:rtype: dl.entities.compute.Compute
|
|
228
|
+
"""
|
|
229
|
+
|
|
230
|
+
# request
|
|
231
|
+
success, response = self._client_api.gen_request(
|
|
232
|
+
req_type='post',
|
|
233
|
+
path=self._base_url + '/{}/validate'.format(compute_id)
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
if not success:
|
|
237
|
+
raise exceptions.PlatformException(response)
|
|
238
|
+
|
|
239
|
+
compute = self._build_compute_by_type(response.json())
|
|
240
|
+
|
|
241
|
+
if wait:
|
|
242
|
+
command_id = compute.metadata.get('system', {}).get('commands', {}).get('validate', None)
|
|
243
|
+
if command_id is not None:
|
|
244
|
+
command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
|
|
245
|
+
try:
|
|
246
|
+
command.wait(iteration_callback=self.__get_log_compute_progress_callback(compute.id))
|
|
247
|
+
except Exception as e:
|
|
248
|
+
self.log_cache.pop(compute.id, None)
|
|
249
|
+
raise e
|
|
250
|
+
compute = self.get(compute_id=compute.id)
|
|
251
|
+
|
|
252
|
+
return compute
|
|
253
|
+
|
|
254
|
+
def list_global(self):
|
|
255
|
+
"""
|
|
256
|
+
List computes
|
|
257
|
+
|
|
258
|
+
:return: List of computes
|
|
259
|
+
:rtype: list[str]
|
|
260
|
+
"""
|
|
261
|
+
|
|
262
|
+
# request
|
|
263
|
+
success, response = self._client_api.gen_request(
|
|
264
|
+
req_type='get',
|
|
265
|
+
path=self._base_url + '/globals',
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
if not success:
|
|
269
|
+
raise exceptions.PlatformException(response)
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
return response.json()
|
|
273
|
+
|
|
175
274
|
@staticmethod
|
|
176
275
|
def read_file(file_path):
|
|
177
276
|
try:
|
|
@@ -230,121 +329,59 @@ class Computes:
|
|
|
230
329
|
return compute
|
|
231
330
|
|
|
232
331
|
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
def create(
|
|
240
|
-
self,
|
|
241
|
-
name: str,
|
|
242
|
-
compute_id: str,
|
|
243
|
-
context: entities.ComputeContext
|
|
244
|
-
):
|
|
245
|
-
"""
|
|
246
|
-
Create a new service driver
|
|
247
|
-
|
|
248
|
-
:param name: Service driver name
|
|
249
|
-
:param compute_id: Compute ID
|
|
250
|
-
:param context: Compute context
|
|
251
|
-
:return: Service driver
|
|
252
|
-
|
|
253
|
-
"""
|
|
254
|
-
|
|
255
|
-
payload = {
|
|
256
|
-
'name': name,
|
|
257
|
-
'computeId': compute_id,
|
|
258
|
-
'context': context.to_json()
|
|
259
|
-
}
|
|
260
|
-
|
|
261
|
-
# request
|
|
262
|
-
success, response = self._client_api.gen_request(
|
|
263
|
-
req_type='post',
|
|
264
|
-
path=self._base_url,
|
|
265
|
-
json_req=payload
|
|
266
|
-
)
|
|
267
|
-
|
|
268
|
-
if not success:
|
|
269
|
-
raise exceptions.PlatformException(response)
|
|
270
|
-
|
|
271
|
-
service_driver = entities.ServiceDriver.from_json(
|
|
272
|
-
_json=response.json(),
|
|
273
|
-
client_api=self._client_api
|
|
274
|
-
)
|
|
275
|
-
|
|
276
|
-
return service_driver
|
|
277
|
-
|
|
278
|
-
def get(self, service_driver_id: str):
|
|
279
|
-
"""
|
|
280
|
-
Get a service driver
|
|
281
|
-
|
|
282
|
-
:param service_driver_id: Service driver ID
|
|
283
|
-
:return: Service driver
|
|
284
|
-
"""
|
|
285
|
-
|
|
286
|
-
# request
|
|
287
|
-
success, response = self._client_api.gen_request(
|
|
288
|
-
req_type='get',
|
|
289
|
-
path=self._base_url + '/{}'.format(service_driver_id)
|
|
290
|
-
)
|
|
291
|
-
|
|
332
|
+
def _list(self, filters: entities.Filters):
|
|
333
|
+
url = self._base_url + '/query'
|
|
334
|
+
success, response = self._client_api.gen_request(req_type='POST',
|
|
335
|
+
path=url,
|
|
336
|
+
json_req=filters.prepare())
|
|
292
337
|
if not success:
|
|
293
338
|
raise exceptions.PlatformException(response)
|
|
294
339
|
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
)
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
340
|
+
return response.json()
|
|
341
|
+
|
|
342
|
+
def _build_entities_from_response(self, response_items) -> miscellaneous.List[entities.Compute]:
|
|
343
|
+
pool = self._client_api.thread_pools(pool_name='entity.create')
|
|
344
|
+
jobs = [None for _ in range(len(response_items))]
|
|
345
|
+
for i_item, item in enumerate(response_items):
|
|
346
|
+
jobs[i_item] = pool.submit(entities.Compute._protected_from_json,
|
|
347
|
+
**{'client_api': self._client_api,
|
|
348
|
+
'_json': item})
|
|
349
|
+
results = [j.result() for j in jobs]
|
|
350
|
+
_ = [logger.warning(r[1]) for r in results if r[0] is False]
|
|
351
|
+
items = miscellaneous.List([r[1] for r in results if r[0] is True])
|
|
352
|
+
return items
|
|
353
|
+
|
|
354
|
+
def list(self, filters: entities.Filters = None) -> entities.PagedEntities:
|
|
303
355
|
"""
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
:param service_driver_id: Service driver ID
|
|
307
|
-
"""
|
|
308
|
-
|
|
309
|
-
# request
|
|
310
|
-
success, response = self._client_api.gen_request(
|
|
311
|
-
req_type='delete',
|
|
312
|
-
path=self._base_url + '/{}'.format(service_driver_id)
|
|
313
|
-
)
|
|
314
|
-
|
|
315
|
-
if not success:
|
|
316
|
-
raise exceptions.PlatformException(response)
|
|
356
|
+
List all services drivers
|
|
317
357
|
|
|
318
|
-
|
|
358
|
+
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
359
|
+
:return: Paged entity
|
|
360
|
+
:rtype: dtlpy.entities.paged_entities.PagedEntities
|
|
319
361
|
|
|
320
|
-
|
|
321
|
-
"""
|
|
322
|
-
Set a service driver as default
|
|
362
|
+
**Example**:
|
|
323
363
|
|
|
324
|
-
|
|
325
|
-
:param org_id: Organization ID
|
|
326
|
-
:param update_existing_services: Update existing services
|
|
364
|
+
.. code-block:: python
|
|
327
365
|
|
|
328
|
-
|
|
366
|
+
services = dl.service_drivers.list()
|
|
329
367
|
"""
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
'
|
|
337
|
-
'
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
)
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
)
|
|
349
|
-
|
|
350
|
-
return service_driver
|
|
368
|
+
# default filters
|
|
369
|
+
if filters is None:
|
|
370
|
+
filters = entities.Filters(resource=entities.FiltersResource.COMPUTE)
|
|
371
|
+
|
|
372
|
+
if filters.resource != entities.FiltersResource.COMPUTE:
|
|
373
|
+
raise exceptions.PlatformException(
|
|
374
|
+
error='400',
|
|
375
|
+
message='Filters resource must to be FiltersResource.COMPUTE. Got: {!r}'.format(
|
|
376
|
+
filters.resource))
|
|
377
|
+
|
|
378
|
+
if not isinstance(filters, entities.Filters):
|
|
379
|
+
raise exceptions.PlatformException('400', 'Unknown filters type')
|
|
380
|
+
|
|
381
|
+
paged = entities.PagedEntities(items_repository=self,
|
|
382
|
+
filters=filters,
|
|
383
|
+
page_offset=filters.page,
|
|
384
|
+
page_size=filters.page_size,
|
|
385
|
+
client_api=self._client_api)
|
|
386
|
+
paged.get_page()
|
|
387
|
+
return paged
|
dtlpy/repositories/datasets.py
CHANGED
|
@@ -128,7 +128,7 @@ class Datasets:
|
|
|
128
128
|
|
|
129
129
|
@staticmethod
|
|
130
130
|
def _build_payload(filters, include_feature_vectors, include_annotations,
|
|
131
|
-
export_type, annotation_filters, feature_vector_filters, dataset_lock, lock_timeout_sec):
|
|
131
|
+
export_type, annotation_filters, feature_vector_filters, dataset_lock, lock_timeout_sec, export_summary):
|
|
132
132
|
valid_list = [e.value for e in entities.ExportType]
|
|
133
133
|
valid_types = ', '.join(valid_list)
|
|
134
134
|
if export_type not in ['json', 'zip']:
|
|
@@ -161,6 +161,9 @@ class Datasets:
|
|
|
161
161
|
if dataset_lock:
|
|
162
162
|
payload['datasetLock'] = dataset_lock
|
|
163
163
|
|
|
164
|
+
if export_summary:
|
|
165
|
+
payload['summary'] = export_summary
|
|
166
|
+
|
|
164
167
|
if lock_timeout_sec:
|
|
165
168
|
payload['lockTimeoutSec'] = lock_timeout_sec
|
|
166
169
|
|
|
@@ -636,7 +639,8 @@ class Datasets:
|
|
|
636
639
|
export_type: entities.ExportType = entities.ExportType.JSON,
|
|
637
640
|
timeout: int = 0,
|
|
638
641
|
dataset_lock: bool = False,
|
|
639
|
-
lock_timeout_sec: int = None
|
|
642
|
+
lock_timeout_sec: int = None,
|
|
643
|
+
export_summary: bool = False):
|
|
640
644
|
"""
|
|
641
645
|
Export dataset items and annotations.
|
|
642
646
|
|
|
@@ -654,6 +658,7 @@ class Datasets:
|
|
|
654
658
|
:param bool include_feature_vectors: Include item feature vectors in the export
|
|
655
659
|
:param bool include_annotations: Include item annotations in the export
|
|
656
660
|
:param bool dataset_lock: Make dataset readonly during the export
|
|
661
|
+
:param bool export_summary: Get Summary of the dataset export
|
|
657
662
|
:param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
|
|
658
663
|
:param entities.ExportType export_type: Type of export ('json' or 'zip')
|
|
659
664
|
:param int timeout: Maximum time in seconds to wait for the export to complete
|
|
@@ -669,12 +674,14 @@ class Datasets:
|
|
|
669
674
|
include_feature_vectors=True,
|
|
670
675
|
include_annotations=True,
|
|
671
676
|
export_type=dl.ExportType.JSON,
|
|
672
|
-
dataset_lock=True
|
|
673
|
-
lock_timeout_sec=300
|
|
677
|
+
dataset_lock=True,
|
|
678
|
+
lock_timeout_sec=300,
|
|
679
|
+
export_summary=False)
|
|
674
680
|
"""
|
|
675
681
|
dataset_id = self._resolve_dataset_id(dataset, dataset_name, dataset_id)
|
|
676
682
|
payload = self._build_payload(filters, include_feature_vectors, include_annotations,
|
|
677
|
-
export_type, annotation_filters, feature_vector_filters,
|
|
683
|
+
export_type, annotation_filters, feature_vector_filters,
|
|
684
|
+
dataset_lock, lock_timeout_sec, export_summary)
|
|
678
685
|
|
|
679
686
|
success, response = self._client_api.gen_request(req_type='post', path=f'/datasets/{dataset_id}/export',
|
|
680
687
|
json_req=payload)
|
|
@@ -940,7 +947,8 @@ class Datasets:
|
|
|
940
947
|
alpha: float = None,
|
|
941
948
|
export_version=entities.ExportVersion.V1,
|
|
942
949
|
dataset_lock: bool = False,
|
|
943
|
-
lock_timeout_sec: int = None
|
|
950
|
+
lock_timeout_sec: int = None,
|
|
951
|
+
export_summary: bool = False,
|
|
944
952
|
) -> str:
|
|
945
953
|
"""
|
|
946
954
|
Download dataset's annotations by filters.
|
|
@@ -968,6 +976,7 @@ class Datasets:
|
|
|
968
976
|
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
969
977
|
:return: local_path of the directory where all the downloaded item
|
|
970
978
|
:param bool dataset_lock: optional - default = False
|
|
979
|
+
:param bool export_summary: optional - default = False
|
|
971
980
|
:param int lock_timeout_sec: optional
|
|
972
981
|
:rtype: str
|
|
973
982
|
|
|
@@ -982,8 +991,9 @@ class Datasets:
|
|
|
982
991
|
thickness=1,
|
|
983
992
|
with_text=False,
|
|
984
993
|
alpha=1,
|
|
985
|
-
dataset_lock=False
|
|
986
|
-
lock_timeout_sec=300
|
|
994
|
+
dataset_lock=False,
|
|
995
|
+
lock_timeout_sec=300,
|
|
996
|
+
export_summary=False
|
|
987
997
|
)
|
|
988
998
|
"""
|
|
989
999
|
if annotation_options is None:
|
|
@@ -1045,7 +1055,8 @@ class Datasets:
|
|
|
1045
1055
|
filter_output_annotations=filter_output_annotations,
|
|
1046
1056
|
export_version=export_version,
|
|
1047
1057
|
dataset_lock=dataset_lock,
|
|
1048
|
-
lock_timeout_sec=lock_timeout_sec
|
|
1058
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
1059
|
+
export_summary=export_summary
|
|
1049
1060
|
)
|
|
1050
1061
|
if annotation_options:
|
|
1051
1062
|
pages = dataset.items.list(filters=filters)
|
dtlpy/repositories/downloader.py
CHANGED
|
@@ -48,7 +48,8 @@ class Downloader:
|
|
|
48
48
|
alpha=1,
|
|
49
49
|
export_version=entities.ExportVersion.V1,
|
|
50
50
|
dataset_lock=False,
|
|
51
|
-
lock_timeout_sec=None
|
|
51
|
+
lock_timeout_sec=None,
|
|
52
|
+
export_summary=False
|
|
52
53
|
):
|
|
53
54
|
"""
|
|
54
55
|
Download dataset by filters.
|
|
@@ -75,6 +76,7 @@ class Downloader:
|
|
|
75
76
|
:param alpha: opacity value [0 1], default 1
|
|
76
77
|
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
77
78
|
:param bool dataset_lock: optional - default = False
|
|
79
|
+
:param bool export_summary: optional - default = False
|
|
78
80
|
:param int lock_timeout_sec: optional
|
|
79
81
|
:return: Output (list)
|
|
80
82
|
"""
|
|
@@ -201,7 +203,8 @@ class Downloader:
|
|
|
201
203
|
'filter_output_annotations': filter_output_annotations,
|
|
202
204
|
'export_version': export_version,
|
|
203
205
|
'dataset_lock': dataset_lock,
|
|
204
|
-
'lock_timeout_sec': lock_timeout_sec
|
|
206
|
+
'lock_timeout_sec': lock_timeout_sec,
|
|
207
|
+
'export_summary': export_summary
|
|
205
208
|
})
|
|
206
209
|
###############
|
|
207
210
|
# downloading #
|
|
@@ -369,7 +372,8 @@ class Downloader:
|
|
|
369
372
|
filter_output_annotations=False,
|
|
370
373
|
export_version=entities.ExportVersion.V1,
|
|
371
374
|
dataset_lock=False,
|
|
372
|
-
lock_timeout_sec=None
|
|
375
|
+
lock_timeout_sec=None,
|
|
376
|
+
export_summary=False
|
|
373
377
|
):
|
|
374
378
|
"""
|
|
375
379
|
Download annotations json for entire dataset
|
|
@@ -384,6 +388,7 @@ class Downloader:
|
|
|
384
388
|
:param filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
|
|
385
389
|
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
386
390
|
:param bool dataset_lock: optional - default = False
|
|
391
|
+
:param bool export_summary: optional - default = False
|
|
387
392
|
:param int lock_timeout_sec: optional
|
|
388
393
|
:return:
|
|
389
394
|
"""
|
|
@@ -409,6 +414,9 @@ class Downloader:
|
|
|
409
414
|
payload['annotations']['filter'] = filter_output_annotations
|
|
410
415
|
if dataset_lock:
|
|
411
416
|
payload['datasetLock'] = dataset_lock
|
|
417
|
+
|
|
418
|
+
if export_summary:
|
|
419
|
+
payload['summary'] = export_summary
|
|
412
420
|
|
|
413
421
|
if lock_timeout_sec:
|
|
414
422
|
payload['lockTimeoutSec'] = lock_timeout_sec
|
|
@@ -694,7 +702,8 @@ class Downloader:
|
|
|
694
702
|
raise PlatformException(response)
|
|
695
703
|
else:
|
|
696
704
|
_, ext = os.path.splitext(item.metadata['system']['shebang']['linkInfo']['ref'].split('?')[0])
|
|
697
|
-
local_filepath
|
|
705
|
+
if local_filepath:
|
|
706
|
+
local_filepath += ext
|
|
698
707
|
response = self.get_url_stream(url=url)
|
|
699
708
|
|
|
700
709
|
if save_locally:
|
|
@@ -791,9 +800,12 @@ class Downloader:
|
|
|
791
800
|
for chunk in response.iter_content(chunk_size=chunk_size):
|
|
792
801
|
if chunk: # filter out keep-alive new chunks
|
|
793
802
|
data.write(chunk)
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
803
|
+
|
|
804
|
+
file_validation = True
|
|
805
|
+
if not is_url:
|
|
806
|
+
file_validation, start_point, chunk_resume = self.__get_next_chunk(item=item,
|
|
807
|
+
download_progress=data,
|
|
808
|
+
chunk_resume=chunk_resume)
|
|
797
809
|
if file_validation:
|
|
798
810
|
download_done = True
|
|
799
811
|
else:
|
|
@@ -804,7 +816,7 @@ class Downloader:
|
|
|
804
816
|
data.seek(0)
|
|
805
817
|
data.name = item.name
|
|
806
818
|
if not save_locally and to_array:
|
|
807
|
-
if 'image' not in item.mimetype:
|
|
819
|
+
if 'image' not in item.mimetype and not is_url:
|
|
808
820
|
raise PlatformException(
|
|
809
821
|
error="400",
|
|
810
822
|
message='Download element type numpy.ndarray support for image only. '
|
dtlpy/repositories/dpks.py
CHANGED
|
@@ -248,7 +248,7 @@ class Dpks:
|
|
|
248
248
|
logger.warning("the project id that provide different from the dpk project id")
|
|
249
249
|
|
|
250
250
|
if local_path is None:
|
|
251
|
-
if manifest_filepath=='dataloop.json':
|
|
251
|
+
if manifest_filepath == 'dataloop.json':
|
|
252
252
|
local_path = os.getcwd()
|
|
253
253
|
else:
|
|
254
254
|
local_path = os.path.dirname(manifest_filepath)
|
|
@@ -407,3 +407,28 @@ class Dpks:
|
|
|
407
407
|
dpk = self.__get_by_name(dpk_name=dpk_name, dpk_version=dpk_version)
|
|
408
408
|
|
|
409
409
|
return dpk
|
|
410
|
+
|
|
411
|
+
def get_previews(self, dpk: entities.Dpk):
|
|
412
|
+
"""
|
|
413
|
+
Get the preview of a specific dpk.
|
|
414
|
+
|
|
415
|
+
:param entities.Dpk dpk: the dpk entity to get the preview for.
|
|
416
|
+
:return the preview of the dpk's templates
|
|
417
|
+
:rtype dict
|
|
418
|
+
|
|
419
|
+
** Example **
|
|
420
|
+
..coed-block:: python
|
|
421
|
+
res = dl.dpks.get_previews(dpk=dpk)
|
|
422
|
+
"""
|
|
423
|
+
url = '/app-registry/{}/previews'.format(dpk.id)
|
|
424
|
+
|
|
425
|
+
# request
|
|
426
|
+
success, response = self._client_api.gen_request(
|
|
427
|
+
req_type='get',
|
|
428
|
+
path=url
|
|
429
|
+
)
|
|
430
|
+
|
|
431
|
+
if not success:
|
|
432
|
+
raise exceptions.PlatformException(response)
|
|
433
|
+
|
|
434
|
+
return response.json()
|
dtlpy/repositories/items.py
CHANGED
|
@@ -529,7 +529,8 @@ class Items:
|
|
|
529
529
|
alpha: float = 1,
|
|
530
530
|
export_version=entities.ExportVersion.V1,
|
|
531
531
|
dataset_lock: bool = False,
|
|
532
|
-
lock_timeout_sec: int = None
|
|
532
|
+
lock_timeout_sec: int = None,
|
|
533
|
+
export_summary: bool = False,
|
|
533
534
|
):
|
|
534
535
|
"""
|
|
535
536
|
Download dataset items by filters.
|
|
@@ -550,6 +551,7 @@ class Items:
|
|
|
550
551
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
551
552
|
:param bool overwrite: optional - default = False
|
|
552
553
|
:param bool dataset_lock: optional - default = False
|
|
554
|
+
:param bool export_summary: optional - default = False
|
|
553
555
|
:param int lock_timeout_sec: optional
|
|
554
556
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
555
557
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
@@ -599,7 +601,8 @@ class Items:
|
|
|
599
601
|
filter_output_annotations=filter_output_annotations,
|
|
600
602
|
export_version=export_version,
|
|
601
603
|
dataset_lock=dataset_lock,
|
|
602
|
-
lock_timeout_sec=lock_timeout_sec
|
|
604
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
605
|
+
export_summary=export_summary
|
|
603
606
|
)
|
|
604
607
|
|
|
605
608
|
def upload(
|