dtlpy 1.91.37__py3-none-any.whl → 1.92.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +5 -2
- dtlpy/__version__.py +1 -1
- dtlpy/entities/__init__.py +1 -1
- dtlpy/entities/command.py +3 -2
- dtlpy/entities/dataset.py +52 -2
- dtlpy/entities/feature_set.py +3 -0
- dtlpy/entities/filters.py +2 -2
- dtlpy/entities/item.py +15 -1
- dtlpy/entities/node.py +11 -1
- dtlpy/entities/ontology.py +36 -40
- dtlpy/entities/pipeline.py +20 -1
- dtlpy/entities/pipeline_execution.py +23 -0
- dtlpy/entities/prompt_item.py +240 -37
- dtlpy/entities/service.py +5 -5
- dtlpy/ml/base_model_adapter.py +101 -41
- dtlpy/new_instance.py +80 -9
- dtlpy/repositories/apps.py +56 -10
- dtlpy/repositories/commands.py +10 -2
- dtlpy/repositories/datasets.py +142 -12
- dtlpy/repositories/dpks.py +5 -1
- dtlpy/repositories/feature_sets.py +23 -3
- dtlpy/repositories/models.py +1 -1
- dtlpy/repositories/pipeline_executions.py +53 -0
- dtlpy/repositories/uploader.py +3 -0
- dtlpy/services/api_client.py +59 -3
- {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/METADATA +1 -1
- {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/RECORD +35 -38
- tests/features/environment.py +29 -0
- dtlpy/callbacks/__init__.py +0 -16
- dtlpy/callbacks/piper_progress_reporter.py +0 -29
- dtlpy/callbacks/progress_viewer.py +0 -54
- {dtlpy-1.91.37.data → dtlpy-1.92.19.data}/scripts/dlp +0 -0
- {dtlpy-1.91.37.data → dtlpy-1.92.19.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.91.37.data → dtlpy-1.92.19.data}/scripts/dlp.py +0 -0
- {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/LICENSE +0 -0
- {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/WHEEL +0 -0
- {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.91.37.dist-info → dtlpy-1.92.19.dist-info}/top_level.txt +0 -0
dtlpy/new_instance.py
CHANGED
|
@@ -1,14 +1,61 @@
|
|
|
1
1
|
class Dtlpy:
|
|
2
|
+
from .services.api_client import client as client_api
|
|
3
|
+
from .services.api_client import VerboseLoggingLevel, ApiClient
|
|
4
|
+
from .services import DataloopLogger, DtlpyFilter, check_sdk, Reporter, service_defaults
|
|
5
|
+
from .services.api_reference import api_reference as _api_reference
|
|
6
|
+
from .caches.cache import CacheConfig, CacheType
|
|
2
7
|
from .exceptions import PlatformException
|
|
3
8
|
from . import repositories, exceptions, entities, examples
|
|
4
|
-
from .
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
9
|
+
from .entities import (
|
|
10
|
+
# main entities
|
|
11
|
+
Project, Dataset, ExpirationOptions, ExportVersion, Trigger, Item, Execution, AnnotationCollection, Annotation,
|
|
12
|
+
Recipe, IndexDriver, AttributesTypes, AttributesRange, Dpk, App, AppModule, AppScope,
|
|
13
|
+
Ontology, Label, Task, TaskPriority, ConsensusTaskType, Assignment, Service, Package, Codebase, Model,
|
|
14
|
+
PackageModule, PackageFunction,
|
|
15
|
+
# annotations
|
|
16
|
+
Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose,
|
|
17
|
+
Description,
|
|
18
|
+
Polygon, Text, FreeText, RefImage,
|
|
19
|
+
# filters
|
|
20
|
+
Filters, FiltersKnownFields, FiltersResource, FiltersOperations, FiltersMethod, FiltersOrderByDirection,
|
|
21
|
+
FiltersKnownFields as KnownFields,
|
|
22
|
+
# triggers
|
|
23
|
+
TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
|
|
24
|
+
# faas
|
|
25
|
+
FunctionIO, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
|
|
26
|
+
InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
|
|
27
|
+
PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
|
|
28
|
+
# roberto
|
|
29
|
+
DatasetSubsetType, ModelStatus, PlotSample, ArtifactType, Artifact, ItemArtifact, LinkArtifact, LocalArtifact,
|
|
30
|
+
EntityScopeLevel,
|
|
31
|
+
# features
|
|
32
|
+
FeatureEntityType, Feature, FeatureSet,
|
|
33
|
+
#
|
|
34
|
+
RequirementOperator, PackageRequirement,
|
|
35
|
+
Command, CommandsStatus,
|
|
36
|
+
LocalCodebase, GitCodebase, ItemCodebase, FilesystemCodebase, PackageCodebaseType,
|
|
37
|
+
MemberRole, MemberOrgRole,
|
|
38
|
+
Webhook, HttpMethod,
|
|
39
|
+
ViewAnnotationOptions, AnnotationStatus, AnnotationType,
|
|
40
|
+
ItemStatus, ExecutionStatus, ExportMetadata,
|
|
41
|
+
PromptItem, Prompt, PromptType,
|
|
42
|
+
ItemLink, UrlLink, LinkTypeEnum,
|
|
43
|
+
Modality, ModalityTypeEnum, ModalityRefTypeEnum,
|
|
44
|
+
Workload, WorkloadUnit, ItemAction,
|
|
45
|
+
PipelineExecution, CycleRerunMethod, PipelineExecutionNode, Pipeline, PipelineConnection,
|
|
46
|
+
PipelineNode, TaskNode, CodeNode, PipelineStats, PipelineSettings,
|
|
47
|
+
PipelineNodeType, PipelineNameSpace, PipelineResumeOption, Variable, CompositionStatus,
|
|
48
|
+
FunctionNode, DatasetNode, PipelineConnectionPort, PipelineNodeIO, Organization, OrganizationsPlans,
|
|
49
|
+
Integration,
|
|
50
|
+
Driver, S3Driver, GcsDriver, AzureBlobDriver, CacheAction, PodType,
|
|
51
|
+
ExternalStorage, IntegrationType, Role, PlatformEntityType, SettingsValueTypes, SettingsTypes,
|
|
52
|
+
SettingsSectionNames,
|
|
53
|
+
SettingScope, BaseSetting, UserSetting, Setting, ServiceSample, ExecutionSample, PipelineExecutionSample,
|
|
54
|
+
ResourceExecution, Message, NotificationEventContext
|
|
55
|
+
)
|
|
56
|
+
from .ml import BaseModelAdapter
|
|
57
|
+
from .utilities import Converter, BaseServiceRunner, Progress, Context, AnnotationFormat
|
|
58
|
+
from .repositories import FUNCTION_END_LINE, PackageCatalog
|
|
12
59
|
|
|
13
60
|
def __init__(self, cookie_filepath=None):
|
|
14
61
|
self.client_api = self.ApiClient(cookie_filepath=cookie_filepath)
|
|
@@ -24,15 +71,40 @@ class Dtlpy:
|
|
|
24
71
|
self.triggers = self.repositories.Triggers(client_api=self.client_api)
|
|
25
72
|
self.assignments = self.repositories.Assignments(client_api=self.client_api)
|
|
26
73
|
self.tasks = self.repositories.Tasks(client_api=self.client_api)
|
|
74
|
+
self.dpks = self.repositories.Dpks(client_api=self.client_api)
|
|
27
75
|
self.annotations = self.repositories.Annotations(client_api=self.client_api)
|
|
76
|
+
self.models = self.repositories.Models(client_api=self.client_api)
|
|
77
|
+
self.ontologies = self.repositories.Ontologies(client_api=self.client_api)
|
|
78
|
+
self.recipes = self.repositories.Recipes(client_api=self.client_api)
|
|
79
|
+
self.pipelines = self.repositories.Pipelines(client_api=self.client_api)
|
|
80
|
+
self.pipeline_executions = self.repositories.PipelineExecutions(client_api=self.client_api)
|
|
81
|
+
self.feature_sets = self.repositories.FeatureSets(client_api=self.client_api)
|
|
82
|
+
self.features = self.repositories.Features(client_api=self.client_api)
|
|
83
|
+
self.organizations = self.repositories.Organizations(client_api=self.client_api)
|
|
84
|
+
self.analytics = self.repositories.Analytics(client_api=self.client_api)
|
|
85
|
+
self.integrations = self.repositories.Integrations(client_api=self.client_api)
|
|
86
|
+
self.drivers = self.repositories.Drivers(client_api=self.client_api)
|
|
87
|
+
self.settings = self.repositories.Settings(client_api=self.client_api)
|
|
88
|
+
self.apps = self.repositories.Apps(client_api=self.client_api)
|
|
89
|
+
self.dpks = self.repositories.Dpks(client_api=self.client_api)
|
|
90
|
+
self.messages = self.repositories.Messages(client_api=self.client_api)
|
|
91
|
+
self.compositions = self.repositories.Compositions(client_api=self.client_api)
|
|
92
|
+
|
|
28
93
|
self.verbose = self.client_api.verbose
|
|
29
94
|
self.login = self.client_api.login
|
|
95
|
+
self.logout = self.client_api.logout
|
|
30
96
|
self.login_token = self.client_api.login_token
|
|
31
97
|
self.login_secret = self.client_api.login_secret
|
|
98
|
+
self.login_api_key = self.client_api.login_api_key
|
|
99
|
+
self.login_m2m = self.client_api.login_m2m
|
|
32
100
|
self.add_environment = self.client_api.add_environment
|
|
33
101
|
self.setenv = self.client_api.setenv
|
|
34
102
|
self.token_expired = self.client_api.token_expired
|
|
35
103
|
self.info = self.client_api.info
|
|
104
|
+
self.cache_state = self.client_api.cache_state
|
|
105
|
+
self.attributes_mode = self.client_api.attributes_mode
|
|
106
|
+
self.sdk_cache = self.client_api.sdk_cache
|
|
107
|
+
self.platform_settings = self.client_api.platform_settings
|
|
36
108
|
|
|
37
109
|
def __del__(self):
|
|
38
110
|
for name, pool in self.client_api._thread_pools.items():
|
|
@@ -176,7 +248,6 @@ class Dtlpy:
|
|
|
176
248
|
GPU_T4_S = "gpu-t4-s"
|
|
177
249
|
GPU_T4_M = "gpu-t4-m"
|
|
178
250
|
|
|
179
|
-
|
|
180
251
|
class LoggingLevel:
|
|
181
252
|
DEBUG = 'debug'
|
|
182
253
|
WARNING = 'warning'
|
dtlpy/repositories/apps.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
|
|
3
|
-
from .. import entities, exceptions, miscellaneous
|
|
3
|
+
from .. import entities, exceptions, miscellaneous, repositories
|
|
4
4
|
from ..services.api_client import ApiClient
|
|
5
5
|
|
|
6
6
|
logger = logging.getLogger(name='dtlpy')
|
|
@@ -11,6 +11,7 @@ class Apps:
|
|
|
11
11
|
def __init__(self, client_api: ApiClient, project: entities.Project = None):
|
|
12
12
|
self._client_api = client_api
|
|
13
13
|
self._project = project
|
|
14
|
+
self._commands = None
|
|
14
15
|
|
|
15
16
|
@property
|
|
16
17
|
def project(self) -> entities.Project:
|
|
@@ -21,6 +22,12 @@ class Apps:
|
|
|
21
22
|
assert isinstance(self._project, entities.Project)
|
|
22
23
|
return self._project
|
|
23
24
|
|
|
25
|
+
@property
|
|
26
|
+
def commands(self) -> repositories.Commands:
|
|
27
|
+
if self._commands is None:
|
|
28
|
+
self._commands = repositories.Commands(client_api=self._client_api)
|
|
29
|
+
return self._commands
|
|
30
|
+
|
|
24
31
|
@project.setter
|
|
25
32
|
def project(self, project: entities.Project):
|
|
26
33
|
if not isinstance(project, entities.Project):
|
|
@@ -160,12 +167,13 @@ class Apps:
|
|
|
160
167
|
paged.get_page()
|
|
161
168
|
return paged
|
|
162
169
|
|
|
163
|
-
def update(self, app: entities.App = None, app_id: str = None) -> bool:
|
|
170
|
+
def update(self, app: entities.App = None, app_id: str = None, wait: bool = True) -> bool:
|
|
164
171
|
"""
|
|
165
172
|
Update the current app to the new configuration
|
|
166
173
|
|
|
167
174
|
:param entities.App app: The app to update.
|
|
168
175
|
:param str app_id: The app id to update.
|
|
176
|
+
:param bool wait: wait for the operation to finish.
|
|
169
177
|
:return bool whether the operation ran successfully or not
|
|
170
178
|
|
|
171
179
|
**Example**
|
|
@@ -179,16 +187,30 @@ class Apps:
|
|
|
179
187
|
success, response = self._client_api.gen_request(req_type='put',
|
|
180
188
|
path=f"/apps/{app.id}",
|
|
181
189
|
json_req=app.to_json())
|
|
182
|
-
if success:
|
|
183
|
-
|
|
184
|
-
|
|
190
|
+
if not success:
|
|
191
|
+
raise exceptions.PlatformException(response)
|
|
192
|
+
|
|
193
|
+
app = entities.App.from_json(
|
|
194
|
+
_json=response.json(),
|
|
195
|
+
client_api=self._client_api,
|
|
196
|
+
project=self.project
|
|
197
|
+
)
|
|
198
|
+
if app.metadata:
|
|
199
|
+
command_id = app.metadata.get('system', {}).get('commands', {}).get('update', None)
|
|
200
|
+
if wait and app.status == entities.CompositionStatus.UPDATING and command_id is not None:
|
|
201
|
+
command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
|
|
202
|
+
command.wait()
|
|
203
|
+
app = self.get(app_id=app.id)
|
|
204
|
+
|
|
205
|
+
return success
|
|
185
206
|
|
|
186
207
|
def install(self,
|
|
187
208
|
dpk: entities.Dpk,
|
|
188
209
|
app_name: str = None,
|
|
189
210
|
organization_id: str = None,
|
|
190
211
|
custom_installation: dict = None,
|
|
191
|
-
scope: entities.AppScope = None
|
|
212
|
+
scope: entities.AppScope = None,
|
|
213
|
+
wait: bool = True
|
|
192
214
|
) -> entities.App:
|
|
193
215
|
"""
|
|
194
216
|
Install the specified app in the project.
|
|
@@ -199,6 +221,7 @@ class Apps:
|
|
|
199
221
|
:param str organization_id: the organization which you want to apply on the filter.
|
|
200
222
|
:param dict custom_installation: partial installation.
|
|
201
223
|
:param str scope: the scope of the app. default is project.
|
|
224
|
+
:param bool wait: wait for the operation to finish.
|
|
202
225
|
|
|
203
226
|
:return the installed app.
|
|
204
227
|
:rtype entities.App
|
|
@@ -229,11 +252,20 @@ class Apps:
|
|
|
229
252
|
json_req=app.to_json())
|
|
230
253
|
if not success:
|
|
231
254
|
raise exceptions.PlatformException(response)
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
255
|
+
app = entities.App.from_json(_json=response.json(),
|
|
256
|
+
client_api=self._client_api,
|
|
257
|
+
project=self.project)
|
|
258
|
+
|
|
259
|
+
if app.metadata:
|
|
260
|
+
command_id = app.metadata.get('system', {}).get('commands', {}).get('install', None)
|
|
261
|
+
if wait and app.status == entities.CompositionStatus.INITIALIZING and command_id is not None:
|
|
262
|
+
command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
|
|
263
|
+
command.wait()
|
|
264
|
+
app = self.get(app_id=app.id)
|
|
265
|
+
|
|
266
|
+
return app
|
|
235
267
|
|
|
236
|
-
def uninstall(self, app_id: str = None, app_name: str = None) -> bool:
|
|
268
|
+
def uninstall(self, app_id: str = None, app_name: str = None, wait: bool = True) -> bool:
|
|
237
269
|
"""
|
|
238
270
|
Delete an app entity.
|
|
239
271
|
|
|
@@ -241,6 +273,7 @@ class Apps:
|
|
|
241
273
|
|
|
242
274
|
:param str app_id: optional - the id of the app.
|
|
243
275
|
:param str app_name: optional - the name of the app.
|
|
276
|
+
:param bool wait: optional - wait for the operation to finish.
|
|
244
277
|
:return whether we succeed uninstalling the specified app.
|
|
245
278
|
:rtype bool
|
|
246
279
|
|
|
@@ -260,6 +293,19 @@ class Apps:
|
|
|
260
293
|
if not success:
|
|
261
294
|
raise exceptions.PlatformException(response)
|
|
262
295
|
|
|
296
|
+
try:
|
|
297
|
+
app = self.get(app_id=app_id)
|
|
298
|
+
except Exception as e:
|
|
299
|
+
if e.status_code == '404':
|
|
300
|
+
return success
|
|
301
|
+
else:
|
|
302
|
+
raise e
|
|
303
|
+
if app.metadata:
|
|
304
|
+
command_id = app.metadata.get('system', {}).get('commands', {}).get('uninstall', None)
|
|
305
|
+
if wait and app.status == entities.CompositionStatus.TERMINATING and command_id is not None:
|
|
306
|
+
command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
|
|
307
|
+
command.wait()
|
|
308
|
+
|
|
263
309
|
logger.debug(f"App deleted successfully (id: {app_id}, name: {app_name}")
|
|
264
310
|
return success
|
|
265
311
|
|
dtlpy/repositories/commands.py
CHANGED
|
@@ -87,17 +87,25 @@ class Commands:
|
|
|
87
87
|
:param float backoff_factor: A backoff factor to apply between attempts after the second try
|
|
88
88
|
:return: Command object
|
|
89
89
|
"""
|
|
90
|
+
|
|
90
91
|
elapsed = 0
|
|
91
92
|
start = time.time()
|
|
92
93
|
if timeout is None or timeout <= 0:
|
|
93
94
|
timeout = np.inf
|
|
94
95
|
|
|
95
96
|
command = None
|
|
96
|
-
pbar = tqdm.tqdm(total=100,
|
|
97
|
-
|
|
97
|
+
pbar = tqdm.tqdm(total=100,
|
|
98
|
+
disable=self._client_api.verbose.disable_progress_bar,
|
|
99
|
+
file=sys.stdout,
|
|
100
|
+
desc='Command Progress')
|
|
98
101
|
num_tries = 1
|
|
99
102
|
while elapsed < timeout:
|
|
100
103
|
command = self.get(command_id=command_id, url=url)
|
|
104
|
+
if command.type == 'ExportDatasetAsJson':
|
|
105
|
+
self._client_api.callbacks.run_on_event(event=self._client_api.callbacks.CallbackEvent.DATASET_EXPORT,
|
|
106
|
+
context=command.spec,
|
|
107
|
+
progress=command.progress)
|
|
108
|
+
|
|
101
109
|
pbar.update(command.progress - pbar.n)
|
|
102
110
|
if not command.in_progress():
|
|
103
111
|
break
|
dtlpy/repositories/datasets.py
CHANGED
|
@@ -4,10 +4,12 @@ Datasets Repository
|
|
|
4
4
|
|
|
5
5
|
import os
|
|
6
6
|
import sys
|
|
7
|
+
import time
|
|
7
8
|
import copy
|
|
8
9
|
import tqdm
|
|
9
10
|
import logging
|
|
10
|
-
|
|
11
|
+
import json
|
|
12
|
+
from typing import Union
|
|
11
13
|
|
|
12
14
|
from .. import entities, repositories, miscellaneous, exceptions, services, PlatformException, _api_reference
|
|
13
15
|
from ..services.api_client import ApiClient
|
|
@@ -93,7 +95,11 @@ class Datasets:
|
|
|
93
95
|
filters._user_query = 'false'
|
|
94
96
|
if not folder_path.startswith('/'):
|
|
95
97
|
folder_path = '/' + folder_path
|
|
96
|
-
filters.add(field='dir', values=folder_path
|
|
98
|
+
filters.add(field='dir', values=folder_path, method=entities.FiltersMethod.OR)
|
|
99
|
+
if not folder_path.endswith('*'):
|
|
100
|
+
if not folder_path.endswith('/'):
|
|
101
|
+
folder_path += '/'
|
|
102
|
+
filters.add(field='dir', values=folder_path + '*', method=entities.FiltersMethod.OR)
|
|
97
103
|
return filters
|
|
98
104
|
|
|
99
105
|
def _get_binaries_dataset(self):
|
|
@@ -111,6 +117,70 @@ class Datasets:
|
|
|
111
117
|
dataset = datasets[0]
|
|
112
118
|
return dataset
|
|
113
119
|
|
|
120
|
+
def _resolve_dataset_id(self, dataset, dataset_name, dataset_id):
|
|
121
|
+
if dataset is None and dataset_name is None and dataset_id is None:
|
|
122
|
+
raise ValueError('Must provide dataset, dataset name or dataset id')
|
|
123
|
+
if dataset_id is None:
|
|
124
|
+
if dataset is None:
|
|
125
|
+
dataset = self.get(dataset_name=dataset_name)
|
|
126
|
+
dataset_id = dataset.id
|
|
127
|
+
return dataset_id
|
|
128
|
+
|
|
129
|
+
@staticmethod
|
|
130
|
+
def _build_payload(filters, include_feature_vectors, include_annotations, export_type, annotation_filters,
|
|
131
|
+
feature_vector_filters):
|
|
132
|
+
valid_list = [e.value for e in entities.ExportType]
|
|
133
|
+
valid_types = ', '.join(valid_list)
|
|
134
|
+
if export_type not in ['json', 'zip']:
|
|
135
|
+
raise ValueError('export_type must be one of the following: {}'.format(valid_types))
|
|
136
|
+
payload = {'exportType': export_type}
|
|
137
|
+
if filters is None:
|
|
138
|
+
filters = entities.Filters()
|
|
139
|
+
|
|
140
|
+
if isinstance(filters, entities.Filters):
|
|
141
|
+
payload['itemsQuery'] = {'filter': filters.prepare()['filter'], 'join': filters.prepare().get("join", {})}
|
|
142
|
+
elif isinstance(filters, dict):
|
|
143
|
+
payload['itemsQuery'] = filters
|
|
144
|
+
else:
|
|
145
|
+
raise exceptions.BadRequest(message='filters must be of type dict or Filters', status_code=500)
|
|
146
|
+
|
|
147
|
+
payload['itemsVectorQuery'] = {}
|
|
148
|
+
if include_feature_vectors:
|
|
149
|
+
payload['includeItemVectors'] = True
|
|
150
|
+
payload['itemsVectorQuery']['select'] = {"datasetId": 1, 'featureSetId': 1, 'value': 1}
|
|
151
|
+
|
|
152
|
+
if feature_vector_filters is not None:
|
|
153
|
+
payload['itemsVectorQuery']['filter'] = feature_vector_filters.prepare()['filter']
|
|
154
|
+
|
|
155
|
+
payload['annotations'] = {"include": include_annotations, "convertSemantic": False}
|
|
156
|
+
|
|
157
|
+
if annotation_filters is not None:
|
|
158
|
+
payload['annotationsQuery'] = annotation_filters.prepare()['filter']
|
|
159
|
+
payload['annotations']['filter'] = True
|
|
160
|
+
|
|
161
|
+
return payload
|
|
162
|
+
|
|
163
|
+
def _download_exported_item(self, item_id, export_type, local_path=None):
|
|
164
|
+
export_item = repositories.Items(client_api=self._client_api).get(item_id=item_id)
|
|
165
|
+
export_item_path = export_item.download(local_path=local_path)
|
|
166
|
+
|
|
167
|
+
if export_type == entities.ExportType.ZIP:
|
|
168
|
+
# unzipping annotations to directory
|
|
169
|
+
if isinstance(export_item_path, list) or not os.path.isfile(export_item_path):
|
|
170
|
+
raise exceptions.PlatformException(
|
|
171
|
+
error='404',
|
|
172
|
+
message='error downloading annotation zip file. see above for more information. item id: {!r}'.format(
|
|
173
|
+
export_item.id))
|
|
174
|
+
try:
|
|
175
|
+
miscellaneous.Zipping.unzip_directory(zip_filename=export_item_path,
|
|
176
|
+
to_directory=local_path)
|
|
177
|
+
except Exception as e:
|
|
178
|
+
logger.warning("Failed to extract zip file error: {}".format(e))
|
|
179
|
+
finally:
|
|
180
|
+
# cleanup
|
|
181
|
+
if isinstance(export_item_path, str) and os.path.isfile(export_item_path):
|
|
182
|
+
os.remove(export_item_path)
|
|
183
|
+
|
|
114
184
|
@property
|
|
115
185
|
def platform_url(self):
|
|
116
186
|
return self._client_api._get_resource_url("projects/{}/datasets".format(self.project.id))
|
|
@@ -425,12 +495,7 @@ class Datasets:
|
|
|
425
495
|
directory_tree = dataset.directory_tree
|
|
426
496
|
directory_tree = project.datasets.directory_tree(dataset='dataset_entity')
|
|
427
497
|
"""
|
|
428
|
-
|
|
429
|
-
raise exceptions.PlatformException('400', 'Must provide dataset, dataset name or dataset id')
|
|
430
|
-
if dataset_id is None:
|
|
431
|
-
if dataset is None:
|
|
432
|
-
dataset = self.get(dataset_name=dataset_name)
|
|
433
|
-
dataset_id = dataset.id
|
|
498
|
+
dataset_id = self._resolve_dataset_id(dataset, dataset_name, dataset_id)
|
|
434
499
|
|
|
435
500
|
url_path = '/datasets/{}/directoryTree'.format(dataset_id)
|
|
436
501
|
|
|
@@ -519,6 +584,72 @@ class Datasets:
|
|
|
519
584
|
.format(response))
|
|
520
585
|
return self.get(dataset_id=command.spec['returnedModelId'])
|
|
521
586
|
|
|
587
|
+
@_api_reference.add(path='/datasets/{id}/export', method='post')
|
|
588
|
+
def export(self,
|
|
589
|
+
dataset: entities.Dataset = None,
|
|
590
|
+
dataset_name: str = None,
|
|
591
|
+
dataset_id: str = None,
|
|
592
|
+
local_path: str = None,
|
|
593
|
+
filters: Union[dict, entities.Filters] = None,
|
|
594
|
+
annotation_filters: entities.Filters = None,
|
|
595
|
+
feature_vector_filters: entities.Filters = None,
|
|
596
|
+
include_feature_vectors: bool = False,
|
|
597
|
+
include_annotations: bool = False,
|
|
598
|
+
export_type: entities.ExportType = entities.ExportType.JSON,
|
|
599
|
+
timeout: int = 0):
|
|
600
|
+
"""
|
|
601
|
+
Export dataset items and annotations.
|
|
602
|
+
|
|
603
|
+
**Prerequisites**: You must be an *owner* or *developer* to use this method.
|
|
604
|
+
|
|
605
|
+
You must provide at least ONE of the following params: dataset, dataset_name, dataset_id.
|
|
606
|
+
|
|
607
|
+
:param dtlpy.entities.dataset.Dataset dataset: Dataset object
|
|
608
|
+
:param str dataset_name: The name of the dataset
|
|
609
|
+
:param str dataset_id: The ID of the dataset
|
|
610
|
+
:param str local_path: Local path to save the exported dataset
|
|
611
|
+
:param Union[dict, dtlpy.entities.filters.Filters] filters: Filters entity or a query dictionary
|
|
612
|
+
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for export
|
|
613
|
+
:param dtlpy.entities.filters.Filters feature_vector_filters: Filters entity to filter feature vectors for export
|
|
614
|
+
:param bool include_feature_vectors: Include item feature vectors in the export
|
|
615
|
+
:param bool include_annotations: Include item annotations in the export
|
|
616
|
+
:param entities.ExportType export_type: Type of export ('json' or 'zip')
|
|
617
|
+
:param int timeout: Maximum time in seconds to wait for the export to complete
|
|
618
|
+
:return: Exported item
|
|
619
|
+
:rtype: dtlpy.entities.item.Item
|
|
620
|
+
|
|
621
|
+
**Example**:
|
|
622
|
+
|
|
623
|
+
.. code-block:: python
|
|
624
|
+
|
|
625
|
+
export_item = project.datasets.export(dataset_id='dataset_id',
|
|
626
|
+
filters=filters,
|
|
627
|
+
include_feature_vectors=True,
|
|
628
|
+
include_annotations=True,
|
|
629
|
+
export_type=dl.ExportType.JSON)
|
|
630
|
+
"""
|
|
631
|
+
dataset_id = self._resolve_dataset_id(dataset, dataset_name, dataset_id)
|
|
632
|
+
payload = self._build_payload(filters, include_feature_vectors, include_annotations, export_type,
|
|
633
|
+
annotation_filters, feature_vector_filters)
|
|
634
|
+
|
|
635
|
+
success, response = self._client_api.gen_request(req_type='post', path=f'/datasets/{dataset_id}/export',
|
|
636
|
+
json_req=payload)
|
|
637
|
+
if not success:
|
|
638
|
+
raise exceptions.PlatformException(response)
|
|
639
|
+
|
|
640
|
+
command = entities.Command.from_json(_json=response.json(),
|
|
641
|
+
client_api=self._client_api)
|
|
642
|
+
|
|
643
|
+
time.sleep(2) # as the command have wrong progress in the beginning
|
|
644
|
+
command = command.wait(timeout=timeout)
|
|
645
|
+
if 'outputItemId' not in command.spec:
|
|
646
|
+
raise exceptions.PlatformException(
|
|
647
|
+
error='400',
|
|
648
|
+
message="outputItemId key is missing in command response: {}".format(response))
|
|
649
|
+
item_id = command.spec['outputItemId']
|
|
650
|
+
self._download_exported_item(item_id=item_id, export_type=export_type, local_path=local_path)
|
|
651
|
+
return local_path
|
|
652
|
+
|
|
522
653
|
@_api_reference.add(path='/datasets/merge', method='post')
|
|
523
654
|
def merge(self,
|
|
524
655
|
merge_name: str,
|
|
@@ -769,7 +900,7 @@ class Datasets:
|
|
|
769
900
|
Download dataset's annotations by filters.
|
|
770
901
|
|
|
771
902
|
You may filter the dataset both for items and for annotations and download annotations.
|
|
772
|
-
|
|
903
|
+
|
|
773
904
|
Optional -- download annotations as: mask, instance, image mask of the item.
|
|
774
905
|
|
|
775
906
|
**Prerequisites**: You must be in the role of an *owner* or *developer*.
|
|
@@ -917,9 +1048,8 @@ class Datasets:
|
|
|
917
1048
|
):
|
|
918
1049
|
"""
|
|
919
1050
|
Upload annotations to dataset.
|
|
920
|
-
|
|
921
|
-
Example for remote_root_path: If the item filepath is a/b/item and
|
|
922
|
-
remote_root_path is /a the start folder will be b instead of a
|
|
1051
|
+
|
|
1052
|
+
Example for remote_root_path: If the item filepath is "/a/b/item" and remote_root_path is "/a" - the start folder will be b instead of a
|
|
923
1053
|
|
|
924
1054
|
**Prerequisites**: You must have a dataset with items that are related to the annotations. The relationship between the dataset and annotations is shown in the name. You must be in the role of an *owner* or *developer*.
|
|
925
1055
|
|
dtlpy/repositories/dpks.py
CHANGED
|
@@ -196,7 +196,8 @@ class Dpks:
|
|
|
196
196
|
dpk = dpk_v.items[0]
|
|
197
197
|
return dpk
|
|
198
198
|
|
|
199
|
-
def publish(self, dpk: entities.Dpk = None, ignore_max_file_size: bool = False,
|
|
199
|
+
def publish(self, dpk: entities.Dpk = None, ignore_max_file_size: bool = False,
|
|
200
|
+
manifest_filepath='dataloop.json') -> entities.Dpk:
|
|
200
201
|
"""
|
|
201
202
|
Upload a dpk entity to the dataloop platform.
|
|
202
203
|
|
|
@@ -290,6 +291,9 @@ class Dpks:
|
|
|
290
291
|
elif filters.resource != entities.FiltersResource.DPK:
|
|
291
292
|
raise ValueError('Filters resource must to be FiltersResource.DPK. Got: {!r}'.format(filters.resource))
|
|
292
293
|
|
|
294
|
+
if self._project is not None:
|
|
295
|
+
filters.add(field='context.project', values=self._project.id)
|
|
296
|
+
|
|
293
297
|
paged = entities.PagedEntities(items_repository=self,
|
|
294
298
|
filters=filters,
|
|
295
299
|
page_offset=filters.page,
|
|
@@ -11,11 +11,16 @@ class FeatureSets:
|
|
|
11
11
|
"""
|
|
12
12
|
URL = '/features/sets'
|
|
13
13
|
|
|
14
|
-
def __init__(self,
|
|
14
|
+
def __init__(self,
|
|
15
|
+
client_api: ApiClient,
|
|
15
16
|
project_id: str = None,
|
|
16
|
-
project: entities.Project = None
|
|
17
|
+
project: entities.Project = None,
|
|
18
|
+
model_id: str = None,
|
|
19
|
+
model: entities.Model = None):
|
|
17
20
|
self._project = project
|
|
18
21
|
self._project_id = project_id
|
|
22
|
+
self._model = model
|
|
23
|
+
self._model_id = model_id
|
|
19
24
|
self._client_api = client_api
|
|
20
25
|
|
|
21
26
|
############
|
|
@@ -34,11 +39,23 @@ class FeatureSets:
|
|
|
34
39
|
if self._project is None:
|
|
35
40
|
raise exceptions.PlatformException(
|
|
36
41
|
error='2001',
|
|
37
|
-
message='Cannot perform action WITHOUT Project entity in
|
|
42
|
+
message='Cannot perform action WITHOUT Project entity in FeatureSets repository.'
|
|
38
43
|
' Please checkout or set a project')
|
|
39
44
|
assert isinstance(self._project, entities.Project)
|
|
40
45
|
return self._project
|
|
41
46
|
|
|
47
|
+
@property
|
|
48
|
+
def model(self) -> entities.Model:
|
|
49
|
+
if self._model is None and self._model_id is not None:
|
|
50
|
+
# get from id
|
|
51
|
+
self._model = repositories.Models(client_api=self._client_api).get(model_id=self._model_id)
|
|
52
|
+
if self._model is None:
|
|
53
|
+
raise exceptions.PlatformException(
|
|
54
|
+
error='2001',
|
|
55
|
+
message='Cannot perform action WITHOUT Model entity in FeatureSets repository.')
|
|
56
|
+
assert isinstance(self._model, entities.Model)
|
|
57
|
+
return self._model
|
|
58
|
+
|
|
42
59
|
###########
|
|
43
60
|
# methods #
|
|
44
61
|
###########
|
|
@@ -132,6 +149,7 @@ class FeatureSets:
|
|
|
132
149
|
set_type: str,
|
|
133
150
|
entity_type: entities.FeatureEntityType,
|
|
134
151
|
project_id: str = None,
|
|
152
|
+
model_id: set = None,
|
|
135
153
|
org_id: str = None):
|
|
136
154
|
"""
|
|
137
155
|
Create a new Feature Set
|
|
@@ -141,6 +159,7 @@ class FeatureSets:
|
|
|
141
159
|
:param str set_type: string of the feature type: 2d, 3d, modelFC, TSNE,PCA,FFT
|
|
142
160
|
:param entity_type: the entity that feature vector is linked to. Use the enum dl.FeatureEntityType
|
|
143
161
|
:param str project_id: the ID of the project where feature set will be created
|
|
162
|
+
:param str model_id: the ID of the model that creates the vectors
|
|
144
163
|
:param str org_id: the ID of the org where feature set will be created
|
|
145
164
|
:return: Feature Set object
|
|
146
165
|
"""
|
|
@@ -154,6 +173,7 @@ class FeatureSets:
|
|
|
154
173
|
'size': size,
|
|
155
174
|
'type': set_type,
|
|
156
175
|
'project': project_id,
|
|
176
|
+
'modelId': model_id,
|
|
157
177
|
'entityType': entity_type}
|
|
158
178
|
if org_id is not None:
|
|
159
179
|
payload['org'] = org_id
|
dtlpy/repositories/models.py
CHANGED
|
@@ -668,7 +668,7 @@ class Models:
|
|
|
668
668
|
:param item_ids: a list of item id to run the prediction.
|
|
669
669
|
:return:
|
|
670
670
|
"""
|
|
671
|
-
if len(model.metadata['system']
|
|
671
|
+
if len(model.metadata['system'].get('deploy', {}).get('services', [])) == 0:
|
|
672
672
|
# no services for model
|
|
673
673
|
raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
|
|
674
674
|
payload = {'input': {'itemIds': item_ids},
|
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
import logging
|
|
2
|
+
import time
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
2
5
|
from .. import entities, repositories, exceptions, miscellaneous, services, _api_reference
|
|
3
6
|
from ..services.api_client import ApiClient
|
|
4
7
|
|
|
5
8
|
logger = logging.getLogger(name='dtlpy')
|
|
9
|
+
MAX_SLEEP_TIME = 30
|
|
6
10
|
|
|
7
11
|
|
|
8
12
|
class PipelineExecutions:
|
|
@@ -379,3 +383,52 @@ class PipelineExecutions:
|
|
|
379
383
|
message="cycleOptions key is missing in command response: {!r}"
|
|
380
384
|
.format(response))
|
|
381
385
|
return True
|
|
386
|
+
|
|
387
|
+
def wait(self,
|
|
388
|
+
pipeline_execution_id: str = None,
|
|
389
|
+
pipeline_execution: entities.PipelineExecution = None,
|
|
390
|
+
timeout: int = None,
|
|
391
|
+
backoff_factor=1):
|
|
392
|
+
"""
|
|
393
|
+
Get Service execution object.
|
|
394
|
+
|
|
395
|
+
**Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a service.
|
|
396
|
+
|
|
397
|
+
:param str pipeline_execution_id: pipeline execution id
|
|
398
|
+
:param str pipeline_execution: dl.PipelineExecution, optional. must input one of pipeline execution or pipeline_execution_id
|
|
399
|
+
:param int timeout: seconds to wait until TimeoutError is raised. if <=0 - wait until done - by default wait take the service timeout
|
|
400
|
+
:param float backoff_factor: A backoff factor to apply between attempts after the second try
|
|
401
|
+
:return: Service execution object
|
|
402
|
+
:rtype: dtlpy.entities.pipeline_execution.PipelineExecution
|
|
403
|
+
|
|
404
|
+
**Example**:
|
|
405
|
+
|
|
406
|
+
.. code-block:: python
|
|
407
|
+
|
|
408
|
+
pipeline.pipeline_executions.wait(pipeline_execution_id='pipeline_execution_id')
|
|
409
|
+
"""
|
|
410
|
+
if pipeline_execution is None:
|
|
411
|
+
if pipeline_execution_id is None:
|
|
412
|
+
raise ValueError('Must input at least one: [pipeline_execution, pipeline_execution_id]')
|
|
413
|
+
else:
|
|
414
|
+
pipeline_execution_id = pipeline_execution.id
|
|
415
|
+
elapsed = 0
|
|
416
|
+
start = time.time()
|
|
417
|
+
if timeout is None or timeout <= 0:
|
|
418
|
+
timeout = np.inf
|
|
419
|
+
|
|
420
|
+
num_tries = 1
|
|
421
|
+
while elapsed < timeout:
|
|
422
|
+
pipeline_execution = self.get(pipeline_execution_id=pipeline_execution_id)
|
|
423
|
+
if not pipeline_execution.in_progress():
|
|
424
|
+
break
|
|
425
|
+
elapsed = time.time() - start
|
|
426
|
+
if elapsed >= timeout:
|
|
427
|
+
raise TimeoutError(
|
|
428
|
+
f"Pipeline execution wait() function timed out. id: {pipeline_execution.id!r}, status: {pipeline_execution.status}.")
|
|
429
|
+
sleep_time = np.min([timeout - elapsed, backoff_factor * (2 ** num_tries), MAX_SLEEP_TIME])
|
|
430
|
+
num_tries += 1
|
|
431
|
+
logger.debug(
|
|
432
|
+
f"Pipeline execution {pipeline_execution.id!r} has been running for {elapsed:.2f}[s]. Sleeping for {sleep_time:.2f}[s]")
|
|
433
|
+
time.sleep(sleep_time)
|
|
434
|
+
return pipeline_execution
|
dtlpy/repositories/uploader.py
CHANGED
|
@@ -522,6 +522,9 @@ class Uploader:
|
|
|
522
522
|
ref=item.id)
|
|
523
523
|
if pbar is not None:
|
|
524
524
|
pbar.update()
|
|
525
|
+
self.items_repository._client_api.callbacks.run_on_event(event=self.items_repository._client_api.callbacks.CallbackEvent.ITEMS_UPLOAD,
|
|
526
|
+
context={'item_id': item.id, 'dataset_id': item.dataset_id},
|
|
527
|
+
progress=round(pbar.n / pbar.total * 100, 0))
|
|
525
528
|
else:
|
|
526
529
|
if isinstance(element.buffer, str):
|
|
527
530
|
ref = element.buffer
|