dtlpy 1.94.5__py3-none-any.whl → 1.95.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +4 -4
- dtlpy/__version__.py +1 -1
- dtlpy/entities/__init__.py +1 -1
- dtlpy/entities/compute.py +14 -2
- dtlpy/entities/dataset.py +5 -2
- dtlpy/entities/service.py +66 -10
- dtlpy/entities/setting.py +3 -2
- dtlpy/new_instance.py +1 -1
- dtlpy/repositories/commands.py +1 -1
- dtlpy/repositories/computes.py +74 -2
- dtlpy/repositories/datasets.py +8 -2
- dtlpy/repositories/dpks.py +1 -1
- {dtlpy-1.94.5.dist-info → dtlpy-1.95.6.dist-info}/METADATA +2 -1
- {dtlpy-1.94.5.dist-info → dtlpy-1.95.6.dist-info}/RECORD +21 -21
- {dtlpy-1.94.5.data → dtlpy-1.95.6.data}/scripts/dlp +0 -0
- {dtlpy-1.94.5.data → dtlpy-1.95.6.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.94.5.data → dtlpy-1.95.6.data}/scripts/dlp.py +0 -0
- {dtlpy-1.94.5.dist-info → dtlpy-1.95.6.dist-info}/LICENSE +0 -0
- {dtlpy-1.94.5.dist-info → dtlpy-1.95.6.dist-info}/WHEEL +0 -0
- {dtlpy-1.94.5.dist-info → dtlpy-1.95.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.94.5.dist-info → dtlpy-1.95.6.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py
CHANGED
|
@@ -77,8 +77,8 @@ from .entities import (
|
|
|
77
77
|
# triggers
|
|
78
78
|
TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
|
|
79
79
|
# faas
|
|
80
|
-
FunctionIO, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
|
|
81
|
-
InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
|
|
80
|
+
FunctionIO, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
|
|
81
|
+
InstanceCatalog, PackageInputType, ServiceType, ServiceModeType, KubernetesRPSAutoscaler,
|
|
82
82
|
PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
|
|
83
83
|
# roberto
|
|
84
84
|
DatasetSubsetType, ModelStatus, PlotSample, ArtifactType, Artifact, ItemArtifact, LinkArtifact, LocalArtifact,
|
|
@@ -316,8 +316,8 @@ EXECUTION_STATUS_FAILED = ExecutionStatus.FAILED
|
|
|
316
316
|
LINK_TYPE_ID = LinkTypeEnum.ID
|
|
317
317
|
LINK_TYPE_URL = LinkTypeEnum.URL
|
|
318
318
|
|
|
319
|
-
KUBERNETES_AUTUSCALER_TYPE_CPU =
|
|
320
|
-
KUBERNETES_AUTUSCALER_TYPE_RABBITMQ =
|
|
319
|
+
KUBERNETES_AUTUSCALER_TYPE_CPU = KubernetesAutoscalerType.CPU
|
|
320
|
+
KUBERNETES_AUTUSCALER_TYPE_RABBITMQ = KubernetesAutoscalerType.RABBITMQ
|
|
321
321
|
|
|
322
322
|
INSTANCE_CATALOG_REGULAR_XS = InstanceCatalog.REGULAR_XS
|
|
323
323
|
INSTANCE_CATALOG_REGULAR_S = InstanceCatalog.REGULAR_S
|
dtlpy/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '1.
|
|
1
|
+
version = '1.95.6'
|
dtlpy/entities/__init__.py
CHANGED
|
@@ -43,7 +43,7 @@ from .package_slot import PackageSlot, SlotPostAction, SlotPostActionType, SlotD
|
|
|
43
43
|
UiBindingPanel
|
|
44
44
|
from .package_function import PackageFunction, FunctionIO, PackageInputType
|
|
45
45
|
from .time_series import TimeSeries
|
|
46
|
-
from .service import Service, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, \
|
|
46
|
+
from .service import Service, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRPSAutoscaler, \
|
|
47
47
|
InstanceCatalog, KubernetesRuntime, ServiceType, ServiceModeType
|
|
48
48
|
from .execution import Execution, ExecutionStatus
|
|
49
49
|
from .command import Command, CommandsStatus
|
dtlpy/entities/compute.py
CHANGED
|
@@ -221,6 +221,18 @@ class ComputeCluster:
|
|
|
221
221
|
'authentication': self.authentication.to_json()
|
|
222
222
|
}
|
|
223
223
|
|
|
224
|
+
@classmethod
|
|
225
|
+
def from_setup_json(cls, devops_output, integration):
|
|
226
|
+
node_pools = [NodePool.from_json(n) for n in devops_output['config']['nodePools']]
|
|
227
|
+
return cls(
|
|
228
|
+
devops_output['config']['name'],
|
|
229
|
+
devops_output['config']['endpoint'],
|
|
230
|
+
devops_output['config']['kubernetesVersion'],
|
|
231
|
+
ClusterProvider(devops_output['config']['provider']),
|
|
232
|
+
node_pools,
|
|
233
|
+
{},
|
|
234
|
+
Authentication(AuthenticationIntegration(integration.id,integration.type))
|
|
235
|
+
)
|
|
224
236
|
|
|
225
237
|
class ComputeContext:
|
|
226
238
|
def __init__(self, labels: List[str], org: str, project: Optional[str] = None):
|
|
@@ -284,10 +296,10 @@ class Compute:
|
|
|
284
296
|
return self._serviceDrivers
|
|
285
297
|
|
|
286
298
|
def delete(self):
|
|
287
|
-
return self.
|
|
299
|
+
return self.computes.delete(compute_id=self.id)
|
|
288
300
|
|
|
289
301
|
def update(self):
|
|
290
|
-
return self.
|
|
302
|
+
return self.computes.update(compute=self)
|
|
291
303
|
|
|
292
304
|
@classmethod
|
|
293
305
|
def from_json(cls, _json, client_api: ApiClient):
|
dtlpy/entities/dataset.py
CHANGED
|
@@ -535,7 +535,8 @@ class Dataset(entities.BaseEntity):
|
|
|
535
535
|
with_items_annotations=True,
|
|
536
536
|
with_metadata=True,
|
|
537
537
|
with_task_annotations_status=True,
|
|
538
|
-
dst_dataset_id=None
|
|
538
|
+
dst_dataset_id=None,
|
|
539
|
+
target_directory=None,
|
|
539
540
|
):
|
|
540
541
|
"""
|
|
541
542
|
Clone dataset
|
|
@@ -548,6 +549,7 @@ class Dataset(entities.BaseEntity):
|
|
|
548
549
|
:param bool with_metadata: clone metadata
|
|
549
550
|
:param bool with_task_annotations_status: clone task annotations status
|
|
550
551
|
:param str dst_dataset_id: destination dataset id
|
|
552
|
+
:param str target_directory: target directory
|
|
551
553
|
:return: dataset object
|
|
552
554
|
:rtype: dtlpy.entities.dataset.Dataset
|
|
553
555
|
|
|
@@ -567,7 +569,8 @@ class Dataset(entities.BaseEntity):
|
|
|
567
569
|
with_metadata=with_metadata,
|
|
568
570
|
with_items_annotations=with_items_annotations,
|
|
569
571
|
with_task_annotations_status=with_task_annotations_status,
|
|
570
|
-
dst_dataset_id=dst_dataset_id
|
|
572
|
+
dst_dataset_id=dst_dataset_id,
|
|
573
|
+
target_directory=target_directory)
|
|
571
574
|
|
|
572
575
|
def sync(self, wait=True):
|
|
573
576
|
"""
|
dtlpy/entities/service.py
CHANGED
|
@@ -156,8 +156,10 @@ class KubernetesRuntime(ServiceRuntime):
|
|
|
156
156
|
|
|
157
157
|
self.autoscaler = kwargs.get('autoscaler', autoscaler)
|
|
158
158
|
if self.autoscaler is not None and isinstance(self.autoscaler, dict):
|
|
159
|
-
if self.autoscaler['type'] ==
|
|
159
|
+
if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
|
|
160
160
|
self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
|
|
161
|
+
elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
|
|
162
|
+
self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
|
|
161
163
|
else:
|
|
162
164
|
raise NotImplementedError(
|
|
163
165
|
'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
|
|
@@ -228,6 +230,7 @@ class Service(entities.BaseEntity):
|
|
|
228
230
|
archive = attr.ib(repr=False)
|
|
229
231
|
config = attr.ib(repr=False)
|
|
230
232
|
settings = attr.ib(repr=False)
|
|
233
|
+
panels = attr.ib(repr=False)
|
|
231
234
|
|
|
232
235
|
# SDK
|
|
233
236
|
_package = attr.ib(repr=False)
|
|
@@ -340,7 +343,8 @@ class Service(entities.BaseEntity):
|
|
|
340
343
|
settings=_json.get('settings', None),
|
|
341
344
|
app=_json.get('app', None),
|
|
342
345
|
integrations=_json.get('integrations', None),
|
|
343
|
-
org_id=_json.get('orgId', None)
|
|
346
|
+
org_id=_json.get('orgId', None),
|
|
347
|
+
panels=_json.get('panels', None)
|
|
344
348
|
)
|
|
345
349
|
inst.is_fetched = is_fetched
|
|
346
350
|
return inst
|
|
@@ -484,7 +488,8 @@ class Service(entities.BaseEntity):
|
|
|
484
488
|
attr.fields(Service).settings,
|
|
485
489
|
attr.fields(Service).app,
|
|
486
490
|
attr.fields(Service).integrations,
|
|
487
|
-
attr.fields(Service).org_id
|
|
491
|
+
attr.fields(Service).org_id,
|
|
492
|
+
attr.fields(Service).panels
|
|
488
493
|
)
|
|
489
494
|
)
|
|
490
495
|
|
|
@@ -508,6 +513,9 @@ class Service(entities.BaseEntity):
|
|
|
508
513
|
if self.updated_by is not None:
|
|
509
514
|
_json['updatedBy'] = self.updated_by
|
|
510
515
|
|
|
516
|
+
if self.panels is not None:
|
|
517
|
+
_json['panels'] = self.panels
|
|
518
|
+
|
|
511
519
|
if self.max_attempts is not None:
|
|
512
520
|
_json['maxAttempts'] = self.max_attempts
|
|
513
521
|
|
|
@@ -806,8 +814,8 @@ class Service(entities.BaseEntity):
|
|
|
806
814
|
)
|
|
807
815
|
|
|
808
816
|
|
|
809
|
-
class
|
|
810
|
-
""" The Service
|
|
817
|
+
class KubernetesAutoscalerType(str, Enum):
|
|
818
|
+
""" The Service Autoscaler Type (RABBITMQ, CPU).
|
|
811
819
|
|
|
812
820
|
.. list-table::
|
|
813
821
|
:widths: 15 150
|
|
@@ -816,21 +824,42 @@ class KubernetesAutuscalerType(str, Enum):
|
|
|
816
824
|
* - State
|
|
817
825
|
- Description
|
|
818
826
|
* - RABBITMQ
|
|
819
|
-
- Service
|
|
827
|
+
- Service Autoscaler based on service queue length
|
|
820
828
|
* - CPU
|
|
821
|
-
- Service
|
|
829
|
+
- Service Autoscaler based on service CPU usage
|
|
830
|
+
* - RPS
|
|
831
|
+
- Service Autoscaler based on service RPS
|
|
822
832
|
"""
|
|
823
833
|
RABBITMQ = 'rabbitmq'
|
|
824
834
|
CPU = 'cpu'
|
|
835
|
+
RPS = 'rps'
|
|
836
|
+
|
|
837
|
+
|
|
838
|
+
# added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
|
|
839
|
+
class KubernetesAutuscalerTypeMeta(type):
|
|
840
|
+
def __getattribute__(cls, item):
|
|
841
|
+
if hasattr(KubernetesAutoscalerType, item):
|
|
842
|
+
warnings.warn(
|
|
843
|
+
'KubernetesAutuscalerType is deprecated and will be removed in version 1.97.0, '
|
|
844
|
+
'use KubernetesAutoscalerType instead',
|
|
845
|
+
DeprecationWarning
|
|
846
|
+
)
|
|
847
|
+
return getattr(KubernetesAutoscalerType, item)
|
|
848
|
+
else:
|
|
849
|
+
raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
|
|
850
|
+
|
|
851
|
+
|
|
852
|
+
class KubernetesAutuscalerType(metaclass=KubernetesAutuscalerTypeMeta):
|
|
853
|
+
pass
|
|
825
854
|
|
|
826
855
|
|
|
827
856
|
class KubernetesAutoscaler(entities.BaseEntity):
|
|
828
857
|
MIN_REPLICA_DEFAULT = 0
|
|
829
858
|
MAX_REPLICA_DEFAULT = 1
|
|
830
|
-
AUTOSCALER_TYPE_DEFAULT =
|
|
859
|
+
AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
|
|
831
860
|
|
|
832
861
|
def __init__(self,
|
|
833
|
-
autoscaler_type:
|
|
862
|
+
autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
|
|
834
863
|
min_replicas=MIN_REPLICA_DEFAULT,
|
|
835
864
|
max_replicas=MAX_REPLICA_DEFAULT,
|
|
836
865
|
cooldown_period=None,
|
|
@@ -870,7 +899,7 @@ class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
|
|
|
870
899
|
**kwargs):
|
|
871
900
|
super().__init__(min_replicas=min_replicas,
|
|
872
901
|
max_replicas=max_replicas,
|
|
873
|
-
autoscaler_type=
|
|
902
|
+
autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
|
|
874
903
|
cooldown_period=cooldown_period,
|
|
875
904
|
polling_interval=polling_interval, **kwargs)
|
|
876
905
|
self.queue_length = kwargs.get('queueLength', queue_length)
|
|
@@ -879,3 +908,30 @@ class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
|
|
|
879
908
|
_json = super().to_json()
|
|
880
909
|
_json['queueLength'] = self.queue_length
|
|
881
910
|
return _json
|
|
911
|
+
|
|
912
|
+
|
|
913
|
+
class KubernetesRPSAutoscaler(KubernetesAutoscaler):
|
|
914
|
+
THRESHOLD_DEFAULT = 10
|
|
915
|
+
RATE_SECONDS_DEFAULT = 30
|
|
916
|
+
|
|
917
|
+
def __init__(self,
|
|
918
|
+
min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
|
|
919
|
+
max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
|
|
920
|
+
threshold=THRESHOLD_DEFAULT,
|
|
921
|
+
rate_seconds=RATE_SECONDS_DEFAULT,
|
|
922
|
+
cooldown_period=None,
|
|
923
|
+
polling_interval=None,
|
|
924
|
+
**kwargs):
|
|
925
|
+
super().__init__(min_replicas=min_replicas,
|
|
926
|
+
max_replicas=max_replicas,
|
|
927
|
+
autoscaler_type=KubernetesAutoscalerType.RPS,
|
|
928
|
+
cooldown_period=cooldown_period,
|
|
929
|
+
polling_interval=polling_interval, **kwargs)
|
|
930
|
+
self.threshold = kwargs.get('threshold', threshold)
|
|
931
|
+
self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
|
|
932
|
+
|
|
933
|
+
def to_json(self):
|
|
934
|
+
_json = super().to_json()
|
|
935
|
+
_json['rateSeconds'] = self.rate_seconds
|
|
936
|
+
_json['threshold'] = self.threshold
|
|
937
|
+
return _json
|
dtlpy/entities/setting.py
CHANGED
|
@@ -190,7 +190,8 @@ class Setting(BaseSetting):
|
|
|
190
190
|
hint=None,
|
|
191
191
|
client_api=None,
|
|
192
192
|
project=None,
|
|
193
|
-
org=None
|
|
193
|
+
org=None,
|
|
194
|
+
setting_type=SettingsTypes.USER_SETTINGS
|
|
194
195
|
):
|
|
195
196
|
super().__init__(
|
|
196
197
|
default_value=default_value,
|
|
@@ -199,7 +200,7 @@ class Setting(BaseSetting):
|
|
|
199
200
|
value_type=value_type,
|
|
200
201
|
scope=scope,
|
|
201
202
|
metadata=metadata,
|
|
202
|
-
setting_type=
|
|
203
|
+
setting_type=setting_type,
|
|
203
204
|
client_api=client_api,
|
|
204
205
|
project=project,
|
|
205
206
|
org=org,
|
dtlpy/new_instance.py
CHANGED
|
@@ -22,7 +22,7 @@ class Dtlpy:
|
|
|
22
22
|
# triggers
|
|
23
23
|
TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
|
|
24
24
|
# faas
|
|
25
|
-
FunctionIO, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
|
|
25
|
+
FunctionIO, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
|
|
26
26
|
InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
|
|
27
27
|
PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
|
|
28
28
|
# roberto
|
dtlpy/repositories/commands.py
CHANGED
dtlpy/repositories/computes.py
CHANGED
|
@@ -1,7 +1,12 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
import datetime
|
|
3
|
+
import json
|
|
4
|
+
|
|
1
5
|
from ..services.api_client import ApiClient
|
|
2
6
|
from .. import exceptions, entities, repositories
|
|
3
7
|
from typing import List, Optional, Dict
|
|
4
|
-
|
|
8
|
+
from ..entities import ComputeCluster, ComputeContext, ComputeType, Project
|
|
9
|
+
from ..entities.integration import IntegrationType
|
|
5
10
|
|
|
6
11
|
class Computes:
|
|
7
12
|
|
|
@@ -9,6 +14,8 @@ class Computes:
|
|
|
9
14
|
self._client_api = client_api
|
|
10
15
|
self._base_url = '/compute'
|
|
11
16
|
self._commands = None
|
|
17
|
+
self._projects = None
|
|
18
|
+
self._organizations = None
|
|
12
19
|
|
|
13
20
|
@property
|
|
14
21
|
def commands(self) -> repositories.Commands:
|
|
@@ -16,6 +23,18 @@ class Computes:
|
|
|
16
23
|
self._commands = repositories.Commands(client_api=self._client_api)
|
|
17
24
|
return self._commands
|
|
18
25
|
|
|
26
|
+
@property
|
|
27
|
+
def projects(self):
|
|
28
|
+
if self._projects is None:
|
|
29
|
+
self._projects = repositories.Projects(client_api=self._client_api)
|
|
30
|
+
return self._projects
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def organizations(self):
|
|
34
|
+
if self._organizations is None:
|
|
35
|
+
self._organizations = repositories.Organizations(client_api=self._client_api)
|
|
36
|
+
return self._organizations
|
|
37
|
+
|
|
19
38
|
def create(
|
|
20
39
|
self,
|
|
21
40
|
name: str,
|
|
@@ -142,6 +161,59 @@ class Computes:
|
|
|
142
161
|
|
|
143
162
|
return True
|
|
144
163
|
|
|
164
|
+
@staticmethod
|
|
165
|
+
def read_file(file_path):
|
|
166
|
+
try:
|
|
167
|
+
with open(file_path, 'r') as file:
|
|
168
|
+
content = file.read()
|
|
169
|
+
return content
|
|
170
|
+
except FileNotFoundError:
|
|
171
|
+
print(f"The file at {file_path} was not found.")
|
|
172
|
+
except IOError:
|
|
173
|
+
print(f"An error occurred while reading the file at {file_path}.")
|
|
174
|
+
|
|
175
|
+
def decode_and_parse_input(self, file_path):
|
|
176
|
+
"""Decode a base64 encoded string from file a and parse it as JSON."""
|
|
177
|
+
decoded_bytes = base64.b64decode(self.read_file(file_path))
|
|
178
|
+
return json.loads(decoded_bytes)
|
|
179
|
+
|
|
180
|
+
@staticmethod
|
|
181
|
+
def create_integration(org, name, auth_data):
|
|
182
|
+
"""Create a new key-value integration within the specified project."""
|
|
183
|
+
return org.integrations.create(
|
|
184
|
+
integrations_type=IntegrationType.KEY_VALUE,
|
|
185
|
+
name=name,
|
|
186
|
+
options={
|
|
187
|
+
'key': name,
|
|
188
|
+
'value': json.dumps(auth_data)
|
|
189
|
+
}
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
def setup_compute_cluster(self, config, integration, org_id, project=None):
|
|
193
|
+
"""Set up a compute cluster using the provided configuration and integration."""
|
|
194
|
+
cluster = ComputeCluster.from_setup_json(config, integration)
|
|
195
|
+
project_id = None
|
|
196
|
+
if project is not None:
|
|
197
|
+
project_id = project.id
|
|
198
|
+
compute = self.create(
|
|
199
|
+
config['config']['name'],
|
|
200
|
+
ComputeContext([], org_id, project_id),
|
|
201
|
+
[],
|
|
202
|
+
cluster,
|
|
203
|
+
ComputeType.KUBERNETES)
|
|
204
|
+
return compute
|
|
205
|
+
|
|
206
|
+
def create_from_config_file(self, config_file_path, org_id, project_name: Optional[str] = None):
|
|
207
|
+
config = self.decode_and_parse_input(config_file_path)
|
|
208
|
+
project = None
|
|
209
|
+
if project_name is not None:
|
|
210
|
+
project = self.projects.get(project_name=project_name)
|
|
211
|
+
org = self.organizations.get(organization_id=org_id)
|
|
212
|
+
integration_name = ('cluster_integration_test_' + datetime.datetime.now().isoformat().split('.')[0]
|
|
213
|
+
.replace(':', '_'))
|
|
214
|
+
integration = self.create_integration(org, integration_name, config['authentication'])
|
|
215
|
+
compute = self.setup_compute_cluster(config, integration, org_id, project)
|
|
216
|
+
return compute
|
|
145
217
|
|
|
146
218
|
class ServiceDrivers:
|
|
147
219
|
|
|
@@ -234,7 +306,7 @@ class ServiceDrivers:
|
|
|
234
306
|
"""
|
|
235
307
|
Set a service driver as default
|
|
236
308
|
|
|
237
|
-
:param service_driver_id:
|
|
309
|
+
:param service_driver_id: Compute name
|
|
238
310
|
:param org_id: Organization ID
|
|
239
311
|
:param update_existing_services: Update existing services
|
|
240
312
|
|
dtlpy/repositories/datasets.py
CHANGED
|
@@ -515,7 +515,8 @@ class Datasets:
|
|
|
515
515
|
with_items_annotations: bool = True,
|
|
516
516
|
with_metadata: bool = True,
|
|
517
517
|
with_task_annotations_status: bool = True,
|
|
518
|
-
dst_dataset_id: str = None
|
|
518
|
+
dst_dataset_id: str = None,
|
|
519
|
+
target_directory: str = None):
|
|
519
520
|
"""
|
|
520
521
|
Clone a dataset. Read more about cloning datatsets and items in our `documentation <https://dataloop.ai/docs/clone-merge-dataset#cloned-dataset>`_ and `SDK documentation <https://developers.dataloop.ai/tutorials/data_management/data_versioning/chapter/>`_.
|
|
521
522
|
|
|
@@ -528,6 +529,7 @@ class Datasets:
|
|
|
528
529
|
:param bool with_metadata: true to clone with metadata
|
|
529
530
|
:param bool with_task_annotations_status: true to clone with task annotations' status
|
|
530
531
|
:param str dst_dataset_id: destination dataset id
|
|
532
|
+
:param str target_directory: target directory
|
|
531
533
|
:return: dataset object
|
|
532
534
|
:rtype: dtlpy.entities.dataset.Dataset
|
|
533
535
|
|
|
@@ -555,13 +557,17 @@ class Datasets:
|
|
|
555
557
|
if copy_filters.has_field('hidden'):
|
|
556
558
|
copy_filters.pop('hidden')
|
|
557
559
|
|
|
560
|
+
if target_directory is not None and not target_directory.startswith('/'):
|
|
561
|
+
target_directory = '/' + target_directory
|
|
562
|
+
|
|
558
563
|
payload = {
|
|
559
564
|
"name": clone_name,
|
|
560
565
|
"filter": copy_filters.prepare(),
|
|
561
566
|
"cloneDatasetParams": {
|
|
562
567
|
"withItemsAnnotations": with_items_annotations,
|
|
563
568
|
"withMetadata": with_metadata,
|
|
564
|
-
"withTaskAnnotationsStatus": with_task_annotations_status
|
|
569
|
+
"withTaskAnnotationsStatus": with_task_annotations_status,
|
|
570
|
+
"targetDirectory": target_directory
|
|
565
571
|
}
|
|
566
572
|
}
|
|
567
573
|
if dst_dataset_id is not None:
|
dtlpy/repositories/dpks.py
CHANGED
|
@@ -287,7 +287,7 @@ class Dpks:
|
|
|
287
287
|
"""
|
|
288
288
|
success, response = self._client_api.gen_request(req_type='delete', path=f'/app-registry/{dpk_id}')
|
|
289
289
|
if success:
|
|
290
|
-
logger.info('Deleted dpk successfully')
|
|
290
|
+
logger.info(f'Deleted dpk: {dpk_id} successfully')
|
|
291
291
|
else:
|
|
292
292
|
raise exceptions.PlatformException(response)
|
|
293
293
|
return success
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dtlpy
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.95.6
|
|
4
4
|
Summary: SDK and CLI for Dataloop platform
|
|
5
5
|
Home-page: https://github.com/dataloop-ai/dtlpy
|
|
6
6
|
Author: Dataloop Team
|
|
@@ -42,6 +42,7 @@ Requires-Dist: diskcache (>=5.4)
|
|
|
42
42
|
Requires-Dist: redis (>=3.5)
|
|
43
43
|
Requires-Dist: inquirer
|
|
44
44
|
Requires-Dist: dtlpymetrics
|
|
45
|
+
Requires-Dist: dataclasses
|
|
45
46
|
|
|
46
47
|

|
|
47
48
|
[](https://sdk-docs.dataloop.ai/en/latest/?badge=latest)
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
dtlpy/__init__.py,sha256=
|
|
2
|
-
dtlpy/__version__.py,sha256=
|
|
1
|
+
dtlpy/__init__.py,sha256=nE2SN0AD2rZ_ekF_kD7OzZbSE32H8zV5UM6t_E0LzTw,20647
|
|
2
|
+
dtlpy/__version__.py,sha256=3OtkFGDOCL5-ZRo52dswwfcY5vYhh114MOzr2lJhEAA,19
|
|
3
3
|
dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
|
|
4
|
-
dtlpy/new_instance.py,sha256=
|
|
4
|
+
dtlpy/new_instance.py,sha256=ORhXmIsc8Kut2M1jekKL3dG_adRp7axK-25B4zJNqMU,10091
|
|
5
5
|
dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
|
|
6
6
|
dtlpy/assets/lock_open.png,sha256=BH9uyf5uYvgZrDpDw9qCUnT3UbkXG8XbeRmWDpWlV4M,18215
|
|
7
7
|
dtlpy/assets/main.py,sha256=N1JUsx79qnXI7Hx22C8JOzHJdGHxvrXeTx5UZAxvJfE,1380
|
|
@@ -44,7 +44,7 @@ dtlpy/dlp/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
|
|
|
44
44
|
dtlpy/dlp/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
|
|
45
45
|
dtlpy/dlp/dlp.py,sha256=YjNBjeCDTXJ7tj8qdiGZ8lFb8DtPZl-FvViyjxt9xF8,4278
|
|
46
46
|
dtlpy/dlp/parser.py,sha256=p-TFaiAU2c3QkI97TXzL2LDR3Eq0hGDFrTc9J2jWLh4,30551
|
|
47
|
-
dtlpy/entities/__init__.py,sha256=
|
|
47
|
+
dtlpy/entities/__init__.py,sha256=R2kDC9VHOeRSTgXXqNowbf_yZwy7tbAkukvIlPZmPVE,4856
|
|
48
48
|
dtlpy/entities/analytic.py,sha256=5MpYDKPVsZ1MIy20Ju515RWed6P667j4TLxsan2gyNM,11925
|
|
49
49
|
dtlpy/entities/annotation.py,sha256=yk-JQzgzXvnDLFrOkmcHQfEtsiPqZeIisv80ksNB-f8,66912
|
|
50
50
|
dtlpy/entities/annotation_collection.py,sha256=CEYSBHhhDkC0VJdHsBSrA6TgdKGMcKeI3tFM40UJwS8,29838
|
|
@@ -56,8 +56,8 @@ dtlpy/entities/base_entity.py,sha256=i83KrtAz6dX4t8JEiUimLI5ZRrN0VnoUWKG2Zz49N5w
|
|
|
56
56
|
dtlpy/entities/bot.py,sha256=is3NUCnPg56HSjsHIvFcVkymValMqDV0uHRDC1Ib-ds,3819
|
|
57
57
|
dtlpy/entities/codebase.py,sha256=pwRkAq2GV0wvmzshg89IAmE-0I2Wsy_-QNOu8OV8uqc,8999
|
|
58
58
|
dtlpy/entities/command.py,sha256=ARu8ttk-C7_Ice7chRyTtyOtakBTF09FC04mEk73SO8,5010
|
|
59
|
-
dtlpy/entities/compute.py,sha256=
|
|
60
|
-
dtlpy/entities/dataset.py,sha256=
|
|
59
|
+
dtlpy/entities/compute.py,sha256=4FEpahPFFGHxye_fLh_p_kP6iEQ3QJK7S5hAdd6Afos,12744
|
|
60
|
+
dtlpy/entities/dataset.py,sha256=tNCl7nNCx-DrZ3z96APhRdvllfQA1-9y8DpL6Ma2l0I,47516
|
|
61
61
|
dtlpy/entities/directory_tree.py,sha256=Rni6pLSWytR6yeUPgEdCCRfTg_cqLOdUc9uCqz9KT-Q,1186
|
|
62
62
|
dtlpy/entities/dpk.py,sha256=a5C1UG_cvDnXSee650WHH43QflxbJCo_g0V17-GRb24,17639
|
|
63
63
|
dtlpy/entities/driver.py,sha256=O_QdK1EaLjQyQkmvKsmkNgmvmMb1mPjKnJGxK43KrOA,7197
|
|
@@ -87,8 +87,8 @@ dtlpy/entities/prompt_item.py,sha256=Kmvguz3f0sGtkKZS9OEA_-Yi4aQRCgdg1GBkaLQyyTg
|
|
|
87
87
|
dtlpy/entities/recipe.py,sha256=Q1HtYgind3bEe-vnDZWhw6H-rcIAGhkGHPRWtLIkPSE,11917
|
|
88
88
|
dtlpy/entities/reflect_dict.py,sha256=2NaSAL-CO0T0FYRYFQlaSpbsoLT2Q18AqdHgQSLX5Y4,3273
|
|
89
89
|
dtlpy/entities/resource_execution.py,sha256=1HuVV__U4jAUOtOkWlWImnM3Yts8qxMSAkMA9sBhArY,5033
|
|
90
|
-
dtlpy/entities/service.py,sha256=
|
|
91
|
-
dtlpy/entities/setting.py,sha256=
|
|
90
|
+
dtlpy/entities/service.py,sha256=ZV3HhBbafs0N_lSIWxu4CNJ39WThd7z5GAd0fCvSnFg,32462
|
|
91
|
+
dtlpy/entities/setting.py,sha256=uXagJHtcCR3nJYClR_AUGZjz_kx3TejPcUZ8ginHFIA,8561
|
|
92
92
|
dtlpy/entities/task.py,sha256=XHiEqZYFlrDCtmw1MXsysjoBLdIzAk7coMrVk8bNIiE,19534
|
|
93
93
|
dtlpy/entities/time_series.py,sha256=336jWNckjuSn0G29WJFetB7nBoFAKqs4VH9_IB4m4FE,4017
|
|
94
94
|
dtlpy/entities/trigger.py,sha256=zh3wYUY2-zATh_7ous0Ck87Yojo9r9PAVQrkcESxoko,14266
|
|
@@ -159,12 +159,12 @@ dtlpy/repositories/artifacts.py,sha256=Ke2ustTNw-1eQ0onLsWY7gL2aChjXPAX5p1uQ_EzM
|
|
|
159
159
|
dtlpy/repositories/assignments.py,sha256=1VwJZ7ctQe1iaDDDpeYDgoj2G-TCgzolVLUEqUocd2w,25506
|
|
160
160
|
dtlpy/repositories/bots.py,sha256=q1SqH01JHloljKxknhHU09psV1vQx9lPhu3g8mBBeRg,8104
|
|
161
161
|
dtlpy/repositories/codebases.py,sha256=pvcZxdrq0-zWysVbdXjUOhnfcF6hJD8v5VclNZ-zhGA,24668
|
|
162
|
-
dtlpy/repositories/commands.py,sha256=
|
|
162
|
+
dtlpy/repositories/commands.py,sha256=kXhmyBpLZNs-6vKBo4iXaommpjcGBDXs287IICUnQMw,5593
|
|
163
163
|
dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
|
|
164
|
-
dtlpy/repositories/computes.py,sha256
|
|
165
|
-
dtlpy/repositories/datasets.py,sha256=
|
|
164
|
+
dtlpy/repositories/computes.py,sha256=EtfE_3JhTdNlSYDPkKXBFkq-DBl4sgQqIm50ajvFdWM,9976
|
|
165
|
+
dtlpy/repositories/datasets.py,sha256=rDpJXNyxOlJwDQB-wNkM-JIqOGH10q9nujnAl6y8_xU,52077
|
|
166
166
|
dtlpy/repositories/downloader.py,sha256=pNwL7Nid8xmOyYNiv4DB_WY4RoKlxQ-U9nG2V99Gyr8,41342
|
|
167
|
-
dtlpy/repositories/dpks.py,sha256=
|
|
167
|
+
dtlpy/repositories/dpks.py,sha256=mj3QPvfzj_jZAscwIgpKUfa7fLxptc3OJQ_RrSfgYxo,17487
|
|
168
168
|
dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
|
|
169
169
|
dtlpy/repositories/executions.py,sha256=M84nhpFPPZq4fQeJ2m_sv6JT4NE2WDRMOXWr451J0bU,30403
|
|
170
170
|
dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
|
|
@@ -221,9 +221,9 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
|
|
|
221
221
|
dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
|
|
222
222
|
dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
|
|
223
223
|
dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
|
|
224
|
-
dtlpy-1.
|
|
225
|
-
dtlpy-1.
|
|
226
|
-
dtlpy-1.
|
|
224
|
+
dtlpy-1.95.6.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
|
|
225
|
+
dtlpy-1.95.6.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
|
|
226
|
+
dtlpy-1.95.6.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
|
|
227
227
|
tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
228
228
|
tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
229
229
|
tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
|
|
@@ -231,9 +231,9 @@ tests/assets/models_flow/main.py,sha256=87O3-JaWcC6m_kA39sqPhX70_VCBzzbLWmX2YQFi
|
|
|
231
231
|
tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
|
|
232
232
|
tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
233
233
|
tests/features/environment.py,sha256=V23cUx_p4VpNk9kc2I0BDZJHO_xcJBFJq8m3JlYCooc,16736
|
|
234
|
-
dtlpy-1.
|
|
235
|
-
dtlpy-1.
|
|
236
|
-
dtlpy-1.
|
|
237
|
-
dtlpy-1.
|
|
238
|
-
dtlpy-1.
|
|
239
|
-
dtlpy-1.
|
|
234
|
+
dtlpy-1.95.6.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
235
|
+
dtlpy-1.95.6.dist-info/METADATA,sha256=gwEWuQCr9AOla7PZkit6MzkeFa6pW626gXqni3GPKVM,3002
|
|
236
|
+
dtlpy-1.95.6.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
|
237
|
+
dtlpy-1.95.6.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
|
|
238
|
+
dtlpy-1.95.6.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
|
|
239
|
+
dtlpy-1.95.6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|