dtlpy 1.93.11__py3-none-any.whl → 1.95.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dtlpy/__init__.py CHANGED
@@ -77,8 +77,8 @@ from .entities import (
77
77
  # triggers
78
78
  TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
79
79
  # faas
80
- FunctionIO, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
81
- InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
80
+ FunctionIO, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
81
+ InstanceCatalog, PackageInputType, ServiceType, ServiceModeType, KubernetesRPSAutoscaler,
82
82
  PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
83
83
  # roberto
84
84
  DatasetSubsetType, ModelStatus, PlotSample, ArtifactType, Artifact, ItemArtifact, LinkArtifact, LocalArtifact,
@@ -316,8 +316,8 @@ EXECUTION_STATUS_FAILED = ExecutionStatus.FAILED
316
316
  LINK_TYPE_ID = LinkTypeEnum.ID
317
317
  LINK_TYPE_URL = LinkTypeEnum.URL
318
318
 
319
- KUBERNETES_AUTUSCALER_TYPE_CPU = KubernetesAutuscalerType.CPU
320
- KUBERNETES_AUTUSCALER_TYPE_RABBITMQ = KubernetesAutuscalerType.RABBITMQ
319
+ KUBERNETES_AUTUSCALER_TYPE_CPU = KubernetesAutoscalerType.CPU
320
+ KUBERNETES_AUTUSCALER_TYPE_RABBITMQ = KubernetesAutoscalerType.RABBITMQ
321
321
 
322
322
  INSTANCE_CATALOG_REGULAR_XS = InstanceCatalog.REGULAR_XS
323
323
  INSTANCE_CATALOG_REGULAR_S = InstanceCatalog.REGULAR_S
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.93.11'
1
+ version = '1.95.6'
@@ -43,7 +43,7 @@ from .package_slot import PackageSlot, SlotPostAction, SlotPostActionType, SlotD
43
43
  UiBindingPanel
44
44
  from .package_function import PackageFunction, FunctionIO, PackageInputType
45
45
  from .time_series import TimeSeries
46
- from .service import Service, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, \
46
+ from .service import Service, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRPSAutoscaler, \
47
47
  InstanceCatalog, KubernetesRuntime, ServiceType, ServiceModeType
48
48
  from .execution import Execution, ExecutionStatus
49
49
  from .command import Command, CommandsStatus
dtlpy/entities/compute.py CHANGED
@@ -9,6 +9,7 @@ class ClusterProvider(str, Enum):
9
9
  AWS = 'aws'
10
10
  AZURE = 'azure'
11
11
  HPC = 'hpc'
12
+ LOCAL = 'local'
12
13
 
13
14
 
14
15
  class ComputeType(str, Enum):
@@ -22,18 +23,27 @@ class ComputeStatus(str, Enum):
22
23
 
23
24
 
24
25
  class Toleration:
25
- def __init__(self, name: str):
26
- self.name = name
26
+ def __init__(self, effect: str, key: str, operator: str, value: str):
27
+ self.effect = effect
28
+ self.key = key
29
+ self.operator = operator
30
+ self.value = value
27
31
 
28
32
  @classmethod
29
33
  def from_json(cls, _json):
30
34
  return cls(
31
- name=_json.get('name')
35
+ effect=_json.get('effect'),
36
+ key=_json.get('key'),
37
+ operator=_json.get('operator'),
38
+ value=_json.get('value')
32
39
  )
33
40
 
34
41
  def to_json(self):
35
42
  return {
36
- 'name': self.name
43
+ 'effect': self.effect,
44
+ 'key': self.key,
45
+ 'operator': self.operator,
46
+ 'value': self.value
37
47
  }
38
48
 
39
49
 
@@ -85,32 +95,32 @@ class NodePool:
85
95
  self,
86
96
  name: str,
87
97
  is_dl_type_default: bool,
88
- dl_type: Optional[str] = None,
98
+ dl_types: Optional[List[str]] = None,
89
99
  tolerations: Optional[List[Toleration]] = None,
90
100
  description: str = "",
91
101
  node_selector: str = "",
92
102
  preemtible: bool = False,
93
- deployment_resources_request: DeploymentResources = None
103
+ deployment_resources: DeploymentResources = None
94
104
  ):
95
105
  self.name = name
96
106
  self.is_dl_type_default = is_dl_type_default
97
- self.dl_type = dl_type
107
+ self.dl_types = dl_types
98
108
  self.tolerations = tolerations if tolerations is not None else []
99
109
  self.description = description
100
110
  self.node_selector = node_selector
101
111
  self.preemtible = preemtible
102
- self.deployment_resources_request = deployment_resources_request
112
+ self.deployment_resources = deployment_resources
103
113
 
104
114
  @classmethod
105
115
  def from_json(cls, _json):
106
116
  node_pool = cls(
107
117
  name=_json.get('name'),
108
118
  is_dl_type_default=_json.get('isDlTypeDefault'),
109
- dl_type=_json.get('dlType'),
119
+ dl_types=_json.get('dlTypes'),
110
120
  description=_json.get('description'),
111
121
  node_selector=_json.get('nodeSelector'),
112
122
  preemtible=_json.get('preemtible'),
113
- deployment_resources_request=DeploymentResources.from_json(_json.get('deploymentResourcesRequest', dict())),
123
+ deployment_resources=DeploymentResources.from_json(_json.get('deploymentResources', dict())),
114
124
  tolerations=[Toleration.from_json(t) for t in _json.get('tolerations', list())]
115
125
  )
116
126
 
@@ -123,12 +133,12 @@ class NodePool:
123
133
  'description': self.description,
124
134
  'nodeSelector': self.node_selector,
125
135
  'preemtible': self.preemtible,
126
- 'deploymentResourcesRequest': self.deployment_resources_request.to_json(),
136
+ 'deploymentResources': self.deployment_resources.to_json(),
127
137
  'tolerations': [t.to_json() for t in self.tolerations]
128
138
  }
129
139
 
130
- if self.dl_type is not None:
131
- _json['dlType'] = self.dl_type
140
+ if self.dl_types is not None:
141
+ _json['dlTypes'] = self.dl_types
132
142
 
133
143
  return _json
134
144
 
@@ -211,6 +221,18 @@ class ComputeCluster:
211
221
  'authentication': self.authentication.to_json()
212
222
  }
213
223
 
224
+ @classmethod
225
+ def from_setup_json(cls, devops_output, integration):
226
+ node_pools = [NodePool.from_json(n) for n in devops_output['config']['nodePools']]
227
+ return cls(
228
+ devops_output['config']['name'],
229
+ devops_output['config']['endpoint'],
230
+ devops_output['config']['kubernetesVersion'],
231
+ ClusterProvider(devops_output['config']['provider']),
232
+ node_pools,
233
+ {},
234
+ Authentication(AuthenticationIntegration(integration.id,integration.type))
235
+ )
214
236
 
215
237
  class ComputeContext:
216
238
  def __init__(self, labels: List[str], org: str, project: Optional[str] = None):
@@ -238,6 +260,7 @@ class Compute:
238
260
  def __init__(
239
261
  self,
240
262
  id: str,
263
+ name: str,
241
264
  context: ComputeContext,
242
265
  client_api: ApiClient,
243
266
  shared_contexts: Optional[List[ComputeContext]] = None,
@@ -248,6 +271,7 @@ class Compute:
248
271
  metadata: Optional[Dict] = None,
249
272
  ):
250
273
  self.id = id
274
+ self.name = name
251
275
  self.context = context
252
276
  self.shared_contexts = shared_contexts if shared_contexts is not None else []
253
277
  self.global_ = global_
@@ -272,15 +296,16 @@ class Compute:
272
296
  return self._serviceDrivers
273
297
 
274
298
  def delete(self):
275
- return self._computes.delete(compute_id=self.id)
299
+ return self.computes.delete(compute_id=self.id)
276
300
 
277
301
  def update(self):
278
- return self._computes.update(compute=self)
302
+ return self.computes.update(compute=self)
279
303
 
280
304
  @classmethod
281
305
  def from_json(cls, _json, client_api: ApiClient):
282
306
  return cls(
283
307
  id=_json.get('id'),
308
+ name=_json.get('name'),
284
309
  context=ComputeContext.from_json(_json.get('context', dict())),
285
310
  shared_contexts=[ComputeContext.from_json(sc) for sc in _json.get('sharedContexts', list())],
286
311
  global_=_json.get('global'),
dtlpy/entities/dataset.py CHANGED
@@ -535,7 +535,8 @@ class Dataset(entities.BaseEntity):
535
535
  with_items_annotations=True,
536
536
  with_metadata=True,
537
537
  with_task_annotations_status=True,
538
- dst_dataset_id=None
538
+ dst_dataset_id=None,
539
+ target_directory=None,
539
540
  ):
540
541
  """
541
542
  Clone dataset
@@ -548,6 +549,7 @@ class Dataset(entities.BaseEntity):
548
549
  :param bool with_metadata: clone metadata
549
550
  :param bool with_task_annotations_status: clone task annotations status
550
551
  :param str dst_dataset_id: destination dataset id
552
+ :param str target_directory: target directory
551
553
  :return: dataset object
552
554
  :rtype: dtlpy.entities.dataset.Dataset
553
555
 
@@ -567,7 +569,8 @@ class Dataset(entities.BaseEntity):
567
569
  with_metadata=with_metadata,
568
570
  with_items_annotations=with_items_annotations,
569
571
  with_task_annotations_status=with_task_annotations_status,
570
- dst_dataset_id=dst_dataset_id)
572
+ dst_dataset_id=dst_dataset_id,
573
+ target_directory=target_directory)
571
574
 
572
575
  def sync(self, wait=True):
573
576
  """
dtlpy/entities/dpk.py CHANGED
@@ -311,6 +311,16 @@ class Dpk(entities.DlEntity):
311
311
  """
312
312
  return self.dpks.publish(dpk=self)
313
313
 
314
+ def update(self):
315
+ """
316
+ Update the dpk attributes to Dataloop platform.
317
+
318
+ **Example**
319
+ .. code-block:: python
320
+ updated_dpk = dpk.update()
321
+ """
322
+ return self.dpks.update(dpk=self)
323
+
314
324
  def pull(self, local_path):
315
325
  """
316
326
  Pulls the app from the platform as dpk file.
dtlpy/entities/filters.py CHANGED
@@ -90,7 +90,9 @@ class Filters:
90
90
  page_size=None,
91
91
  ):
92
92
  if page_size is None:
93
- if resource in [FiltersResource.EXECUTION, FiltersResource.PIPELINE_EXECUTION]:
93
+ if resource in [FiltersResource.EXECUTION,
94
+ FiltersResource.PIPELINE_EXECUTION,
95
+ FiltersResource.DPK]:
94
96
  page_size = 100
95
97
  else:
96
98
  page_size = 1000
@@ -1,5 +1,6 @@
1
1
  import logging
2
2
  import math
3
+ import time
3
4
  import tqdm
4
5
  import copy
5
6
  import sys
@@ -169,6 +170,7 @@ class PagedEntities:
169
170
  jobs = list()
170
171
  pool = self._client_api.thread_pools('item.page')
171
172
  while True:
173
+ time.sleep(0.01) # to flush the results
172
174
  if page_offset <= total_pages:
173
175
  jobs.append(pool.submit(self.return_page, **{'page_offset': page_offset,
174
176
  'page_size': page_size}))
dtlpy/entities/service.py CHANGED
@@ -156,8 +156,10 @@ class KubernetesRuntime(ServiceRuntime):
156
156
 
157
157
  self.autoscaler = kwargs.get('autoscaler', autoscaler)
158
158
  if self.autoscaler is not None and isinstance(self.autoscaler, dict):
159
- if self.autoscaler['type'] == KubernetesAutuscalerType.RABBITMQ:
159
+ if self.autoscaler['type'] == KubernetesAutoscalerType.RABBITMQ:
160
160
  self.autoscaler = KubernetesRabbitmqAutoscaler(**self.autoscaler)
161
+ elif self.autoscaler['type'] == KubernetesAutoscalerType.RPS:
162
+ self.autoscaler = KubernetesRPSAutoscaler(**self.autoscaler)
161
163
  else:
162
164
  raise NotImplementedError(
163
165
  'Unknown kubernetes autoscaler type: {}'.format(self.autoscaler['type']))
@@ -228,6 +230,7 @@ class Service(entities.BaseEntity):
228
230
  archive = attr.ib(repr=False)
229
231
  config = attr.ib(repr=False)
230
232
  settings = attr.ib(repr=False)
233
+ panels = attr.ib(repr=False)
231
234
 
232
235
  # SDK
233
236
  _package = attr.ib(repr=False)
@@ -340,7 +343,8 @@ class Service(entities.BaseEntity):
340
343
  settings=_json.get('settings', None),
341
344
  app=_json.get('app', None),
342
345
  integrations=_json.get('integrations', None),
343
- org_id=_json.get('orgId', None)
346
+ org_id=_json.get('orgId', None),
347
+ panels=_json.get('panels', None)
344
348
  )
345
349
  inst.is_fetched = is_fetched
346
350
  return inst
@@ -484,7 +488,8 @@ class Service(entities.BaseEntity):
484
488
  attr.fields(Service).settings,
485
489
  attr.fields(Service).app,
486
490
  attr.fields(Service).integrations,
487
- attr.fields(Service).org_id
491
+ attr.fields(Service).org_id,
492
+ attr.fields(Service).panels
488
493
  )
489
494
  )
490
495
 
@@ -508,6 +513,9 @@ class Service(entities.BaseEntity):
508
513
  if self.updated_by is not None:
509
514
  _json['updatedBy'] = self.updated_by
510
515
 
516
+ if self.panels is not None:
517
+ _json['panels'] = self.panels
518
+
511
519
  if self.max_attempts is not None:
512
520
  _json['maxAttempts'] = self.max_attempts
513
521
 
@@ -806,8 +814,8 @@ class Service(entities.BaseEntity):
806
814
  )
807
815
 
808
816
 
809
- class KubernetesAutuscalerType(str, Enum):
810
- """ The Service Autuscaler Type (RABBITMQ, CPU).
817
+ class KubernetesAutoscalerType(str, Enum):
818
+ """ The Service Autoscaler Type (RABBITMQ, CPU).
811
819
 
812
820
  .. list-table::
813
821
  :widths: 15 150
@@ -816,21 +824,42 @@ class KubernetesAutuscalerType(str, Enum):
816
824
  * - State
817
825
  - Description
818
826
  * - RABBITMQ
819
- - Service Autuscaler will be in RABBITMQ
827
+ - Service Autoscaler based on service queue length
820
828
  * - CPU
821
- - Service Autuscaler will be in in local CPU
829
+ - Service Autoscaler based on service CPU usage
830
+ * - RPS
831
+ - Service Autoscaler based on service RPS
822
832
  """
823
833
  RABBITMQ = 'rabbitmq'
824
834
  CPU = 'cpu'
835
+ RPS = 'rps'
836
+
837
+
838
+ # added this class to avoid breaking changes after fixing a spelling mistake in KubernetesAutoscalerType
839
+ class KubernetesAutuscalerTypeMeta(type):
840
+ def __getattribute__(cls, item):
841
+ if hasattr(KubernetesAutoscalerType, item):
842
+ warnings.warn(
843
+ 'KubernetesAutuscalerType is deprecated and will be removed in version 1.97.0, '
844
+ 'use KubernetesAutoscalerType instead',
845
+ DeprecationWarning
846
+ )
847
+ return getattr(KubernetesAutoscalerType, item)
848
+ else:
849
+ raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
850
+
851
+
852
+ class KubernetesAutuscalerType(metaclass=KubernetesAutuscalerTypeMeta):
853
+ pass
825
854
 
826
855
 
827
856
  class KubernetesAutoscaler(entities.BaseEntity):
828
857
  MIN_REPLICA_DEFAULT = 0
829
858
  MAX_REPLICA_DEFAULT = 1
830
- AUTOSCALER_TYPE_DEFAULT = KubernetesAutuscalerType.RABBITMQ
859
+ AUTOSCALER_TYPE_DEFAULT = KubernetesAutoscalerType.RABBITMQ
831
860
 
832
861
  def __init__(self,
833
- autoscaler_type: KubernetesAutuscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
862
+ autoscaler_type: KubernetesAutoscalerType.RABBITMQ = AUTOSCALER_TYPE_DEFAULT,
834
863
  min_replicas=MIN_REPLICA_DEFAULT,
835
864
  max_replicas=MAX_REPLICA_DEFAULT,
836
865
  cooldown_period=None,
@@ -870,7 +899,7 @@ class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
870
899
  **kwargs):
871
900
  super().__init__(min_replicas=min_replicas,
872
901
  max_replicas=max_replicas,
873
- autoscaler_type=KubernetesAutuscalerType.RABBITMQ,
902
+ autoscaler_type=KubernetesAutoscalerType.RABBITMQ,
874
903
  cooldown_period=cooldown_period,
875
904
  polling_interval=polling_interval, **kwargs)
876
905
  self.queue_length = kwargs.get('queueLength', queue_length)
@@ -879,3 +908,30 @@ class KubernetesRabbitmqAutoscaler(KubernetesAutoscaler):
879
908
  _json = super().to_json()
880
909
  _json['queueLength'] = self.queue_length
881
910
  return _json
911
+
912
+
913
+ class KubernetesRPSAutoscaler(KubernetesAutoscaler):
914
+ THRESHOLD_DEFAULT = 10
915
+ RATE_SECONDS_DEFAULT = 30
916
+
917
+ def __init__(self,
918
+ min_replicas=KubernetesAutoscaler.MIN_REPLICA_DEFAULT,
919
+ max_replicas=KubernetesAutoscaler.MAX_REPLICA_DEFAULT,
920
+ threshold=THRESHOLD_DEFAULT,
921
+ rate_seconds=RATE_SECONDS_DEFAULT,
922
+ cooldown_period=None,
923
+ polling_interval=None,
924
+ **kwargs):
925
+ super().__init__(min_replicas=min_replicas,
926
+ max_replicas=max_replicas,
927
+ autoscaler_type=KubernetesAutoscalerType.RPS,
928
+ cooldown_period=cooldown_period,
929
+ polling_interval=polling_interval, **kwargs)
930
+ self.threshold = kwargs.get('threshold', threshold)
931
+ self.rate_seconds = kwargs.get('rateSeconds', rate_seconds)
932
+
933
+ def to_json(self):
934
+ _json = super().to_json()
935
+ _json['rateSeconds'] = self.rate_seconds
936
+ _json['threshold'] = self.threshold
937
+ return _json
dtlpy/entities/setting.py CHANGED
@@ -190,7 +190,8 @@ class Setting(BaseSetting):
190
190
  hint=None,
191
191
  client_api=None,
192
192
  project=None,
193
- org=None
193
+ org=None,
194
+ setting_type=SettingsTypes.USER_SETTINGS
194
195
  ):
195
196
  super().__init__(
196
197
  default_value=default_value,
@@ -199,7 +200,7 @@ class Setting(BaseSetting):
199
200
  value_type=value_type,
200
201
  scope=scope,
201
202
  metadata=metadata,
202
- setting_type=SettingsTypes.USER_SETTINGS,
203
+ setting_type=setting_type,
203
204
  client_api=client_api,
204
205
  project=project,
205
206
  org=org,
@@ -234,10 +234,20 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
234
234
  :return: preprocessed: the var with the loaded item information (e.g. ndarray for image, dict for json files etc)
235
235
  """
236
236
  # Item to batch func
237
- if self.model_entity.input_type in self.item_to_batch_mapping:
237
+ if isinstance(self.model_entity.input_type, list):
238
+ if 'text' in self.model_entity.input_type and 'text' in item.mimetype:
239
+ processed = self._item_to_text(item)
240
+ elif 'image' in self.model_entity.input_type and 'image' in item.mimetype:
241
+ processed = self._item_to_image(item)
242
+ else:
243
+ processed = self._item_to_item(item)
244
+
245
+ elif self.model_entity.input_type in self.item_to_batch_mapping:
238
246
  processed = self.item_to_batch_mapping[self.model_entity.input_type](item)
247
+
239
248
  else:
240
249
  processed = self._item_to_item(item)
250
+
241
251
  return processed
242
252
 
243
253
  def prepare_data(self,
@@ -546,8 +556,8 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
546
556
  upload_features = self.adapter_defaults.resolve("upload_features", upload_features)
547
557
 
548
558
  self.logger.debug("Creating embeddings for dataset (name:{}, id:{}, using batch size {}".format(dataset.name,
549
- dataset.id,
550
- batch_size))
559
+ dataset.id,
560
+ batch_size))
551
561
  if not filters:
552
562
  filters = entities.Filters()
553
563
  if filters is not None and isinstance(filters, dict):
@@ -806,8 +816,17 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
806
816
 
807
817
  @staticmethod
808
818
  def _item_to_text(item):
809
- buffer = item.download(save_locally=False)
810
- text = buffer.read().decode()
819
+ filename = item.download(overwrite=True)
820
+ text = None
821
+ if item.mimetype == 'text/plain' or item.mimetype == 'text/markdown':
822
+ with open(filename, 'r') as f:
823
+ text = f.read()
824
+ text = text.replace('\n', ' ')
825
+ else:
826
+ logger.warning('Item is not text file. mimetype: {}'.format(item.mimetype))
827
+ text = item
828
+ if os.path.exists(filename):
829
+ os.remove(filename)
811
830
  return text
812
831
 
813
832
  @staticmethod
dtlpy/new_instance.py CHANGED
@@ -22,7 +22,7 @@ class Dtlpy:
22
22
  # triggers
23
23
  TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
24
24
  # faas
25
- FunctionIO, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
25
+ FunctionIO, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
26
26
  InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
27
27
  PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
28
28
  # roberto
@@ -57,7 +57,7 @@ class Commands:
57
57
  if url is None:
58
58
  url_path = "/commands/{}".format(command_id)
59
59
  else:
60
- url_path = url.split('v1')[1]
60
+ url_path = url.split('api/v1')[1]
61
61
 
62
62
  success, response = self._client_api.gen_request(req_type="get",
63
63
  path=url_path)
@@ -1,7 +1,12 @@
1
+ import base64
2
+ import datetime
3
+ import json
4
+
1
5
  from ..services.api_client import ApiClient
2
6
  from .. import exceptions, entities, repositories
3
7
  from typing import List, Optional, Dict
4
-
8
+ from ..entities import ComputeCluster, ComputeContext, ComputeType, Project
9
+ from ..entities.integration import IntegrationType
5
10
 
6
11
  class Computes:
7
12
 
@@ -9,6 +14,8 @@ class Computes:
9
14
  self._client_api = client_api
10
15
  self._base_url = '/compute'
11
16
  self._commands = None
17
+ self._projects = None
18
+ self._organizations = None
12
19
 
13
20
  @property
14
21
  def commands(self) -> repositories.Commands:
@@ -16,8 +23,21 @@ class Computes:
16
23
  self._commands = repositories.Commands(client_api=self._client_api)
17
24
  return self._commands
18
25
 
26
+ @property
27
+ def projects(self):
28
+ if self._projects is None:
29
+ self._projects = repositories.Projects(client_api=self._client_api)
30
+ return self._projects
31
+
32
+ @property
33
+ def organizations(self):
34
+ if self._organizations is None:
35
+ self._organizations = repositories.Organizations(client_api=self._client_api)
36
+ return self._organizations
37
+
19
38
  def create(
20
39
  self,
40
+ name: str,
21
41
  context: entities.ComputeContext,
22
42
  shared_contexts: Optional[List[entities.ComputeContext]],
23
43
  cluster: entities.ComputeCluster,
@@ -29,6 +49,7 @@ class Computes:
29
49
  """
30
50
  Create a new compute
31
51
 
52
+ :param name: Compute name
32
53
  :param context: Compute context
33
54
  :param shared_contexts: Shared contexts
34
55
  :param cluster: Compute cluster
@@ -40,11 +61,12 @@ class Computes:
40
61
  """
41
62
 
42
63
  payload = {
64
+ 'name': name,
43
65
  'context': context.to_json(),
44
66
  'type': type.value,
45
67
  'global': is_global,
46
68
  'features': features,
47
- shared_contexts: [sc.to_json() for sc in shared_contexts],
69
+ 'shared_contexts': [sc.to_json() for sc in shared_contexts],
48
70
  'cluster': cluster.to_json()
49
71
  }
50
72
 
@@ -64,7 +86,7 @@ class Computes:
64
86
  )
65
87
 
66
88
  if wait:
67
- command_id = compute.metadata.get('system', {}).get('create', {}).get('commandId', None)
89
+ command_id = compute.metadata.get('system', {}).get('commands', {}).get('create', {})
68
90
  if command_id is not None:
69
91
  command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
70
92
  command.wait()
@@ -139,12 +161,65 @@ class Computes:
139
161
 
140
162
  return True
141
163
 
164
+ @staticmethod
165
+ def read_file(file_path):
166
+ try:
167
+ with open(file_path, 'r') as file:
168
+ content = file.read()
169
+ return content
170
+ except FileNotFoundError:
171
+ print(f"The file at {file_path} was not found.")
172
+ except IOError:
173
+ print(f"An error occurred while reading the file at {file_path}.")
174
+
175
+ def decode_and_parse_input(self, file_path):
176
+ """Decode a base64 encoded string from file a and parse it as JSON."""
177
+ decoded_bytes = base64.b64decode(self.read_file(file_path))
178
+ return json.loads(decoded_bytes)
179
+
180
+ @staticmethod
181
+ def create_integration(org, name, auth_data):
182
+ """Create a new key-value integration within the specified project."""
183
+ return org.integrations.create(
184
+ integrations_type=IntegrationType.KEY_VALUE,
185
+ name=name,
186
+ options={
187
+ 'key': name,
188
+ 'value': json.dumps(auth_data)
189
+ }
190
+ )
191
+
192
+ def setup_compute_cluster(self, config, integration, org_id, project=None):
193
+ """Set up a compute cluster using the provided configuration and integration."""
194
+ cluster = ComputeCluster.from_setup_json(config, integration)
195
+ project_id = None
196
+ if project is not None:
197
+ project_id = project.id
198
+ compute = self.create(
199
+ config['config']['name'],
200
+ ComputeContext([], org_id, project_id),
201
+ [],
202
+ cluster,
203
+ ComputeType.KUBERNETES)
204
+ return compute
205
+
206
+ def create_from_config_file(self, config_file_path, org_id, project_name: Optional[str] = None):
207
+ config = self.decode_and_parse_input(config_file_path)
208
+ project = None
209
+ if project_name is not None:
210
+ project = self.projects.get(project_name=project_name)
211
+ org = self.organizations.get(organization_id=org_id)
212
+ integration_name = ('cluster_integration_test_' + datetime.datetime.now().isoformat().split('.')[0]
213
+ .replace(':', '_'))
214
+ integration = self.create_integration(org, integration_name, config['authentication'])
215
+ compute = self.setup_compute_cluster(config, integration, org_id, project)
216
+ return compute
142
217
 
143
218
  class ServiceDrivers:
144
219
 
145
220
  def __init__(self, client_api: ApiClient):
146
221
  self._client_api = client_api
147
- self._base_url = '/serviceDriver'
222
+ self._base_url = '/serviceDrivers'
148
223
 
149
224
  def create(
150
225
  self,
@@ -226,3 +301,35 @@ class ServiceDrivers:
226
301
  raise exceptions.PlatformException(response)
227
302
 
228
303
  return True
304
+
305
+ def set_default(self, service_driver_id: str, org_id: str, update_existing_services=False):
306
+ """
307
+ Set a service driver as default
308
+
309
+ :param service_driver_id: Compute name
310
+ :param org_id: Organization ID
311
+ :param update_existing_services: Update existing services
312
+
313
+ :return: Service driver
314
+ """
315
+
316
+ # request
317
+ success, response = self._client_api.gen_request(
318
+ req_type='post',
319
+ path=self._base_url + '/default',
320
+ json_req={
321
+ 'organizationId': org_id,
322
+ 'updateExistingServices': update_existing_services,
323
+ 'driverName': service_driver_id
324
+ }
325
+ )
326
+
327
+ if not success:
328
+ raise exceptions.PlatformException(response)
329
+
330
+ service_driver = entities.ServiceDriver.from_json(
331
+ _json=response.json(),
332
+ client_api=self._client_api
333
+ )
334
+
335
+ return service_driver
@@ -515,7 +515,8 @@ class Datasets:
515
515
  with_items_annotations: bool = True,
516
516
  with_metadata: bool = True,
517
517
  with_task_annotations_status: bool = True,
518
- dst_dataset_id: str = None):
518
+ dst_dataset_id: str = None,
519
+ target_directory: str = None):
519
520
  """
520
521
  Clone a dataset. Read more about cloning datatsets and items in our `documentation <https://dataloop.ai/docs/clone-merge-dataset#cloned-dataset>`_ and `SDK documentation <https://developers.dataloop.ai/tutorials/data_management/data_versioning/chapter/>`_.
521
522
 
@@ -528,6 +529,7 @@ class Datasets:
528
529
  :param bool with_metadata: true to clone with metadata
529
530
  :param bool with_task_annotations_status: true to clone with task annotations' status
530
531
  :param str dst_dataset_id: destination dataset id
532
+ :param str target_directory: target directory
531
533
  :return: dataset object
532
534
  :rtype: dtlpy.entities.dataset.Dataset
533
535
 
@@ -555,13 +557,17 @@ class Datasets:
555
557
  if copy_filters.has_field('hidden'):
556
558
  copy_filters.pop('hidden')
557
559
 
560
+ if target_directory is not None and not target_directory.startswith('/'):
561
+ target_directory = '/' + target_directory
562
+
558
563
  payload = {
559
564
  "name": clone_name,
560
565
  "filter": copy_filters.prepare(),
561
566
  "cloneDatasetParams": {
562
567
  "withItemsAnnotations": with_items_annotations,
563
568
  "withMetadata": with_metadata,
564
- "withTaskAnnotationsStatus": with_task_annotations_status
569
+ "withTaskAnnotationsStatus": with_task_annotations_status,
570
+ "targetDirectory": target_directory
565
571
  }
566
572
  }
567
573
  if dst_dataset_id is not None:
@@ -256,6 +256,25 @@ class Dpks:
256
256
 
257
257
  return entities.Dpk.from_json(response_pack.json(), self._client_api, dpk.project)
258
258
 
259
+ def update(self, dpk: entities.Dpk) -> entities.Dpk:
260
+ """
261
+ Update the dpk entity in the platform.
262
+
263
+ Note: the update will update only attributes, displayName, Icon, description and pipeline template preview
264
+
265
+ :param entities.Dpk dpk: the dpk entity to update.
266
+ :return the updated dpk entity.
267
+ :rtype entities.Dpk
268
+ """
269
+ success, response = self._client_api.gen_request(req_type='patch',
270
+ path='/app-registry/{}'.format(dpk.id),
271
+ json_req=dpk.to_json())
272
+ if not success:
273
+ raise exceptions.PlatformException(response)
274
+ res = response.json()
275
+ logger.info(res.get('message'))
276
+ return entities.Dpk.from_json(res.get('dpk'), self._client_api, self._project)
277
+
259
278
  def delete(self, dpk_id: str) -> bool:
260
279
  """
261
280
  Delete the dpk from the app store.
@@ -268,7 +287,7 @@ class Dpks:
268
287
  """
269
288
  success, response = self._client_api.gen_request(req_type='delete', path=f'/app-registry/{dpk_id}')
270
289
  if success:
271
- logger.info('Deleted dpk successfully')
290
+ logger.info(f'Deleted dpk: {dpk_id} successfully')
272
291
  else:
273
292
  raise exceptions.PlatformException(response)
274
293
  return success
@@ -99,7 +99,8 @@ class Integrations:
99
99
  def create(self,
100
100
  integrations_type: entities.IntegrationType,
101
101
  name: str,
102
- options: dict):
102
+ options: dict,
103
+ metadata: dict = None):
103
104
  """
104
105
  Create an integration between an external storage and the organization.
105
106
 
@@ -118,6 +119,7 @@ class Integrations:
118
119
  :param IntegrationType integrations_type: integrations type dl.IntegrationType
119
120
  :param str name: integrations name
120
121
  :param dict options: dict of storage secrets
122
+ :param dict metadata: metadata
121
123
  :return: success
122
124
  :rtype: bool
123
125
 
@@ -142,6 +144,8 @@ class Integrations:
142
144
 
143
145
  url_path = '/orgs/{}/integrations'.format(organization_id)
144
146
  payload = {"type": integrations_type.value if isinstance(integrations_type, entities.IntegrationType) else integrations_type, 'name': name, 'options': options}
147
+ if metadata is not None:
148
+ payload['metadata'] = metadata
145
149
  success, response = self._client_api.gen_request(req_type='post',
146
150
  path=url_path,
147
151
  json_req=payload)
@@ -150,9 +154,9 @@ class Integrations:
150
154
  else:
151
155
  integration = entities.Integration.from_json(_json=response.json(), client_api=self._client_api)
152
156
  if integration.metadata and isinstance(integration.metadata, list) and len(integration.metadata) > 0:
153
- for metadata in integration.metadata:
154
- if metadata['name'] == 'status':
155
- integration_status = metadata['value']
157
+ for m in integration.metadata:
158
+ if m['name'] == 'status':
159
+ integration_status = m['value']
156
160
  logger.info('Integration status: {}'.format(integration_status))
157
161
  return integration
158
162
 
@@ -1465,7 +1465,11 @@ class ApiClient:
1465
1465
  custom_env = os.environ.get('DTLPY_CUSTOM_ENV', None)
1466
1466
  environment = json.loads(base64.b64decode(custom_env.encode()).decode())
1467
1467
  env = environment.pop('url')
1468
+ token = None
1469
+ if self.environments.get(env):
1470
+ token = self.environments[env].get('token', None)
1468
1471
  self.environments[env] = environment.get(env, environment)
1472
+ self.environments[env]['token'] = token
1469
1473
  verify_ssl = self.environments[env].get('verify_ssl', None)
1470
1474
  if verify_ssl is not None and isinstance(verify_ssl, str):
1471
1475
  self.environments[env]['verify_ssl'] = True if verify_ssl.lower() == 'true' else False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dtlpy
3
- Version: 1.93.11
3
+ Version: 1.95.6
4
4
  Summary: SDK and CLI for Dataloop platform
5
5
  Home-page: https://github.com/dataloop-ai/dtlpy
6
6
  Author: Dataloop Team
@@ -42,6 +42,7 @@ Requires-Dist: diskcache (>=5.4)
42
42
  Requires-Dist: redis (>=3.5)
43
43
  Requires-Dist: inquirer
44
44
  Requires-Dist: dtlpymetrics
45
+ Requires-Dist: dataclasses
45
46
 
46
47
  ![logo.svg](docs%2F_static%2Flogo.svg)
47
48
  [![Documentation Status](https://readthedocs.org/projects/dtlpy/badge/?version=latest)](https://sdk-docs.dataloop.ai/en/latest/?badge=latest)
@@ -1,7 +1,7 @@
1
- dtlpy/__init__.py,sha256=6-Ioishmg5KdDJ3ZtouCDp-UYLzgysq7BI94IpiLl9Y,20596
2
- dtlpy/__version__.py,sha256=flCI96G7rktCrmZ4eEkrFS3Nw88MtB-SGsLZGEPtddc,20
1
+ dtlpy/__init__.py,sha256=nE2SN0AD2rZ_ekF_kD7OzZbSE32H8zV5UM6t_E0LzTw,20647
2
+ dtlpy/__version__.py,sha256=3OtkFGDOCL5-ZRo52dswwfcY5vYhh114MOzr2lJhEAA,19
3
3
  dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
4
- dtlpy/new_instance.py,sha256=I4Gc658s-yUD0-gEiC2pRDKaADZPdr1dm67K4mkx5xw,10065
4
+ dtlpy/new_instance.py,sha256=ORhXmIsc8Kut2M1jekKL3dG_adRp7axK-25B4zJNqMU,10091
5
5
  dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
6
6
  dtlpy/assets/lock_open.png,sha256=BH9uyf5uYvgZrDpDw9qCUnT3UbkXG8XbeRmWDpWlV4M,18215
7
7
  dtlpy/assets/main.py,sha256=N1JUsx79qnXI7Hx22C8JOzHJdGHxvrXeTx5UZAxvJfE,1380
@@ -44,7 +44,7 @@ dtlpy/dlp/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
44
44
  dtlpy/dlp/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
45
45
  dtlpy/dlp/dlp.py,sha256=YjNBjeCDTXJ7tj8qdiGZ8lFb8DtPZl-FvViyjxt9xF8,4278
46
46
  dtlpy/dlp/parser.py,sha256=p-TFaiAU2c3QkI97TXzL2LDR3Eq0hGDFrTc9J2jWLh4,30551
47
- dtlpy/entities/__init__.py,sha256=eD0ON6MAmzvc0NQmTWzxLPOdHbl5bu4Np3a1JMBfR4k,4805
47
+ dtlpy/entities/__init__.py,sha256=R2kDC9VHOeRSTgXXqNowbf_yZwy7tbAkukvIlPZmPVE,4856
48
48
  dtlpy/entities/analytic.py,sha256=5MpYDKPVsZ1MIy20Ju515RWed6P667j4TLxsan2gyNM,11925
49
49
  dtlpy/entities/annotation.py,sha256=yk-JQzgzXvnDLFrOkmcHQfEtsiPqZeIisv80ksNB-f8,66912
50
50
  dtlpy/entities/annotation_collection.py,sha256=CEYSBHhhDkC0VJdHsBSrA6TgdKGMcKeI3tFM40UJwS8,29838
@@ -56,15 +56,15 @@ dtlpy/entities/base_entity.py,sha256=i83KrtAz6dX4t8JEiUimLI5ZRrN0VnoUWKG2Zz49N5w
56
56
  dtlpy/entities/bot.py,sha256=is3NUCnPg56HSjsHIvFcVkymValMqDV0uHRDC1Ib-ds,3819
57
57
  dtlpy/entities/codebase.py,sha256=pwRkAq2GV0wvmzshg89IAmE-0I2Wsy_-QNOu8OV8uqc,8999
58
58
  dtlpy/entities/command.py,sha256=ARu8ttk-C7_Ice7chRyTtyOtakBTF09FC04mEk73SO8,5010
59
- dtlpy/entities/compute.py,sha256=b7R8K7Ay2g1_Mm0bqrpyuf8IMLlZC3yMYtWkZIL9DdA,11799
60
- dtlpy/entities/dataset.py,sha256=87o6FA9MYCIc0KBCUqQr_VsX-W2mGbJn64JvD-zp-EA,47354
59
+ dtlpy/entities/compute.py,sha256=4FEpahPFFGHxye_fLh_p_kP6iEQ3QJK7S5hAdd6Afos,12744
60
+ dtlpy/entities/dataset.py,sha256=tNCl7nNCx-DrZ3z96APhRdvllfQA1-9y8DpL6Ma2l0I,47516
61
61
  dtlpy/entities/directory_tree.py,sha256=Rni6pLSWytR6yeUPgEdCCRfTg_cqLOdUc9uCqz9KT-Q,1186
62
- dtlpy/entities/dpk.py,sha256=WVEWplWRU7KM8YPY6BlaxSLVFw29elLsE32QOQgCgLo,17403
62
+ dtlpy/entities/dpk.py,sha256=a5C1UG_cvDnXSee650WHH43QflxbJCo_g0V17-GRb24,17639
63
63
  dtlpy/entities/driver.py,sha256=O_QdK1EaLjQyQkmvKsmkNgmvmMb1mPjKnJGxK43KrOA,7197
64
64
  dtlpy/entities/execution.py,sha256=WBiAws-6wZnQQ3y9wyvOeexA3OjxfaRdwDu5dSFYL1g,13420
65
65
  dtlpy/entities/feature.py,sha256=9fFjD0W57anOVSAVU55ypxN_WTCsWTG03Wkc3cAAj78,3732
66
66
  dtlpy/entities/feature_set.py,sha256=niw4MkmrDbD_LWQu1X30uE6U4DCzmFhPTaYeZ6VZDB0,4443
67
- dtlpy/entities/filters.py,sha256=axgneXylIUIM-uABG5Uk6hFtTTMmLWoZ5B5NfthcAw8,22364
67
+ dtlpy/entities/filters.py,sha256=tA-A0dS8nhMbnkHIo-INK6UuKzEPMyCdTs51K1-Vl9Y,22441
68
68
  dtlpy/entities/integration.py,sha256=CA5F1eQCGE_4c_Kry4nWRdeyjHctNnvexcDXg_M5HLU,5734
69
69
  dtlpy/entities/item.py,sha256=G6VVcVCudqeShWigZmNIuKD4OkvTRJ05CeXFXNe3Jk8,29691
70
70
  dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
@@ -79,7 +79,7 @@ dtlpy/entities/package_defaults.py,sha256=wTD7Z7rGYjVy8AcUxTFEnkOkviiJaLVZYvduiU
79
79
  dtlpy/entities/package_function.py,sha256=M42Kvw9A8b6msAkv-wRNAQg_-UC2bejniCjeKDugudc,6314
80
80
  dtlpy/entities/package_module.py,sha256=cOkIITATkzzCQpE0sdPiBUisAz8ImlPG2YGZ0K7SypA,5151
81
81
  dtlpy/entities/package_slot.py,sha256=XBwCodQe618sQm0bmx46Npo94mEk-zUV7ZX0mDRcsD8,3946
82
- dtlpy/entities/paged_entities.py,sha256=A6_D0CUJsN52dBG6yn-oHHzjuVDkBNejTG5r-KxWOxI,5848
82
+ dtlpy/entities/paged_entities.py,sha256=6y44H3FSclQvhB1KLI4zuIs317hWOhdHUynldRrUJkE,5913
83
83
  dtlpy/entities/pipeline.py,sha256=OrRybxEa29S4sKtl7RTdf6kRgnQi90n4wlN4OsMJJLk,20671
84
84
  dtlpy/entities/pipeline_execution.py,sha256=XCXlBAHFYVL2HajE71hK-bPxI4gTwZvg5SKri4BgyRA,9928
85
85
  dtlpy/entities/project.py,sha256=ZUx8zA3mr6N145M62R3UDPCCzO1vxfyWO6vjES-bO-g,14653
@@ -87,8 +87,8 @@ dtlpy/entities/prompt_item.py,sha256=Kmvguz3f0sGtkKZS9OEA_-Yi4aQRCgdg1GBkaLQyyTg
87
87
  dtlpy/entities/recipe.py,sha256=Q1HtYgind3bEe-vnDZWhw6H-rcIAGhkGHPRWtLIkPSE,11917
88
88
  dtlpy/entities/reflect_dict.py,sha256=2NaSAL-CO0T0FYRYFQlaSpbsoLT2Q18AqdHgQSLX5Y4,3273
89
89
  dtlpy/entities/resource_execution.py,sha256=1HuVV__U4jAUOtOkWlWImnM3Yts8qxMSAkMA9sBhArY,5033
90
- dtlpy/entities/service.py,sha256=OaEcKsGgapwWRIzBUU8wvJqd0h_mpY7ICugVjzV7pDA,30211
91
- dtlpy/entities/setting.py,sha256=koydO8b0_bWVNklR2vpsXswxzBo8q83XtGk3wkma0MI,8522
90
+ dtlpy/entities/service.py,sha256=ZV3HhBbafs0N_lSIWxu4CNJ39WThd7z5GAd0fCvSnFg,32462
91
+ dtlpy/entities/setting.py,sha256=uXagJHtcCR3nJYClR_AUGZjz_kx3TejPcUZ8ginHFIA,8561
92
92
  dtlpy/entities/task.py,sha256=XHiEqZYFlrDCtmw1MXsysjoBLdIzAk7coMrVk8bNIiE,19534
93
93
  dtlpy/entities/time_series.py,sha256=336jWNckjuSn0G29WJFetB7nBoFAKqs4VH9_IB4m4FE,4017
94
94
  dtlpy/entities/trigger.py,sha256=zh3wYUY2-zATh_7ous0Ck87Yojo9r9PAVQrkcESxoko,14266
@@ -146,7 +146,7 @@ dtlpy/miscellaneous/list_print.py,sha256=leEg3RodgYfH5t_0JG8VuM8NiesR8sJLK_mRStt
146
146
  dtlpy/miscellaneous/zipping.py,sha256=GMdPhAeHQXeMS5ClaiKWMJWVYQLBLAaJUWxvdYrL4Ro,5337
147
147
  dtlpy/ml/__init__.py,sha256=vPkyXpc9kcWWZ_PxyPEOsjKBJdEbowLkZr8FZIb_OBM,799
148
148
  dtlpy/ml/base_feature_extractor_adapter.py,sha256=iiEGYAx0Rdn4K46H_FlKrAv3ebTXHSxNVAmio0BxhaI,1178
149
- dtlpy/ml/base_model_adapter.py,sha256=-Y29Yze9TEMlE3bio_nvw05EiIfjn_H47WcbEIOtXcg,50112
149
+ dtlpy/ml/base_model_adapter.py,sha256=mcq_1ELAcJ6xzqYg_U0E3rOD-rJumgSu8YeSL9R7czc,50901
150
150
  dtlpy/ml/metrics.py,sha256=BG2E-1Mvjv2e2No9mIJKVmvzqBvLqytKcw3hA7wVUNc,20037
151
151
  dtlpy/ml/predictions_utils.py,sha256=He_84U14oS2Ss7T_-Zj5GDiBZwS-GjMPURUh7u7DjF8,12484
152
152
  dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,2784
@@ -159,17 +159,17 @@ dtlpy/repositories/artifacts.py,sha256=Ke2ustTNw-1eQ0onLsWY7gL2aChjXPAX5p1uQ_EzM
159
159
  dtlpy/repositories/assignments.py,sha256=1VwJZ7ctQe1iaDDDpeYDgoj2G-TCgzolVLUEqUocd2w,25506
160
160
  dtlpy/repositories/bots.py,sha256=q1SqH01JHloljKxknhHU09psV1vQx9lPhu3g8mBBeRg,8104
161
161
  dtlpy/repositories/codebases.py,sha256=pvcZxdrq0-zWysVbdXjUOhnfcF6hJD8v5VclNZ-zhGA,24668
162
- dtlpy/repositories/commands.py,sha256=8GJU2OQTH0grHFQE30l0UVqaPAwio4psk4VpiYklkFk,5589
162
+ dtlpy/repositories/commands.py,sha256=kXhmyBpLZNs-6vKBo4iXaommpjcGBDXs287IICUnQMw,5593
163
163
  dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
164
- dtlpy/repositories/computes.py,sha256=eudVoUhEjTrkHm72BiAWwIEeaXq7PZJpqh3E6oCjJW8,6044
165
- dtlpy/repositories/datasets.py,sha256=Rauh-apKSKP7cWS99uhiZYZ-679qNpPm7HoMkMzyJ-s,51789
164
+ dtlpy/repositories/computes.py,sha256=EtfE_3JhTdNlSYDPkKXBFkq-DBl4sgQqIm50ajvFdWM,9976
165
+ dtlpy/repositories/datasets.py,sha256=rDpJXNyxOlJwDQB-wNkM-JIqOGH10q9nujnAl6y8_xU,52077
166
166
  dtlpy/repositories/downloader.py,sha256=pNwL7Nid8xmOyYNiv4DB_WY4RoKlxQ-U9nG2V99Gyr8,41342
167
- dtlpy/repositories/dpks.py,sha256=b9i-K4HHBA-7T7AZdICFfMUWtqr9--igVwOq0TKyq7Y,16612
167
+ dtlpy/repositories/dpks.py,sha256=mj3QPvfzj_jZAscwIgpKUfa7fLxptc3OJQ_RrSfgYxo,17487
168
168
  dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
169
169
  dtlpy/repositories/executions.py,sha256=M84nhpFPPZq4fQeJ2m_sv6JT4NE2WDRMOXWr451J0bU,30403
170
170
  dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
171
171
  dtlpy/repositories/features.py,sha256=7xA2ihEuNgZD7HBQMMGLWpsS2V_3PgieKW2YAk1OeUU,9712
172
- dtlpy/repositories/integrations.py,sha256=gNQmw5ykFtBaimdxUkzCXQqefZaM8yQPnxWZkIJK7ww,11666
172
+ dtlpy/repositories/integrations.py,sha256=Wi-CpT2PH36GFu3znWP5Uf2CmkqWBUYyOdwvatGD_eM,11798
173
173
  dtlpy/repositories/items.py,sha256=DqJ3g9bc4OLMm9KqI-OebXbr-zcEiohO1wGZJ1uE2Lg,37874
174
174
  dtlpy/repositories/messages.py,sha256=zYcoz8Us6j8Tb5Z7luJuvtO9xSRTuOCS7pl-ztt97Ac,3082
175
175
  dtlpy/repositories/models.py,sha256=GdVWHJ6kOIxM01wH7RVQ3CVaR4OmGurWJdQVHZezLDM,34789
@@ -193,7 +193,7 @@ dtlpy/repositories/uploader.py,sha256=iOlDYWIMy_h1Rbd7Mfug1I0e93dBJ0SxqP_BOwqYQP
193
193
  dtlpy/repositories/webhooks.py,sha256=IIpxOJ-7KeQp1TY9aJZz-FuycSjAoYx0TDk8z86KAK8,9033
194
194
  dtlpy/services/__init__.py,sha256=VfVJy2otIrDra6i7Sepjyez2ujiE6171ChQZp-YgxsM,904
195
195
  dtlpy/services/aihttp_retry.py,sha256=tgntZsAY0dW9v08rkjX1T5BLNDdDd8svtgn7nH8DSGU,5022
196
- dtlpy/services/api_client.py,sha256=EG4Spm163N7Ig99tkubSYqEGQQBElK2jFtJGAek96OY,68145
196
+ dtlpy/services/api_client.py,sha256=DBelaW5qpZoX7vQXjgLL2xTcTwUqJodZ901g0C3Panw,68331
197
197
  dtlpy/services/api_reference.py,sha256=cW-B3eoi9Xs3AwI87_Kr6GV_E6HPoC73aETFaGz3A-0,1515
198
198
  dtlpy/services/async_utils.py,sha256=lfpkTkRUvQoMTxaRZBHbPt5e43qdvpCGDe_-KcY2Jps,2810
199
199
  dtlpy/services/calls_counter.py,sha256=gr0io5rIsO5-7Cgc8neA1vK8kUtYhgFPmDQ2jXtiZZs,1036
@@ -221,19 +221,19 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
221
221
  dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
222
222
  dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
223
223
  dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
224
- dtlpy-1.93.11.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
225
- dtlpy-1.93.11.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
226
- dtlpy-1.93.11.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
224
+ dtlpy-1.95.6.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
225
+ dtlpy-1.95.6.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
226
+ dtlpy-1.95.6.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
227
227
  tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
228
228
  tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
229
229
  tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
230
230
  tests/assets/models_flow/main.py,sha256=87O3-JaWcC6m_kA39sqPhX70_VCBzzbLWmX2YQFilJw,1873
231
231
  tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
232
232
  tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
233
- tests/features/environment.py,sha256=dyYLrhyaKFnobrz7jD-vgmmxjpL5HDwjQCbzOZa37dM,16261
234
- dtlpy-1.93.11.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
235
- dtlpy-1.93.11.dist-info/METADATA,sha256=ZCyo2qfXprK4zhWfrWgyxt_KO90x0T36CoVfhgpN95o,2976
236
- dtlpy-1.93.11.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
237
- dtlpy-1.93.11.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
238
- dtlpy-1.93.11.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
239
- dtlpy-1.93.11.dist-info/RECORD,,
233
+ tests/features/environment.py,sha256=V23cUx_p4VpNk9kc2I0BDZJHO_xcJBFJq8m3JlYCooc,16736
234
+ dtlpy-1.95.6.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
235
+ dtlpy-1.95.6.dist-info/METADATA,sha256=gwEWuQCr9AOla7PZkit6MzkeFa6pW626gXqni3GPKVM,3002
236
+ dtlpy-1.95.6.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
237
+ dtlpy-1.95.6.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
238
+ dtlpy-1.95.6.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
239
+ dtlpy-1.95.6.dist-info/RECORD,,
@@ -135,6 +135,13 @@ def after_feature(context, feature):
135
135
  except Exception:
136
136
  logging.exception('Failed to update api calls')
137
137
 
138
+ if hasattr(feature, 'dataloop_feature_compute'):
139
+ try:
140
+ compute = context.feature.dataloop_feature_compute
141
+ dl.computes.delete(compute_id=compute.id)
142
+ except Exception:
143
+ logging.exception('Failed to delete compute')
144
+
138
145
 
139
146
  @fixture
140
147
  def before_scenario(context, scenario):
@@ -202,6 +209,9 @@ def before_tag(context, tag):
202
209
  dat = tag.split("_")[-1] if "DAT" in tag else ""
203
210
  if hasattr(context, "scenario"):
204
211
  context.scenario.skip(f"Test mark as SKIPPED, Should be merged after {dat}")
212
+ if 'rc_only' in context.tags and 'rc' not in os.environ.get("DLP_ENV_NAME"):
213
+ if hasattr(context, "scenario"):
214
+ context.scenario.skip(f"Test mark as SKIPPED, Should be run only on RC")
205
215
 
206
216
 
207
217
  @fixture
File without changes
File without changes