dtlpy 1.92.18__py3-none-any.whl → 1.93.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dtlpy/__init__.py CHANGED
@@ -97,14 +97,18 @@ from .entities import (
97
97
  ItemLink, UrlLink, LinkTypeEnum,
98
98
  Modality, ModalityTypeEnum, ModalityRefTypeEnum,
99
99
  Workload, WorkloadUnit, ItemAction,
100
- PipelineExecution, CycleRerunMethod, PipelineExecutionNode, Pipeline, PipelineConnection,
100
+ PipelineExecution, PipelineExecutionStatus, CycleRerunMethod, PipelineExecutionNode, Pipeline, PipelineConnection,
101
101
  PipelineNode, TaskNode, CodeNode, PipelineStats, PipelineSettings,
102
102
  PipelineNodeType, PipelineNameSpace, PipelineResumeOption, Variable, CompositionStatus,
103
103
  FunctionNode, DatasetNode, PipelineConnectionPort, PipelineNodeIO, Organization, OrganizationsPlans, Integration,
104
104
  Driver, S3Driver, GcsDriver, AzureBlobDriver, CacheAction, PodType,
105
105
  ExternalStorage, IntegrationType, Role, PlatformEntityType, SettingsValueTypes, SettingsTypes, SettingsSectionNames,
106
106
  SettingScope, BaseSetting, UserSetting, Setting, ServiceSample, ExecutionSample, PipelineExecutionSample,
107
- ResourceExecution, Message, NotificationEventContext
107
+ ResourceExecution, Message, NotificationEventContext,
108
+ # compute
109
+ ClusterProvider, ComputeType, ComputeStatus, Toleration, DeploymentResource, DeploymentResources,
110
+ NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute,
111
+ ServiceDriver
108
112
  )
109
113
  from .ml import BaseModelAdapter
110
114
  from .utilities import Converter, BaseServiceRunner, Progress, Context, AnnotationFormat
@@ -163,6 +167,8 @@ apps = repositories.Apps(client_api=client_api)
163
167
  dpks = repositories.Dpks(client_api=client_api)
164
168
  messages = repositories.Messages(client_api=client_api)
165
169
  compositions = repositories.Compositions(client_api=client_api)
170
+ computes = repositories.Computes(client_api=client_api)
171
+ service_drivers = repositories.ServiceDrivers(client_api=client_api)
166
172
 
167
173
  try:
168
174
  check_sdk.check(version=__version__, client_api=client_api)
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.92.18'
1
+ version = '1.93.11'
@@ -58,7 +58,7 @@ from .driver import Driver, S3Driver, GcsDriver, AzureBlobDriver
58
58
  from .pipeline import Pipeline, PipelineStats, PipelineResumeOption, PipelineSettings, Variable, CompositionStatus
59
59
  from .node import PipelineConnection, PipelineNode, PipelineConnectionPort, PipelineNodeIO, TaskNode, \
60
60
  CodeNode, FunctionNode, PipelineNodeType, PipelineNameSpace, DatasetNode
61
- from .pipeline_execution import PipelineExecution, PipelineExecutionNode, CycleRerunMethod
61
+ from .pipeline_execution import PipelineExecution, PipelineExecutionNode, PipelineExecutionStatus, CycleRerunMethod
62
62
  from .feature import Feature
63
63
  from .feature_set import FeatureSet, FeatureEntityType
64
64
  from .organization import Organization, OrganizationsPlans, MemberOrgRole, CacheAction, PodType
@@ -75,3 +75,6 @@ from .app_module import AppModule
75
75
  from .resource_execution import ResourceExecution
76
76
  from .message import Message, NotificationEventContext
77
77
  from .prompt_item import Prompt, PromptItem, PromptType
78
+ from .compute import ClusterProvider, ComputeType, ComputeStatus, Toleration, DeploymentResource, DeploymentResources, \
79
+ NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute, \
80
+ ServiceDriver
dtlpy/entities/app.py CHANGED
@@ -53,6 +53,7 @@ class App(entities.BaseEntity):
53
53
  _project = attr.ib(type=entities.Project, repr=False)
54
54
  _client_api = attr.ib(type=ApiClient, repr=False)
55
55
  _repositories = attr.ib(repr=False)
56
+ integrations = attr.ib(type=list, default=None)
56
57
 
57
58
  @_repositories.default
58
59
  def set_repositories(self):
@@ -186,6 +187,8 @@ class App(entities.BaseEntity):
186
187
  _json['status'] = self.status
187
188
  if self.settings != {}:
188
189
  _json['settings'] = self.settings
190
+ if self.integrations is not None:
191
+ _json['integrations'] = self.integrations
189
192
 
190
193
  return _json
191
194
 
@@ -210,7 +213,8 @@ class App(entities.BaseEntity):
210
213
  project=project,
211
214
  metadata=_json.get('metadata', None),
212
215
  status=_json.get('status', None),
213
- settings=_json.get('settings', {})
216
+ settings=_json.get('settings', {}),
217
+ integrations=_json.get('integrations', None)
214
218
  )
215
219
  app.is_fetched = is_fetched
216
220
  return app
@@ -0,0 +1,374 @@
1
+ from enum import Enum
2
+ from typing import List, Optional, Dict
3
+ from ..services.api_client import ApiClient
4
+ from .. import repositories
5
+
6
+
7
+ class ClusterProvider(str, Enum):
8
+ GCP = 'gcp'
9
+ AWS = 'aws'
10
+ AZURE = 'azure'
11
+ HPC = 'hpc'
12
+
13
+
14
+ class ComputeType(str, Enum):
15
+ KUBERNETES = "kubernetes"
16
+
17
+
18
+ class ComputeStatus(str, Enum):
19
+ READY = "ready"
20
+ INITIALIZING = "initializing"
21
+ PAUSE = "pause"
22
+
23
+
24
+ class Toleration:
25
+ def __init__(self, name: str):
26
+ self.name = name
27
+
28
+ @classmethod
29
+ def from_json(cls, _json):
30
+ return cls(
31
+ name=_json.get('name')
32
+ )
33
+
34
+ def to_json(self):
35
+ return {
36
+ 'name': self.name
37
+ }
38
+
39
+
40
+ class DeploymentResource:
41
+ def __init__(self, gpu: int, cpu: int, memory: str):
42
+ self.gpu = gpu
43
+ self.cpu = cpu
44
+ self.memory = memory
45
+
46
+ @classmethod
47
+ def from_json(cls, _json):
48
+ return cls(
49
+ gpu=_json.get('gpu', None),
50
+ cpu=_json.get('cpu', None),
51
+ memory=_json.get('memory', None)
52
+ )
53
+
54
+ def to_json(self):
55
+ _json = {}
56
+ if self.gpu is not None:
57
+ _json['gpu'] = self.gpu
58
+ if self.cpu is not None:
59
+ _json['cpu'] = self.cpu
60
+ if self.memory is not None:
61
+ _json['memory'] = self.memory
62
+
63
+
64
+ class DeploymentResources:
65
+ def __init__(self, request: DeploymentResource, limit: DeploymentResource):
66
+ self.request = request
67
+ self.limit = limit
68
+
69
+ @classmethod
70
+ def from_json(cls, _json):
71
+ return cls(
72
+ request=DeploymentResource.from_json(_json.get('request', dict())),
73
+ limit=DeploymentResource.from_json(_json.get('limit', dict()))
74
+ )
75
+
76
+ def to_json(self):
77
+ return {
78
+ 'request': self.request.to_json(),
79
+ 'limit': self.limit.to_json()
80
+ }
81
+
82
+
83
+ class NodePool:
84
+ def __init__(
85
+ self,
86
+ name: str,
87
+ is_dl_type_default: bool,
88
+ dl_type: Optional[str] = None,
89
+ tolerations: Optional[List[Toleration]] = None,
90
+ description: str = "",
91
+ node_selector: str = "",
92
+ preemtible: bool = False,
93
+ deployment_resources_request: DeploymentResources = None
94
+ ):
95
+ self.name = name
96
+ self.is_dl_type_default = is_dl_type_default
97
+ self.dl_type = dl_type
98
+ self.tolerations = tolerations if tolerations is not None else []
99
+ self.description = description
100
+ self.node_selector = node_selector
101
+ self.preemtible = preemtible
102
+ self.deployment_resources_request = deployment_resources_request
103
+
104
+ @classmethod
105
+ def from_json(cls, _json):
106
+ node_pool = cls(
107
+ name=_json.get('name'),
108
+ is_dl_type_default=_json.get('isDlTypeDefault'),
109
+ dl_type=_json.get('dlType'),
110
+ description=_json.get('description'),
111
+ node_selector=_json.get('nodeSelector'),
112
+ preemtible=_json.get('preemtible'),
113
+ deployment_resources_request=DeploymentResources.from_json(_json.get('deploymentResourcesRequest', dict())),
114
+ tolerations=[Toleration.from_json(t) for t in _json.get('tolerations', list())]
115
+ )
116
+
117
+ return node_pool
118
+
119
+ def to_json(self):
120
+ _json = {
121
+ 'name': self.name,
122
+ 'isDlTypeDefault': self.is_dl_type_default,
123
+ 'description': self.description,
124
+ 'nodeSelector': self.node_selector,
125
+ 'preemtible': self.preemtible,
126
+ 'deploymentResourcesRequest': self.deployment_resources_request.to_json(),
127
+ 'tolerations': [t.to_json() for t in self.tolerations]
128
+ }
129
+
130
+ if self.dl_type is not None:
131
+ _json['dlType'] = self.dl_type
132
+
133
+ return _json
134
+
135
+
136
+ class AuthenticationIntegration:
137
+ def __init__(self, id: str, type: str):
138
+ self.id = id
139
+ self.type = type
140
+
141
+ @classmethod
142
+ def from_json(cls, _json):
143
+ return cls(
144
+ id=_json.get('id'),
145
+ type=_json.get('type')
146
+ )
147
+
148
+ def to_json(self):
149
+ return {
150
+ 'id': self.id,
151
+ 'type': self.type
152
+ }
153
+
154
+
155
+ class Authentication:
156
+ def __init__(self, integration: AuthenticationIntegration):
157
+ self.integration = integration
158
+
159
+ @classmethod
160
+ def from_json(cls, _json):
161
+ return cls(
162
+ integration=AuthenticationIntegration.from_json(_json.get('integration', dict()))
163
+ )
164
+
165
+ def to_json(self):
166
+ return {
167
+ 'integration': self.integration.to_json()
168
+ }
169
+
170
+
171
+ class ComputeCluster:
172
+ def __init__(
173
+ self,
174
+ name: str,
175
+ endpoint: str,
176
+ kubernetes_version: str,
177
+ provider: ClusterProvider,
178
+ node_pools: Optional[List[NodePool]] = None,
179
+ metadata: Optional[Dict] = None,
180
+ authentication: Optional[Authentication] = None,
181
+ ):
182
+ self.name = name
183
+ self.endpoint = endpoint
184
+ self.kubernetes_version = kubernetes_version
185
+ self.provider = provider
186
+ self.node_pools = node_pools if node_pools is not None else []
187
+ self.metadata = metadata if metadata is not None else {}
188
+ self.authentication = authentication if authentication is not None else Authentication(
189
+ AuthenticationIntegration("", ""))
190
+
191
+ @classmethod
192
+ def from_json(cls, _json):
193
+ return cls(
194
+ name=_json.get('name'),
195
+ endpoint=_json.get('endpoint'),
196
+ kubernetes_version=_json.get('kubernetesVersion'),
197
+ provider=ClusterProvider(_json.get('provider')),
198
+ node_pools=[NodePool.from_json(np) for np in _json.get('nodePools', list())],
199
+ metadata=_json.get('metadata'),
200
+ authentication=Authentication.from_json(_json.get('authentication', dict()))
201
+ )
202
+
203
+ def to_json(self):
204
+ return {
205
+ 'name': self.name,
206
+ 'endpoint': self.endpoint,
207
+ 'kubernetesVersion': self.kubernetes_version,
208
+ 'provider': self.provider.value,
209
+ 'nodePools': [np.to_json() for np in self.node_pools],
210
+ 'metadata': self.metadata,
211
+ 'authentication': self.authentication.to_json()
212
+ }
213
+
214
+
215
+ class ComputeContext:
216
+ def __init__(self, labels: List[str], org: str, project: Optional[str] = None):
217
+ self.labels = labels
218
+ self.org = org
219
+ self.project = project
220
+
221
+ @classmethod
222
+ def from_json(cls, _json):
223
+ return cls(
224
+ labels=_json.get('labels', list()),
225
+ org=_json.get('org'),
226
+ project=_json.get('project')
227
+ )
228
+
229
+ def to_json(self):
230
+ return {
231
+ 'labels': self.labels,
232
+ 'org': self.org,
233
+ 'project': self.project
234
+ }
235
+
236
+
237
+ class Compute:
238
+ def __init__(
239
+ self,
240
+ id: str,
241
+ context: ComputeContext,
242
+ client_api: ApiClient,
243
+ shared_contexts: Optional[List[ComputeContext]] = None,
244
+ global_: Optional[bool] = None,
245
+ status: ComputeStatus = ComputeStatus.INITIALIZING,
246
+ type: ComputeType = ComputeType.KUBERNETES,
247
+ features: Optional[Dict] = None,
248
+ metadata: Optional[Dict] = None,
249
+ ):
250
+ self.id = id
251
+ self.context = context
252
+ self.shared_contexts = shared_contexts if shared_contexts is not None else []
253
+ self.global_ = global_
254
+ self.status = status
255
+ self.type = type
256
+ self.features = features if features is not None else dict()
257
+ self.metadata = metadata if metadata is not None else dict()
258
+ self._client_api = client_api
259
+ self._computes = None
260
+ self._serviceDrivers = None
261
+
262
+ @property
263
+ def computes(self):
264
+ if self._computes is None:
265
+ self._computes = repositories.Computes(client_api=self._client_api)
266
+ return self._computes
267
+
268
+ @property
269
+ def service_drivers(self):
270
+ if self._serviceDrivers is None:
271
+ self._serviceDrivers = repositories.ServiceDrivers(client_api=self._client_api)
272
+ return self._serviceDrivers
273
+
274
+ def delete(self):
275
+ return self._computes.delete(compute_id=self.id)
276
+
277
+ def update(self):
278
+ return self._computes.update(compute=self)
279
+
280
+ @classmethod
281
+ def from_json(cls, _json, client_api: ApiClient):
282
+ return cls(
283
+ id=_json.get('id'),
284
+ context=ComputeContext.from_json(_json.get('context', dict())),
285
+ shared_contexts=[ComputeContext.from_json(sc) for sc in _json.get('sharedContexts', list())],
286
+ global_=_json.get('global'),
287
+ status=ComputeStatus(_json.get('status')),
288
+ type=ComputeType(_json.get('type')),
289
+ features=_json.get('features'),
290
+ client_api=client_api,
291
+ metadata=_json.get('metadata')
292
+ )
293
+
294
+ def to_json(self):
295
+ return {
296
+ 'id': self.id,
297
+ 'context': self.context.to_json(),
298
+ 'sharedContexts': [sc.to_json() for sc in self.shared_contexts],
299
+ 'global': self.global_,
300
+ 'status': self.status.value,
301
+ 'type': self.type.value,
302
+ 'features': self.features,
303
+ 'metadata': self.metadata
304
+ }
305
+
306
+
307
+ class KubernetesCompute(Compute):
308
+ def __init__(
309
+ self,
310
+ id: str,
311
+ context: ComputeContext,
312
+ cluster: ComputeCluster,
313
+ shared_contexts: Optional[List[ComputeContext]] = None,
314
+ global_: Optional[bool] = None,
315
+ status: ComputeStatus = ComputeStatus.INITIALIZING,
316
+ type: ComputeType = ComputeType.KUBERNETES,
317
+ features: Optional[Dict] = None,
318
+ metadata: Optional[Dict] = None,
319
+ client_api: ApiClient = None
320
+ ):
321
+ super().__init__(id=id, context=context, shared_contexts=shared_contexts, global_=global_, status=status,
322
+ type=type, features=features, metadata=metadata, client_api=client_api)
323
+ self.cluster = cluster
324
+
325
+ @classmethod
326
+ def from_json(cls, _json, client_api: ApiClient):
327
+ return cls(
328
+ id=_json.get('id'),
329
+ context=ComputeContext.from_json(_json.get('context', dict())),
330
+ cluster=ComputeCluster.from_json(_json.get('cluster', dict())),
331
+ shared_contexts=[ComputeContext.from_json(sc) for sc in _json.get('sharedContexts', list())],
332
+ global_=_json.get('global'),
333
+ status=ComputeStatus(_json.get('status')),
334
+ type=ComputeType(_json.get('type')),
335
+ features=_json.get('features'),
336
+ metadata=_json.get('metadata'),
337
+ client_api=client_api
338
+ )
339
+
340
+ def to_json(self):
341
+ return {
342
+ 'id': self.id,
343
+ 'context': self.context.to_json(),
344
+ 'cluster': self.cluster.to_json(),
345
+ 'sharedContexts': [sc.to_json() for sc in self.shared_contexts],
346
+ 'global': self.global_,
347
+ 'status': self.status.value,
348
+ 'type': self.type.value,
349
+ 'features': self.features
350
+ }
351
+
352
+
353
+ class ServiceDriver:
354
+ def __init__(self, name: str, context: ComputeContext, compute_id: str, client_api: ApiClient):
355
+ self.name = name
356
+ self.context = context
357
+ self.compute_id = compute_id
358
+ self.client_api = client_api
359
+
360
+ @classmethod
361
+ def from_json(cls, _json, client_api: ApiClient):
362
+ return cls(
363
+ name=_json.get('name'),
364
+ context=ComputeContext.from_json(_json.get('context', dict())),
365
+ compute_id=_json.get('computeId'),
366
+ client_api=client_api
367
+ )
368
+
369
+ def to_json(self):
370
+ return {
371
+ 'name': self.name,
372
+ 'context': self.context.to_json(),
373
+ 'computeId': self.compute_id
374
+ }
dtlpy/entities/dpk.py CHANGED
@@ -164,6 +164,7 @@ class Components(entities.DlEntity):
164
164
  compute_configs: List[DpkComputeConfig] = entities.DlProperty(location=['computeConfigs'], _kls='DpkComputeConfig')
165
165
  channels: List[DpkComponentChannel] = entities.DlProperty(location=['channels'], _kls='DpkComponentChannel')
166
166
  pipeline_templates: List[dict] = entities.DlProperty(location=['pipelineTemplates'])
167
+ integrations: List[dict] = entities.DlProperty(location=['integrations'])
167
168
 
168
169
  @panels.default
169
170
  def default_panels(self):
dtlpy/entities/filters.py CHANGED
@@ -59,6 +59,7 @@ class FiltersOperations(str, Enum):
59
59
  LESS_THAN = "lt"
60
60
  EXISTS = "exists"
61
61
  MATCH = "match"
62
+ NIN = 'nin'
62
63
 
63
64
 
64
65
  class FiltersMethod(str, Enum):
dtlpy/entities/item.py CHANGED
@@ -189,12 +189,11 @@ class Item(entities.BaseEntity):
189
189
  binary.write(json.dumps(_json).encode())
190
190
  binary.seek(0)
191
191
  binary.name = self.name
192
- resp = requests.post(url=client_api.environment + f'/items/{self.id}/revisions',
193
- headers=client_api.auth,
194
- files={'file': (binary.name, binary)}
195
- )
196
- if not resp.ok:
197
- raise ValueError(resp.text)
192
+ success, resp = client_api.gen_request(req_type='post',
193
+ path=f'/items/{self.id}/revisions',
194
+ files={'file': (binary.name, binary)})
195
+ if not success:
196
+ raise exceptions.PlatformException(resp)
198
197
 
199
198
  @property
200
199
  def project(self):
dtlpy/entities/model.py CHANGED
@@ -158,13 +158,13 @@ class Model(entities.BaseEntity):
158
158
  """
159
159
  if project is not None:
160
160
  if project.id != _json.get('context', {}).get('project', None):
161
- logger.warning('Model has been fetched from a project that is not in it projects list')
161
+ logger.warning("Model's project is different then the input project")
162
162
  project = None
163
163
 
164
164
  if package is not None:
165
165
  if package.id != _json.get('packageId', None):
166
- logger.warning('Model has been fetched from a model that is not in it projects list')
167
- model = None
166
+ logger.warning("Model's package is different then the input package")
167
+ package = None
168
168
 
169
169
  model_artifacts = [entities.Artifact.from_json(_json=artifact,
170
170
  client_api=client_api,