dtlpy 1.104.14__py3-none-any.whl → 1.105.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__version__.py +1 -1
- dtlpy/entities/ontology.py +4 -1
- dtlpy/repositories/computes.py +10 -4
- dtlpy/repositories/integrations.py +13 -18
- {dtlpy-1.104.14.dist-info → dtlpy-1.105.6.dist-info}/METADATA +1 -1
- {dtlpy-1.104.14.dist-info → dtlpy-1.105.6.dist-info}/RECORD +14 -14
- tests/features/environment.py +34 -0
- {dtlpy-1.104.14.data → dtlpy-1.105.6.data}/scripts/dlp +0 -0
- {dtlpy-1.104.14.data → dtlpy-1.105.6.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.104.14.data → dtlpy-1.105.6.data}/scripts/dlp.py +0 -0
- {dtlpy-1.104.14.dist-info → dtlpy-1.105.6.dist-info}/LICENSE +0 -0
- {dtlpy-1.104.14.dist-info → dtlpy-1.105.6.dist-info}/WHEEL +0 -0
- {dtlpy-1.104.14.dist-info → dtlpy-1.105.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.104.14.dist-info → dtlpy-1.105.6.dist-info}/top_level.txt +0 -0
dtlpy/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '1.
|
|
1
|
+
version = '1.105.6'
|
dtlpy/entities/ontology.py
CHANGED
|
@@ -766,7 +766,7 @@ class Ontology(entities.BaseEntity):
|
|
|
766
766
|
# TODO: Add support for import from ontology entity in the Future
|
|
767
767
|
if not self._use_attributes_2:
|
|
768
768
|
raise ValueError("This method is only supported for attributes 2 mode!")
|
|
769
|
-
new_ontology = self.from_json(_json=ontology_json, client_api=self._client_api
|
|
769
|
+
new_ontology = self.from_json(_json=ontology_json, client_api=self._client_api)
|
|
770
770
|
|
|
771
771
|
# Update 'labels' and 'attributes'
|
|
772
772
|
self.labels = new_ontology.labels
|
|
@@ -794,6 +794,9 @@ class Ontology(entities.BaseEntity):
|
|
|
794
794
|
attribute_range=attribute_range
|
|
795
795
|
)
|
|
796
796
|
|
|
797
|
+
# Get remote updated 'attributes'
|
|
798
|
+
self.metadata["attributes"] = self.ontologies.get(ontology_id=self.id).attributes
|
|
799
|
+
|
|
797
800
|
# Update 'instance map' and 'color map'
|
|
798
801
|
self._instance_map = new_ontology.instance_map
|
|
799
802
|
self._color_map = new_ontology.color_map
|
dtlpy/repositories/computes.py
CHANGED
|
@@ -8,6 +8,7 @@ from typing import List, Optional, Dict
|
|
|
8
8
|
from ..entities import ComputeCluster, ComputeContext, ComputeType, Project
|
|
9
9
|
from ..entities.integration import IntegrationType
|
|
10
10
|
|
|
11
|
+
|
|
11
12
|
class Computes:
|
|
12
13
|
|
|
13
14
|
def __init__(self, client_api: ApiClient):
|
|
@@ -44,7 +45,8 @@ class Computes:
|
|
|
44
45
|
type: entities.ComputeType = entities.ComputeType.KUBERNETES,
|
|
45
46
|
is_global: Optional[bool] = False,
|
|
46
47
|
features: Optional[Dict] = None,
|
|
47
|
-
wait=True
|
|
48
|
+
wait=True,
|
|
49
|
+
status: entities.ComputeStatus = None
|
|
48
50
|
):
|
|
49
51
|
"""
|
|
50
52
|
Create a new compute
|
|
@@ -57,6 +59,7 @@ class Computes:
|
|
|
57
59
|
:param is_global: Is global
|
|
58
60
|
:param features: Features
|
|
59
61
|
:param wait: Wait for compute creation
|
|
62
|
+
:param status: Compute status
|
|
60
63
|
:return: Compute
|
|
61
64
|
"""
|
|
62
65
|
|
|
@@ -67,7 +70,8 @@ class Computes:
|
|
|
67
70
|
'global': is_global,
|
|
68
71
|
'features': features,
|
|
69
72
|
'shared_contexts': [sc.to_json() for sc in shared_contexts],
|
|
70
|
-
'cluster': cluster.to_json()
|
|
73
|
+
'cluster': cluster.to_json(),
|
|
74
|
+
'status': status
|
|
71
75
|
}
|
|
72
76
|
|
|
73
77
|
# request
|
|
@@ -86,7 +90,7 @@ class Computes:
|
|
|
86
90
|
)
|
|
87
91
|
|
|
88
92
|
if wait:
|
|
89
|
-
command_id = compute.metadata.get('system', {}).get('commands', {}).get('create',
|
|
93
|
+
command_id = compute.metadata.get('system', {}).get('commands', {}).get('create', None)
|
|
90
94
|
if command_id is not None:
|
|
91
95
|
command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
|
|
92
96
|
command.wait()
|
|
@@ -200,7 +204,8 @@ class Computes:
|
|
|
200
204
|
ComputeContext([], org_id, project_id),
|
|
201
205
|
[],
|
|
202
206
|
cluster,
|
|
203
|
-
ComputeType.KUBERNETES
|
|
207
|
+
ComputeType.KUBERNETES,
|
|
208
|
+
status=config['config'].get('status', None))
|
|
204
209
|
return compute
|
|
205
210
|
|
|
206
211
|
def create_from_config_file(self, config_file_path, org_id, project_name: Optional[str] = None):
|
|
@@ -215,6 +220,7 @@ class Computes:
|
|
|
215
220
|
compute = self.setup_compute_cluster(config, integration, org_id, project)
|
|
216
221
|
return compute
|
|
217
222
|
|
|
223
|
+
|
|
218
224
|
class ServiceDrivers:
|
|
219
225
|
|
|
220
226
|
def __init__(self, client_api: ApiClient):
|
|
@@ -114,6 +114,8 @@ class Integrations:
|
|
|
114
114
|
aws-cross - {}
|
|
115
115
|
gcp-cross - {}
|
|
116
116
|
gcp-workload-identity-federation - {"secret": "", "content": "{}", "clientId": ""}
|
|
117
|
+
private-registry (ECR) - {"name": "", "spec": {"accessKeyId": "", "secretAccessKey": "", "account": "", "region": ""}}
|
|
118
|
+
private-registry (GAR) - {"name": "", "spec": {"password": ""}} (can use generate_gar_options to generate the options)
|
|
117
119
|
|
|
118
120
|
**Prerequisites**: You must be an *owner* in the organization.
|
|
119
121
|
|
|
@@ -129,7 +131,7 @@ class Integrations:
|
|
|
129
131
|
.. code-block:: python
|
|
130
132
|
|
|
131
133
|
project.integrations.create(integrations_type=dl.IntegrationType.S3,
|
|
132
|
-
name='
|
|
134
|
+
name='S3Integration',
|
|
133
135
|
options={key: "Access key ID", secret: "Secret access key"})
|
|
134
136
|
"""
|
|
135
137
|
|
|
@@ -144,7 +146,9 @@ class Integrations:
|
|
|
144
146
|
organization_id = self.org.id
|
|
145
147
|
|
|
146
148
|
url_path = '/orgs/{}/integrations'.format(organization_id)
|
|
147
|
-
payload = {"type": integrations_type.value if isinstance(integrations_type,
|
|
149
|
+
payload = {"type": integrations_type.value if isinstance(integrations_type,
|
|
150
|
+
entities.IntegrationType) else integrations_type,
|
|
151
|
+
'name': name, 'options': options}
|
|
148
152
|
if metadata is not None:
|
|
149
153
|
payload['metadata'] = metadata
|
|
150
154
|
success, response = self._client_api.gen_request(req_type='post',
|
|
@@ -300,21 +304,7 @@ class Integrations:
|
|
|
300
304
|
available_integrations = miscellaneous.List(response.json())
|
|
301
305
|
return available_integrations
|
|
302
306
|
|
|
303
|
-
def
|
|
304
|
-
password = self.__create_gar_password(service_account, location)
|
|
305
|
-
return self.create(
|
|
306
|
-
integrations_type='private-registry',
|
|
307
|
-
name='gar-1',
|
|
308
|
-
metadata={"provider": "gcp"},
|
|
309
|
-
options={
|
|
310
|
-
"name": "_json_key",
|
|
311
|
-
"spec": {
|
|
312
|
-
"password": password
|
|
313
|
-
}
|
|
314
|
-
}
|
|
315
|
-
)
|
|
316
|
-
|
|
317
|
-
def __create_gar_password(self, service_account: str, location: str) -> str:
|
|
307
|
+
def generate_gar_options(self, service_account: str, location: str) -> dict:
|
|
318
308
|
"""
|
|
319
309
|
Generates a Google Artifact Registry JSON configuration and returns it as a base64-encoded string.
|
|
320
310
|
|
|
@@ -348,4 +338,9 @@ class Integrations:
|
|
|
348
338
|
}
|
|
349
339
|
}
|
|
350
340
|
|
|
351
|
-
return
|
|
341
|
+
return {
|
|
342
|
+
"name": "_json_key",
|
|
343
|
+
"spec": {
|
|
344
|
+
"password": str(base64.b64encode(bytes(json.dumps(encoded_pass), 'utf-8')))[2:-1]
|
|
345
|
+
}
|
|
346
|
+
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
dtlpy/__init__.py,sha256=GjtFPFltVerHF1m6ePaVp5oUWcg7yavd3aNhknTip9U,20961
|
|
2
|
-
dtlpy/__version__.py,sha256=
|
|
2
|
+
dtlpy/__version__.py,sha256=RkZdRrLtjnqMBrfIPP4HgsEaxdXnqqxv1qVlCI1mifo,20
|
|
3
3
|
dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
|
|
4
4
|
dtlpy/new_instance.py,sha256=u_c6JtgqsKCr7TU24-g7_CaST9ghqamMhM4Z0Zxt50w,10121
|
|
5
5
|
dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
|
|
@@ -73,7 +73,7 @@ dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
|
|
|
73
73
|
dtlpy/entities/message.py,sha256=ApJuaKEqxATpXjNYUjGdYPu3ibQzEMo8-LtJ_4xAcPI,5865
|
|
74
74
|
dtlpy/entities/model.py,sha256=YwjIi3MxAZoyartTvqx_qhtDKQe6zVsQuwZbYLygMxU,26898
|
|
75
75
|
dtlpy/entities/node.py,sha256=yPPYDLtNMc6vZbbf4FIffY86y7tkaTvYm42Jb7k3Ofk,39617
|
|
76
|
-
dtlpy/entities/ontology.py,sha256=
|
|
76
|
+
dtlpy/entities/ontology.py,sha256=924g9c2ZTfr69fWd_ejrVU0C-MAUR8UAhhz6GY-IQME,32100
|
|
77
77
|
dtlpy/entities/organization.py,sha256=Zm-tTHV82PvYyTNetRRXqvmvzBCbXEwS-gAENf7Zny4,9874
|
|
78
78
|
dtlpy/entities/package.py,sha256=QSDePHlp4ik19aUE3dAUC7edh0oUUVjzSmMG867avc4,26363
|
|
79
79
|
dtlpy/entities/package_defaults.py,sha256=wTD7Z7rGYjVy8AcUxTFEnkOkviiJaLVZYvduiUBKNZo,211
|
|
@@ -164,7 +164,7 @@ dtlpy/repositories/codebases.py,sha256=pvcZxdrq0-zWysVbdXjUOhnfcF6hJD8v5VclNZ-zh
|
|
|
164
164
|
dtlpy/repositories/collections.py,sha256=C_BPMg128Sl9AG3U4PxgI_2aaehQ2NuehMmzoTaXbPQ,11459
|
|
165
165
|
dtlpy/repositories/commands.py,sha256=i6gQgOmRDG8ixqKU7672H3CvGt8VLT3ihDVfri1eWWc,5610
|
|
166
166
|
dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
|
|
167
|
-
dtlpy/repositories/computes.py,sha256=
|
|
167
|
+
dtlpy/repositories/computes.py,sha256=l0-FS3_8WEGG5tbtIR3ltsZc6MyHVkiYajHTCaeUugk,10156
|
|
168
168
|
dtlpy/repositories/datasets.py,sha256=SpG86uToq-E5nVHMwHgWx6VwwwkgfYo8x5vZ0WA3Ouw,56546
|
|
169
169
|
dtlpy/repositories/downloader.py,sha256=CiT8KIjJ8l52Ng003f2_bmolIpe64fi8A_GGEl39M1Y,44254
|
|
170
170
|
dtlpy/repositories/dpks.py,sha256=dglvaiSFBvEithhlQ0RAXwzTxoZaICONs-owx3e2nfU,17848
|
|
@@ -172,7 +172,7 @@ dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY
|
|
|
172
172
|
dtlpy/repositories/executions.py,sha256=4UoU6bnB3kl5cMuF1eJvDecfZCaB06gKWxPfv6_g1_k,32598
|
|
173
173
|
dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
|
|
174
174
|
dtlpy/repositories/features.py,sha256=A_RqTJxzjTh-Wbm0uXaoTNyHSfCLbeiH38iB11p2ifY,9915
|
|
175
|
-
dtlpy/repositories/integrations.py,sha256=
|
|
175
|
+
dtlpy/repositories/integrations.py,sha256=sWij_MbxeAlCs3uDRGGKPX__T-h_mVppe4bErkCGIyM,14102
|
|
176
176
|
dtlpy/repositories/items.py,sha256=AF8h7-Yje1p16nXyofNLiC92bRVZtZjtHRPvHwbW62w,38423
|
|
177
177
|
dtlpy/repositories/messages.py,sha256=QU0Psckg6CA_Tlw9AVxqa-Ay1fRM4n269sSIJkH9o7E,3066
|
|
178
178
|
dtlpy/repositories/models.py,sha256=IekNMcnuKVaAVTJf2AJv6YvX5qCd9kkSl4ETPMWP4Zc,38213
|
|
@@ -224,19 +224,19 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
|
|
|
224
224
|
dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
|
|
225
225
|
dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
|
|
226
226
|
dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
|
|
227
|
-
dtlpy-1.
|
|
228
|
-
dtlpy-1.
|
|
229
|
-
dtlpy-1.
|
|
227
|
+
dtlpy-1.105.6.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
|
|
228
|
+
dtlpy-1.105.6.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
|
|
229
|
+
dtlpy-1.105.6.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
|
|
230
230
|
tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
231
231
|
tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
232
232
|
tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
|
|
233
233
|
tests/assets/models_flow/main.py,sha256=vnDKyVZaae2RFpvwS22Hzi6Dt2LJerH4yQrmKtaT8_g,2123
|
|
234
234
|
tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
|
|
235
235
|
tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
236
|
-
tests/features/environment.py,sha256=
|
|
237
|
-
dtlpy-1.
|
|
238
|
-
dtlpy-1.
|
|
239
|
-
dtlpy-1.
|
|
240
|
-
dtlpy-1.
|
|
241
|
-
dtlpy-1.
|
|
242
|
-
dtlpy-1.
|
|
236
|
+
tests/features/environment.py,sha256=TMeUzSZkksHqbxNBDLk-LYBMD4G5dMo4ZLZXPwQImVE,18751
|
|
237
|
+
dtlpy-1.105.6.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
238
|
+
dtlpy-1.105.6.dist-info/METADATA,sha256=cB65EU8nW0Ju7hoUi4qtuoZ37dQcofrvBcf1rs0D-ww,3019
|
|
239
|
+
dtlpy-1.105.6.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
|
240
|
+
dtlpy-1.105.6.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
|
|
241
|
+
dtlpy-1.105.6.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
|
|
242
|
+
dtlpy-1.105.6.dist-info/RECORD,,
|
tests/features/environment.py
CHANGED
|
@@ -282,6 +282,11 @@ def after_tag(context, tag):
|
|
|
282
282
|
use_fixture(restore_json_file, context)
|
|
283
283
|
except Exception:
|
|
284
284
|
logging.exception('Failed to restore json file')
|
|
285
|
+
elif tag == 'compute_serviceDriver.delete':
|
|
286
|
+
try:
|
|
287
|
+
use_fixture(delete_compute_servicedriver, context)
|
|
288
|
+
except Exception:
|
|
289
|
+
logging.exception('Failed to delete service')
|
|
285
290
|
elif tag == 'frozen_dataset':
|
|
286
291
|
pass
|
|
287
292
|
elif 'testrail-C' in tag:
|
|
@@ -504,7 +509,36 @@ def models_delete(context):
|
|
|
504
509
|
assert all_deleted
|
|
505
510
|
|
|
506
511
|
|
|
512
|
+
def delete_compute_servicedriver(context):
|
|
513
|
+
if not hasattr(context, 'to_delete_computes_ids') and not hasattr(context, 'to_delete_service_drivers_ids'):
|
|
514
|
+
return
|
|
515
|
+
|
|
516
|
+
all_deleted = True
|
|
517
|
+
for service_driver_id in context.to_delete_service_drivers_ids:
|
|
518
|
+
try:
|
|
519
|
+
context.dl.service_drivers.delete(service_driver_id=service_driver_id)
|
|
520
|
+
except context.dl.exceptions.NotFound:
|
|
521
|
+
pass
|
|
522
|
+
except:
|
|
523
|
+
all_deleted = False
|
|
524
|
+
logging.exception('Failed deleting serviceDriver: {}'.format(service_driver_id))
|
|
525
|
+
assert all_deleted
|
|
526
|
+
|
|
527
|
+
all_deleted = True
|
|
528
|
+
for compute_id in context.to_delete_computes_ids:
|
|
529
|
+
try:
|
|
530
|
+
context.dl.computes.delete(compute_id=compute_id)
|
|
531
|
+
except context.dl.exceptions.NotFound:
|
|
532
|
+
pass
|
|
533
|
+
except:
|
|
534
|
+
all_deleted = False
|
|
535
|
+
logging.exception('Failed deleting compute: {}'.format(compute_id))
|
|
536
|
+
assert all_deleted
|
|
537
|
+
|
|
538
|
+
|
|
507
539
|
def restore_json_file(context):
|
|
540
|
+
if not hasattr(context.feature, 'dataloop_feature_project'):
|
|
541
|
+
return
|
|
508
542
|
if not hasattr(context, 'backup_path') or not hasattr(context, 'original_path'):
|
|
509
543
|
assert False, 'Please make sure to set the original_path and backup_path in the context'
|
|
510
544
|
# Restore the file from the backup
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|