dtlpy 1.116.6__py3-none-any.whl → 1.118.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +1 -1
- dtlpy/__version__.py +1 -1
- dtlpy/entities/__init__.py +1 -1
- dtlpy/entities/annotation.py +1 -1
- dtlpy/entities/app.py +1 -1
- dtlpy/entities/compute.py +1 -0
- dtlpy/entities/dataset.py +17 -2
- dtlpy/entities/feature_set.py +7 -0
- dtlpy/entities/item.py +16 -0
- dtlpy/entities/model.py +1 -1
- dtlpy/entities/ontology.py +1 -1
- dtlpy/entities/paged_entities.py +7 -3
- dtlpy/entities/service.py +11 -0
- dtlpy/ml/base_model_adapter.py +68 -37
- dtlpy/repositories/apps.py +12 -13
- dtlpy/repositories/datasets.py +165 -84
- dtlpy/repositories/downloader.py +299 -118
- dtlpy/repositories/feature_sets.py +159 -70
- dtlpy/repositories/recipes.py +15 -5
- dtlpy/services/api_client.py +5 -4
- {dtlpy-1.116.6.dist-info → dtlpy-1.118.12.dist-info}/METADATA +14 -15
- {dtlpy-1.116.6.dist-info → dtlpy-1.118.12.dist-info}/RECORD +29 -31
- {dtlpy-1.116.6.dist-info → dtlpy-1.118.12.dist-info}/WHEEL +1 -1
- {dtlpy-1.116.6.dist-info → dtlpy-1.118.12.dist-info}/top_level.txt +0 -1
- tests/features/__init__.py +0 -0
- tests/features/environment.py +0 -551
- {dtlpy-1.116.6.data → dtlpy-1.118.12.data}/scripts/dlp +0 -0
- {dtlpy-1.116.6.data → dtlpy-1.118.12.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.116.6.data → dtlpy-1.118.12.data}/scripts/dlp.py +0 -0
- {dtlpy-1.116.6.dist-info → dtlpy-1.118.12.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.116.6.dist-info → dtlpy-1.118.12.dist-info}/licenses/LICENSE +0 -0
tests/features/environment.py
DELETED
|
@@ -1,551 +0,0 @@
|
|
|
1
|
-
import time
|
|
2
|
-
|
|
3
|
-
from behave import fixture, use_fixture
|
|
4
|
-
import os
|
|
5
|
-
import json
|
|
6
|
-
import logging
|
|
7
|
-
from filelock import FileLock
|
|
8
|
-
from dotenv import load_dotenv
|
|
9
|
-
import subprocess
|
|
10
|
-
|
|
11
|
-
from behave.reporter.summary import SummaryReporter
|
|
12
|
-
from behave.formatter.base import StreamOpener
|
|
13
|
-
import sys
|
|
14
|
-
|
|
15
|
-
import dtlpy as dl
|
|
16
|
-
import shutil
|
|
17
|
-
|
|
18
|
-
try:
|
|
19
|
-
# for local import
|
|
20
|
-
from tests.env_from_git_branch import get_env_from_git_branch
|
|
21
|
-
except ImportError:
|
|
22
|
-
# for remote import
|
|
23
|
-
from ..env_from_git_branch import get_env_from_git_branch
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
def before_all(context):
|
|
27
|
-
load_dotenv('.test.env')
|
|
28
|
-
# Get index driver from env var
|
|
29
|
-
context.index_driver_var = os.environ.get("INDEX_DRIVER_VAR", None)
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
@fixture
|
|
33
|
-
def after_feature(context, feature):
|
|
34
|
-
print_feature_filename(context, feature)
|
|
35
|
-
|
|
36
|
-
if hasattr(feature, 'bot'):
|
|
37
|
-
try:
|
|
38
|
-
feature.bot.delete()
|
|
39
|
-
except Exception:
|
|
40
|
-
logging.exception('Failed to delete bot')
|
|
41
|
-
|
|
42
|
-
if hasattr(feature, 'apps'):
|
|
43
|
-
for app in context.feature.apps:
|
|
44
|
-
try:
|
|
45
|
-
app.uninstall()
|
|
46
|
-
except Exception:
|
|
47
|
-
logging.exception('Failed to uninstall app')
|
|
48
|
-
|
|
49
|
-
if hasattr(feature, 'dpks'):
|
|
50
|
-
for dpk in context.feature.dpks:
|
|
51
|
-
try:
|
|
52
|
-
dpk.delete()
|
|
53
|
-
except Exception:
|
|
54
|
-
try:
|
|
55
|
-
apps = dl.apps.list(
|
|
56
|
-
filters=dl.Filters(use_defaults=False, resource=dl.FiltersResource.APP,
|
|
57
|
-
field="dpkName",
|
|
58
|
-
values=dpk.name))
|
|
59
|
-
for page in apps:
|
|
60
|
-
for app in page:
|
|
61
|
-
app.uninstall()
|
|
62
|
-
models = dl.models.list(
|
|
63
|
-
filters=dl.Filters(use_defaults=False, resource=dl.FiltersResource.MODEL,
|
|
64
|
-
field="app.dpkName",
|
|
65
|
-
values=dpk.name))
|
|
66
|
-
for page in models:
|
|
67
|
-
for model in page:
|
|
68
|
-
model.delete()
|
|
69
|
-
dpk.delete()
|
|
70
|
-
except:
|
|
71
|
-
logging.exception('Failed to delete dpk')
|
|
72
|
-
|
|
73
|
-
if hasattr(feature, 'dataloop_feature_integration'):
|
|
74
|
-
all_deleted = True
|
|
75
|
-
time.sleep(7) # Wait for drivers to delete
|
|
76
|
-
for integration_id in feature.to_delete_integrations_ids:
|
|
77
|
-
try:
|
|
78
|
-
feature.dataloop_feature_project.integrations.delete(integrations_id=integration_id, sure=True,
|
|
79
|
-
really=True)
|
|
80
|
-
except feature.dataloop_feature_dl.exceptions.NotFound:
|
|
81
|
-
pass
|
|
82
|
-
except:
|
|
83
|
-
all_deleted = False
|
|
84
|
-
logging.exception('Failed deleting integration: {}'.format(integration_id))
|
|
85
|
-
assert all_deleted
|
|
86
|
-
|
|
87
|
-
if hasattr(feature, 'dataloop_feature_project'):
|
|
88
|
-
try:
|
|
89
|
-
if 'frozen_dataset' in feature.tags:
|
|
90
|
-
fix_project_with_frozen_datasets(project=feature.dataloop_feature_project)
|
|
91
|
-
feature.dataloop_feature_project.delete(True, True)
|
|
92
|
-
except Exception:
|
|
93
|
-
logging.exception('Failed to delete project')
|
|
94
|
-
|
|
95
|
-
if hasattr(context.feature, 'dataloop_feature_org'):
|
|
96
|
-
try:
|
|
97
|
-
username = os.environ["TEST_SU_USERNAME"]
|
|
98
|
-
password = os.environ["TEST_SU_PASSWORD"]
|
|
99
|
-
login = dl.login_m2m(
|
|
100
|
-
email=username,
|
|
101
|
-
password=password
|
|
102
|
-
)
|
|
103
|
-
assert login, "TEST FAILED: User login failed"
|
|
104
|
-
context.dl = dl
|
|
105
|
-
success, response = dl.client_api.gen_request(req_type='delete',
|
|
106
|
-
path=f'/orgs/{feature.dataloop_feature_org.id}')
|
|
107
|
-
if not success:
|
|
108
|
-
raise dl.exceptions.PlatformException(response)
|
|
109
|
-
logging.info(f'Organization id {feature.dataloop_feature_org.id} deleted successfully')
|
|
110
|
-
username = os.environ["TEST_USERNAME"]
|
|
111
|
-
password = os.environ["TEST_PASSWORD"]
|
|
112
|
-
login = dl.login_m2m(
|
|
113
|
-
email=username,
|
|
114
|
-
password=password
|
|
115
|
-
)
|
|
116
|
-
assert login, "TEST FAILED: User login failed"
|
|
117
|
-
context.dl = dl
|
|
118
|
-
return True
|
|
119
|
-
except Exception:
|
|
120
|
-
logging.exception('Failed to delete organization')
|
|
121
|
-
|
|
122
|
-
# update api call json
|
|
123
|
-
if hasattr(feature, 'dataloop_feature_dl'):
|
|
124
|
-
if not os.environ.get('IGNORE_API_CALLS', 'false') == 'true':
|
|
125
|
-
try:
|
|
126
|
-
api_calls_path = os.path.join(os.environ['DATALOOP_TEST_ASSETS'], 'api_calls.json')
|
|
127
|
-
with open(api_calls_path, 'r') as f:
|
|
128
|
-
api_calls = json.load(f)
|
|
129
|
-
if context.feature.name in api_calls:
|
|
130
|
-
api_calls[context.feature.name] += feature.dataloop_feature_dl.client_api.calls_counter.number
|
|
131
|
-
else:
|
|
132
|
-
api_calls[context.feature.name] = feature.dataloop_feature_dl.client_api.calls_counter.number
|
|
133
|
-
# lock the file for multi processes needs
|
|
134
|
-
with FileLock("api_calls.json.lock"):
|
|
135
|
-
with open(api_calls_path, 'w') as f:
|
|
136
|
-
json.dump(api_calls, f)
|
|
137
|
-
except Exception:
|
|
138
|
-
logging.exception('Failed to update api calls')
|
|
139
|
-
|
|
140
|
-
if hasattr(feature, 'dataloop_feature_compute'):
|
|
141
|
-
try:
|
|
142
|
-
compute = context.feature.dataloop_feature_compute
|
|
143
|
-
dl.computes.delete(compute_id=compute.id)
|
|
144
|
-
except Exception:
|
|
145
|
-
logging.exception('Failed to delete compute')
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
@fixture
|
|
149
|
-
def before_scenario(context, scenario):
|
|
150
|
-
context.scenario.return_to_user = False
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
@fixture
|
|
154
|
-
def after_scenario(context, scenario):
|
|
155
|
-
if context.scenario.return_to_user == True:
|
|
156
|
-
username = os.environ["TEST_USERNAME"]
|
|
157
|
-
password = os.environ["TEST_PASSWORD"]
|
|
158
|
-
login = dl.login_m2m(
|
|
159
|
-
email=username,
|
|
160
|
-
password=password,
|
|
161
|
-
)
|
|
162
|
-
assert login, "TEST FAILED: User login failed"
|
|
163
|
-
print("----------Changed to a Regular user----------")
|
|
164
|
-
context.scenario.return_to_user = False
|
|
165
|
-
context.dl = dl
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
def get_step_key(step):
|
|
169
|
-
return '{}: line {}. {}'.format(step.location.filename, step.location.line, step.name)
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
@fixture
|
|
173
|
-
def before_step(context, step):
|
|
174
|
-
context.step = step
|
|
175
|
-
key = get_step_key(step)
|
|
176
|
-
setattr(context, key, time.time())
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
@fixture
|
|
180
|
-
def after_step(context, step):
|
|
181
|
-
key = get_step_key(step)
|
|
182
|
-
start_time = getattr(context, key, None)
|
|
183
|
-
total_time = time.time() - start_time
|
|
184
|
-
if total_time > 3:
|
|
185
|
-
print("######## {}\nStep Duration: {}".format(key, total_time))
|
|
186
|
-
delattr(context, key)
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
@fixture
|
|
190
|
-
def before_feature(context, feature):
|
|
191
|
-
if 'rc_only' in context.tags and 'rc' not in os.environ.get("DLP_ENV_NAME"):
|
|
192
|
-
feature.skip("Marked with @rc_only")
|
|
193
|
-
return
|
|
194
|
-
if 'skip_test' in context.tags:
|
|
195
|
-
feature.skip("Marked with @skip_test")
|
|
196
|
-
return
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
def fix_project_with_frozen_datasets(project):
|
|
200
|
-
datasets = project.datasets.list()
|
|
201
|
-
for dataset in datasets:
|
|
202
|
-
if dataset.readonly:
|
|
203
|
-
dataset.set_readonly(False)
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
@fixture
|
|
207
|
-
def before_tag(context, tag):
|
|
208
|
-
if "skip_test" in tag:
|
|
209
|
-
"""
|
|
210
|
-
For example: @skip_test_DAT-99999
|
|
211
|
-
"""
|
|
212
|
-
dat = tag.split("_")[-1] if "DAT" in tag else ""
|
|
213
|
-
if hasattr(context, "scenario"):
|
|
214
|
-
context.scenario.skip(f"Test mark as SKIPPED, Should be merged after {dat}")
|
|
215
|
-
if 'rc_only' in context.tags and 'rc' not in os.environ.get("DLP_ENV_NAME"):
|
|
216
|
-
if hasattr(context, "scenario"):
|
|
217
|
-
context.scenario.skip(f"Test mark as SKIPPED, Should be run only on RC")
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
@fixture
|
|
221
|
-
def after_tag(context, tag):
|
|
222
|
-
if tag == 'services.delete':
|
|
223
|
-
try:
|
|
224
|
-
use_fixture(delete_services, context)
|
|
225
|
-
except Exception:
|
|
226
|
-
logging.exception('Failed to delete service')
|
|
227
|
-
elif tag == 'packages.delete':
|
|
228
|
-
try:
|
|
229
|
-
use_fixture(delete_packages, context)
|
|
230
|
-
except Exception:
|
|
231
|
-
logging.exception('Failed to delete package')
|
|
232
|
-
elif tag == 'pipelines.delete':
|
|
233
|
-
try:
|
|
234
|
-
use_fixture(delete_pipeline, context)
|
|
235
|
-
except Exception:
|
|
236
|
-
logging.exception('Failed to delete package')
|
|
237
|
-
elif tag == 'feature_set.delete':
|
|
238
|
-
try:
|
|
239
|
-
use_fixture(delete_feature_set, context)
|
|
240
|
-
except Exception:
|
|
241
|
-
logging.exception('Failed to delete feature set')
|
|
242
|
-
elif tag == 'feature.delete':
|
|
243
|
-
try:
|
|
244
|
-
use_fixture(delete_feature, context)
|
|
245
|
-
except Exception:
|
|
246
|
-
logging.exception('Failed to delete feature set')
|
|
247
|
-
elif tag == 'bot.create':
|
|
248
|
-
try:
|
|
249
|
-
use_fixture(delete_bots, context)
|
|
250
|
-
except Exception:
|
|
251
|
-
logging.exception('Failed to delete bots')
|
|
252
|
-
elif tag == 'second_project.delete':
|
|
253
|
-
try:
|
|
254
|
-
use_fixture(delete_second_project, context)
|
|
255
|
-
except Exception:
|
|
256
|
-
logging.exception('Failed to delete second project')
|
|
257
|
-
elif tag == 'converter.platform_dataset.delete':
|
|
258
|
-
try:
|
|
259
|
-
use_fixture(delete_converter_dataset, context)
|
|
260
|
-
except Exception:
|
|
261
|
-
logging.exception('Failed to delete converter dataset')
|
|
262
|
-
elif tag == 'datasets.delete':
|
|
263
|
-
try:
|
|
264
|
-
use_fixture(datasets_delete, context)
|
|
265
|
-
except Exception:
|
|
266
|
-
logging.exception('Failed to delete dataset')
|
|
267
|
-
elif tag == 'drivers.delete':
|
|
268
|
-
try:
|
|
269
|
-
use_fixture(drivers_delete, context)
|
|
270
|
-
except Exception:
|
|
271
|
-
logging.exception('Failed to delete driver')
|
|
272
|
-
elif tag == 'models.delete':
|
|
273
|
-
try:
|
|
274
|
-
use_fixture(models_delete, context)
|
|
275
|
-
except Exception:
|
|
276
|
-
logging.exception('Failed to delete model')
|
|
277
|
-
elif tag == 'setenv.reset':
|
|
278
|
-
try:
|
|
279
|
-
use_fixture(reset_setenv, context)
|
|
280
|
-
except Exception:
|
|
281
|
-
logging.exception('Failed to reset env')
|
|
282
|
-
elif tag == 'restore_json_file':
|
|
283
|
-
try:
|
|
284
|
-
use_fixture(restore_json_file, context)
|
|
285
|
-
except Exception:
|
|
286
|
-
logging.exception('Failed to restore json file')
|
|
287
|
-
elif tag == 'compute_serviceDriver.delete':
|
|
288
|
-
try:
|
|
289
|
-
use_fixture(delete_compute_servicedriver, context)
|
|
290
|
-
except Exception:
|
|
291
|
-
logging.exception('Failed to delete service')
|
|
292
|
-
elif tag == 'frozen_dataset':
|
|
293
|
-
pass
|
|
294
|
-
elif 'testrail-C' in tag:
|
|
295
|
-
pass
|
|
296
|
-
elif tag == 'wip':
|
|
297
|
-
pass
|
|
298
|
-
elif any(i_tag in tag for i_tag in ['DAT-', 'qa-', 'rc_only', 'skip_test', 'ATP', 'AIRGAPPED', 'DM-cache']):
|
|
299
|
-
pass
|
|
300
|
-
else:
|
|
301
|
-
raise ValueError('Unknown tag: {}'.format(tag))
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
@fixture
|
|
305
|
-
def delete_second_project(context):
|
|
306
|
-
if hasattr(context, 'second_project'):
|
|
307
|
-
context.second_project.delete(True, True)
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
@fixture
|
|
311
|
-
def delete_bots(context):
|
|
312
|
-
if not hasattr(context, 'to_delete_projects_ids'):
|
|
313
|
-
return
|
|
314
|
-
|
|
315
|
-
all_deleted = True
|
|
316
|
-
while context.to_delete_projects_ids:
|
|
317
|
-
project_id = context.to_delete_projects_ids.pop(0)
|
|
318
|
-
try:
|
|
319
|
-
project = context.dl.projects.get(project_id=project_id)
|
|
320
|
-
for bot in project.bots.list():
|
|
321
|
-
try:
|
|
322
|
-
bot.delete()
|
|
323
|
-
except:
|
|
324
|
-
logging.exception('Failed deleting bots: ')
|
|
325
|
-
all_deleted = False
|
|
326
|
-
pass
|
|
327
|
-
except context.dl.exceptions.NotFound:
|
|
328
|
-
pass
|
|
329
|
-
except:
|
|
330
|
-
logging.exception('Failed deleting bots: ')
|
|
331
|
-
assert all_deleted
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
@fixture
|
|
335
|
-
def delete_packages(context):
|
|
336
|
-
if not hasattr(context, 'to_delete_packages_ids'):
|
|
337
|
-
return
|
|
338
|
-
|
|
339
|
-
all_deleted = True
|
|
340
|
-
while context.to_delete_packages_ids:
|
|
341
|
-
package_id = context.to_delete_packages_ids.pop(0)
|
|
342
|
-
try:
|
|
343
|
-
context.dl.packages.delete(package_id=package_id)
|
|
344
|
-
except context.dl.exceptions.NotFound:
|
|
345
|
-
pass
|
|
346
|
-
except:
|
|
347
|
-
all_deleted = False
|
|
348
|
-
logging.exception('Failed deleting package: ')
|
|
349
|
-
assert all_deleted
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
@fixture
|
|
353
|
-
def delete_feature_set(context):
|
|
354
|
-
if not hasattr(context, 'to_delete_feature_set_ids'):
|
|
355
|
-
return
|
|
356
|
-
|
|
357
|
-
all_deleted = True
|
|
358
|
-
while context.to_delete_feature_set_ids:
|
|
359
|
-
feature_set = context.to_delete_feature_set_ids.pop(0)
|
|
360
|
-
try:
|
|
361
|
-
context.dl.feature_sets.delete(feature_set_id=feature_set)
|
|
362
|
-
except context.dl.exceptions.NotFound:
|
|
363
|
-
pass
|
|
364
|
-
except:
|
|
365
|
-
all_deleted = False
|
|
366
|
-
logging.exception('Failed deleting feature_set: ')
|
|
367
|
-
assert all_deleted
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
@fixture
|
|
371
|
-
def delete_feature(context):
|
|
372
|
-
if not hasattr(context, 'to_delete_feature_ids'):
|
|
373
|
-
return
|
|
374
|
-
|
|
375
|
-
all_deleted = True
|
|
376
|
-
while context.to_delete_feature_ids:
|
|
377
|
-
feature = context.to_delete_feature_ids.pop(0)
|
|
378
|
-
try:
|
|
379
|
-
context.dl.feature.delete(feature_id=feature)
|
|
380
|
-
except context.dl.exceptions.NotFound:
|
|
381
|
-
pass
|
|
382
|
-
except:
|
|
383
|
-
all_deleted = False
|
|
384
|
-
logging.exception('Failed deleting feature: ')
|
|
385
|
-
assert all_deleted
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
@fixture
|
|
389
|
-
def delete_pipeline(context):
|
|
390
|
-
if not hasattr(context, 'to_delete_pipelines_ids'):
|
|
391
|
-
return
|
|
392
|
-
|
|
393
|
-
all_deleted = True
|
|
394
|
-
while context.to_delete_pipelines_ids:
|
|
395
|
-
pipeline_id = context.to_delete_pipelines_ids.pop(0)
|
|
396
|
-
try:
|
|
397
|
-
filters = context.dl.Filters(resource=context.dl.FiltersResource.EXECUTION, field='latestStatus.status',
|
|
398
|
-
values=['created', 'in-progress'], operator='in')
|
|
399
|
-
filters.add(field='pipeline.id', values=pipeline_id)
|
|
400
|
-
executions = context.dl.executions.list(filters=filters)
|
|
401
|
-
for execution in executions.items:
|
|
402
|
-
execution.terminate()
|
|
403
|
-
context.dl.pipelines.delete(pipeline_id=pipeline_id)
|
|
404
|
-
except context.dl.exceptions.NotFound:
|
|
405
|
-
pass
|
|
406
|
-
except:
|
|
407
|
-
all_deleted = False
|
|
408
|
-
logging.exception('Failed deleting pipeline: ')
|
|
409
|
-
assert all_deleted
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
@fixture
|
|
413
|
-
def delete_converter_dataset(context):
|
|
414
|
-
if hasattr(context, 'platform_dataset'):
|
|
415
|
-
context.platform_dataset.delete(True, True)
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
@fixture
|
|
419
|
-
def delete_services(context):
|
|
420
|
-
if not hasattr(context, 'to_delete_services_ids'):
|
|
421
|
-
return
|
|
422
|
-
|
|
423
|
-
all_deleted = True
|
|
424
|
-
while context.to_delete_services_ids:
|
|
425
|
-
service_id = context.to_delete_services_ids.pop(0)
|
|
426
|
-
try:
|
|
427
|
-
context.dl.services.delete(service_id=service_id)
|
|
428
|
-
except context.dl.exceptions.NotFound:
|
|
429
|
-
pass
|
|
430
|
-
except:
|
|
431
|
-
all_deleted = False
|
|
432
|
-
logging.exception('Failed deleting service: ')
|
|
433
|
-
assert all_deleted
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
@fixture
|
|
437
|
-
def drivers_delete(context):
|
|
438
|
-
if not hasattr(context, 'to_delete_drivers_ids'):
|
|
439
|
-
return
|
|
440
|
-
|
|
441
|
-
all_deleted = True
|
|
442
|
-
time.sleep(25) # Wait for datasets to delete
|
|
443
|
-
for driver_id in context.to_delete_drivers_ids:
|
|
444
|
-
try:
|
|
445
|
-
context.project.drivers.delete(driver_id=driver_id, sure=True, really=True)
|
|
446
|
-
except context.dl.exceptions.NotFound:
|
|
447
|
-
pass
|
|
448
|
-
except:
|
|
449
|
-
all_deleted = False
|
|
450
|
-
logging.exception('Failed deleting driver: {}'.format(driver_id))
|
|
451
|
-
assert all_deleted
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
@fixture
|
|
455
|
-
def datasets_delete(context):
|
|
456
|
-
if not hasattr(context, 'to_delete_datasets_ids'):
|
|
457
|
-
return
|
|
458
|
-
|
|
459
|
-
all_deleted = True
|
|
460
|
-
for dataset_id in context.to_delete_datasets_ids:
|
|
461
|
-
try:
|
|
462
|
-
context.project.datasets.delete(dataset_id=dataset_id, sure=True, really=True)
|
|
463
|
-
except context.dl.exceptions.NotFound:
|
|
464
|
-
pass
|
|
465
|
-
except:
|
|
466
|
-
all_deleted = False
|
|
467
|
-
logging.exception('Failed deleting dataset: {}'.format(dataset_id))
|
|
468
|
-
assert all_deleted
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
@fixture
|
|
472
|
-
def reset_setenv(context):
|
|
473
|
-
_, base_env = get_env_from_git_branch()
|
|
474
|
-
cmds = ["dlp", "api", "setenv", "-e", "{}".format(base_env)]
|
|
475
|
-
p = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
476
|
-
context.out, context.err = p.communicate()
|
|
477
|
-
# save return code
|
|
478
|
-
context.return_code = p.returncode
|
|
479
|
-
assert context.return_code == 0, "AFTER TEST FAILED: {}".format(context.err)
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
def print_feature_filename(context, feature):
|
|
483
|
-
s_r = SummaryReporter(context.config)
|
|
484
|
-
stream = getattr(sys, s_r.output_stream_name, sys.stderr)
|
|
485
|
-
p_stream = StreamOpener.ensure_stream_with_encoder(stream)
|
|
486
|
-
p_stream.write(f"Feature Finished : {feature.filename.split('/')[-1]}\n")
|
|
487
|
-
p_stream.write(f"Status: {str(feature.status).split('.')[-1]} - Duration: {feature.duration:.2f} seconds\n")
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
@fixture
|
|
491
|
-
def models_delete(context):
|
|
492
|
-
all_deleted = True
|
|
493
|
-
if hasattr(context, 'to_delete_model_ids'):
|
|
494
|
-
for model_id in context.to_delete_model_ids:
|
|
495
|
-
try:
|
|
496
|
-
context.project.models.delete(model_id=model_id)
|
|
497
|
-
except context.dl.exceptions.NotFound:
|
|
498
|
-
pass
|
|
499
|
-
except:
|
|
500
|
-
all_deleted = False
|
|
501
|
-
logging.exception('Failed deleting model: {}'.format(model_id))
|
|
502
|
-
|
|
503
|
-
for model in context.project.models.list().all():
|
|
504
|
-
try:
|
|
505
|
-
model.delete()
|
|
506
|
-
except context.dl.exceptions.NotFound:
|
|
507
|
-
pass
|
|
508
|
-
except:
|
|
509
|
-
all_deleted = False
|
|
510
|
-
logging.exception('Failed deleting model: {}'.format(model.id))
|
|
511
|
-
assert all_deleted
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
def delete_compute_servicedriver(context):
|
|
515
|
-
if not hasattr(context, 'to_delete_computes_ids') and not hasattr(context, 'to_delete_service_drivers_ids'):
|
|
516
|
-
return
|
|
517
|
-
|
|
518
|
-
all_deleted = True
|
|
519
|
-
for service_driver_id in context.to_delete_service_drivers_ids:
|
|
520
|
-
try:
|
|
521
|
-
context.dl.service_drivers.delete(service_driver_id=service_driver_id)
|
|
522
|
-
except context.dl.exceptions.NotFound:
|
|
523
|
-
pass
|
|
524
|
-
except:
|
|
525
|
-
all_deleted = False
|
|
526
|
-
logging.exception('Failed deleting serviceDriver: {}'.format(service_driver_id))
|
|
527
|
-
assert all_deleted
|
|
528
|
-
|
|
529
|
-
all_deleted = True
|
|
530
|
-
for compute_id in context.to_delete_computes_ids:
|
|
531
|
-
try:
|
|
532
|
-
context.dl.computes.delete(compute_id=compute_id)
|
|
533
|
-
except context.dl.exceptions.NotFound:
|
|
534
|
-
pass
|
|
535
|
-
except:
|
|
536
|
-
all_deleted = False
|
|
537
|
-
logging.exception('Failed deleting compute: {}'.format(compute_id))
|
|
538
|
-
assert all_deleted
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
def restore_json_file(context):
|
|
542
|
-
if not hasattr(context.feature, 'dataloop_feature_project'):
|
|
543
|
-
return
|
|
544
|
-
if not hasattr(context, 'backup_path') or not hasattr(context, 'original_path'):
|
|
545
|
-
assert False, 'Please make sure to set the original_path and backup_path in the context'
|
|
546
|
-
# Restore the file from the backup
|
|
547
|
-
if os.path.exists(context.backup_path):
|
|
548
|
-
shutil.copy(context.backup_path, context.original_path)
|
|
549
|
-
os.remove(context.backup_path) # Clean up the backup
|
|
550
|
-
else:
|
|
551
|
-
raise FileNotFoundError(f"Backup file not found for {context.original_path}")
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|