dtlpy 1.113.10__py3-none-any.whl → 1.114.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. dtlpy/__init__.py +488 -488
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/__pycache__/__init__.cpython-38.pyc +0 -0
  5. dtlpy/assets/code_server/config.yaml +2 -2
  6. dtlpy/assets/code_server/installation.sh +24 -24
  7. dtlpy/assets/code_server/launch.json +13 -13
  8. dtlpy/assets/code_server/settings.json +2 -2
  9. dtlpy/assets/main.py +53 -53
  10. dtlpy/assets/main_partial.py +18 -18
  11. dtlpy/assets/mock.json +11 -11
  12. dtlpy/assets/model_adapter.py +83 -83
  13. dtlpy/assets/package.json +61 -61
  14. dtlpy/assets/package_catalog.json +29 -29
  15. dtlpy/assets/package_gitignore +307 -307
  16. dtlpy/assets/service_runners/__init__.py +33 -33
  17. dtlpy/assets/service_runners/converter.py +96 -96
  18. dtlpy/assets/service_runners/multi_method.py +49 -49
  19. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  20. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  21. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  22. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  23. dtlpy/assets/service_runners/single_method.py +37 -37
  24. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  25. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  26. dtlpy/assets/service_runners/single_method_item.py +41 -41
  27. dtlpy/assets/service_runners/single_method_json.py +42 -42
  28. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  29. dtlpy/assets/voc_annotation_template.xml +23 -23
  30. dtlpy/caches/base_cache.py +32 -32
  31. dtlpy/caches/cache.py +473 -473
  32. dtlpy/caches/dl_cache.py +201 -201
  33. dtlpy/caches/filesystem_cache.py +89 -89
  34. dtlpy/caches/redis_cache.py +84 -84
  35. dtlpy/dlp/__init__.py +20 -20
  36. dtlpy/dlp/cli_utilities.py +367 -367
  37. dtlpy/dlp/command_executor.py +764 -764
  38. dtlpy/dlp/dlp +1 -1
  39. dtlpy/dlp/dlp.bat +1 -1
  40. dtlpy/dlp/dlp.py +128 -128
  41. dtlpy/dlp/parser.py +651 -651
  42. dtlpy/entities/__init__.py +83 -83
  43. dtlpy/entities/analytic.py +311 -311
  44. dtlpy/entities/annotation.py +1879 -1879
  45. dtlpy/entities/annotation_collection.py +699 -699
  46. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  47. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  48. dtlpy/entities/annotation_definitions/box.py +195 -195
  49. dtlpy/entities/annotation_definitions/classification.py +67 -67
  50. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  51. dtlpy/entities/annotation_definitions/cube.py +204 -204
  52. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  53. dtlpy/entities/annotation_definitions/description.py +32 -32
  54. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  55. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  56. dtlpy/entities/annotation_definitions/gis.py +69 -69
  57. dtlpy/entities/annotation_definitions/note.py +139 -139
  58. dtlpy/entities/annotation_definitions/point.py +117 -117
  59. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  60. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  61. dtlpy/entities/annotation_definitions/pose.py +92 -92
  62. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  63. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  64. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  65. dtlpy/entities/annotation_definitions/text.py +85 -85
  66. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  67. dtlpy/entities/app.py +220 -220
  68. dtlpy/entities/app_module.py +107 -107
  69. dtlpy/entities/artifact.py +174 -174
  70. dtlpy/entities/assignment.py +399 -399
  71. dtlpy/entities/base_entity.py +214 -214
  72. dtlpy/entities/bot.py +113 -113
  73. dtlpy/entities/codebase.py +296 -296
  74. dtlpy/entities/collection.py +38 -38
  75. dtlpy/entities/command.py +169 -169
  76. dtlpy/entities/compute.py +442 -442
  77. dtlpy/entities/dataset.py +1285 -1285
  78. dtlpy/entities/directory_tree.py +44 -44
  79. dtlpy/entities/dpk.py +470 -470
  80. dtlpy/entities/driver.py +222 -222
  81. dtlpy/entities/execution.py +397 -397
  82. dtlpy/entities/feature.py +124 -124
  83. dtlpy/entities/feature_set.py +145 -145
  84. dtlpy/entities/filters.py +641 -641
  85. dtlpy/entities/gis_item.py +107 -107
  86. dtlpy/entities/integration.py +184 -184
  87. dtlpy/entities/item.py +953 -953
  88. dtlpy/entities/label.py +123 -123
  89. dtlpy/entities/links.py +85 -85
  90. dtlpy/entities/message.py +175 -175
  91. dtlpy/entities/model.py +694 -691
  92. dtlpy/entities/node.py +1005 -1005
  93. dtlpy/entities/ontology.py +803 -803
  94. dtlpy/entities/organization.py +287 -287
  95. dtlpy/entities/package.py +657 -657
  96. dtlpy/entities/package_defaults.py +5 -5
  97. dtlpy/entities/package_function.py +185 -185
  98. dtlpy/entities/package_module.py +113 -113
  99. dtlpy/entities/package_slot.py +118 -118
  100. dtlpy/entities/paged_entities.py +290 -267
  101. dtlpy/entities/pipeline.py +593 -593
  102. dtlpy/entities/pipeline_execution.py +279 -279
  103. dtlpy/entities/project.py +394 -394
  104. dtlpy/entities/prompt_item.py +499 -499
  105. dtlpy/entities/recipe.py +301 -301
  106. dtlpy/entities/reflect_dict.py +102 -102
  107. dtlpy/entities/resource_execution.py +138 -138
  108. dtlpy/entities/service.py +958 -958
  109. dtlpy/entities/service_driver.py +117 -117
  110. dtlpy/entities/setting.py +294 -294
  111. dtlpy/entities/task.py +491 -491
  112. dtlpy/entities/time_series.py +143 -143
  113. dtlpy/entities/trigger.py +426 -426
  114. dtlpy/entities/user.py +118 -118
  115. dtlpy/entities/webhook.py +124 -124
  116. dtlpy/examples/__init__.py +19 -19
  117. dtlpy/examples/add_labels.py +135 -135
  118. dtlpy/examples/add_metadata_to_item.py +21 -21
  119. dtlpy/examples/annotate_items_using_model.py +65 -65
  120. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  121. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  122. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  123. dtlpy/examples/convert_annotation_types.py +51 -51
  124. dtlpy/examples/converter.py +143 -143
  125. dtlpy/examples/copy_annotations.py +22 -22
  126. dtlpy/examples/copy_folder.py +31 -31
  127. dtlpy/examples/create_annotations.py +51 -51
  128. dtlpy/examples/create_video_annotations.py +83 -83
  129. dtlpy/examples/delete_annotations.py +26 -26
  130. dtlpy/examples/filters.py +113 -113
  131. dtlpy/examples/move_item.py +23 -23
  132. dtlpy/examples/play_video_annotation.py +13 -13
  133. dtlpy/examples/show_item_and_mask.py +53 -53
  134. dtlpy/examples/triggers.py +49 -49
  135. dtlpy/examples/upload_batch_of_items.py +20 -20
  136. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  137. dtlpy/examples/upload_items_with_modalities.py +43 -43
  138. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  139. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  140. dtlpy/exceptions.py +125 -125
  141. dtlpy/miscellaneous/__init__.py +20 -20
  142. dtlpy/miscellaneous/dict_differ.py +95 -95
  143. dtlpy/miscellaneous/git_utils.py +217 -217
  144. dtlpy/miscellaneous/json_utils.py +14 -14
  145. dtlpy/miscellaneous/list_print.py +105 -105
  146. dtlpy/miscellaneous/zipping.py +130 -130
  147. dtlpy/ml/__init__.py +20 -20
  148. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  149. dtlpy/ml/base_model_adapter.py +945 -940
  150. dtlpy/ml/metrics.py +461 -461
  151. dtlpy/ml/predictions_utils.py +274 -274
  152. dtlpy/ml/summary_writer.py +57 -57
  153. dtlpy/ml/train_utils.py +60 -60
  154. dtlpy/new_instance.py +252 -252
  155. dtlpy/repositories/__init__.py +56 -56
  156. dtlpy/repositories/analytics.py +85 -85
  157. dtlpy/repositories/annotations.py +916 -916
  158. dtlpy/repositories/apps.py +383 -383
  159. dtlpy/repositories/artifacts.py +452 -452
  160. dtlpy/repositories/assignments.py +599 -599
  161. dtlpy/repositories/bots.py +213 -213
  162. dtlpy/repositories/codebases.py +559 -559
  163. dtlpy/repositories/collections.py +332 -348
  164. dtlpy/repositories/commands.py +158 -158
  165. dtlpy/repositories/compositions.py +61 -61
  166. dtlpy/repositories/computes.py +434 -406
  167. dtlpy/repositories/datasets.py +1291 -1291
  168. dtlpy/repositories/downloader.py +895 -895
  169. dtlpy/repositories/dpks.py +433 -433
  170. dtlpy/repositories/drivers.py +266 -266
  171. dtlpy/repositories/executions.py +817 -817
  172. dtlpy/repositories/feature_sets.py +226 -226
  173. dtlpy/repositories/features.py +238 -238
  174. dtlpy/repositories/integrations.py +484 -484
  175. dtlpy/repositories/items.py +909 -915
  176. dtlpy/repositories/messages.py +94 -94
  177. dtlpy/repositories/models.py +877 -867
  178. dtlpy/repositories/nodes.py +80 -80
  179. dtlpy/repositories/ontologies.py +511 -511
  180. dtlpy/repositories/organizations.py +525 -525
  181. dtlpy/repositories/packages.py +1941 -1941
  182. dtlpy/repositories/pipeline_executions.py +448 -448
  183. dtlpy/repositories/pipelines.py +642 -642
  184. dtlpy/repositories/projects.py +539 -539
  185. dtlpy/repositories/recipes.py +399 -399
  186. dtlpy/repositories/resource_executions.py +137 -137
  187. dtlpy/repositories/schema.py +120 -120
  188. dtlpy/repositories/service_drivers.py +213 -213
  189. dtlpy/repositories/services.py +1704 -1704
  190. dtlpy/repositories/settings.py +339 -339
  191. dtlpy/repositories/tasks.py +1124 -1124
  192. dtlpy/repositories/times_series.py +278 -278
  193. dtlpy/repositories/triggers.py +536 -536
  194. dtlpy/repositories/upload_element.py +257 -257
  195. dtlpy/repositories/uploader.py +651 -651
  196. dtlpy/repositories/webhooks.py +249 -249
  197. dtlpy/services/__init__.py +22 -22
  198. dtlpy/services/aihttp_retry.py +131 -131
  199. dtlpy/services/api_client.py +1782 -1782
  200. dtlpy/services/api_reference.py +40 -40
  201. dtlpy/services/async_utils.py +133 -133
  202. dtlpy/services/calls_counter.py +44 -44
  203. dtlpy/services/check_sdk.py +68 -68
  204. dtlpy/services/cookie.py +115 -115
  205. dtlpy/services/create_logger.py +156 -156
  206. dtlpy/services/events.py +84 -84
  207. dtlpy/services/logins.py +235 -235
  208. dtlpy/services/reporter.py +256 -256
  209. dtlpy/services/service_defaults.py +91 -91
  210. dtlpy/utilities/__init__.py +20 -20
  211. dtlpy/utilities/annotations/__init__.py +16 -16
  212. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  213. dtlpy/utilities/base_package_runner.py +264 -264
  214. dtlpy/utilities/converter.py +1650 -1650
  215. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  216. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  217. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  218. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  219. dtlpy/utilities/local_development/__init__.py +1 -1
  220. dtlpy/utilities/local_development/local_session.py +179 -179
  221. dtlpy/utilities/reports/__init__.py +2 -2
  222. dtlpy/utilities/reports/figures.py +343 -343
  223. dtlpy/utilities/reports/report.py +71 -71
  224. dtlpy/utilities/videos/__init__.py +17 -17
  225. dtlpy/utilities/videos/video_player.py +598 -598
  226. dtlpy/utilities/videos/videos.py +470 -470
  227. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp +1 -1
  228. dtlpy-1.114.13.data/scripts/dlp.bat +2 -0
  229. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp.py +128 -128
  230. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/LICENSE +200 -200
  231. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/METADATA +172 -172
  232. dtlpy-1.114.13.dist-info/RECORD +240 -0
  233. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/WHEEL +1 -1
  234. tests/features/environment.py +551 -550
  235. dtlpy-1.113.10.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.113.10.dist-info/RECORD +0 -244
  237. tests/assets/__init__.py +0 -0
  238. tests/assets/models_flow/__init__.py +0 -0
  239. tests/assets/models_flow/failedmain.py +0 -52
  240. tests/assets/models_flow/main.py +0 -62
  241. tests/assets/models_flow/main_model.py +0 -54
  242. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/entry_points.txt +0 -0
  243. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/top_level.txt +0 -0
@@ -1,550 +1,551 @@
1
- import time
2
-
3
- from behave import fixture, use_fixture
4
- import os
5
- import json
6
- import logging
7
- from filelock import FileLock
8
- from dotenv import load_dotenv
9
- import subprocess
10
-
11
- from behave.reporter.summary import SummaryReporter
12
- from behave.formatter.base import StreamOpener
13
- import sys
14
-
15
- import dtlpy as dl
16
- import shutil
17
-
18
- try:
19
- # for local import
20
- from tests.env_from_git_branch import get_env_from_git_branch
21
- except ImportError:
22
- # for remote import
23
- from ..env_from_git_branch import get_env_from_git_branch
24
-
25
-
26
- def before_all(context):
27
- load_dotenv('.test.env')
28
- # Get index driver from env var
29
- context.index_driver_var = os.environ.get("INDEX_DRIVER_VAR", None)
30
-
31
-
32
- @fixture
33
- def after_feature(context, feature):
34
- print_feature_filename(context, feature)
35
-
36
- if hasattr(feature, 'bot'):
37
- try:
38
- feature.bot.delete()
39
- except Exception:
40
- logging.exception('Failed to delete bot')
41
-
42
- if hasattr(feature, 'apps'):
43
- for app in context.feature.apps:
44
- try:
45
- app.uninstall()
46
- except Exception:
47
- logging.exception('Failed to uninstall app')
48
-
49
- if hasattr(feature, 'dpks'):
50
- for dpk in context.feature.dpks:
51
- try:
52
- dpk.delete()
53
- except Exception:
54
- try:
55
- apps = dl.apps.list(
56
- filters=dl.Filters(use_defaults=False, resource=dl.FiltersResource.APP,
57
- field="dpkName",
58
- values=dpk.name))
59
- for page in apps:
60
- for app in page:
61
- app.uninstall()
62
- models = dl.models.list(
63
- filters=dl.Filters(use_defaults=False, resource=dl.FiltersResource.MODEL,
64
- field="app.dpkName",
65
- values=dpk.name))
66
- for page in models:
67
- for model in page:
68
- model.delete()
69
- dpk.delete()
70
- except:
71
- logging.exception('Failed to delete dpk')
72
-
73
- if hasattr(feature, 'dataloop_feature_integration'):
74
- all_deleted = True
75
- time.sleep(7) # Wait for drivers to delete
76
- for integration_id in feature.to_delete_integrations_ids:
77
- try:
78
- feature.dataloop_feature_project.integrations.delete(integrations_id=integration_id, sure=True,
79
- really=True)
80
- except feature.dataloop_feature_dl.exceptions.NotFound:
81
- pass
82
- except:
83
- all_deleted = False
84
- logging.exception('Failed deleting integration: {}'.format(integration_id))
85
- assert all_deleted
86
-
87
- if hasattr(feature, 'dataloop_feature_project'):
88
- try:
89
- if 'frozen_dataset' in feature.tags:
90
- fix_project_with_frozen_datasets(project=feature.dataloop_feature_project)
91
- feature.dataloop_feature_project.delete(True, True)
92
- except Exception:
93
- logging.exception('Failed to delete project')
94
-
95
- if hasattr(context.feature, 'dataloop_feature_org'):
96
- try:
97
- username = os.environ["TEST_SU_USERNAME"]
98
- password = os.environ["TEST_SU_PASSWORD"]
99
- login = dl.login_m2m(
100
- email=username,
101
- password=password
102
- )
103
- assert login, "TEST FAILED: User login failed"
104
- context.dl = dl
105
- success, response = dl.client_api.gen_request(req_type='delete',
106
- path=f'/orgs/{feature.dataloop_feature_org.id}')
107
- if not success:
108
- raise dl.exceptions.PlatformException(response)
109
- logging.info(f'Organization id {feature.dataloop_feature_org.id} deleted successfully')
110
- username = os.environ["TEST_USERNAME"]
111
- password = os.environ["TEST_PASSWORD"]
112
- login = dl.login_m2m(
113
- email=username,
114
- password=password
115
- )
116
- assert login, "TEST FAILED: User login failed"
117
- context.dl = dl
118
- return True
119
- except Exception:
120
- logging.exception('Failed to delete organization')
121
-
122
- # update api call json
123
- if hasattr(feature, 'dataloop_feature_dl'):
124
- if not os.environ.get('IGNORE_API_CALLS', 'false') == 'true':
125
- try:
126
- api_calls_path = os.path.join(os.environ['DATALOOP_TEST_ASSETS'], 'api_calls.json')
127
- with open(api_calls_path, 'r') as f:
128
- api_calls = json.load(f)
129
- if context.feature.name in api_calls:
130
- api_calls[context.feature.name] += feature.dataloop_feature_dl.client_api.calls_counter.number
131
- else:
132
- api_calls[context.feature.name] = feature.dataloop_feature_dl.client_api.calls_counter.number
133
- # lock the file for multi processes needs
134
- with FileLock("api_calls.json.lock"):
135
- with open(api_calls_path, 'w') as f:
136
- json.dump(api_calls, f)
137
- except Exception:
138
- logging.exception('Failed to update api calls')
139
-
140
- if hasattr(feature, 'dataloop_feature_compute'):
141
- try:
142
- compute = context.feature.dataloop_feature_compute
143
- dl.computes.delete(compute_id=compute.id)
144
- except Exception:
145
- logging.exception('Failed to delete compute')
146
-
147
-
148
- @fixture
149
- def before_scenario(context, scenario):
150
- context.scenario.return_to_user = False
151
-
152
-
153
- @fixture
154
- def after_scenario(context, scenario):
155
- if context.scenario.return_to_user == True:
156
- username = os.environ["TEST_USERNAME"]
157
- password = os.environ["TEST_PASSWORD"]
158
- login = dl.login_m2m(
159
- email=username,
160
- password=password,
161
- )
162
- assert login, "TEST FAILED: User login failed"
163
- print("----------Changed to a Regular user----------")
164
- context.scenario.return_to_user = False
165
- context.dl = dl
166
-
167
-
168
- def get_step_key(step):
169
- return '{}: line {}. {}'.format(step.location.filename, step.location.line, step.name)
170
-
171
-
172
- @fixture
173
- def before_step(context, step):
174
- key = get_step_key(step)
175
- setattr(context, key, time.time())
176
-
177
-
178
- @fixture
179
- def after_step(context, step):
180
- key = get_step_key(step)
181
- start_time = getattr(context, key, None)
182
- total_time = time.time() - start_time
183
- if total_time > 3:
184
- print("######## {}\nStep Duration: {}".format(key, total_time))
185
- delattr(context, key)
186
-
187
-
188
- @fixture
189
- def before_feature(context, feature):
190
- if 'rc_only' in context.tags and 'rc' not in os.environ.get("DLP_ENV_NAME"):
191
- feature.skip("Marked with @rc_only")
192
- return
193
- if 'skip_test' in context.tags:
194
- feature.skip("Marked with @skip_test")
195
- return
196
-
197
-
198
- def fix_project_with_frozen_datasets(project):
199
- datasets = project.datasets.list()
200
- for dataset in datasets:
201
- if dataset.readonly:
202
- dataset.set_readonly(False)
203
-
204
-
205
- @fixture
206
- def before_tag(context, tag):
207
- if "skip_test" in tag:
208
- """
209
- For example: @skip_test_DAT-99999
210
- """
211
- dat = tag.split("_")[-1] if "DAT" in tag else ""
212
- if hasattr(context, "scenario"):
213
- context.scenario.skip(f"Test mark as SKIPPED, Should be merged after {dat}")
214
- if 'rc_only' in context.tags and 'rc' not in os.environ.get("DLP_ENV_NAME"):
215
- if hasattr(context, "scenario"):
216
- context.scenario.skip(f"Test mark as SKIPPED, Should be run only on RC")
217
-
218
-
219
- @fixture
220
- def after_tag(context, tag):
221
- if tag == 'services.delete':
222
- try:
223
- use_fixture(delete_services, context)
224
- except Exception:
225
- logging.exception('Failed to delete service')
226
- elif tag == 'packages.delete':
227
- try:
228
- use_fixture(delete_packages, context)
229
- except Exception:
230
- logging.exception('Failed to delete package')
231
- elif tag == 'pipelines.delete':
232
- try:
233
- use_fixture(delete_pipeline, context)
234
- except Exception:
235
- logging.exception('Failed to delete package')
236
- elif tag == 'feature_set.delete':
237
- try:
238
- use_fixture(delete_feature_set, context)
239
- except Exception:
240
- logging.exception('Failed to delete feature set')
241
- elif tag == 'feature.delete':
242
- try:
243
- use_fixture(delete_feature, context)
244
- except Exception:
245
- logging.exception('Failed to delete feature set')
246
- elif tag == 'bot.create':
247
- try:
248
- use_fixture(delete_bots, context)
249
- except Exception:
250
- logging.exception('Failed to delete bots')
251
- elif tag == 'second_project.delete':
252
- try:
253
- use_fixture(delete_second_project, context)
254
- except Exception:
255
- logging.exception('Failed to delete second project')
256
- elif tag == 'converter.platform_dataset.delete':
257
- try:
258
- use_fixture(delete_converter_dataset, context)
259
- except Exception:
260
- logging.exception('Failed to delete converter dataset')
261
- elif tag == 'datasets.delete':
262
- try:
263
- use_fixture(datasets_delete, context)
264
- except Exception:
265
- logging.exception('Failed to delete dataset')
266
- elif tag == 'drivers.delete':
267
- try:
268
- use_fixture(drivers_delete, context)
269
- except Exception:
270
- logging.exception('Failed to delete driver')
271
- elif tag == 'models.delete':
272
- try:
273
- use_fixture(models_delete, context)
274
- except Exception:
275
- logging.exception('Failed to delete model')
276
- elif tag == 'setenv.reset':
277
- try:
278
- use_fixture(reset_setenv, context)
279
- except Exception:
280
- logging.exception('Failed to reset env')
281
- elif tag == 'restore_json_file':
282
- try:
283
- use_fixture(restore_json_file, context)
284
- except Exception:
285
- logging.exception('Failed to restore json file')
286
- elif tag == 'compute_serviceDriver.delete':
287
- try:
288
- use_fixture(delete_compute_servicedriver, context)
289
- except Exception:
290
- logging.exception('Failed to delete service')
291
- elif tag == 'frozen_dataset':
292
- pass
293
- elif 'testrail-C' in tag:
294
- pass
295
- elif tag == 'wip':
296
- pass
297
- elif any(i_tag in tag for i_tag in ['DAT-', 'qa-', 'rc_only', 'skip_test', 'ATP', 'AIRGAPPED', 'DM-cache']):
298
- pass
299
- else:
300
- raise ValueError('Unknown tag: {}'.format(tag))
301
-
302
-
303
- @fixture
304
- def delete_second_project(context):
305
- if hasattr(context, 'second_project'):
306
- context.second_project.delete(True, True)
307
-
308
-
309
- @fixture
310
- def delete_bots(context):
311
- if not hasattr(context, 'to_delete_projects_ids'):
312
- return
313
-
314
- all_deleted = True
315
- while context.to_delete_projects_ids:
316
- project_id = context.to_delete_projects_ids.pop(0)
317
- try:
318
- project = context.dl.projects.get(project_id=project_id)
319
- for bot in project.bots.list():
320
- try:
321
- bot.delete()
322
- except:
323
- logging.exception('Failed deleting bots: ')
324
- all_deleted = False
325
- pass
326
- except context.dl.exceptions.NotFound:
327
- pass
328
- except:
329
- logging.exception('Failed deleting bots: ')
330
- assert all_deleted
331
-
332
-
333
- @fixture
334
- def delete_packages(context):
335
- if not hasattr(context, 'to_delete_packages_ids'):
336
- return
337
-
338
- all_deleted = True
339
- while context.to_delete_packages_ids:
340
- package_id = context.to_delete_packages_ids.pop(0)
341
- try:
342
- context.dl.packages.delete(package_id=package_id)
343
- except context.dl.exceptions.NotFound:
344
- pass
345
- except:
346
- all_deleted = False
347
- logging.exception('Failed deleting package: ')
348
- assert all_deleted
349
-
350
-
351
- @fixture
352
- def delete_feature_set(context):
353
- if not hasattr(context, 'to_delete_feature_set_ids'):
354
- return
355
-
356
- all_deleted = True
357
- while context.to_delete_feature_set_ids:
358
- feature_set = context.to_delete_feature_set_ids.pop(0)
359
- try:
360
- context.dl.feature_sets.delete(feature_set_id=feature_set)
361
- except context.dl.exceptions.NotFound:
362
- pass
363
- except:
364
- all_deleted = False
365
- logging.exception('Failed deleting feature_set: ')
366
- assert all_deleted
367
-
368
-
369
- @fixture
370
- def delete_feature(context):
371
- if not hasattr(context, 'to_delete_feature_ids'):
372
- return
373
-
374
- all_deleted = True
375
- while context.to_delete_feature_ids:
376
- feature = context.to_delete_feature_ids.pop(0)
377
- try:
378
- context.dl.feature.delete(feature_id=feature)
379
- except context.dl.exceptions.NotFound:
380
- pass
381
- except:
382
- all_deleted = False
383
- logging.exception('Failed deleting feature: ')
384
- assert all_deleted
385
-
386
-
387
- @fixture
388
- def delete_pipeline(context):
389
- if not hasattr(context, 'to_delete_pipelines_ids'):
390
- return
391
-
392
- all_deleted = True
393
- while context.to_delete_pipelines_ids:
394
- pipeline_id = context.to_delete_pipelines_ids.pop(0)
395
- try:
396
- filters = context.dl.Filters(resource=context.dl.FiltersResource.EXECUTION, field='latestStatus.status',
397
- values=['created', 'in-progress'], operator='in')
398
- filters.add(field='pipeline.id', values=pipeline_id)
399
- executions = context.dl.executions.list(filters=filters)
400
- for execution in executions.items:
401
- execution.terminate()
402
- context.dl.pipelines.delete(pipeline_id=pipeline_id)
403
- except context.dl.exceptions.NotFound:
404
- pass
405
- except:
406
- all_deleted = False
407
- logging.exception('Failed deleting pipeline: ')
408
- assert all_deleted
409
-
410
-
411
- @fixture
412
- def delete_converter_dataset(context):
413
- if hasattr(context, 'platform_dataset'):
414
- context.platform_dataset.delete(True, True)
415
-
416
-
417
- @fixture
418
- def delete_services(context):
419
- if not hasattr(context, 'to_delete_services_ids'):
420
- return
421
-
422
- all_deleted = True
423
- while context.to_delete_services_ids:
424
- service_id = context.to_delete_services_ids.pop(0)
425
- try:
426
- context.dl.services.delete(service_id=service_id)
427
- except context.dl.exceptions.NotFound:
428
- pass
429
- except:
430
- all_deleted = False
431
- logging.exception('Failed deleting service: ')
432
- assert all_deleted
433
-
434
-
435
- @fixture
436
- def drivers_delete(context):
437
- if not hasattr(context, 'to_delete_drivers_ids'):
438
- return
439
-
440
- all_deleted = True
441
- time.sleep(25) # Wait for datasets to delete
442
- for driver_id in context.to_delete_drivers_ids:
443
- try:
444
- context.project.drivers.delete(driver_id=driver_id, sure=True, really=True)
445
- except context.dl.exceptions.NotFound:
446
- pass
447
- except:
448
- all_deleted = False
449
- logging.exception('Failed deleting driver: {}'.format(driver_id))
450
- assert all_deleted
451
-
452
-
453
- @fixture
454
- def datasets_delete(context):
455
- if not hasattr(context, 'to_delete_datasets_ids'):
456
- return
457
-
458
- all_deleted = True
459
- for dataset_id in context.to_delete_datasets_ids:
460
- try:
461
- context.project.datasets.delete(dataset_id=dataset_id, sure=True, really=True)
462
- except context.dl.exceptions.NotFound:
463
- pass
464
- except:
465
- all_deleted = False
466
- logging.exception('Failed deleting dataset: {}'.format(dataset_id))
467
- assert all_deleted
468
-
469
-
470
- @fixture
471
- def reset_setenv(context):
472
- _, base_env = get_env_from_git_branch()
473
- cmds = ["dlp", "api", "setenv", "-e", "{}".format(base_env)]
474
- p = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
475
- context.out, context.err = p.communicate()
476
- # save return code
477
- context.return_code = p.returncode
478
- assert context.return_code == 0, "AFTER TEST FAILED: {}".format(context.err)
479
-
480
-
481
- def print_feature_filename(context, feature):
482
- s_r = SummaryReporter(context.config)
483
- stream = getattr(sys, s_r.output_stream_name, sys.stderr)
484
- p_stream = StreamOpener.ensure_stream_with_encoder(stream)
485
- p_stream.write(f"Feature Finished : {feature.filename.split('/')[-1]}\n")
486
- p_stream.write(f"Status: {str(feature.status).split('.')[-1]} - Duration: {feature.duration:.2f} seconds\n")
487
-
488
-
489
- @fixture
490
- def models_delete(context):
491
- all_deleted = True
492
- if hasattr(context, 'to_delete_model_ids'):
493
- for model_id in context.to_delete_model_ids:
494
- try:
495
- context.project.models.delete(model_id=model_id)
496
- except context.dl.exceptions.NotFound:
497
- pass
498
- except:
499
- all_deleted = False
500
- logging.exception('Failed deleting model: {}'.format(model_id))
501
-
502
- for model in context.project.models.list().all():
503
- try:
504
- model.delete()
505
- except context.dl.exceptions.NotFound:
506
- pass
507
- except:
508
- all_deleted = False
509
- logging.exception('Failed deleting model: {}'.format(model.id))
510
- assert all_deleted
511
-
512
-
513
- def delete_compute_servicedriver(context):
514
- if not hasattr(context, 'to_delete_computes_ids') and not hasattr(context, 'to_delete_service_drivers_ids'):
515
- return
516
-
517
- all_deleted = True
518
- for service_driver_id in context.to_delete_service_drivers_ids:
519
- try:
520
- context.dl.service_drivers.delete(service_driver_id=service_driver_id)
521
- except context.dl.exceptions.NotFound:
522
- pass
523
- except:
524
- all_deleted = False
525
- logging.exception('Failed deleting serviceDriver: {}'.format(service_driver_id))
526
- assert all_deleted
527
-
528
- all_deleted = True
529
- for compute_id in context.to_delete_computes_ids:
530
- try:
531
- context.dl.computes.delete(compute_id=compute_id)
532
- except context.dl.exceptions.NotFound:
533
- pass
534
- except:
535
- all_deleted = False
536
- logging.exception('Failed deleting compute: {}'.format(compute_id))
537
- assert all_deleted
538
-
539
-
540
- def restore_json_file(context):
541
- if not hasattr(context.feature, 'dataloop_feature_project'):
542
- return
543
- if not hasattr(context, 'backup_path') or not hasattr(context, 'original_path'):
544
- assert False, 'Please make sure to set the original_path and backup_path in the context'
545
- # Restore the file from the backup
546
- if os.path.exists(context.backup_path):
547
- shutil.copy(context.backup_path, context.original_path)
548
- os.remove(context.backup_path) # Clean up the backup
549
- else:
550
- raise FileNotFoundError(f"Backup file not found for {context.original_path}")
1
+ import time
2
+
3
+ from behave import fixture, use_fixture
4
+ import os
5
+ import json
6
+ import logging
7
+ from filelock import FileLock
8
+ from dotenv import load_dotenv
9
+ import subprocess
10
+
11
+ from behave.reporter.summary import SummaryReporter
12
+ from behave.formatter.base import StreamOpener
13
+ import sys
14
+
15
+ import dtlpy as dl
16
+ import shutil
17
+
18
+ try:
19
+ # for local import
20
+ from tests.env_from_git_branch import get_env_from_git_branch
21
+ except ImportError:
22
+ # for remote import
23
+ from ..env_from_git_branch import get_env_from_git_branch
24
+
25
+
26
+ def before_all(context):
27
+ load_dotenv('.test.env')
28
+ # Get index driver from env var
29
+ context.index_driver_var = os.environ.get("INDEX_DRIVER_VAR", None)
30
+
31
+
32
+ @fixture
33
+ def after_feature(context, feature):
34
+ print_feature_filename(context, feature)
35
+
36
+ if hasattr(feature, 'bot'):
37
+ try:
38
+ feature.bot.delete()
39
+ except Exception:
40
+ logging.exception('Failed to delete bot')
41
+
42
+ if hasattr(feature, 'apps'):
43
+ for app in context.feature.apps:
44
+ try:
45
+ app.uninstall()
46
+ except Exception:
47
+ logging.exception('Failed to uninstall app')
48
+
49
+ if hasattr(feature, 'dpks'):
50
+ for dpk in context.feature.dpks:
51
+ try:
52
+ dpk.delete()
53
+ except Exception:
54
+ try:
55
+ apps = dl.apps.list(
56
+ filters=dl.Filters(use_defaults=False, resource=dl.FiltersResource.APP,
57
+ field="dpkName",
58
+ values=dpk.name))
59
+ for page in apps:
60
+ for app in page:
61
+ app.uninstall()
62
+ models = dl.models.list(
63
+ filters=dl.Filters(use_defaults=False, resource=dl.FiltersResource.MODEL,
64
+ field="app.dpkName",
65
+ values=dpk.name))
66
+ for page in models:
67
+ for model in page:
68
+ model.delete()
69
+ dpk.delete()
70
+ except:
71
+ logging.exception('Failed to delete dpk')
72
+
73
+ if hasattr(feature, 'dataloop_feature_integration'):
74
+ all_deleted = True
75
+ time.sleep(7) # Wait for drivers to delete
76
+ for integration_id in feature.to_delete_integrations_ids:
77
+ try:
78
+ feature.dataloop_feature_project.integrations.delete(integrations_id=integration_id, sure=True,
79
+ really=True)
80
+ except feature.dataloop_feature_dl.exceptions.NotFound:
81
+ pass
82
+ except:
83
+ all_deleted = False
84
+ logging.exception('Failed deleting integration: {}'.format(integration_id))
85
+ assert all_deleted
86
+
87
+ if hasattr(feature, 'dataloop_feature_project'):
88
+ try:
89
+ if 'frozen_dataset' in feature.tags:
90
+ fix_project_with_frozen_datasets(project=feature.dataloop_feature_project)
91
+ feature.dataloop_feature_project.delete(True, True)
92
+ except Exception:
93
+ logging.exception('Failed to delete project')
94
+
95
+ if hasattr(context.feature, 'dataloop_feature_org'):
96
+ try:
97
+ username = os.environ["TEST_SU_USERNAME"]
98
+ password = os.environ["TEST_SU_PASSWORD"]
99
+ login = dl.login_m2m(
100
+ email=username,
101
+ password=password
102
+ )
103
+ assert login, "TEST FAILED: User login failed"
104
+ context.dl = dl
105
+ success, response = dl.client_api.gen_request(req_type='delete',
106
+ path=f'/orgs/{feature.dataloop_feature_org.id}')
107
+ if not success:
108
+ raise dl.exceptions.PlatformException(response)
109
+ logging.info(f'Organization id {feature.dataloop_feature_org.id} deleted successfully')
110
+ username = os.environ["TEST_USERNAME"]
111
+ password = os.environ["TEST_PASSWORD"]
112
+ login = dl.login_m2m(
113
+ email=username,
114
+ password=password
115
+ )
116
+ assert login, "TEST FAILED: User login failed"
117
+ context.dl = dl
118
+ return True
119
+ except Exception:
120
+ logging.exception('Failed to delete organization')
121
+
122
+ # update api call json
123
+ if hasattr(feature, 'dataloop_feature_dl'):
124
+ if not os.environ.get('IGNORE_API_CALLS', 'false') == 'true':
125
+ try:
126
+ api_calls_path = os.path.join(os.environ['DATALOOP_TEST_ASSETS'], 'api_calls.json')
127
+ with open(api_calls_path, 'r') as f:
128
+ api_calls = json.load(f)
129
+ if context.feature.name in api_calls:
130
+ api_calls[context.feature.name] += feature.dataloop_feature_dl.client_api.calls_counter.number
131
+ else:
132
+ api_calls[context.feature.name] = feature.dataloop_feature_dl.client_api.calls_counter.number
133
+ # lock the file for multi processes needs
134
+ with FileLock("api_calls.json.lock"):
135
+ with open(api_calls_path, 'w') as f:
136
+ json.dump(api_calls, f)
137
+ except Exception:
138
+ logging.exception('Failed to update api calls')
139
+
140
+ if hasattr(feature, 'dataloop_feature_compute'):
141
+ try:
142
+ compute = context.feature.dataloop_feature_compute
143
+ dl.computes.delete(compute_id=compute.id)
144
+ except Exception:
145
+ logging.exception('Failed to delete compute')
146
+
147
+
148
+ @fixture
149
+ def before_scenario(context, scenario):
150
+ context.scenario.return_to_user = False
151
+
152
+
153
+ @fixture
154
+ def after_scenario(context, scenario):
155
+ if context.scenario.return_to_user == True:
156
+ username = os.environ["TEST_USERNAME"]
157
+ password = os.environ["TEST_PASSWORD"]
158
+ login = dl.login_m2m(
159
+ email=username,
160
+ password=password,
161
+ )
162
+ assert login, "TEST FAILED: User login failed"
163
+ print("----------Changed to a Regular user----------")
164
+ context.scenario.return_to_user = False
165
+ context.dl = dl
166
+
167
+
168
+ def get_step_key(step):
169
+ return '{}: line {}. {}'.format(step.location.filename, step.location.line, step.name)
170
+
171
+
172
+ @fixture
173
+ def before_step(context, step):
174
+ context.step = step
175
+ key = get_step_key(step)
176
+ setattr(context, key, time.time())
177
+
178
+
179
+ @fixture
180
+ def after_step(context, step):
181
+ key = get_step_key(step)
182
+ start_time = getattr(context, key, None)
183
+ total_time = time.time() - start_time
184
+ if total_time > 3:
185
+ print("######## {}\nStep Duration: {}".format(key, total_time))
186
+ delattr(context, key)
187
+
188
+
189
+ @fixture
190
+ def before_feature(context, feature):
191
+ if 'rc_only' in context.tags and 'rc' not in os.environ.get("DLP_ENV_NAME"):
192
+ feature.skip("Marked with @rc_only")
193
+ return
194
+ if 'skip_test' in context.tags:
195
+ feature.skip("Marked with @skip_test")
196
+ return
197
+
198
+
199
+ def fix_project_with_frozen_datasets(project):
200
+ datasets = project.datasets.list()
201
+ for dataset in datasets:
202
+ if dataset.readonly:
203
+ dataset.set_readonly(False)
204
+
205
+
206
+ @fixture
207
+ def before_tag(context, tag):
208
+ if "skip_test" in tag:
209
+ """
210
+ For example: @skip_test_DAT-99999
211
+ """
212
+ dat = tag.split("_")[-1] if "DAT" in tag else ""
213
+ if hasattr(context, "scenario"):
214
+ context.scenario.skip(f"Test mark as SKIPPED, Should be merged after {dat}")
215
+ if 'rc_only' in context.tags and 'rc' not in os.environ.get("DLP_ENV_NAME"):
216
+ if hasattr(context, "scenario"):
217
+ context.scenario.skip(f"Test mark as SKIPPED, Should be run only on RC")
218
+
219
+
220
+ @fixture
221
+ def after_tag(context, tag):
222
+ if tag == 'services.delete':
223
+ try:
224
+ use_fixture(delete_services, context)
225
+ except Exception:
226
+ logging.exception('Failed to delete service')
227
+ elif tag == 'packages.delete':
228
+ try:
229
+ use_fixture(delete_packages, context)
230
+ except Exception:
231
+ logging.exception('Failed to delete package')
232
+ elif tag == 'pipelines.delete':
233
+ try:
234
+ use_fixture(delete_pipeline, context)
235
+ except Exception:
236
+ logging.exception('Failed to delete package')
237
+ elif tag == 'feature_set.delete':
238
+ try:
239
+ use_fixture(delete_feature_set, context)
240
+ except Exception:
241
+ logging.exception('Failed to delete feature set')
242
+ elif tag == 'feature.delete':
243
+ try:
244
+ use_fixture(delete_feature, context)
245
+ except Exception:
246
+ logging.exception('Failed to delete feature set')
247
+ elif tag == 'bot.create':
248
+ try:
249
+ use_fixture(delete_bots, context)
250
+ except Exception:
251
+ logging.exception('Failed to delete bots')
252
+ elif tag == 'second_project.delete':
253
+ try:
254
+ use_fixture(delete_second_project, context)
255
+ except Exception:
256
+ logging.exception('Failed to delete second project')
257
+ elif tag == 'converter.platform_dataset.delete':
258
+ try:
259
+ use_fixture(delete_converter_dataset, context)
260
+ except Exception:
261
+ logging.exception('Failed to delete converter dataset')
262
+ elif tag == 'datasets.delete':
263
+ try:
264
+ use_fixture(datasets_delete, context)
265
+ except Exception:
266
+ logging.exception('Failed to delete dataset')
267
+ elif tag == 'drivers.delete':
268
+ try:
269
+ use_fixture(drivers_delete, context)
270
+ except Exception:
271
+ logging.exception('Failed to delete driver')
272
+ elif tag == 'models.delete':
273
+ try:
274
+ use_fixture(models_delete, context)
275
+ except Exception:
276
+ logging.exception('Failed to delete model')
277
+ elif tag == 'setenv.reset':
278
+ try:
279
+ use_fixture(reset_setenv, context)
280
+ except Exception:
281
+ logging.exception('Failed to reset env')
282
+ elif tag == 'restore_json_file':
283
+ try:
284
+ use_fixture(restore_json_file, context)
285
+ except Exception:
286
+ logging.exception('Failed to restore json file')
287
+ elif tag == 'compute_serviceDriver.delete':
288
+ try:
289
+ use_fixture(delete_compute_servicedriver, context)
290
+ except Exception:
291
+ logging.exception('Failed to delete service')
292
+ elif tag == 'frozen_dataset':
293
+ pass
294
+ elif 'testrail-C' in tag:
295
+ pass
296
+ elif tag == 'wip':
297
+ pass
298
+ elif any(i_tag in tag for i_tag in ['DAT-', 'qa-', 'rc_only', 'skip_test', 'ATP', 'AIRGAPPED', 'DM-cache']):
299
+ pass
300
+ else:
301
+ raise ValueError('Unknown tag: {}'.format(tag))
302
+
303
+
304
+ @fixture
305
+ def delete_second_project(context):
306
+ if hasattr(context, 'second_project'):
307
+ context.second_project.delete(True, True)
308
+
309
+
310
+ @fixture
311
+ def delete_bots(context):
312
+ if not hasattr(context, 'to_delete_projects_ids'):
313
+ return
314
+
315
+ all_deleted = True
316
+ while context.to_delete_projects_ids:
317
+ project_id = context.to_delete_projects_ids.pop(0)
318
+ try:
319
+ project = context.dl.projects.get(project_id=project_id)
320
+ for bot in project.bots.list():
321
+ try:
322
+ bot.delete()
323
+ except:
324
+ logging.exception('Failed deleting bots: ')
325
+ all_deleted = False
326
+ pass
327
+ except context.dl.exceptions.NotFound:
328
+ pass
329
+ except:
330
+ logging.exception('Failed deleting bots: ')
331
+ assert all_deleted
332
+
333
+
334
+ @fixture
335
+ def delete_packages(context):
336
+ if not hasattr(context, 'to_delete_packages_ids'):
337
+ return
338
+
339
+ all_deleted = True
340
+ while context.to_delete_packages_ids:
341
+ package_id = context.to_delete_packages_ids.pop(0)
342
+ try:
343
+ context.dl.packages.delete(package_id=package_id)
344
+ except context.dl.exceptions.NotFound:
345
+ pass
346
+ except:
347
+ all_deleted = False
348
+ logging.exception('Failed deleting package: ')
349
+ assert all_deleted
350
+
351
+
352
+ @fixture
353
+ def delete_feature_set(context):
354
+ if not hasattr(context, 'to_delete_feature_set_ids'):
355
+ return
356
+
357
+ all_deleted = True
358
+ while context.to_delete_feature_set_ids:
359
+ feature_set = context.to_delete_feature_set_ids.pop(0)
360
+ try:
361
+ context.dl.feature_sets.delete(feature_set_id=feature_set)
362
+ except context.dl.exceptions.NotFound:
363
+ pass
364
+ except:
365
+ all_deleted = False
366
+ logging.exception('Failed deleting feature_set: ')
367
+ assert all_deleted
368
+
369
+
370
+ @fixture
371
+ def delete_feature(context):
372
+ if not hasattr(context, 'to_delete_feature_ids'):
373
+ return
374
+
375
+ all_deleted = True
376
+ while context.to_delete_feature_ids:
377
+ feature = context.to_delete_feature_ids.pop(0)
378
+ try:
379
+ context.dl.feature.delete(feature_id=feature)
380
+ except context.dl.exceptions.NotFound:
381
+ pass
382
+ except:
383
+ all_deleted = False
384
+ logging.exception('Failed deleting feature: ')
385
+ assert all_deleted
386
+
387
+
388
+ @fixture
389
+ def delete_pipeline(context):
390
+ if not hasattr(context, 'to_delete_pipelines_ids'):
391
+ return
392
+
393
+ all_deleted = True
394
+ while context.to_delete_pipelines_ids:
395
+ pipeline_id = context.to_delete_pipelines_ids.pop(0)
396
+ try:
397
+ filters = context.dl.Filters(resource=context.dl.FiltersResource.EXECUTION, field='latestStatus.status',
398
+ values=['created', 'in-progress'], operator='in')
399
+ filters.add(field='pipeline.id', values=pipeline_id)
400
+ executions = context.dl.executions.list(filters=filters)
401
+ for execution in executions.items:
402
+ execution.terminate()
403
+ context.dl.pipelines.delete(pipeline_id=pipeline_id)
404
+ except context.dl.exceptions.NotFound:
405
+ pass
406
+ except:
407
+ all_deleted = False
408
+ logging.exception('Failed deleting pipeline: ')
409
+ assert all_deleted
410
+
411
+
412
+ @fixture
413
+ def delete_converter_dataset(context):
414
+ if hasattr(context, 'platform_dataset'):
415
+ context.platform_dataset.delete(True, True)
416
+
417
+
418
+ @fixture
419
+ def delete_services(context):
420
+ if not hasattr(context, 'to_delete_services_ids'):
421
+ return
422
+
423
+ all_deleted = True
424
+ while context.to_delete_services_ids:
425
+ service_id = context.to_delete_services_ids.pop(0)
426
+ try:
427
+ context.dl.services.delete(service_id=service_id)
428
+ except context.dl.exceptions.NotFound:
429
+ pass
430
+ except:
431
+ all_deleted = False
432
+ logging.exception('Failed deleting service: ')
433
+ assert all_deleted
434
+
435
+
436
+ @fixture
437
+ def drivers_delete(context):
438
+ if not hasattr(context, 'to_delete_drivers_ids'):
439
+ return
440
+
441
+ all_deleted = True
442
+ time.sleep(25) # Wait for datasets to delete
443
+ for driver_id in context.to_delete_drivers_ids:
444
+ try:
445
+ context.project.drivers.delete(driver_id=driver_id, sure=True, really=True)
446
+ except context.dl.exceptions.NotFound:
447
+ pass
448
+ except:
449
+ all_deleted = False
450
+ logging.exception('Failed deleting driver: {}'.format(driver_id))
451
+ assert all_deleted
452
+
453
+
454
+ @fixture
455
+ def datasets_delete(context):
456
+ if not hasattr(context, 'to_delete_datasets_ids'):
457
+ return
458
+
459
+ all_deleted = True
460
+ for dataset_id in context.to_delete_datasets_ids:
461
+ try:
462
+ context.project.datasets.delete(dataset_id=dataset_id, sure=True, really=True)
463
+ except context.dl.exceptions.NotFound:
464
+ pass
465
+ except:
466
+ all_deleted = False
467
+ logging.exception('Failed deleting dataset: {}'.format(dataset_id))
468
+ assert all_deleted
469
+
470
+
471
+ @fixture
472
+ def reset_setenv(context):
473
+ _, base_env = get_env_from_git_branch()
474
+ cmds = ["dlp", "api", "setenv", "-e", "{}".format(base_env)]
475
+ p = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
476
+ context.out, context.err = p.communicate()
477
+ # save return code
478
+ context.return_code = p.returncode
479
+ assert context.return_code == 0, "AFTER TEST FAILED: {}".format(context.err)
480
+
481
+
482
+ def print_feature_filename(context, feature):
483
+ s_r = SummaryReporter(context.config)
484
+ stream = getattr(sys, s_r.output_stream_name, sys.stderr)
485
+ p_stream = StreamOpener.ensure_stream_with_encoder(stream)
486
+ p_stream.write(f"Feature Finished : {feature.filename.split('/')[-1]}\n")
487
+ p_stream.write(f"Status: {str(feature.status).split('.')[-1]} - Duration: {feature.duration:.2f} seconds\n")
488
+
489
+
490
+ @fixture
491
+ def models_delete(context):
492
+ all_deleted = True
493
+ if hasattr(context, 'to_delete_model_ids'):
494
+ for model_id in context.to_delete_model_ids:
495
+ try:
496
+ context.project.models.delete(model_id=model_id)
497
+ except context.dl.exceptions.NotFound:
498
+ pass
499
+ except:
500
+ all_deleted = False
501
+ logging.exception('Failed deleting model: {}'.format(model_id))
502
+
503
+ for model in context.project.models.list().all():
504
+ try:
505
+ model.delete()
506
+ except context.dl.exceptions.NotFound:
507
+ pass
508
+ except:
509
+ all_deleted = False
510
+ logging.exception('Failed deleting model: {}'.format(model.id))
511
+ assert all_deleted
512
+
513
+
514
+ def delete_compute_servicedriver(context):
515
+ if not hasattr(context, 'to_delete_computes_ids') and not hasattr(context, 'to_delete_service_drivers_ids'):
516
+ return
517
+
518
+ all_deleted = True
519
+ for service_driver_id in context.to_delete_service_drivers_ids:
520
+ try:
521
+ context.dl.service_drivers.delete(service_driver_id=service_driver_id)
522
+ except context.dl.exceptions.NotFound:
523
+ pass
524
+ except:
525
+ all_deleted = False
526
+ logging.exception('Failed deleting serviceDriver: {}'.format(service_driver_id))
527
+ assert all_deleted
528
+
529
+ all_deleted = True
530
+ for compute_id in context.to_delete_computes_ids:
531
+ try:
532
+ context.dl.computes.delete(compute_id=compute_id)
533
+ except context.dl.exceptions.NotFound:
534
+ pass
535
+ except:
536
+ all_deleted = False
537
+ logging.exception('Failed deleting compute: {}'.format(compute_id))
538
+ assert all_deleted
539
+
540
+
541
+ def restore_json_file(context):
542
+ if not hasattr(context.feature, 'dataloop_feature_project'):
543
+ return
544
+ if not hasattr(context, 'backup_path') or not hasattr(context, 'original_path'):
545
+ assert False, 'Please make sure to set the original_path and backup_path in the context'
546
+ # Restore the file from the backup
547
+ if os.path.exists(context.backup_path):
548
+ shutil.copy(context.backup_path, context.original_path)
549
+ os.remove(context.backup_path) # Clean up the backup
550
+ else:
551
+ raise FileNotFoundError(f"Backup file not found for {context.original_path}")