dtlpy 1.90.39__py3-none-any.whl → 1.92.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. dtlpy/__init__.py +5 -2
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/lock_open.png +0 -0
  4. dtlpy/entities/__init__.py +1 -1
  5. dtlpy/entities/analytic.py +118 -98
  6. dtlpy/entities/annotation.py +22 -31
  7. dtlpy/entities/annotation_collection.py +19 -21
  8. dtlpy/entities/app.py +13 -3
  9. dtlpy/entities/assignment.py +6 -0
  10. dtlpy/entities/base_entity.py +0 -23
  11. dtlpy/entities/command.py +3 -2
  12. dtlpy/entities/dataset.py +53 -3
  13. dtlpy/entities/dpk.py +15 -0
  14. dtlpy/entities/execution.py +13 -1
  15. dtlpy/entities/feature_set.py +3 -0
  16. dtlpy/entities/filters.py +87 -8
  17. dtlpy/entities/integration.py +1 -1
  18. dtlpy/entities/item.py +41 -1
  19. dtlpy/entities/node.py +49 -3
  20. dtlpy/entities/ontology.py +62 -5
  21. dtlpy/entities/package_function.py +2 -0
  22. dtlpy/entities/package_module.py +13 -0
  23. dtlpy/entities/pipeline.py +20 -1
  24. dtlpy/entities/pipeline_execution.py +37 -6
  25. dtlpy/entities/prompt_item.py +240 -27
  26. dtlpy/entities/recipe.py +37 -0
  27. dtlpy/entities/service.py +33 -4
  28. dtlpy/ml/base_model_adapter.py +166 -18
  29. dtlpy/new_instance.py +80 -9
  30. dtlpy/repositories/apps.py +68 -22
  31. dtlpy/repositories/assignments.py +1 -1
  32. dtlpy/repositories/commands.py +10 -2
  33. dtlpy/repositories/datasets.py +143 -13
  34. dtlpy/repositories/dpks.py +34 -1
  35. dtlpy/repositories/executions.py +27 -30
  36. dtlpy/repositories/feature_sets.py +23 -3
  37. dtlpy/repositories/features.py +4 -1
  38. dtlpy/repositories/models.py +1 -1
  39. dtlpy/repositories/packages.py +6 -3
  40. dtlpy/repositories/pipeline_executions.py +58 -5
  41. dtlpy/repositories/services.py +28 -7
  42. dtlpy/repositories/tasks.py +8 -2
  43. dtlpy/repositories/uploader.py +5 -2
  44. dtlpy/services/api_client.py +74 -12
  45. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/METADATA +2 -2
  46. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/RECORD +54 -57
  47. tests/features/environment.py +67 -1
  48. dtlpy/callbacks/__init__.py +0 -16
  49. dtlpy/callbacks/piper_progress_reporter.py +0 -29
  50. dtlpy/callbacks/progress_viewer.py +0 -54
  51. {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp +0 -0
  52. {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp.bat +0 -0
  53. {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp.py +0 -0
  54. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/LICENSE +0 -0
  55. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/WHEEL +0 -0
  56. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/entry_points.txt +0 -0
  57. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/top_level.txt +0 -0
@@ -27,6 +27,7 @@ def before_all(context):
27
27
  # Get index driver from env var
28
28
  context.index_driver_var = os.environ.get("INDEX_DRIVER_VAR", None)
29
29
 
30
+
30
31
  @fixture
31
32
  def after_feature(context, feature):
32
33
  print_feature_filename(context, feature)
@@ -57,6 +58,13 @@ def after_feature(context, feature):
57
58
  for page in apps:
58
59
  for app in page:
59
60
  app.uninstall()
61
+ models = dl.models.list(
62
+ filters=dl.Filters(use_defaults=False, resource=dl.FiltersResource.MODEL,
63
+ field="app.dpkName",
64
+ values=dpk.name))
65
+ for page in models:
66
+ for model in page:
67
+ model.delete()
60
68
  dpk.delete()
61
69
  except:
62
70
  logging.exception('Failed to delete dpk')
@@ -148,6 +156,26 @@ def after_scenario(context, scenario):
148
156
  context.dl = dl
149
157
 
150
158
 
159
+ def get_step_key(step):
160
+ return '{}: line {}. {}'.format(step.location.filename, step.location.line, step.name)
161
+
162
+
163
+ @fixture
164
+ def before_step(context, step):
165
+ key = get_step_key(step)
166
+ setattr(context, key, time.time())
167
+
168
+
169
+ @fixture
170
+ def after_step(context, step):
171
+ key = get_step_key(step)
172
+ start_time = getattr(context, key, None)
173
+ total_time = time.time() - start_time
174
+ if total_time > 3:
175
+ print("######## {}\nStep Duration: {}".format(key, total_time))
176
+ delattr(context, key)
177
+
178
+
151
179
  @fixture
152
180
  def before_feature(context, feature):
153
181
  if 'rc_only' in context.tags and 'rc' not in os.environ.get("DLP_ENV_NAME"):
@@ -168,6 +196,9 @@ def fix_project_with_frozen_datasets(project):
168
196
  @fixture
169
197
  def before_tag(context, tag):
170
198
  if "skip_test" in tag:
199
+ """
200
+ For example: @skip_test_DAT-99999
201
+ """
171
202
  dat = tag.split("_")[-1] if "DAT" in tag else ""
172
203
  if hasattr(context, "scenario"):
173
204
  context.scenario.skip(f"Test mark as SKIPPED, Should be merged after {dat}")
@@ -225,6 +256,11 @@ def after_tag(context, tag):
225
256
  use_fixture(drivers_delete, context)
226
257
  except Exception:
227
258
  logging.exception('Failed to delete driver')
259
+ elif tag == 'models.delete':
260
+ try:
261
+ use_fixture(models_delete, context)
262
+ except Exception:
263
+ logging.exception('Failed to delete model')
228
264
  elif tag == 'setenv.reset':
229
265
  try:
230
266
  use_fixture(reset_setenv, context)
@@ -335,6 +371,11 @@ def delete_pipeline(context):
335
371
  while context.to_delete_pipelines_ids:
336
372
  pipeline_id = context.to_delete_pipelines_ids.pop(0)
337
373
  try:
374
+ filters = context.dl.Filters(resource=context.dl.FiltersResource.EXECUTION, field='latestStatus.status', values=['created', 'in-progress'], operator='in')
375
+ filters.add(field='pipeline.id', values=pipeline_id)
376
+ executions = context.dl.executions.list(filters=filters)
377
+ for execution in executions.items:
378
+ execution.terminate()
338
379
  context.dl.pipelines.delete(pipeline_id=pipeline_id)
339
380
  except context.dl.exceptions.NotFound:
340
381
  pass
@@ -418,4 +459,29 @@ def print_feature_filename(context, feature):
418
459
  s_r = SummaryReporter(context.config)
419
460
  stream = getattr(sys, s_r.output_stream_name, sys.stderr)
420
461
  p_stream = StreamOpener.ensure_stream_with_encoder(stream)
421
- p_stream.write("{}\n".format(feature.filename.split('/')[-1]))
462
+ p_stream.write(f"Feature Finished : {feature.filename.split('/')[-1]}\n")
463
+ p_stream.write(f"Status: {str(feature.status).split('.')[-1]} - Duration: {feature.duration:.2f} seconds\n")
464
+
465
+
466
+ @fixture
467
+ def models_delete(context):
468
+ all_deleted = True
469
+ if hasattr(context, 'to_delete_model_ids'):
470
+ for model_id in context.to_delete_model_ids:
471
+ try:
472
+ context.project.models.delete(model_id=model_id)
473
+ except context.dl.exceptions.NotFound:
474
+ pass
475
+ except:
476
+ all_deleted = False
477
+ logging.exception('Failed deleting model: {}'.format(model_id))
478
+
479
+ for model in context.project.models.list().all():
480
+ try:
481
+ model.delete()
482
+ except context.dl.exceptions.NotFound:
483
+ pass
484
+ except:
485
+ all_deleted = False
486
+ logging.exception('Failed deleting model: {}'.format(model.id))
487
+ assert all_deleted
@@ -1,16 +0,0 @@
1
- #! /usr/bin/env python3
2
- # This file is part of DTLPY.
3
- #
4
- # DTLPY is free software: you can redistribute it and/or modify
5
- # it under the terms of the GNU General Public License as published by
6
- # the Free Software Foundation, either version 3 of the License, or
7
- # (at your option) any later version.
8
- #
9
- # DTLPY is distributed in the hope that it will be useful,
10
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
11
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
- # GNU General Public License for more details.
13
- #
14
- # You should have received a copy of the GNU General Public License
15
- # along with DTLPY. If not, see <http://www.gnu.org/licenses/>.
16
- # from .progress_viewer import ProgressViewer
@@ -1,29 +0,0 @@
1
- def get_callback(progress):
2
- from keras.callbacks import Callback
3
- import numpy as np
4
- import logging
5
- import time
6
-
7
- logger = logging.getLogger(name='dtlpy')
8
-
9
- class PiperProgressReporter(Callback):
10
- def __init__(self, progress):
11
- super(PiperProgressReporter, self).__init__()
12
- self.progress = progress
13
- self.results = dict()
14
- self.epoch_time_start = None
15
-
16
- def on_train_begin(self, logs=None):
17
- self.results = dict()
18
-
19
- def on_epoch_begin(self, batch, logs=None):
20
- self.epoch_time_start = time.time()
21
-
22
- def on_epoch_end(self, epoch, logs=None):
23
- logs_dict = dict(zip(list(logs.keys()), [float(num) for num in np.array(list(logs.values())).tolist()]))
24
- self.results[epoch] = logs_dict
25
- self.results[epoch]['runtime'] = time.time() - self.epoch_time_start
26
- self.progress.report_output(output={'epoch': epoch,
27
- 'logs': logs_dict})
28
-
29
- return PiperProgressReporter(progress)
@@ -1,54 +0,0 @@
1
- def main():
2
- from keras.callbacks import Callback
3
- import dtlpy as dl
4
- import numpy as np
5
- import logging
6
- import time
7
- import json
8
- import os
9
-
10
- logger = logging.getLogger(name='dtlpy')
11
-
12
- class ProgressViewer(Callback):
13
- def __init__(self, session_id, directory=None):
14
- super(ProgressViewer, self).__init__()
15
- # init Dataloop instance
16
- # get sessions artifact
17
- self.session = dl.sessions.get(session_id=session_id)
18
- artifacts = self.session.artifacts.list()
19
- self.artifact = None
20
- for artifact in artifacts:
21
- if artifact.type == 'progress':
22
- self.artifact = artifact
23
- logger.info('Progress artifact found. overwriting. artifact_id: %s' % self.artifact.id)
24
- break
25
- if self.artifact is None:
26
- self.artifact = self.session.artifacts.create(artifact_name='progress.yml',
27
- artifact_type='progress',
28
- description='update progress on each epoch')
29
-
30
- logger.info('[INFO] Creating progress artifact. artifact_id: %s' % self.artifact.id)
31
- if directory is None:
32
- directory = './results'
33
- if not os.path.isdir(directory):
34
- os.makedirs(directory)
35
- self.filename = os.path.join(directory, 'progress.yml')
36
- self.results = dict()
37
- self.epoch_time_start = None
38
-
39
- def on_train_begin(self, logs=None):
40
- self.results = dict()
41
-
42
- def on_epoch_begin(self, batch, logs=None):
43
- self.epoch_time_start = time.time()
44
-
45
- def on_epoch_end(self, epoch, logs=None):
46
- self.results[epoch] = dict(zip(list(logs.keys()), np.array(list(logs.values())).tolist()))
47
- self.results[epoch]['runtime'] = time.time() - self.epoch_time_start
48
- with open(self.filename, 'w') as f:
49
- json.dump(self.results, f)
50
- self.session.artifacts.upload(filepath=self.filename,
51
- artifact_name='progress.yml',
52
- artifact_type='progress')
53
-
54
- return ProgressViewer
File without changes