dtlpy 1.90.39__py3-none-any.whl → 1.92.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. dtlpy/__init__.py +5 -2
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/lock_open.png +0 -0
  4. dtlpy/entities/__init__.py +1 -1
  5. dtlpy/entities/analytic.py +118 -98
  6. dtlpy/entities/annotation.py +22 -31
  7. dtlpy/entities/annotation_collection.py +19 -21
  8. dtlpy/entities/app.py +13 -3
  9. dtlpy/entities/assignment.py +6 -0
  10. dtlpy/entities/base_entity.py +0 -23
  11. dtlpy/entities/command.py +3 -2
  12. dtlpy/entities/dataset.py +53 -3
  13. dtlpy/entities/dpk.py +15 -0
  14. dtlpy/entities/execution.py +13 -1
  15. dtlpy/entities/feature_set.py +3 -0
  16. dtlpy/entities/filters.py +87 -8
  17. dtlpy/entities/integration.py +1 -1
  18. dtlpy/entities/item.py +41 -1
  19. dtlpy/entities/node.py +49 -3
  20. dtlpy/entities/ontology.py +62 -5
  21. dtlpy/entities/package_function.py +2 -0
  22. dtlpy/entities/package_module.py +13 -0
  23. dtlpy/entities/pipeline.py +20 -1
  24. dtlpy/entities/pipeline_execution.py +37 -6
  25. dtlpy/entities/prompt_item.py +240 -27
  26. dtlpy/entities/recipe.py +37 -0
  27. dtlpy/entities/service.py +33 -4
  28. dtlpy/ml/base_model_adapter.py +166 -18
  29. dtlpy/new_instance.py +80 -9
  30. dtlpy/repositories/apps.py +68 -22
  31. dtlpy/repositories/assignments.py +1 -1
  32. dtlpy/repositories/commands.py +10 -2
  33. dtlpy/repositories/datasets.py +143 -13
  34. dtlpy/repositories/dpks.py +34 -1
  35. dtlpy/repositories/executions.py +27 -30
  36. dtlpy/repositories/feature_sets.py +23 -3
  37. dtlpy/repositories/features.py +4 -1
  38. dtlpy/repositories/models.py +1 -1
  39. dtlpy/repositories/packages.py +6 -3
  40. dtlpy/repositories/pipeline_executions.py +58 -5
  41. dtlpy/repositories/services.py +28 -7
  42. dtlpy/repositories/tasks.py +8 -2
  43. dtlpy/repositories/uploader.py +5 -2
  44. dtlpy/services/api_client.py +74 -12
  45. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/METADATA +2 -2
  46. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/RECORD +54 -57
  47. tests/features/environment.py +67 -1
  48. dtlpy/callbacks/__init__.py +0 -16
  49. dtlpy/callbacks/piper_progress_reporter.py +0 -29
  50. dtlpy/callbacks/progress_viewer.py +0 -54
  51. {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp +0 -0
  52. {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp.bat +0 -0
  53. {dtlpy-1.90.39.data → dtlpy-1.92.18.data}/scripts/dlp.py +0 -0
  54. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/LICENSE +0 -0
  55. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/WHEEL +0 -0
  56. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/entry_points.txt +0 -0
  57. {dtlpy-1.90.39.dist-info → dtlpy-1.92.18.dist-info}/top_level.txt +0 -0
@@ -110,7 +110,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
110
110
 
111
111
  :param local_path: `str` directory path in local FileSystem
112
112
  """
113
- raise NotImplementedError("Please implement 'load' method in {}".format(self.__class__.__name__))
113
+ raise NotImplementedError("Please implement `load` method in {}".format(self.__class__.__name__))
114
114
 
115
115
  def save(self, local_path, **kwargs):
116
116
  """ saves configuration and weights locally
@@ -121,7 +121,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
121
121
 
122
122
  :param local_path: `str` directory path in local FileSystem
123
123
  """
124
- raise NotImplementedError("Please implement 'save' method in {}".format(self.__class__.__name__))
124
+ raise NotImplementedError("Please implement `save` method in {}".format(self.__class__.__name__))
125
125
 
126
126
  def train(self, data_path, output_path, **kwargs):
127
127
  """
@@ -133,17 +133,27 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
133
133
  :param data_path: `str` local File System path to where the data was downloaded and converted at
134
134
  :param output_path: `str` local File System path where to dump training mid-results (checkpoints, logs...)
135
135
  """
136
- raise NotImplementedError("Please implement 'train' method in {}".format(self.__class__.__name__))
136
+ raise NotImplementedError("Please implement `train` method in {}".format(self.__class__.__name__))
137
137
 
138
138
  def predict(self, batch, **kwargs):
139
- """ Model inference (predictions) on batch of images
139
+ """ Model inference (predictions) on batch of items
140
140
 
141
141
  Virtual method - need to implement
142
142
 
143
- :param batch: `np.ndarray`
143
+ :param batch: output of the `prepare_item_func` func
144
144
  :return: `list[dl.AnnotationCollection]` each collection is per each image / item in the batch
145
145
  """
146
- raise NotImplementedError("Please implement 'predict' method in {}".format(self.__class__.__name__))
146
+ raise NotImplementedError("Please implement `predict` method in {}".format(self.__class__.__name__))
147
+
148
+ def embed(self, batch, **kwargs):
149
+ """ Extract model embeddings on batch of items
150
+
151
+ Virtual method - need to implement
152
+
153
+ :param batch: output of the `prepare_item_func` func
154
+ :return: `list[list]` a feature vector per each item in the batch
155
+ """
156
+ raise NotImplementedError("Please implement `embed` method in {}".format(self.__class__.__name__))
147
157
 
148
158
  def evaluate(self, model: entities.Model, dataset: entities.Dataset, filters: entities.Filters) -> entities.Model:
149
159
  """
@@ -177,7 +187,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
177
187
  :param data_path: `str` local File System directory path where we already downloaded the data from dataloop platform
178
188
  :return:
179
189
  """
180
- raise NotImplementedError("Please implement 'convert_from_dtlpy' method in {}".format(self.__class__.__name__))
190
+ raise NotImplementedError("Please implement `convert_from_dtlpy` method in {}".format(self.__class__.__name__))
181
191
 
182
192
  #################
183
193
  # DTLPY METHODS #
@@ -255,15 +265,27 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
255
265
  self.logger.debug("Downloading subset {!r} of {}".format(subset,
256
266
  self.model_entity.dataset.name))
257
267
 
258
- if self.configuration.get("include_model_annotations", False):
259
- annotation_filters = None
260
- else:
268
+ if self.model_entity.output_type is not None:
269
+ if self.model_entity.output_type in [entities.AnnotationType.SEGMENTATION,
270
+ entities.AnnotationType.POLYGON]:
271
+ model_output_types = [entities.AnnotationType.SEGMENTATION, entities.AnnotationType.POLYGON]
272
+ else:
273
+ model_output_types = [self.model_entity.output_type]
261
274
  annotation_filters = entities.Filters(
275
+ field=entities.FiltersKnownFields.TYPE,
276
+ values=model_output_types,
277
+ resource=entities.FiltersResource.ANNOTATION,
278
+ operator=entities.FiltersOperations.IN
279
+ )
280
+ else:
281
+ annotation_filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION)
282
+
283
+ if not self.configuration.get("include_model_annotations", False):
284
+ annotation_filters.add(
262
285
  field="metadata.system.model.name",
263
286
  values=False,
264
- operator=entities.FiltersOperations.EXISTS,
265
- resource=entities.FiltersResource.ANNOTATION
266
- )
287
+ operator=entities.FiltersOperations.EXISTS
288
+ )
267
289
 
268
290
  ret_list = dataset.items.download(filters=filters,
269
291
  local_path=data_subset_base_path,
@@ -352,7 +374,8 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
352
374
  pool = ThreadPoolExecutor(max_workers=16)
353
375
 
354
376
  annotations = list()
355
- for i_batch in tqdm.tqdm(range(0, len(items), batch_size), desc='predicting', unit='bt', leave=None, file=sys.stdout):
377
+ for i_batch in tqdm.tqdm(range(0, len(items), batch_size), desc='predicting', unit='bt', leave=None,
378
+ file=sys.stdout):
356
379
  batch_items = items[i_batch: i_batch + batch_size]
357
380
  batch = list(pool.map(self.prepare_item_func, batch_items))
358
381
  batch_collections = self.predict(batch, **kwargs)
@@ -385,6 +408,115 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
385
408
  pool.shutdown()
386
409
  return items, annotations
387
410
 
411
+ @entities.Package.decorators.function(display_name='Embed Items',
412
+ inputs={'items': 'Item[]'},
413
+ outputs={'items': 'Item[]', 'features': '[]'})
414
+ def embed_items(self, items: list, upload_features=True, batch_size=None, **kwargs):
415
+ """
416
+ Extract feature from an input list of items (or single) and return the items and the feature vector.
417
+
418
+ :param items: `List[dl.Item]` list of items to predict
419
+ :param upload_features: `bool` uploads the features on the given items
420
+ :param batch_size: `int` size of batch to run a single inference
421
+
422
+ :return: `List[dl.Item]`, `List[List[vector]]`
423
+ """
424
+ if batch_size is None:
425
+ batch_size = self.configuration.get('batch_size', 4)
426
+ input_type = self.model_entity.input_type
427
+ self.logger.debug(
428
+ "Predicting {} items, using batch size {}. input type: {}".format(len(items), batch_size, input_type))
429
+
430
+ # Search for existing feature set for this model id
431
+ filters = entities.Filters(field='modelId',
432
+ values=self.model_entity.id,
433
+ resource=entities.FiltersResource.FEATURE_SET)
434
+ pages = self.model_entity.project.feature_sets.list(filters)
435
+ if pages.items_count == 0:
436
+ feature_set_name = self.configuration.get('featureSetName', self.model_entity.name)
437
+ logger.info('Feature Set not found. creating... ')
438
+ feature_set = self.model_entity.project.feature_sets.create(name=feature_set_name,
439
+ entity_type=entities.FeatureEntityType.ITEM,
440
+ model_id=self.model_entity.id,
441
+ project_id=self.model_entity.project_id,
442
+ set_type=self.model_entity.name,
443
+ size=self.configuration.get('embeddings_size',
444
+ 256))
445
+ if 'featureSetName' not in self.model_entity.configuration:
446
+ self.model_entity.configuration['featureSetName'] = feature_set_name
447
+ self.model_entity.update()
448
+ logger.info(f'Feature Set created! name: {feature_set.name}, id: {feature_set.id}')
449
+ elif pages.items_count > 1:
450
+ raise ValueError(
451
+ f'More than one feature set for model. model_id: {self.model_entity.id}, feature_sets_ids: {[f.id for f in pages.all()]}')
452
+ else:
453
+ feature_set = pages.items[0]
454
+ logger.info(f'Feature Set found! name: {feature_set.name}, id: {feature_set.id}')
455
+
456
+ # upload the feature vectors
457
+ pool = ThreadPoolExecutor(max_workers=16)
458
+ vectors = list()
459
+ for i_batch in tqdm.tqdm(range(0, len(items), batch_size),
460
+ desc='predicting',
461
+ unit='bt',
462
+ leave=None,
463
+ file=sys.stdout):
464
+ batch_items = items[i_batch: i_batch + batch_size]
465
+ batch = list(pool.map(self.prepare_item_func, batch_items))
466
+ batch_vectors = self.embed(batch, **kwargs)
467
+ vectors.extend(batch_vectors)
468
+ if upload_features is True:
469
+ self.logger.debug(
470
+ "Uploading items' feature vectors for model {!r}.".format(self.model_entity.name))
471
+ try:
472
+ _ = list(pool.map(partial(self._upload_model_features,
473
+ feature_set.id,
474
+ self.model_entity.project_id),
475
+ batch_items,
476
+ batch_vectors))
477
+ except Exception as err:
478
+ self.logger.exception("Failed to upload feature vectors to items.")
479
+
480
+ pool.shutdown()
481
+ return items, vectors
482
+
483
+ @entities.Package.decorators.function(display_name='Embed Dataset with DQL',
484
+ inputs={'dataset': 'Dataset',
485
+ 'filters': 'Json'})
486
+ def embed_dataset(self,
487
+ dataset: entities.Dataset,
488
+ filters: entities.Filters = None,
489
+ upload_features=True,
490
+ batch_size=None,
491
+ **kwargs):
492
+ """
493
+ Extract feature from all items given
494
+
495
+ :param dataset: Dataset entity to predict
496
+ :param filters: Filters entity for a filtering before predicting
497
+ :param upload_features: `bool` uploads the features back to the given items
498
+ :param batch_size: `int` size of batch to run a single inference
499
+
500
+ :return: `bool` indicating if the prediction process completed successfully
501
+ """
502
+ if batch_size is None:
503
+ batch_size = self.configuration.get('batch_size', 4)
504
+
505
+ self.logger.debug("Creating embedings for dataset (name:{}, id:{}, using batch size {}".format(dataset.name,
506
+ dataset.id,
507
+ batch_size))
508
+ if not filters:
509
+ filters = entities.Filters()
510
+ if filters is not None and isinstance(filters, dict):
511
+ filters = entities.Filters(custom_filter=filters)
512
+ pages = dataset.items.list(filters=filters, page_size=batch_size)
513
+ items = [item for page in pages for item in page]
514
+ self.embed_items(items=items,
515
+ upload_features=upload_features,
516
+ batch_size=batch_size,
517
+ **kwargs)
518
+ return True
519
+
388
520
  @entities.Package.decorators.function(display_name='Predict Dataset with DQL',
389
521
  inputs={'dataset': 'Dataset',
390
522
  'filters': 'Json'})
@@ -404,9 +536,12 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
404
536
  :param cleanup: `bool` if set removes existing predictions with the same package-model name (default: False)
405
537
  :param batch_size: `int` size of batch to run a single inference
406
538
 
407
- :return: `List[dl.AnnotationCollection]` where all annotation in the collection are of type package.output_type
408
- and has prediction fields (model_info)
539
+ :return: `bool` indicating if the prediction process completed successfully
409
540
  """
541
+
542
+ if batch_size is None:
543
+ batch_size = self.configuration.get('batch_size', 4)
544
+
410
545
  self.logger.debug("Predicting dataset (name:{}, id:{}, using batch size {}".format(dataset.name,
411
546
  dataset.id,
412
547
  batch_size))
@@ -415,9 +550,9 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
415
550
  if filters is not None and isinstance(filters, dict):
416
551
  filters = entities.Filters(custom_filter=filters)
417
552
  pages = dataset.items.list(filters=filters, page_size=batch_size)
418
- items = [item for item in pages.all() if item.type == 'file']
553
+ items = [item for page in pages for item in page]
419
554
  self.predict_items(items=items,
420
- with_upload=with_upload,
555
+ upload_annotations=with_upload,
421
556
  cleanup=cleanup,
422
557
  batch_size=batch_size,
423
558
  **kwargs)
@@ -571,6 +706,19 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
571
706
  # =============
572
707
  # INNER METHODS
573
708
  # =============
709
+
710
+ @staticmethod
711
+ def _upload_model_features(feature_set_id, project_id, item: entities.Item, vector):
712
+ try:
713
+ feature = item.features.create(value=vector,
714
+ project_id=project_id,
715
+ feature_set_id=feature_set_id,
716
+ entity=item)
717
+ return feature
718
+ except Exception as e:
719
+ logger.error(f'Failed to upload feature vector if length {len(vector)} to item {item.id}, Error: {e}')
720
+ return []
721
+
574
722
  def _upload_model_annotations(self, item: entities.Item, predictions, clean_annotations):
575
723
  """
576
724
  Utility function that upload prediction to dlp platform based on the package.output_type
dtlpy/new_instance.py CHANGED
@@ -1,14 +1,61 @@
1
1
  class Dtlpy:
2
+ from .services.api_client import client as client_api
3
+ from .services.api_client import VerboseLoggingLevel, ApiClient
4
+ from .services import DataloopLogger, DtlpyFilter, check_sdk, Reporter, service_defaults
5
+ from .services.api_reference import api_reference as _api_reference
6
+ from .caches.cache import CacheConfig, CacheType
2
7
  from .exceptions import PlatformException
3
8
  from . import repositories, exceptions, entities, examples
4
- from .__version__ import version as __version__
5
- from .entities import Box, Text, Point, Segmentation, Polygon, Ellipse, Classification, Subtitle, Polyline, \
6
- Filters, Trigger, Description, \
7
- AnnotationCollection, Annotation, Item, Codebase, Filters, Execution, Recipe, Ontology, Label, \
8
- ItemLink, UrlLink, PackageModule, PackageFunction, FunctionIO, Modality, Workload, WorkloadUnit
9
- from .utilities import Converter, BaseServiceRunner, Progress
10
- from .services.api_client import ApiClient
11
- from .services import check_sdk
9
+ from .entities import (
10
+ # main entities
11
+ Project, Dataset, ExpirationOptions, ExportVersion, Trigger, Item, Execution, AnnotationCollection, Annotation,
12
+ Recipe, IndexDriver, AttributesTypes, AttributesRange, Dpk, App, AppModule, AppScope,
13
+ Ontology, Label, Task, TaskPriority, ConsensusTaskType, Assignment, Service, Package, Codebase, Model,
14
+ PackageModule, PackageFunction,
15
+ # annotations
16
+ Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose,
17
+ Description,
18
+ Polygon, Text, FreeText, RefImage,
19
+ # filters
20
+ Filters, FiltersKnownFields, FiltersResource, FiltersOperations, FiltersMethod, FiltersOrderByDirection,
21
+ FiltersKnownFields as KnownFields,
22
+ # triggers
23
+ TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
24
+ # faas
25
+ FunctionIO, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
26
+ InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
27
+ PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
28
+ # roberto
29
+ DatasetSubsetType, ModelStatus, PlotSample, ArtifactType, Artifact, ItemArtifact, LinkArtifact, LocalArtifact,
30
+ EntityScopeLevel,
31
+ # features
32
+ FeatureEntityType, Feature, FeatureSet,
33
+ #
34
+ RequirementOperator, PackageRequirement,
35
+ Command, CommandsStatus,
36
+ LocalCodebase, GitCodebase, ItemCodebase, FilesystemCodebase, PackageCodebaseType,
37
+ MemberRole, MemberOrgRole,
38
+ Webhook, HttpMethod,
39
+ ViewAnnotationOptions, AnnotationStatus, AnnotationType,
40
+ ItemStatus, ExecutionStatus, ExportMetadata,
41
+ PromptItem, Prompt, PromptType,
42
+ ItemLink, UrlLink, LinkTypeEnum,
43
+ Modality, ModalityTypeEnum, ModalityRefTypeEnum,
44
+ Workload, WorkloadUnit, ItemAction,
45
+ PipelineExecution, CycleRerunMethod, PipelineExecutionNode, Pipeline, PipelineConnection,
46
+ PipelineNode, TaskNode, CodeNode, PipelineStats, PipelineSettings,
47
+ PipelineNodeType, PipelineNameSpace, PipelineResumeOption, Variable, CompositionStatus,
48
+ FunctionNode, DatasetNode, PipelineConnectionPort, PipelineNodeIO, Organization, OrganizationsPlans,
49
+ Integration,
50
+ Driver, S3Driver, GcsDriver, AzureBlobDriver, CacheAction, PodType,
51
+ ExternalStorage, IntegrationType, Role, PlatformEntityType, SettingsValueTypes, SettingsTypes,
52
+ SettingsSectionNames,
53
+ SettingScope, BaseSetting, UserSetting, Setting, ServiceSample, ExecutionSample, PipelineExecutionSample,
54
+ ResourceExecution, Message, NotificationEventContext
55
+ )
56
+ from .ml import BaseModelAdapter
57
+ from .utilities import Converter, BaseServiceRunner, Progress, Context, AnnotationFormat
58
+ from .repositories import FUNCTION_END_LINE, PackageCatalog
12
59
 
13
60
  def __init__(self, cookie_filepath=None):
14
61
  self.client_api = self.ApiClient(cookie_filepath=cookie_filepath)
@@ -24,15 +71,40 @@ class Dtlpy:
24
71
  self.triggers = self.repositories.Triggers(client_api=self.client_api)
25
72
  self.assignments = self.repositories.Assignments(client_api=self.client_api)
26
73
  self.tasks = self.repositories.Tasks(client_api=self.client_api)
74
+ self.dpks = self.repositories.Dpks(client_api=self.client_api)
27
75
  self.annotations = self.repositories.Annotations(client_api=self.client_api)
76
+ self.models = self.repositories.Models(client_api=self.client_api)
77
+ self.ontologies = self.repositories.Ontologies(client_api=self.client_api)
78
+ self.recipes = self.repositories.Recipes(client_api=self.client_api)
79
+ self.pipelines = self.repositories.Pipelines(client_api=self.client_api)
80
+ self.pipeline_executions = self.repositories.PipelineExecutions(client_api=self.client_api)
81
+ self.feature_sets = self.repositories.FeatureSets(client_api=self.client_api)
82
+ self.features = self.repositories.Features(client_api=self.client_api)
83
+ self.organizations = self.repositories.Organizations(client_api=self.client_api)
84
+ self.analytics = self.repositories.Analytics(client_api=self.client_api)
85
+ self.integrations = self.repositories.Integrations(client_api=self.client_api)
86
+ self.drivers = self.repositories.Drivers(client_api=self.client_api)
87
+ self.settings = self.repositories.Settings(client_api=self.client_api)
88
+ self.apps = self.repositories.Apps(client_api=self.client_api)
89
+ self.dpks = self.repositories.Dpks(client_api=self.client_api)
90
+ self.messages = self.repositories.Messages(client_api=self.client_api)
91
+ self.compositions = self.repositories.Compositions(client_api=self.client_api)
92
+
28
93
  self.verbose = self.client_api.verbose
29
94
  self.login = self.client_api.login
95
+ self.logout = self.client_api.logout
30
96
  self.login_token = self.client_api.login_token
31
97
  self.login_secret = self.client_api.login_secret
98
+ self.login_api_key = self.client_api.login_api_key
99
+ self.login_m2m = self.client_api.login_m2m
32
100
  self.add_environment = self.client_api.add_environment
33
101
  self.setenv = self.client_api.setenv
34
102
  self.token_expired = self.client_api.token_expired
35
103
  self.info = self.client_api.info
104
+ self.cache_state = self.client_api.cache_state
105
+ self.attributes_mode = self.client_api.attributes_mode
106
+ self.sdk_cache = self.client_api.sdk_cache
107
+ self.platform_settings = self.client_api.platform_settings
36
108
 
37
109
  def __del__(self):
38
110
  for name, pool in self.client_api._thread_pools.items():
@@ -176,7 +248,6 @@ class Dtlpy:
176
248
  GPU_T4_S = "gpu-t4-s"
177
249
  GPU_T4_M = "gpu-t4-m"
178
250
 
179
-
180
251
  class LoggingLevel:
181
252
  DEBUG = 'debug'
182
253
  WARNING = 'warning'
@@ -1,6 +1,6 @@
1
1
  import logging
2
2
 
3
- from .. import entities, exceptions, miscellaneous
3
+ from .. import entities, exceptions, miscellaneous, repositories
4
4
  from ..services.api_client import ApiClient
5
5
 
6
6
  logger = logging.getLogger(name='dtlpy')
@@ -11,6 +11,7 @@ class Apps:
11
11
  def __init__(self, client_api: ApiClient, project: entities.Project = None):
12
12
  self._client_api = client_api
13
13
  self._project = project
14
+ self._commands = None
14
15
 
15
16
  @property
16
17
  def project(self) -> entities.Project:
@@ -21,6 +22,12 @@ class Apps:
21
22
  assert isinstance(self._project, entities.Project)
22
23
  return self._project
23
24
 
25
+ @property
26
+ def commands(self) -> repositories.Commands:
27
+ if self._commands is None:
28
+ self._commands = repositories.Commands(client_api=self._client_api)
29
+ return self._commands
30
+
24
31
  @project.setter
25
32
  def project(self, project: entities.Project):
26
33
  if not isinstance(project, entities.Project):
@@ -160,12 +167,13 @@ class Apps:
160
167
  paged.get_page()
161
168
  return paged
162
169
 
163
- def update(self, app: entities.App = None, app_id: str = None) -> bool:
170
+ def update(self, app: entities.App = None, app_id: str = None, wait: bool = True) -> bool:
164
171
  """
165
172
  Update the current app to the new configuration
166
173
 
167
174
  :param entities.App app: The app to update.
168
175
  :param str app_id: The app id to update.
176
+ :param bool wait: wait for the operation to finish.
169
177
  :return bool whether the operation ran successfully or not
170
178
 
171
179
  **Example**
@@ -179,16 +187,30 @@ class Apps:
179
187
  success, response = self._client_api.gen_request(req_type='put',
180
188
  path=f"/apps/{app.id}",
181
189
  json_req=app.to_json())
182
- if success:
183
- return success
184
- raise exceptions.PlatformException(response)
190
+ if not success:
191
+ raise exceptions.PlatformException(response)
192
+
193
+ app = entities.App.from_json(
194
+ _json=response.json(),
195
+ client_api=self._client_api,
196
+ project=self.project
197
+ )
198
+ if app.metadata:
199
+ command_id = app.metadata.get('system', {}).get('commands', {}).get('update', None)
200
+ if wait and app.status == entities.CompositionStatus.UPDATING and command_id is not None:
201
+ command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
202
+ command.wait()
203
+ app = self.get(app_id=app.id)
204
+
205
+ return success
185
206
 
186
207
  def install(self,
187
208
  dpk: entities.Dpk,
188
209
  app_name: str = None,
189
210
  organization_id: str = None,
190
211
  custom_installation: dict = None,
191
- scope: entities.AppScope = None
212
+ scope: entities.AppScope = None,
213
+ wait: bool = True
192
214
  ) -> entities.App:
193
215
  """
194
216
  Install the specified app in the project.
@@ -199,6 +221,7 @@ class Apps:
199
221
  :param str organization_id: the organization which you want to apply on the filter.
200
222
  :param dict custom_installation: partial installation.
201
223
  :param str scope: the scope of the app. default is project.
224
+ :param bool wait: wait for the operation to finish.
202
225
 
203
226
  :return the installed app.
204
227
  :rtype entities.App
@@ -229,11 +252,20 @@ class Apps:
229
252
  json_req=app.to_json())
230
253
  if not success:
231
254
  raise exceptions.PlatformException(response)
232
- return entities.App.from_json(_json=response.json(),
233
- client_api=self._client_api,
234
- project=self.project)
255
+ app = entities.App.from_json(_json=response.json(),
256
+ client_api=self._client_api,
257
+ project=self.project)
258
+
259
+ if app.metadata:
260
+ command_id = app.metadata.get('system', {}).get('commands', {}).get('install', None)
261
+ if wait and app.status == entities.CompositionStatus.INITIALIZING and command_id is not None:
262
+ command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
263
+ command.wait()
264
+ app = self.get(app_id=app.id)
235
265
 
236
- def uninstall(self, app_id: str = None, app_name: str = None) -> bool:
266
+ return app
267
+
268
+ def uninstall(self, app_id: str = None, app_name: str = None, wait: bool = True) -> bool:
237
269
  """
238
270
  Delete an app entity.
239
271
 
@@ -241,6 +273,7 @@ class Apps:
241
273
 
242
274
  :param str app_id: optional - the id of the app.
243
275
  :param str app_name: optional - the name of the app.
276
+ :param bool wait: optional - wait for the operation to finish.
244
277
  :return whether we succeed uninstalling the specified app.
245
278
  :rtype bool
246
279
 
@@ -260,6 +293,19 @@ class Apps:
260
293
  if not success:
261
294
  raise exceptions.PlatformException(response)
262
295
 
296
+ try:
297
+ app = self.get(app_id=app_id)
298
+ except Exception as e:
299
+ if e.status_code == '404':
300
+ return success
301
+ else:
302
+ raise e
303
+ if app.metadata:
304
+ command_id = app.metadata.get('system', {}).get('commands', {}).get('uninstall', None)
305
+ if wait and app.status == entities.CompositionStatus.TERMINATING and command_id is not None:
306
+ command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
307
+ command.wait()
308
+
263
309
  logger.debug(f"App deleted successfully (id: {app_id}, name: {app_name}")
264
310
  return success
265
311
 
@@ -280,15 +326,15 @@ class Apps:
280
326
  """
281
327
  if app_id is not None and app is None:
282
328
  app = self.get(app_id=app_id)
329
+
330
+ if app and app.status == entities.CompositionStatus.INSTALLED:
331
+ raise exceptions.PlatformException(
332
+ error='400',
333
+ message='Application is already active'
334
+ )
283
335
  if app is None:
284
336
  raise exceptions.PlatformException(error='400', message='You must provide app or app_id')
285
337
 
286
- if app and app.status == entities.CompositionStatus.INSTALLED:
287
- raise exceptions.PlatformException(
288
- error='400',
289
- message='Application is already active'
290
- )
291
-
292
338
  success, response = self._client_api.gen_request(req_type='post', path='/apps/{}/activate'.format(app.id))
293
339
  if not success:
294
340
  raise exceptions.PlatformException(response)
@@ -313,15 +359,15 @@ class Apps:
313
359
  """
314
360
  if app_id is not None and app is None:
315
361
  app = self.get(app_id=app_id)
362
+
363
+ if app and app.status == entities.CompositionStatus.UNINSTALLED:
364
+ raise exceptions.PlatformException(
365
+ error='400',
366
+ message='Application is already inactive'
367
+ )
316
368
  if app is None:
317
369
  raise exceptions.PlatformException(error='400', message='You must provide app or app_id')
318
370
 
319
- if app and app.status == entities.CompositionStatus.UNINSTALLED:
320
- raise exceptions.PlatformException(
321
- error='400',
322
- message='Application is already inactive'
323
- )
324
-
325
371
  success, response = self._client_api.gen_request(req_type='post', path='/apps/{}/deactivate'.format(app.id))
326
372
  if not success:
327
373
  raise exceptions.PlatformException(response)
@@ -117,7 +117,7 @@ class Assignments:
117
117
  elif self._project is not None:
118
118
  project_ids = [self._project.id]
119
119
  else:
120
- raise ('400', 'Must provide project')
120
+ raise exceptions.PlatformException(error='400', message='Must provide project')
121
121
 
122
122
  project_ids = ','.join(project_ids)
123
123
  query.append('projects={}'.format(project_ids))
@@ -87,17 +87,25 @@ class Commands:
87
87
  :param float backoff_factor: A backoff factor to apply between attempts after the second try
88
88
  :return: Command object
89
89
  """
90
+
90
91
  elapsed = 0
91
92
  start = time.time()
92
93
  if timeout is None or timeout <= 0:
93
94
  timeout = np.inf
94
95
 
95
96
  command = None
96
- pbar = tqdm.tqdm(total=100, disable=self._client_api.verbose.disable_progress_bar,
97
- file=sys.stdout, desc='Command Progress')
97
+ pbar = tqdm.tqdm(total=100,
98
+ disable=self._client_api.verbose.disable_progress_bar,
99
+ file=sys.stdout,
100
+ desc='Command Progress')
98
101
  num_tries = 1
99
102
  while elapsed < timeout:
100
103
  command = self.get(command_id=command_id, url=url)
104
+ if command.type == 'ExportDatasetAsJson':
105
+ self._client_api.callbacks.run_on_event(event=self._client_api.callbacks.CallbackEvent.DATASET_EXPORT,
106
+ context=command.spec,
107
+ progress=command.progress)
108
+
101
109
  pbar.update(command.progress - pbar.n)
102
110
  if not command.in_progress():
103
111
  break