dtlpy 1.114.13__py3-none-any.whl → 1.114.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17,6 +17,77 @@ class Models:
17
17
  """
18
18
  Models Repository
19
19
  """
20
+ @staticmethod
21
+ def _filter_to_dict(filter_obj):
22
+ """Convert Filters object to dict, or return as-is if already dict/None"""
23
+ if filter_obj is not None:
24
+ filter_obj = filter_obj.prepare() if isinstance(filter_obj, entities.Filters) else filter_obj
25
+ return filter_obj
26
+
27
+ @staticmethod
28
+ def _get_filter_from_model(model, subset_type, resource_type):
29
+ """Extract filter dict from model metadata"""
30
+ filter_dict = None
31
+ if model is not None:
32
+ if resource_type == entities.FiltersResource.ITEM:
33
+ filter_dict = model.metadata.get('system', {}).get('subsets', {}).get(subset_type.value)
34
+ else: # ANNOTATION
35
+ filter_dict = model.metadata.get('system', {}).get('annotationsSubsets', {}).get(subset_type.value)
36
+ return filter_dict
37
+
38
+ @staticmethod
39
+ def _build_model_metadata(
40
+ train_filter: entities.Filters = None,
41
+ validation_filter: entities.Filters = None,
42
+ annotations_train_filter: entities.Filters = None,
43
+ annotations_validation_filter: entities.Filters = None,
44
+ from_model: entities.Model = None
45
+ ) -> dict:
46
+ """
47
+ Build model metadata with filters, optionally inheriting from existing model.
48
+
49
+ :param train_filter: Training data filter (Filters object or dict)
50
+ :param validation_filter: Validation data filter (Filters object or dict)
51
+ :param annotations_train_filter: Training annotations filter (Filters object or dict)
52
+ :param annotations_validation_filter: Validation annotations filter (Filters object or dict)
53
+ :param from_model: Source model to inherit filters from (if not provided explicitly)
54
+ :return: Metadata dictionary with filters
55
+ """
56
+ metadata = {'system': {'subsets': {}, 'annotationsSubsets': {}}}
57
+
58
+ # Handle item filters
59
+ train_filter_dict = Models._filter_to_dict(train_filter)
60
+ if train_filter_dict is None and from_model is not None:
61
+ train_filter_dict = Models._get_filter_from_model(
62
+ model=from_model, subset_type=entities.DatasetSubsetType.TRAIN, resource_type=entities.FiltersResource.ITEM)
63
+
64
+ validation_filter_dict = Models._filter_to_dict(validation_filter)
65
+ if validation_filter_dict is None and from_model is not None:
66
+ validation_filter_dict = Models._get_filter_from_model(
67
+ model=from_model, subset_type=entities.DatasetSubsetType.VALIDATION, resource_type=entities.FiltersResource.ITEM)
68
+
69
+ # Handle annotation filters
70
+ annotations_train_filter_dict = Models._filter_to_dict(annotations_train_filter)
71
+ if annotations_train_filter_dict is None and from_model is not None:
72
+ annotations_train_filter_dict = Models._get_filter_from_model(
73
+ model=from_model, subset_type=entities.DatasetSubsetType.TRAIN, resource_type=entities.FiltersResource.ANNOTATION)
74
+
75
+ annotations_validation_filter_dict = Models._filter_to_dict(annotations_validation_filter)
76
+ if annotations_validation_filter_dict is None and from_model is not None:
77
+ annotations_validation_filter_dict = Models._get_filter_from_model(
78
+ model=from_model, subset_type=entities.DatasetSubsetType.VALIDATION, resource_type=entities.FiltersResource.ANNOTATION)
79
+
80
+ # Set filters in metadata
81
+ if train_filter_dict is not None:
82
+ metadata['system']['subsets']['train'] = train_filter_dict
83
+ if validation_filter_dict is not None:
84
+ metadata['system']['subsets']['validation'] = validation_filter_dict
85
+ if annotations_train_filter_dict is not None:
86
+ metadata['system']['annotationsSubsets']['train'] = annotations_train_filter_dict
87
+ if annotations_validation_filter_dict is not None:
88
+ metadata['system']['annotationsSubsets']['validation'] = annotations_validation_filter_dict
89
+
90
+ return metadata
20
91
 
21
92
  def __init__(self,
22
93
  client_api: ApiClient,
@@ -216,35 +287,56 @@ class Models:
216
287
  return metadata
217
288
 
218
289
  @staticmethod
219
- def add_subset(model: entities.Model, subset_name: str, subset_filter: entities.Filters):
290
+ def add_subset(
291
+ model: entities.Model,
292
+ subset_name: str,
293
+ subset_filter=None,
294
+ subset_annotation_filter=None,
295
+ ):
220
296
  """
221
297
  Adds a subset for a model, specifying a subset of the model's dataset that could be used for training or
222
- validation.
298
+ validation. Optionally also adds an annotations subset.
223
299
 
224
300
  :param dtlpy.entities.Model model: the model to which the subset should be added
225
301
  :param str subset_name: the name of the subset
226
- :param dtlpy.entities.Filters subset_filter: the filtering operation that this subset performs in the dataset.
227
-
228
- **Example**
229
-
230
- .. code-block:: python
231
-
232
- project.models.add_subset(model=model_entity, subset_name='train', subset_filter=dtlpy.Filters(field='dir', values='/train'))
233
- model_entity.metadata['system']['subsets']
234
- {'train': <dtlpy.entities.filters.Filters object at 0x1501dfe20>}
302
+ :param subset_filter: filtering for items subset. Can be `entities.Filters`, `dict`, or `None`
303
+ :param subset_annotation_filter: optional filtering for annotations subset. Can be `entities.Filters`, `dict`, or `None`
304
+
305
+ Behavior:
306
+ - If both filters are None, no metadata is added/changed.
307
+ - If a filter is a dict, it is used as-is (no prepare()).
308
+ - If a filter is `entities.Filters`, `.prepare()` is used.
309
+ - Only non-None filters are added.
310
+ """
311
+ if subset_filter is None and subset_annotation_filter is None:
312
+ return
313
+
314
+ subset_filter_dict = subset_filter.prepare() if isinstance(subset_filter, entities.Filters) else subset_filter
315
+ subset_annotation_filter_dict = (
316
+ subset_annotation_filter.prepare()
317
+ if isinstance(subset_annotation_filter, entities.Filters)
318
+ else subset_annotation_filter
319
+ )
235
320
 
236
- """
321
+ # Initialize containers only if needed
237
322
  if 'system' not in model.metadata:
238
323
  model.metadata['system'] = dict()
239
- if 'subsets' not in model.metadata['system']:
240
- model.metadata['system']['subsets'] = dict()
241
- model.metadata['system']['subsets'][subset_name] = subset_filter.prepare()
324
+ if subset_filter_dict is not None:
325
+ if 'subsets' not in model.metadata['system']:
326
+ model.metadata['system']['subsets'] = dict()
327
+ model.metadata['system']['subsets'][subset_name] = subset_filter_dict
328
+
329
+ if subset_annotation_filter_dict is not None:
330
+ if 'annotationsSubsets' not in model.metadata['system']:
331
+ model.metadata['system']['annotationsSubsets'] = dict()
332
+ model.metadata['system']['annotationsSubsets'][subset_name] = subset_annotation_filter_dict
333
+
242
334
  model.update(system_metadata=True)
243
335
 
244
336
  @staticmethod
245
337
  def delete_subset(model: entities.Model, subset_name: str):
246
338
  """
247
- Removes a subset from a model's metadata.
339
+ Removes a subset from a model's metadata (both subsets and annotationsSubsets).
248
340
 
249
341
  :param dtlpy.entities.Model model: the model to which the subset should be added
250
342
  :param str subset_name: the name of the subset
@@ -261,10 +353,16 @@ class Models:
261
353
  {}
262
354
 
263
355
  """
356
+ # Check if subset exists in subsets (for warning)
264
357
  if model.metadata.get("system", dict()).get("subsets", dict()).get(subset_name) is None:
265
358
  logger.error(f"Model system metadata incomplete, could not delete subset {subset_name}.")
266
359
  else:
267
360
  _ = model.metadata['system']['subsets'].pop(subset_name)
361
+
362
+ # Remove from annotationsSubsets if it exists
363
+ if model.metadata.get("system", dict()).get("annotationsSubsets", dict()).get(subset_name) is not None:
364
+ _ = model.metadata['system']['annotationsSubsets'].pop(subset_name)
365
+
268
366
  model.update(system_metadata=True)
269
367
 
270
368
  def create(
@@ -286,6 +384,8 @@ class Models:
286
384
  output_type=None,
287
385
  train_filter: entities.Filters = None,
288
386
  validation_filter: entities.Filters = None,
387
+ annotations_train_filter: entities.Filters = None,
388
+ annotations_validation_filter: entities.Filters = None,
289
389
  app: entities.App = None
290
390
  ) -> entities.Model:
291
391
  """
@@ -308,6 +408,8 @@ class Models:
308
408
  :param str output_type: dl.AnnotationType - the type of annotations the model produces (class, box segment, text, etc)
309
409
  :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
310
410
  :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
411
+ :param dtlpy.entities.filters.Filters annotations_train_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model train
412
+ :param dtlpy.entities.filters.Filters annotations_validation_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model validation
311
413
  :param dtlpy.entities.App app: App entity to connect the model to
312
414
  :return: Model Entity
313
415
 
@@ -407,10 +509,13 @@ class Models:
407
509
  if status is not None:
408
510
  payload['status'] = status
409
511
 
410
- if train_filter or validation_filter:
411
- metadata = self._set_model_filter(metadata={},
412
- train_filter=train_filter,
413
- validation_filter=validation_filter)
512
+ if train_filter or validation_filter or annotations_train_filter or annotations_validation_filter:
513
+ metadata = Models._build_model_metadata(
514
+ train_filter=train_filter,
515
+ validation_filter=validation_filter,
516
+ annotations_train_filter=annotations_train_filter,
517
+ annotations_validation_filter=annotations_validation_filter
518
+ )
414
519
  payload['metadata'] = metadata
415
520
 
416
521
  # request
@@ -442,6 +547,8 @@ class Models:
442
547
  tags: list = None,
443
548
  train_filter: entities.Filters = None,
444
549
  validation_filter: entities.Filters = None,
550
+ annotations_train_filter: entities.Filters = None,
551
+ annotations_validation_filter: entities.Filters = None,
445
552
  wait=True,
446
553
  ) -> entities.Model:
447
554
  """
@@ -459,6 +566,8 @@ class Models:
459
566
  :param list tags: `list` of `str` - label of the model
460
567
  :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
461
568
  :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
569
+ :param dtlpy.entities.filters.Filters annotations_train_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model train
570
+ :param dtlpy.entities.filters.Filters annotations_validation_filter: Filters entity or a dictionary to define the annotations' scope in the specified dataset_id for the model validation
462
571
  :param bool wait: `bool` wait for model to be ready
463
572
  :return: dl.Model which is a clone version of the existing model
464
573
  """
@@ -495,12 +604,14 @@ class Models:
495
604
  if status is not None:
496
605
  from_json['status'] = status
497
606
 
498
- metadata = self._set_model_filter(metadata={},
499
- train_filter=train_filter if train_filter is not None else from_model.metadata.get(
500
- 'system', {}).get('subsets', {}).get('train', None),
501
- validation_filter=validation_filter if validation_filter is not None else from_model.metadata.get(
502
- 'system', {}).get('subsets', {}).get('validation', None))
503
- if metadata:
607
+ metadata = Models._build_model_metadata(
608
+ train_filter=train_filter,
609
+ validation_filter=validation_filter,
610
+ annotations_train_filter=annotations_train_filter,
611
+ annotations_validation_filter=annotations_validation_filter,
612
+ from_model=from_model
613
+ )
614
+ if metadata['system']['subsets'] or metadata['system']['annotationsSubsets']:
504
615
  from_json['metadata'] = metadata
505
616
  success, response = self._client_api.gen_request(req_type='post',
506
617
  path='/ml/models/{}/clone'.format(from_model.id),
@@ -193,12 +193,13 @@ class PipelineExecutions:
193
193
  paged.get_page()
194
194
  return paged
195
195
 
196
- @_api_reference.add(path='/pipelines/{pipelineId}/execute', method='post')
196
+ @_api_reference.add(path="/pipelines/{pipelineId}/execute", method="post")
197
197
  def create(
198
198
  self,
199
199
  pipeline_id: str = None,
200
200
  execution_input=None,
201
- node_id: str = None
201
+ node_id: str = None,
202
+ test_mode: bool = None
202
203
  ):
203
204
  """
204
205
  Execute a pipeline.
@@ -208,7 +209,7 @@ class PipelineExecutions:
208
209
  :param pipeline_id: pipeline id
209
210
  :param execution_input: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
210
211
  :param node_id: node id to start from
211
-
212
+ :param bool test_mode: if True, the pipeline will be executed in test mode
212
213
  :return: entities.PipelineExecution object
213
214
  :rtype: dtlpy.entities.pipeline_execution.PipelineExecution
214
215
 
@@ -223,29 +224,31 @@ class PipelineExecutions:
223
224
  raise exceptions.PlatformException('400', 'Please provide pipeline id')
224
225
  pipeline_id = self._pipeline.id
225
226
 
226
- payload = dict()
227
+ pipeline_options = dict()
228
+ if test_mode is not None:
229
+ pipeline_options["testMode"] = test_mode
227
230
  if execution_input is None:
228
231
  # support pipeline executions without any input
229
232
  pass
230
233
  elif isinstance(execution_input, dict):
231
- payload['input'] = execution_input
234
+ pipeline_options["input"] = execution_input
232
235
  else:
233
236
  if not isinstance(execution_input, list):
234
237
  execution_input = [execution_input]
235
238
  if len(execution_input) > 0 and isinstance(execution_input[0], entities.FunctionIO):
236
- payload['input'] = dict()
239
+ pipeline_options["input"] = dict()
237
240
  for single_input in execution_input:
238
- payload['input'].update(single_input.to_json(resource='execution'))
241
+ pipeline_options["input"].update(single_input.to_json(resource="execution"))
239
242
  else:
240
243
  raise exceptions.PlatformException('400', 'Unknown input type')
241
244
 
242
245
  if node_id is not None:
243
- payload['nodeId'] = node_id
246
+ pipeline_options["nodeId"] = node_id
244
247
 
245
248
  success, response = self._client_api.gen_request(
246
- path='/pipelines/{}/execute'.format(pipeline_id),
247
- req_type='POST',
248
- json_req=payload
249
+ path="/pipelines/{}/execute".format(pipeline_id),
250
+ req_type="POST",
251
+ json_req={"pipeline": pipeline_options},
249
252
  )
250
253
 
251
254
  if not success: