dtlpy 1.114.13__py3-none-any.whl → 1.114.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dtlpy/__init__.py CHANGED
@@ -65,7 +65,7 @@ from .entities import (
65
65
  # main entities
66
66
  Project, Dataset, ExpirationOptions, ExportVersion, Trigger, Item, Execution, AnnotationCollection, Annotation,
67
67
  Recipe, IndexDriver, AttributesTypes, AttributesRange, Dpk, App, AppModule, AppScope,
68
- Ontology, Label, Task, TaskPriority, ConsensusTaskType, Assignment, Service, Package, Codebase, Model,
68
+ Ontology, Label, Task, TaskPriority, ConsensusTaskType, AllocationMethod, Assignment, Service, Package, Codebase, Model,
69
69
  PackageModule, PackageFunction,
70
70
  # annotations
71
71
  Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose, Gis, GisType,
@@ -484,5 +484,8 @@ CONSENSUS_TASK_TYPE_CONSENSUS = ConsensusTaskType.CONSENSUS
484
484
  CONSENSUS_TASK_TYPE_QUALIFICATION = ConsensusTaskType.QUALIFICATION
485
485
  CONSENSUS_TASK_TYPE_HONEYPOT = ConsensusTaskType.HONEYPOT
486
486
 
487
+ TASK_TYPE_PULLING = AllocationMethod.PULLING
488
+ TASK_TYPE_DISTRIBUTION = AllocationMethod.DISTRIBUTION
489
+
487
490
  SERVICE_MODE_TYPE_REGULAR = ServiceModeType.REGULAR
488
491
  SERVICE_MODE_TYPE_DEBUG = ServiceModeType.DEBUG
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.114.13'
1
+ version = '1.114.14'
@@ -48,7 +48,7 @@ from .service import Service, KubernetesAutoscalerType, KubernetesRabbitmqAutosc
48
48
  from .execution import Execution, ExecutionStatus
49
49
  from .command import Command, CommandsStatus
50
50
  from .assignment import Assignment, Workload, WorkloadUnit
51
- from .task import Task, ItemAction, TaskPriority, ConsensusTaskType
51
+ from .task import Task, ItemAction, TaskPriority, ConsensusTaskType, AllocationMethod
52
52
  from .directory_tree import DirectoryTree
53
53
  from .user import User
54
54
  from .bot import Bot
dtlpy/entities/driver.py CHANGED
@@ -30,8 +30,9 @@ class ExternalStorage(str, Enum):
30
30
  GCS = "gcs"
31
31
  AZUREBLOB = "azureblob"
32
32
  AZURE_DATALAKE_GEN2 = 'azureDatalakeGen2'
33
- KEY_VALUE = "key_value"
34
- AWS_STS = 'aws-sts'
33
+ POWERSCALE_S3 = 'powerscaleS3'
34
+ MIN_IO = 's3'
35
+ POWERSCALE_NFS = 'powerscaleNfs'
35
36
 
36
37
 
37
38
  @attr.s()
dtlpy/entities/filters.py CHANGED
@@ -7,7 +7,7 @@ from enum import Enum
7
7
 
8
8
  from .. import exceptions, entities
9
9
 
10
- logger = logging.getLogger(name='dtlpy')
10
+ logger = logging.getLogger(name="dtlpy")
11
11
 
12
12
 
13
13
  class FiltersKnownFields(str, Enum):
@@ -19,7 +19,7 @@ class FiltersKnownFields(str, Enum):
19
19
  LABEL = "label"
20
20
  NAME = "name"
21
21
  HIDDEN = "hidden"
22
- TYPE = 'type'
22
+ TYPE = "type"
23
23
 
24
24
 
25
25
  class FiltersResource(str, Enum):
@@ -33,22 +33,22 @@ class FiltersResource(str, Enum):
33
33
  TRIGGER = "triggers"
34
34
  MODEL = "models"
35
35
  WEBHOOK = "webhooks"
36
- RECIPE = 'recipe'
37
- DATASET = 'datasets'
38
- ONTOLOGY = 'ontology'
39
- TASK = 'tasks'
40
- PIPELINE = 'pipeline'
41
- PIPELINE_EXECUTION = 'pipelineState'
42
- COMPOSITION = 'composition'
43
- FEATURE = 'feature_vectors'
44
- FEATURE_SET = 'feature_sets'
45
- ORGANIZATIONS = 'organizations'
46
- DRIVERS = 'drivers'
47
- SETTINGS = 'setting'
48
- RESOURCE_EXECUTION = 'resourceExecution'
49
- METRICS = 'metrics',
50
- SERVICE_DRIVER = 'serviceDrivers',
51
- COMPUTE = 'compute'
36
+ RECIPE = "recipe"
37
+ DATASET = "datasets"
38
+ ONTOLOGY = "ontology"
39
+ TASK = "tasks"
40
+ PIPELINE = "pipeline"
41
+ PIPELINE_EXECUTION = "pipelineState"
42
+ COMPOSITION = "composition"
43
+ FEATURE = "feature_vectors"
44
+ FEATURE_SET = "feature_sets"
45
+ ORGANIZATIONS = "organizations"
46
+ DRIVERS = "drivers"
47
+ SETTINGS = "setting"
48
+ RESOURCE_EXECUTION = "resourceExecution"
49
+ METRICS = ("metrics",)
50
+ SERVICE_DRIVER = ("serviceDrivers",)
51
+ COMPUTE = "compute"
52
52
 
53
53
 
54
54
  class FiltersOperations(str, Enum):
@@ -61,7 +61,7 @@ class FiltersOperations(str, Enum):
61
61
  LESS_THAN = "lt"
62
62
  EXISTS = "exists"
63
63
  MATCH = "match"
64
- NIN = 'nin'
64
+ NIN = "nin"
65
65
 
66
66
 
67
67
  class FiltersMethod(str, Enum):
@@ -80,21 +80,19 @@ class Filters:
80
80
  """
81
81
 
82
82
  def __init__(
83
- self,
84
- field=None,
85
- values=None,
86
- operator: FiltersOperations = None,
87
- method: FiltersMethod = None,
88
- custom_filter=None,
89
- resource: FiltersResource = FiltersResource.ITEM,
90
- use_defaults=True,
91
- context=None,
92
- page_size=None,
83
+ self,
84
+ field=None,
85
+ values=None,
86
+ operator: FiltersOperations = None,
87
+ method: FiltersMethod = None,
88
+ custom_filter=None,
89
+ resource: FiltersResource = FiltersResource.ITEM,
90
+ use_defaults=True,
91
+ context=None,
92
+ page_size=None,
93
93
  ):
94
94
  if page_size is None:
95
- if resource in [FiltersResource.EXECUTION,
96
- FiltersResource.PIPELINE_EXECUTION,
97
- FiltersResource.DPK]:
95
+ if resource in [FiltersResource.EXECUTION, FiltersResource.PIPELINE_EXECUTION, FiltersResource.DPK]:
98
96
  page_size = 100
99
97
  else:
100
98
  page_size = 1000
@@ -103,7 +101,7 @@ class Filters:
103
101
  self.and_filter_list = list()
104
102
  self._unique_fields = list()
105
103
  self.custom_filter = custom_filter
106
- self.known_operators = ['or', 'and', 'in', 'ne', 'eq', 'gt', 'lt', 'exists']
104
+ self.known_operators = ["or", "and", "in", "ne", "eq", "gt", "lt", "exists"]
107
105
  self._resource = resource
108
106
  self.page = 0
109
107
  self.page_size = page_size
@@ -113,7 +111,7 @@ class Filters:
113
111
  self.recursive = True
114
112
 
115
113
  # system only - task and assignment attributes
116
- self._user_query = 'true'
114
+ self._user_query = "true"
117
115
  self._ref_task = False
118
116
  self._ref_assignment = False
119
117
  self._ref_op = None
@@ -131,9 +129,11 @@ class Filters:
131
129
  def __validate_page_size(self):
132
130
  max_page_size = self.__max_page_size
133
131
  if self.page_size > max_page_size:
134
- logger.warning('Cannot list {} with page size greater than {}. Changing page_size to {}.'.format(
135
- self.resource, max_page_size, max_page_size
136
- ))
132
+ logger.warning(
133
+ "Cannot list {} with page size greater than {}. Changing page_size to {}.".format(
134
+ self.resource, max_page_size, max_page_size
135
+ )
136
+ )
137
137
  self.page_size = max_page_size
138
138
 
139
139
  @property
@@ -145,7 +145,7 @@ class Filters:
145
145
 
146
146
  @property
147
147
  def resource(self):
148
- return f'{self._resource.value}' if isinstance(self._resource, FiltersResource) else f'{self._resource}'
148
+ return f"{self._resource.value}" if isinstance(self._resource, FiltersResource) else f"{self._resource}"
149
149
 
150
150
  @resource.setter
151
151
  def resource(self, resource):
@@ -198,8 +198,10 @@ class Filters:
198
198
  """
199
199
  if method is None:
200
200
  method = self.method
201
- if 'metadata.system.refs.metadata' in field and self.resource == FiltersResource.ITEM:
202
- logger.warning('Filtering by metadata.system.refs.metadata may cause incorrect results. please use match operator')
201
+ if "metadata.system.refs.metadata" in field and self.resource == FiltersResource.ITEM:
202
+ logger.warning(
203
+ "Filtering by metadata.system.refs.metadata may cause incorrect results. please use match operator"
204
+ )
203
205
 
204
206
  # create SingleFilter object and add to self.filter_list
205
207
  if method == FiltersMethod.OR:
@@ -207,17 +209,16 @@ class Filters:
207
209
  elif method == FiltersMethod.AND:
208
210
  self.__override(field=field, values=values, operator=operator)
209
211
  else:
210
- raise exceptions.PlatformException(error='400',
211
- message='Unknown method {}, please select from: or/and'.format(method))
212
+ raise exceptions.PlatformException(
213
+ error="400", message="Unknown method {}, please select from: or/and".format(method)
214
+ )
212
215
 
213
216
  def __override(self, field, values, operator=None):
214
217
  if field in self._unique_fields:
215
218
  for i_single_filter, single_filter in enumerate(self.and_filter_list):
216
219
  if single_filter.field == field:
217
220
  self.and_filter_list.pop(i_single_filter)
218
- self.and_filter_list.append(
219
- SingleFilter(field=field, values=values, operator=operator)
220
- )
221
+ self.and_filter_list.append(SingleFilter(field=field, values=values, operator=operator))
221
222
 
222
223
  def generate_url_query_params(self, url):
223
224
  """
@@ -225,13 +226,13 @@ class Filters:
225
226
 
226
227
  :param str url:
227
228
  """
228
- url = '{}?'.format(url)
229
+ url = "{}?".format(url)
229
230
  for f in self.and_filter_list:
230
231
  if isinstance(f.values, list):
231
- url = '{}{}={}&'.format(url, f.field, ','.join(f.values))
232
+ url = "{}{}={}&".format(url, f.field, ",".join(f.values))
232
233
  else:
233
- url = '{}{}={}&'.format(url, f.field, f.values)
234
- return '{}&pageOffset={}&pageSize={}'.format(url, self.page, self.page_size)
234
+ url = "{}{}={}&".format(url, f.field, f.values)
235
+ return "{}&pageOffset={}&pageSize={}".format(url, self.page, self.page_size)
235
236
 
236
237
  def has_field(self, field):
237
238
  """
@@ -272,15 +273,11 @@ class Filters:
272
273
  :param str field: field to pop
273
274
  """
274
275
  if self.join is not None:
275
- for single_filter in self.join['filter']['$and']:
276
+ for single_filter in self.join["filter"]["$and"]:
276
277
  if field in single_filter:
277
- self.join['filter']['$and'].remove(single_filter)
278
+ self.join["filter"]["$and"].remove(single_filter)
278
279
 
279
- def add_join(self, field,
280
- values,
281
- operator: FiltersOperations = None,
282
- method: FiltersMethod = FiltersMethod.AND
283
- ):
280
+ def add_join(self, field, values, operator: FiltersOperations = None, method: FiltersMethod = FiltersMethod.AND):
284
281
  """
285
282
  join a query to the filter
286
283
 
@@ -296,43 +293,42 @@ class Filters:
296
293
  filter.add_join(field='metadata.user', values=['1','2'], operator=dl.FiltersOperations.IN)
297
294
  """
298
295
  if self.resource not in [FiltersResource.ITEM, FiltersResource.ANNOTATION]:
299
- raise exceptions.PlatformException(error='400',
300
- message='Cannot join to {} filters'.format(self.resource))
296
+ raise exceptions.PlatformException(error="400", message="Cannot join to {} filters".format(self.resource))
301
297
 
302
298
  if self.join is None:
303
299
  self.join = dict()
304
- if 'on' not in self.join:
300
+ if "on" not in self.join:
305
301
  if self.resource == FiltersResource.ITEM:
306
- self.join['on'] = {'resource': FiltersResource.ANNOTATION.value, 'local': 'itemId', 'forigen': 'id'}
302
+ self.join["on"] = {"resource": FiltersResource.ANNOTATION.value, "local": "itemId", "forigen": "id"}
307
303
  else:
308
- self.join['on'] = {'resource': FiltersResource.ITEM.value, 'local': 'id', 'forigen': 'itemId'}
309
- if 'filter' not in self.join:
310
- self.join['filter'] = dict()
311
- join_method = '$' + method
312
- if join_method not in self.join['filter']:
313
- self.join['filter'][join_method] = list()
314
- self.join['filter'][join_method].append(SingleFilter(field=field, values=values, operator=operator).prepare())
304
+ self.join["on"] = {"resource": FiltersResource.ITEM.value, "local": "id", "forigen": "itemId"}
305
+ if "filter" not in self.join:
306
+ self.join["filter"] = dict()
307
+ join_method = "$" + method
308
+ if join_method not in self.join["filter"]:
309
+ self.join["filter"][join_method] = list()
310
+ self.join["filter"][join_method].append(SingleFilter(field=field, values=values, operator=operator).prepare())
315
311
 
316
312
  def __add_defaults(self):
317
313
  if self._use_defaults:
318
314
  # add items defaults
319
315
  if self.resource == FiltersResource.ITEM:
320
- self._unique_fields = ['type', 'hidden']
321
- self.add(field='hidden', values=False, method=FiltersMethod.AND)
322
- self.add(field='type', values='file', method=FiltersMethod.AND)
316
+ self._unique_fields = ["type", "hidden"]
317
+ self.add(field="hidden", values=False, method=FiltersMethod.AND)
318
+ self.add(field="type", values="file", method=FiltersMethod.AND)
323
319
  # add service defaults
324
320
  elif self.resource == FiltersResource.SERVICE:
325
- self._unique_fields = ['global']
326
- self.add(field='global', values=True, operator=FiltersOperations.NOT_EQUAL, method=FiltersMethod.AND)
321
+ self._unique_fields = ["global"]
322
+ self.add(field="global", values=True, operator=FiltersOperations.NOT_EQUAL, method=FiltersMethod.AND)
327
323
  elif self.resource == FiltersResource.PACKAGE:
328
- self._unique_fields = ['global']
329
- self.add(field='global', values=True, operator=FiltersOperations.NOT_EQUAL, method=FiltersMethod.AND)
324
+ self._unique_fields = ["global"]
325
+ self.add(field="global", values=True, operator=FiltersOperations.NOT_EQUAL, method=FiltersMethod.AND)
330
326
  # add annotations defaults
331
327
  elif self.resource == FiltersResource.ANNOTATION:
332
- self._unique_fields = ['type']
328
+ self._unique_fields = ["type"]
333
329
  values = [annotation_type.value for annotation_type in entities.AnnotationType]
334
330
  values.remove(entities.AnnotationType.NOTE.value)
335
- self.add(field='type', values=values, operator=FiltersOperations.IN, method=FiltersMethod.AND)
331
+ self.add(field="type", values=values, operator=FiltersOperations.IN, method=FiltersMethod.AND)
336
332
 
337
333
  def __generate_query(self):
338
334
  filters_dict = dict()
@@ -341,27 +337,40 @@ class Filters:
341
337
  or_filters = list()
342
338
  for single_filter in self.or_filter_list:
343
339
  or_filters.append(
344
- single_filter.prepare(recursive=self.recursive and self.resource == FiltersResource.ITEM))
345
- filters_dict['$or'] = or_filters
340
+ single_filter.prepare(recursive=self.recursive and self.resource == FiltersResource.ITEM)
341
+ )
342
+ filters_dict["$or"] = or_filters
346
343
 
347
344
  if len(self.and_filter_list) > 0:
348
345
  and_filters = list()
349
346
  for single_filter in self.and_filter_list:
350
347
  and_filters.append(
351
- single_filter.prepare(recursive=self.recursive and self.resource == FiltersResource.ITEM))
352
- filters_dict['$and'] = and_filters
348
+ single_filter.prepare(recursive=self.recursive and self.resource == FiltersResource.ITEM)
349
+ )
350
+ filters_dict["$and"] = and_filters
353
351
 
354
352
  return filters_dict
355
353
 
356
354
  def __generate_custom_query(self):
357
- filters_dict = dict()
358
- if 'filter' in self.custom_filter or 'join' in self.custom_filter:
359
- if 'filter' in self.custom_filter:
360
- filters_dict = self.custom_filter['filter']
361
- self.join = self.custom_filter.get('join', self.join)
355
+ if "filter" not in self.custom_filter:
356
+ query_dict = {"filter": self.custom_filter}
362
357
  else:
363
- filters_dict = self.custom_filter
364
- return filters_dict
358
+ query_dict = self.custom_filter
359
+ if "resource" not in query_dict:
360
+ query_dict["resource"] = self.resource
361
+ if "page" not in query_dict:
362
+ query_dict["page"] = self.page
363
+ if "pageSize" not in query_dict:
364
+ query_dict["pageSize"] = self.page_size
365
+ if self.join is not None and 'join' not in query_dict:
366
+ query_dict["join"] = self.join
367
+ if "join" in query_dict and "on" not in query_dict["join"]:
368
+ if self.resource == FiltersResource.ITEM:
369
+ query_dict["join"]["on"] = {"resource": FiltersResource.ANNOTATION.value, "local": "itemId", "forigen": "id"}
370
+ else:
371
+ query_dict["join"]["on"] = {"resource": FiltersResource.ITEM.value, "local": "id", "forigen": "itemId"}
372
+
373
+ return query_dict
365
374
 
366
375
  def __generate_ref_query(self):
367
376
  refs = list()
@@ -371,7 +380,7 @@ class Filters:
371
380
  self._ref_task_id = [self._ref_task_id]
372
381
 
373
382
  for ref_id in self._ref_task_id:
374
- task_refs.append({'type': 'task', 'id': ref_id})
383
+ task_refs.append({"type": "task", "id": ref_id})
375
384
 
376
385
  refs += task_refs
377
386
 
@@ -381,7 +390,7 @@ class Filters:
381
390
  self._ref_assignment_id = [self._ref_assignment_id]
382
391
 
383
392
  for ref_id in self._ref_assignment_id:
384
- assignment_refs.append({'type': 'assignment', 'id': ref_id})
393
+ assignment_refs.append({"type": "assignment", "id": ref_id})
385
394
 
386
395
  refs += assignment_refs
387
396
 
@@ -404,62 +413,59 @@ class Filters:
404
413
  ########
405
414
  _json = dict()
406
415
 
407
- if self.custom_filter is None:
408
- _json['filter'] = self.__generate_query()
409
- else:
410
- _json['filter'] = self.__generate_custom_query()
416
+ if self.custom_filter is not None:
417
+ _json = self.__generate_custom_query()
418
+ return _json
419
+
420
+ _json["filter"] = self.__generate_query()
411
421
 
412
422
  ##################
413
423
  # filter options #
414
424
  ##################
415
425
  if not query_only:
416
426
  if len(self.sort) > 0:
417
- _json['sort'] = self.sort
427
+ _json["sort"] = self.sort
418
428
 
419
429
  self.__validate_page_size()
420
430
 
421
- _json['page'] = self.page
422
- _json['pageSize'] = self.page_size
423
- _json['resource'] = self.resource
431
+ _json["page"] = self.page
432
+ _json["pageSize"] = self.page_size
433
+ _json["resource"] = self.resource
424
434
 
425
435
  ########
426
436
  # join #
427
437
  ########
428
438
  if self.join is not None:
429
- _json['join'] = self.join
439
+ _json["join"] = self.join
430
440
 
431
441
  #####################
432
442
  # operation or refs #
433
443
  #####################
434
444
  if self._ref_assignment or self._ref_task:
435
- _json['references'] = {
436
- 'operation': self._ref_op,
437
- 'refs': self.__generate_ref_query()
438
- }
445
+ _json["references"] = {"operation": self._ref_op, "refs": self.__generate_ref_query()}
439
446
  elif operation is not None:
440
- if operation == 'update':
447
+ if operation == "update":
441
448
  if update:
442
- _json[operation] = {'metadata': {'user': update}}
449
+ _json[operation] = {"metadata": {"user": update}}
443
450
  else:
444
451
  _json[operation] = dict()
445
452
  if system_metadata and system_update:
446
- _json['systemSpace'] = True
447
- _json[operation]['metadata'] = _json[operation].get('metadata', dict())
448
- _json[operation]['metadata']['system'] = system_update
449
- elif operation == 'delete':
453
+ _json["systemSpace"] = True
454
+ _json[operation]["metadata"] = _json[operation].get("metadata", dict())
455
+ _json[operation]["metadata"]["system"] = system_update
456
+ elif operation == "delete":
450
457
  _json[operation] = True
451
- _json.pop('sort', None)
458
+ _json.pop("sort", None)
452
459
  if self.resource == FiltersResource.ITEM:
453
- _json.pop('page', None)
454
- _json.pop('pageSize', None)
460
+ _json.pop("page", None)
461
+ _json.pop("pageSize", None)
455
462
  else:
456
- raise exceptions.PlatformException(error='400',
457
- message='Unknown operation: {}'.format(operation))
463
+ raise exceptions.PlatformException(error="400", message="Unknown operation: {}".format(operation))
458
464
 
459
465
  if self.context is not None:
460
- _json['context'] = self.context
466
+ _json["context"] = self.context
461
467
  if self._system_space is not None:
462
- _json['systemSpace'] = self._system_space
468
+ _json["systemSpace"] = self._system_space
463
469
  return _json
464
470
 
465
471
  def print(self, indent=2):
@@ -479,7 +485,7 @@ class Filters:
479
485
  filter.sort_by(field='metadata.user', values=dl.FiltersOrderByDirection.ASCENDING)
480
486
  """
481
487
  if value not in [FiltersOrderByDirection.ASCENDING, FiltersOrderByDirection.DESCENDING]:
482
- raise exceptions.PlatformException(error='400', message='Sort can be by ascending or descending order only')
488
+ raise exceptions.PlatformException(error="400", message="Sort can be by ascending or descending order only")
483
489
  self.sort[field] = value.value if isinstance(value, FiltersOrderByDirection) else value
484
490
 
485
491
  def platform_url(self, resource) -> str:
@@ -492,21 +498,21 @@ class Filters:
492
498
  """
493
499
  _json = self.prepare()
494
500
  # add the view option
495
- _json['view'] = 'icons'
501
+ _json["view"] = "icons"
496
502
  # convert from enum to string
497
503
  _json["resource"] = f'{_json["resource"]}'
498
504
  # convert the dictionary to a json string
499
- _json['dqlFilter'] = json.dumps({'filter': _json.pop('filter'),
500
- 'join': _json.pop('join', None),
501
- 'sort': _json.get('sort', None)})
505
+ _json["dqlFilter"] = json.dumps(
506
+ {"filter": _json.pop("filter"), "join": _json.pop("join", None), "sort": _json.get("sort", None)}
507
+ )
502
508
  # set the page size as the UI default
503
- _json['pageSize'] = 100
504
- _json['page'] = _json['page']
509
+ _json["pageSize"] = 100
510
+ _json["page"] = _json["page"]
505
511
  # build the url for the dataset data browser
506
512
  if isinstance(resource, entities.Dataset):
507
- url = resource.platform_url + f'?{urllib.parse.urlencode(_json)}'
513
+ url = resource.platform_url + f"?{urllib.parse.urlencode(_json)}"
508
514
  else:
509
- raise NotImplementedError('Not implemented for resource type: {}'.format(type(resource)))
515
+ raise NotImplementedError("Not implemented for resource type: {}".format(type(resource)))
510
516
  return url
511
517
 
512
518
  def open_in_web(self, resource):
@@ -518,7 +524,7 @@ class Filters:
518
524
  if isinstance(resource, entities.Dataset):
519
525
  resource._client_api._open_in_web(url=self.platform_url(resource=resource))
520
526
  else:
521
- raise NotImplementedError('Not implemented for resource type: {}'.format(type(resource)))
527
+ raise NotImplementedError("Not implemented for resource type: {}".format(type(resource)))
522
528
 
523
529
  def save(self, project: entities.Project, filter_name: str):
524
530
  """
@@ -529,32 +535,29 @@ class Filters:
529
535
  :return: True if success
530
536
  """
531
537
  _json_filter = self.prepare()
532
- shebang_dict = {"type": "dql",
533
- "shebang": "dataloop",
534
- "metadata": {
535
- "version": "1.0.0",
536
- "system": {
537
- "mimetype": "dql"
538
- },
539
- "dltype": "filter",
540
- "filterFieldsState": [],
541
- "resource": "items",
542
- "filter": _json_filter.pop('filter'),
543
- "join": _json_filter.pop('join')
544
- }
545
- }
538
+ shebang_dict = {
539
+ "type": "dql",
540
+ "shebang": "dataloop",
541
+ "metadata": {
542
+ "version": "1.0.0",
543
+ "system": {"mimetype": "dql"},
544
+ "dltype": "filter",
545
+ "filterFieldsState": [],
546
+ "resource": "items",
547
+ "filter": _json_filter.pop("filter"),
548
+ "join": _json_filter.pop("join"),
549
+ },
550
+ }
546
551
  b_dataset = project.datasets._get_binaries_dataset()
547
552
  byte_io = io.BytesIO()
548
553
  byte_io.name = filter_name
549
554
  byte_io.write(json.dumps(shebang_dict).encode())
550
555
  byte_io.seek(0)
551
- b_dataset.items.upload(local_path=byte_io,
552
- remote_path='/.dataloop/dqlfilters/items',
553
- remote_name=filter_name)
556
+ b_dataset.items.upload(local_path=byte_io, remote_path="/.dataloop/dqlfilters/items", remote_name=filter_name)
554
557
  return True
555
558
 
556
559
  @classmethod
557
- def load(cls, project: entities.Project, filter_name: str) -> 'Filters':
560
+ def load(cls, project: entities.Project, filter_name: str) -> "Filters":
558
561
  """
559
562
  Load a saved filter from the project by name
560
563
 
@@ -563,20 +566,23 @@ class Filters:
563
566
  :return: dl.Filters
564
567
  """
565
568
  b_dataset = project.datasets._get_binaries_dataset()
566
- f = entities.Filters(custom_filter={
567
- 'filter': {'$and': [{'filename': f'/.dataloop/dqlfilters/items/{filter_name}'}]},
568
- 'page': 0,
569
- 'pageSize': 1000,
570
- 'resource': 'items'
571
- })
569
+ f = entities.Filters(
570
+ custom_filter={
571
+ "filter": {"$and": [{"filename": f"/.dataloop/dqlfilters/items/{filter_name}"}]},
572
+ "page": 0,
573
+ "pageSize": 1000,
574
+ "resource": "items",
575
+ }
576
+ )
572
577
  pages = b_dataset.items.list(filters=f)
573
578
  if pages.items_count == 0:
574
579
  raise exceptions.NotFound(
575
- f'Saved filter not found: {filter_name}. Run `Filters.list()` to list existing filters')
580
+ f"Saved filter not found: {filter_name}. Run `Filters.list()` to list existing filters"
581
+ )
576
582
  with open(pages.items[0].download()) as f:
577
583
  data = json.load(f)
578
- custom_filter = data['metadata']['filter']
579
- custom_filter['join'] = data['metadata']['join']
584
+ custom_filter = data["metadata"]["filter"]
585
+ custom_filter["join"] = data["metadata"]["join"]
580
586
  return cls(custom_filter=custom_filter)
581
587
 
582
588
  @staticmethod
@@ -587,9 +593,7 @@ class Filters:
587
593
  :return: a list of all the saved filters' names
588
594
  """
589
595
  b_dataset = project.datasets._get_binaries_dataset()
590
- f = entities.Filters(use_defaults=False,
591
- field='dir',
592
- values='/.dataloop/dqlfilters/items')
596
+ f = entities.Filters(use_defaults=False, field="dir", values="/.dataloop/dqlfilters/items")
593
597
  pages = b_dataset.items.list(filters=f)
594
598
  all_filter_items = list(pages.all())
595
599
  names = [i.name for i in all_filter_items]
@@ -604,11 +608,11 @@ class SingleFilter:
604
608
 
605
609
  @staticmethod
606
610
  def __add_recursive(value):
607
- if not value.endswith('*') and not os.path.splitext(value)[-1].startswith('.'):
608
- if value.endswith('/'):
609
- value = value + '**'
611
+ if not value.endswith("*") and not os.path.splitext(value)[-1].startswith("."):
612
+ if value.endswith("/"):
613
+ value = value + "**"
610
614
  else:
611
- value = value + '/**'
615
+ value = value + "/**"
612
616
  return value
613
617
 
614
618
  def prepare(self, recursive=False):
@@ -620,7 +624,7 @@ class SingleFilter:
620
624
  _json = dict()
621
625
  values = self.values
622
626
 
623
- if recursive and self.field == 'filename':
627
+ if recursive and self.field == "filename":
624
628
  if isinstance(values, str):
625
629
  values = self.__add_recursive(value=values)
626
630
  elif isinstance(values, list):
@@ -632,7 +636,7 @@ class SingleFilter:
632
636
  else:
633
637
  value = dict()
634
638
  op = self.operator.value if isinstance(self.operator, FiltersOperations) else self.operator
635
- value['${}'.format(op)] = values
639
+ value["${}".format(op)] = values
636
640
  _json[self.field] = value
637
641
 
638
642
  return _json