dtlpy 1.105.6__py3-none-any.whl → 1.107.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -46,7 +46,9 @@ class Downloader:
46
46
  export_png_files=False,
47
47
  filter_output_annotations=False,
48
48
  alpha=1,
49
- export_version=entities.ExportVersion.V1
49
+ export_version=entities.ExportVersion.V1,
50
+ dataset_lock=False,
51
+ lock_timeout_sec=None
50
52
  ):
51
53
  """
52
54
  Download dataset by filters.
@@ -72,6 +74,8 @@ class Downloader:
72
74
  :param filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
73
75
  :param alpha: opacity value [0 1], default 1
74
76
  :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
77
+ :param bool dataset_lock: optional - default = False
78
+ :param int lock_timeout_sec: optional
75
79
  :return: Output (list)
76
80
  """
77
81
 
@@ -195,7 +199,9 @@ class Downloader:
195
199
  'include_annotations_in_output': include_annotations_in_output,
196
200
  'export_png_files': export_png_files,
197
201
  'filter_output_annotations': filter_output_annotations,
198
- 'export_version': export_version
202
+ 'export_version': export_version,
203
+ 'dataset_lock': dataset_lock,
204
+ 'lock_timeout_sec': lock_timeout_sec
199
205
  })
200
206
  ###############
201
207
  # downloading #
@@ -361,7 +367,9 @@ class Downloader:
361
367
  include_annotations_in_output=True,
362
368
  export_png_files=False,
363
369
  filter_output_annotations=False,
364
- export_version=entities.ExportVersion.V1
370
+ export_version=entities.ExportVersion.V1,
371
+ dataset_lock=False,
372
+ lock_timeout_sec=None
365
373
  ):
366
374
  """
367
375
  Download annotations json for entire dataset
@@ -375,6 +383,8 @@ class Downloader:
375
383
  :param export_png_files: default - if True, semantic annotations should be exported as png files
376
384
  :param filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
377
385
  :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
386
+ :param bool dataset_lock: optional - default = False
387
+ :param int lock_timeout_sec: optional
378
388
  :return:
379
389
  """
380
390
  local_path = os.path.join(local_path, "json")
@@ -397,6 +407,11 @@ class Downloader:
397
407
  if annotation_filters is not None:
398
408
  payload['annotationsQuery'] = annotation_filters.prepare()
399
409
  payload['annotations']['filter'] = filter_output_annotations
410
+ if dataset_lock:
411
+ payload['datasetLock'] = dataset_lock
412
+
413
+ if lock_timeout_sec:
414
+ payload['lockTimeoutSec'] = lock_timeout_sec
400
415
 
401
416
  success, response = dataset._client_api.gen_request(req_type='post',
402
417
  path='/datasets/{}/export'.format(dataset.id),
@@ -730,19 +745,15 @@ class Downloader:
730
745
  one_file_pbar.update(len(chunk))
731
746
  except Exception as err:
732
747
  pass
733
- file_validation, start_point, chunk_resume = self.__get_next_chunk(item=item,
734
- download_progress=temp_file_path,
735
- chunk_resume=chunk_resume)
748
+
749
+ file_validation = True
750
+ if not is_url:
751
+ file_validation, start_point, chunk_resume = self.__get_next_chunk(item=item,
752
+ download_progress=temp_file_path,
753
+ chunk_resume=chunk_resume)
736
754
  if file_validation:
737
755
  shutil.move(temp_file_path, local_filepath)
738
756
  download_done = True
739
- else:
740
- if not is_url:
741
- continue
742
- else:
743
- raise PlatformException(
744
- error="400",
745
- message='Downloaded file is corrupted. Please try again. If the issue repeats please contact support.')
746
757
  except Exception as err:
747
758
  if os.path.isfile(temp_file_path):
748
759
  os.remove(temp_file_path)
@@ -55,12 +55,15 @@ class Integrations:
55
55
  def delete(self,
56
56
  integrations_id: str,
57
57
  sure: bool = False,
58
- really: bool = False) -> bool:
58
+ really: bool = False,
59
+ organization_id: str = None
60
+ ) -> bool:
59
61
  """
60
62
  Delete integrations from the organization.
61
63
 
62
64
  **Prerequisites**: You must be an organization *owner* to delete an integration.
63
65
 
66
+ :param organization_id: organization id
64
67
  :param str integrations_id: integrations id
65
68
  :param bool sure: Are you sure you want to delete?
66
69
  :param bool really: Really really sure?
@@ -74,11 +77,12 @@ class Integrations:
74
77
  project.integrations.delete(integrations_id='integrations_id', sure=True, really=True)
75
78
  """
76
79
  if sure and really:
77
- if self.project is None and self.org is None:
80
+ if self.project is None and self.org is None and organization_id is None:
78
81
  raise exceptions.PlatformException(
79
82
  error='400',
80
83
  message='Must provide an identifier in inputs')
81
84
 
85
+ if organization_id is None:
82
86
  if self.project is not None:
83
87
  organization_id = self.project.org.get('id')
84
88
  else:
@@ -101,7 +105,9 @@ class Integrations:
101
105
  integrations_type: entities.IntegrationType,
102
106
  name: str,
103
107
  options: dict,
104
- metadata: dict = None):
108
+ metadata: dict = None,
109
+ organization_id: str = None,
110
+ ):
105
111
  """
106
112
  Create an integration between an external storage and the organization.
107
113
 
@@ -123,6 +129,7 @@ class Integrations:
123
129
  :param str name: integrations name
124
130
  :param dict options: dict of storage secrets
125
131
  :param dict metadata: metadata
132
+ :param str organization_id: organization id
126
133
  :return: success
127
134
  :rtype: bool
128
135
 
@@ -135,15 +142,16 @@ class Integrations:
135
142
  options={key: "Access key ID", secret: "Secret access key"})
136
143
  """
137
144
 
138
- if self.project is None and self.org is None:
145
+ if self.project is None and self.org is None and organization_id is None:
139
146
  raise exceptions.PlatformException(
140
147
  error='400',
141
148
  message='Must have an organization or project')
142
149
 
143
- if self.project is not None:
144
- organization_id = self.project.org.get('id')
145
- else:
146
- organization_id = self.org.id
150
+ if organization_id is None:
151
+ if self.project is not None:
152
+ organization_id = self.project.org.get('id')
153
+ else:
154
+ organization_id = self.org.id
147
155
 
148
156
  url_path = '/orgs/{}/integrations'.format(organization_id)
149
157
  payload = {"type": integrations_type.value if isinstance(integrations_type,
@@ -170,7 +178,9 @@ class Integrations:
170
178
  new_name: str = None,
171
179
  integrations_id: str = None,
172
180
  integration: entities.Integration = None,
173
- new_options: dict = None):
181
+ new_options: dict = None,
182
+ organization_id: str = None,
183
+ ):
174
184
  """
175
185
  Update the integration's name.
176
186
 
@@ -180,6 +190,7 @@ class Integrations:
180
190
  :param str integrations_id: integrations id
181
191
  :param Integration integration: integration object
182
192
  :param dict new_options: new value
193
+ :param str organization_id: organization id
183
194
  :return: Integration object
184
195
  :rtype: dtlpy.entities.integration.Integration
185
196
 
@@ -198,7 +209,8 @@ class Integrations:
198
209
 
199
210
  project.integrations.update(integrations_id='integrations_id', new_options={roleArn: ""})
200
211
  """
201
- if self.project is None and self.org is None:
212
+
213
+ if self.project is None and self.org is None and organization_id is None:
202
214
  raise exceptions.PlatformException(
203
215
  error='400',
204
216
  message='Must have an organization or project')
@@ -207,10 +219,11 @@ class Integrations:
207
219
  error='400',
208
220
  message='Must have an integrations_id or integration')
209
221
 
210
- if self.project is not None:
211
- organization_id = self.project.org.get('id')
212
- else:
213
- organization_id = self.org.id
222
+ if organization_id is None:
223
+ if self.project is not None:
224
+ organization_id = self.project.org.get('id')
225
+ else:
226
+ organization_id = self.org.id
214
227
 
215
228
  url_path = '/orgs/{}/integrations/'.format(organization_id)
216
229
  payload = dict(integrationId=integrations_id if integrations_id is not None else integration.id)
@@ -230,13 +243,14 @@ class Integrations:
230
243
  return entities.Integration.from_json(_json=response.json(), client_api=self._client_api)
231
244
 
232
245
  @_api_reference.add(path='/orgs/{orgId}/integrations/{integrationId}', method='get')
233
- def get(self, integrations_id: str):
246
+ def get(self, integrations_id: str, organization_id: str = None):
234
247
  """
235
248
  Get organization integrations. Use this method to access your integration and be able to use it in your code.
236
249
 
237
250
  **Prerequisites**: You must be an *owner* in the organization.
238
251
 
239
252
  :param str integrations_id: integrations id
253
+ :param str organization_id: organization id
240
254
  :return: Integration object
241
255
  :rtype: dtlpy.entities.integration.Integration
242
256
 
@@ -246,15 +260,16 @@ class Integrations:
246
260
 
247
261
  project.integrations.get(integrations_id='integrations_id')
248
262
  """
249
- if self.project is None and self.org is None:
263
+ if self.project is None and self.org is None and organization_id is None:
250
264
  raise exceptions.PlatformException(
251
265
  error='400',
252
266
  message='Must have an organization or project')
253
267
 
254
- if self.project is not None:
255
- organization_id = self.project.org.get('id')
256
- else:
257
- organization_id = self.org.id
268
+ if organization_id is None:
269
+ if self.project is not None:
270
+ organization_id = self.project.org.get('id')
271
+ else:
272
+ organization_id = self.org.id
258
273
 
259
274
  url_path = '/orgs/{}/integrations/{}'.format(organization_id, integrations_id)
260
275
 
@@ -265,13 +280,14 @@ class Integrations:
265
280
  return entities.Integration.from_json(_json=response.json(), client_api=self._client_api)
266
281
 
267
282
  @_api_reference.add(path='/orgs/{orgId}/integrations', method='get')
268
- def list(self, only_available=False):
283
+ def list(self, only_available=False, organization_id: str = None):
269
284
  """
270
285
  List all the organization's integrations with external storage.
271
286
 
272
287
  **Prerequisites**: You must be an *owner* in the organization.
273
288
 
274
289
  :param bool only_available: if True list only the available integrations.
290
+ :param str organization_id: organization id
275
291
  :return: groups list
276
292
  :rtype: list
277
293
 
@@ -281,15 +297,16 @@ class Integrations:
281
297
 
282
298
  project.integrations.list(only_available=True)
283
299
  """
284
- if self.project is None and self.org is None:
300
+ if self.project is None and self.org is None and organization_id is None:
285
301
  raise exceptions.PlatformException(
286
302
  error='400',
287
303
  message='Must have an organization or project')
288
304
 
289
- if self.project is not None:
290
- organization_id = self.project.org.get('id')
291
- else:
292
- organization_id = self.org.id
305
+ if organization_id is None:
306
+ if self.project is not None:
307
+ organization_id = self.project.org.get('id')
308
+ else:
309
+ organization_id = self.org.id
293
310
 
294
311
  if only_available:
295
312
  url_path = '/orgs/{}/availableIntegrations'.format(organization_id)
@@ -304,7 +321,8 @@ class Integrations:
304
321
  available_integrations = miscellaneous.List(response.json())
305
322
  return available_integrations
306
323
 
307
- def generate_gar_options(self, service_account: str, location: str) -> dict:
324
+ @staticmethod
325
+ def generate_gar_options(service_account: str, location: str, email: str = None) -> dict:
308
326
  """
309
327
  Generates a Google Artifact Registry JSON configuration and returns it as a base64-encoded string.
310
328
 
@@ -320,27 +338,119 @@ class Integrations:
320
338
  Returns:
321
339
  str: A base64-encoded string representation of the repository JSON configuration.
322
340
  """
323
- if not service_account:
324
- raise ValueError('Missing Service Account')
325
- if not location:
326
- raise ValueError('Missing Location')
327
- user_name = "_json_key"
328
- cred = f"{user_name}:{service_account}"
329
- auth = str(base64.b64encode(bytes(cred, 'utf-8')))[2:-1]
341
+ return IntegrationUtils.generate_gar_options(service_account=service_account, location=location, email=email)
342
+
343
+ @staticmethod
344
+ def generate_docker_hub_options(username: str, password: str, email: str = None) -> dict:
345
+ """
346
+ Generates a Docker Hub JSON configuration and returns it as a base64-encoded string.
347
+
348
+ Parameters:
349
+ username (str): The Docker Hub username.
350
+ password (str): The Docker Hub password.
351
+ email (str): Optional - Docker Hub email.
352
+
353
+ Returns:
354
+ str: A base64-encoded string representation of the repository JSON configuration.
355
+ """
356
+ return IntegrationUtils.generate_docker_hub_options(username=username, password=password, email=email)
357
+
358
+ @staticmethod
359
+ def generate_ecr_options(access_key_id: str, secret_access_key: str, account: str, region: str) -> dict:
360
+ """
361
+ Generates an Amazon Elastic Container Registry (ECR) JSON configuration and returns it as a base64-encoded string.
362
+
363
+ Parameters:
364
+ access_key_id (str): The AWS access key ID.
365
+ secret_access_key (str): The AWS secret access key.
366
+ account (str): The AWS account ID.
367
+ region (str): The AWS region.
368
+
369
+ Returns:
370
+ str: A base64-encoded string representation of the repository JSON configuration.
371
+ """
372
+ return IntegrationUtils.generate_ecr_options(
373
+ access_key_id=access_key_id,
374
+ secret_access_key=secret_access_key,
375
+ account=account,
376
+ region=region
377
+ )
378
+
330
379
 
380
+ class IntegrationUtils:
381
+
382
+ @staticmethod
383
+ def encode(st: str):
384
+ return str(base64.b64encode(bytes(st, 'utf-8')))[2:-1]
385
+
386
+ @staticmethod
387
+ def generate_json_key_options(location: str, username: str, password: str, auth: str, email: str = None):
331
388
  encoded_pass = {
332
389
  "auths": {
333
390
  f"{location}": {
334
- "username": user_name,
335
- "password": service_account,
391
+ "username": username,
392
+ "password": password,
336
393
  "auth": auth
337
394
  }
338
395
  }
339
396
  }
340
397
 
398
+ if email:
399
+ encoded_pass['auths'][f'{location}']['email'] = email
400
+
341
401
  return {
342
402
  "name": "_json_key",
343
403
  "spec": {
344
- "password": str(base64.b64encode(bytes(json.dumps(encoded_pass), 'utf-8')))[2:-1]
404
+ "password": IntegrationUtils.encode(json.dumps(encoded_pass))
405
+ }
406
+ }
407
+
408
+ @staticmethod
409
+ def generate_gar_options(service_account: str, location: str, email: str = None) -> dict:
410
+
411
+ if not service_account:
412
+ raise ValueError('Missing Service Account')
413
+ if not location:
414
+ raise ValueError('Missing Location')
415
+
416
+ username = "_json_key"
417
+ cred = f"{username}:{service_account}"
418
+ auth = IntegrationUtils.encode(cred)
419
+
420
+ return IntegrationUtils.generate_json_key_options(
421
+ location=location,
422
+ username=username,
423
+ password=service_account,
424
+ auth=auth,
425
+ email=email
426
+ )
427
+
428
+ @staticmethod
429
+ def generate_docker_hub_options(username: str, password: str, email: str = None) -> dict:
430
+
431
+ if not username:
432
+ raise ValueError('Missing Username')
433
+ if not password:
434
+ raise ValueError('Missing Password')
435
+
436
+ auth = IntegrationUtils.encode('{}:{}'.format(username, password))
437
+
438
+ return IntegrationUtils.generate_json_key_options(
439
+ location='docker.io',
440
+ username=username,
441
+ password=password,
442
+ auth=auth,
443
+ email=email
444
+ )
445
+
446
+ @staticmethod
447
+ def generate_ecr_options(access_key_id: str, secret_access_key: str, account: str, region: str) -> dict:
448
+ return {
449
+ "name": "AWS",
450
+ "spec": {
451
+ "accessKeyId": access_key_id,
452
+ "secretAccessKey": secret_access_key,
453
+ "account": account,
454
+ "region": region,
345
455
  }
346
456
  }
@@ -527,7 +527,9 @@ class Items:
527
527
  export_png_files: bool = False,
528
528
  filter_output_annotations: bool = False,
529
529
  alpha: float = 1,
530
- export_version=entities.ExportVersion.V1
530
+ export_version=entities.ExportVersion.V1,
531
+ dataset_lock: bool = False,
532
+ lock_timeout_sec: int = None
531
533
  ):
532
534
  """
533
535
  Download dataset items by filters.
@@ -547,6 +549,8 @@ class Items:
547
549
  :param list annotation_options: download annotations options: list(dl.ViewAnnotationOptions)
548
550
  :param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
549
551
  :param bool overwrite: optional - default = False
552
+ :param bool dataset_lock: optional - default = False
553
+ :param int lock_timeout_sec: optional
550
554
  :param bool to_items_folder: Create 'items' folder and download items to it
551
555
  :param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
552
556
  :param bool with_text: optional - add text to annotations, default = False
@@ -593,7 +597,9 @@ class Items:
593
597
  include_annotations_in_output=include_annotations_in_output,
594
598
  export_png_files=export_png_files,
595
599
  filter_output_annotations=filter_output_annotations,
596
- export_version=export_version
600
+ export_version=export_version,
601
+ dataset_lock=dataset_lock,
602
+ lock_timeout_sec=lock_timeout_sec
597
603
  )
598
604
 
599
605
  def upload(
@@ -869,3 +875,38 @@ class Items:
869
875
  raise exceptions.PlatformException(response)
870
876
 
871
877
  return success
878
+
879
+ def task_scores(self, item_id: str, task_id: str, page_offset: int = 0, page_size: int = 100):
880
+ """
881
+ Get item score
882
+
883
+ **Prerequisites**: You must be able to read the task
884
+
885
+ :param str item_id: item id
886
+ :param str task_id: task id
887
+ :param int page_offset: start page
888
+ :param int page_size: page size
889
+ :return: page of item scores
890
+
891
+ **Example**:
892
+
893
+ .. code-block:: python
894
+
895
+ dataset.items.item_score(item_id='item_id', task_id='task_id')
896
+
897
+ """
898
+
899
+ if item_id is None:
900
+ raise exceptions.PlatformException('400', 'Must provide item id')
901
+
902
+ if task_id is None:
903
+ raise exceptions.PlatformException('400', 'Must provide task id')
904
+
905
+ success, response = self._client_api.gen_request(req_type="get",
906
+ path="/scores/tasks/{}/items/{}?page={}&pageSize={}"
907
+ .format(task_id, item_id, page_offset, page_size))
908
+ if success:
909
+ return response.json()
910
+ else:
911
+ raise exceptions.PlatformException(response)
912
+
@@ -432,14 +432,6 @@ class Projects:
432
432
  assert isinstance(project, entities.Project)
433
433
  if checkout:
434
434
  self.checkout(project=project)
435
- if project.id not in self._client_api.platform_settings.working_projects:
436
- self._client_api.platform_settings.add_project(project.id)
437
- try:
438
- settings_list = project.settings.resolve(user_email=self._client_api.info()['user_email'],
439
- project_id=project.id)
440
- self._client_api.platform_settings.add_bulk(settings_list)
441
- except:
442
- logger.warning("failed to add project settings")
443
435
  return project
444
436
 
445
437
  @_api_reference.add(path='/projects/{projectId}', method='delete')
@@ -112,12 +112,6 @@ class Settings:
112
112
  else:
113
113
  raise exceptions.PlatformException(response)
114
114
 
115
- # add settings to cookies
116
- self._client_api.platform_settings.add(setting.name,
117
- {
118
- setting.scope.id: setting.value
119
- }
120
- )
121
115
  return constructor(
122
116
  _json=_json,
123
117
  client_api=self._client_api,
@@ -172,13 +166,6 @@ class Settings:
172
166
  else:
173
167
  raise exceptions.PlatformException(response)
174
168
 
175
- # add settings to cookies
176
- self._client_api.platform_settings.add(setting.name,
177
- {
178
- setting.scope.id: setting.value
179
- }
180
- )
181
-
182
169
  return constructor(
183
170
  _json=_json,
184
171
  client_api=self._client_api,
@@ -2,6 +2,7 @@ import datetime
2
2
  import logging
3
3
  import json
4
4
  from typing import Union, List
5
+ import warnings
5
6
 
6
7
  from .. import exceptions, miscellaneous, entities, repositories, _api_reference
7
8
  from ..services.api_client import ApiClient
@@ -426,7 +427,7 @@ class Tasks:
426
427
  **Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who created that task.
427
428
 
428
429
  :param dtlpy.entities.task.Task task: the task object
429
- :param bool system_metadata: True, if you want to change metadata system
430
+ :param bool system_metadata: DEPRECATED
430
431
  :return: Task object
431
432
  :rtype: dtlpy.entities.task.Task
432
433
 
@@ -440,7 +441,7 @@ class Tasks:
440
441
  url = '{}/{}'.format(url, task.id)
441
442
 
442
443
  if system_metadata:
443
- url += '?system=true'
444
+ warnings.warn("Task system metadata updates are not permitted. Please store custom metadata in 'task.metadata['user']' instead.", DeprecationWarning)
444
445
 
445
446
  success, response = self._client_api.gen_request(req_type='patch',
446
447
  path=url,
@@ -1089,3 +1090,36 @@ class Tasks:
1089
1090
  success_count=len(updated_items), failed_items=failed_items)
1090
1091
  logger.info(msg=log_msg)
1091
1092
  return True
1093
+
1094
+ def task_scores(self, task_id: str = None, page_offset: int = 0, page_size: int = 100):
1095
+ """
1096
+ Get all entities scores in a task.
1097
+
1098
+ :param str task_id: the id of the task
1099
+ :param int page_offset: the page offset
1100
+ :param int page_size: the page size
1101
+ :return: page of the task scores
1102
+
1103
+ **Example**:
1104
+
1105
+ .. code-block:: python
1106
+
1107
+ dataset.tasks.task_scores(task_id= 'task_id')
1108
+ """
1109
+ if task_id is None:
1110
+ raise exceptions.PlatformException('400', 'Please provide task_id')
1111
+
1112
+ url = '/scores/tasks/{task_id}?page={page_offset}&pageSize={page_size}'.format(
1113
+ task_id=task_id,
1114
+ page_offset=page_offset,
1115
+ page_size=page_size
1116
+ )
1117
+ success, response = self._client_api.gen_request(
1118
+ req_type='get',
1119
+ path=url
1120
+ )
1121
+
1122
+ if success:
1123
+ return response.json()
1124
+ else:
1125
+ raise exceptions.PlatformException(response)
@@ -430,51 +430,6 @@ class Attributes2:
430
430
  os.environ["USE_ATTRIBUTE_2"] = json.dumps(val)
431
431
  self.to_cookie()
432
432
 
433
-
434
- class PlatformSettings:
435
-
436
- def __init__(self):
437
- self._working_projects = list()
438
- self._settings = dict()
439
-
440
- @property
441
- def settings(self) -> dict:
442
- return self._settings
443
-
444
- @property
445
- def working_projects(self) -> list:
446
- return self._working_projects
447
-
448
- @settings.setter
449
- def settings(self, val: dict):
450
- if not isinstance(val, dict):
451
- raise exceptions.PlatformException(error=400,
452
- message="input must be of type dict")
453
-
454
- self._settings = val
455
-
456
- def add(self, setting_name: str, setting: dict):
457
- if setting_name in self.settings:
458
- self._settings[setting_name].update(setting)
459
- else:
460
- self._settings[setting_name] = setting
461
-
462
- def add_project(self, project_id: str):
463
- if not isinstance(project_id, str):
464
- raise exceptions.PlatformException(error=400,
465
- message="input must be of type str")
466
- self._working_projects.append(project_id)
467
-
468
- def add_bulk(self, settings_list):
469
- settings_dict = {s.name: {s.scope.id: s.value}
470
- for s in settings_list}
471
- for setting_name, settings_val in settings_dict.items():
472
- if setting_name in self._settings:
473
- self._settings[setting_name].update(settings_val)
474
- else:
475
- self._settings[setting_name] = settings_val
476
-
477
-
478
433
  class Decorators:
479
434
  @staticmethod
480
435
  def token_expired_decorator(method):
@@ -522,7 +477,6 @@ class ApiClient:
522
477
  self._callbacks = None
523
478
  self._cache_state = None
524
479
  self._attributes_mode = None
525
- self._platform_settings = None
526
480
  self._cache_configs = None
527
481
  self._sdk_cache = None
528
482
  self._fetch_entities = None
@@ -793,13 +747,6 @@ class ApiClient:
793
747
  assert isinstance(self._attributes_mode, Attributes2)
794
748
  return self._attributes_mode
795
749
 
796
- @property
797
- def platform_settings(self):
798
- if self._platform_settings is None:
799
- self._platform_settings = PlatformSettings()
800
- assert isinstance(self._platform_settings, PlatformSettings)
801
- return self._platform_settings
802
-
803
750
  @property
804
751
  def sdk_cache(self):
805
752
  if self._sdk_cache is None: