supervisely 6.73.460__py3-none-any.whl → 6.73.462__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of supervisely might be problematic. Click here for more details.

@@ -7,7 +7,6 @@ from supervisely.app.widgets.checkbox.checkbox import Checkbox
7
7
  from supervisely.app.widgets.container.container import Container
8
8
  from supervisely.app.widgets.field.field import Field
9
9
  from supervisely.app.widgets.select.select import Select
10
- from supervisely.app.widgets.select_project.select_project import SelectProject
11
10
  from supervisely.app.widgets.tree_select.tree_select import TreeSelect
12
11
  from supervisely.project.project_type import ProjectType
13
12
 
@@ -120,7 +119,15 @@ class SelectDatasetTree(Widget):
120
119
  if self._project_id:
121
120
  project_info = self._api.project.get_info_by_id(self._project_id)
122
121
  if allowed_project_types is not None:
123
- if project_info.type not in [pt.value for pt in allowed_project_types]:
122
+ allowed_values = []
123
+ if not isinstance(allowed_project_types, list):
124
+ allowed_project_types = [allowed_project_types]
125
+
126
+ for pt in allowed_project_types:
127
+ if isinstance(pt, (ProjectType, str)):
128
+ allowed_values.append(str(pt))
129
+
130
+ if project_info.type not in allowed_values:
124
131
  self._project_id = None
125
132
 
126
133
  self._multiselect = multiselect
@@ -790,14 +790,20 @@ class InferenceImageCache:
790
790
  try:
791
791
  frame = self.get_frame_from_cache(video_id, hash_or_id)
792
792
  except Exception as e:
793
- logger.error(f"Error retrieving frame from cache: {e}", exc_info=True)
793
+ logger.error(
794
+ f"Error retrieving frame from cache: {repr(e)}. Frame will be re-downloaded",
795
+ exc_info=True,
796
+ )
794
797
  ids_to_load.append(hash_or_id)
795
798
  return pos, None
796
799
  return pos, frame
797
800
  try:
798
801
  image = self._cache.get_image(name_constructor(hash_or_id))
799
802
  except Exception as e:
800
- logger.error(f"Error retrieving image from cache: {repr(e)}. Image will be re-downloaded", exc_info=True)
803
+ logger.error(
804
+ f"Error retrieving image from cache: {repr(e)}. Image will be re-downloaded",
805
+ exc_info=True,
806
+ )
801
807
  ids_to_load.append(hash_or_id)
802
808
  return pos, None
803
809
  return pos, image
@@ -435,7 +435,7 @@ class Inference:
435
435
 
436
436
  device = "cuda" if torch.cuda.is_available() else "cpu"
437
437
  except Exception as e:
438
- logger.warn(
438
+ logger.warning(
439
439
  f"Device auto detection failed, set to default 'cpu', reason: {repr(e)}"
440
440
  )
441
441
  device = "cpu"
@@ -1383,15 +1383,15 @@ class Inference:
1383
1383
  if classes is not None:
1384
1384
  num_classes = len(classes)
1385
1385
  except NotImplementedError:
1386
- logger.warn(f"get_classes() function not implemented for {type(self)} object.")
1386
+ logger.warning(f"get_classes() function not implemented for {type(self)} object.")
1387
1387
  except AttributeError:
1388
- logger.warn("Probably, get_classes() function not working without model deploy.")
1388
+ logger.warning("Probably, get_classes() function not working without model deploy.")
1389
1389
  except Exception as exc:
1390
- logger.warn("Unknown exception. Please, contact support")
1390
+ logger.warning("Unknown exception. Please, contact support")
1391
1391
  logger.exception(exc)
1392
1392
 
1393
1393
  if num_classes is None:
1394
- logger.warn(f"get_classes() function return {classes}; skip classes processing.")
1394
+ logger.warning(f"get_classes() function return {classes}; skip classes processing.")
1395
1395
 
1396
1396
  return {
1397
1397
  "app_name": get_name_from_env(default="Neural Network Serving"),
@@ -4644,7 +4644,7 @@ def get_gpu_count():
4644
4644
  gpu_count = len(re.findall(r"GPU \d+:", nvidia_smi_output))
4645
4645
  return gpu_count
4646
4646
  except (subprocess.CalledProcessError, FileNotFoundError) as exc:
4647
- logger.warn("Calling nvidia-smi caused a error: {exc}. Assume there is no any GPU.")
4647
+ logger.warning("Calling nvidia-smi caused a error: {exc}. Assume there is no any GPU.")
4648
4648
  return 0
4649
4649
 
4650
4650
 
@@ -441,46 +441,52 @@ class SessionJSON:
441
441
  prev_current = 0
442
442
  if preparing_cb:
443
443
  # wait for inference status
444
- resp = self._get_preparing_progress()
445
- awaiting_preparing_progress = 0
446
- break_flag = False
447
- while resp.get("status") is None:
448
- time.sleep(1)
449
- awaiting_preparing_progress += 1
450
- if awaiting_preparing_progress > 30:
451
- break_flag = True
444
+ try:
452
445
  resp = self._get_preparing_progress()
453
- if break_flag:
454
- logger.warning(
455
- "Unable to get preparing progress. Continue without prepaing progress status."
456
- )
457
- if not break_flag:
458
- if resp["status"] == "download_info":
446
+ for i in range(30):
447
+ logger.info(
448
+ f"Waiting for preparing progress... {30 - i} seconds left until timeout"
449
+ )
450
+ resp = self._get_preparing_progress()
451
+ if resp.get("status") is not None:
452
+ break
453
+ time.sleep(1)
454
+ if not resp.get("status"):
455
+ raise RuntimeError("Preparing progress status is not available.")
456
+
457
+ if resp.get("status") == "download_info":
458
+ logger.info("Downloading infos...")
459
459
  progress_widget = preparing_cb(
460
460
  message="Downloading infos", total=resp["total"], unit="it"
461
461
  )
462
- while resp["status"] == "download_info":
463
- current = resp["current"]
464
- # pylint: disable=possibly-used-before-assignment
465
- progress_widget.update(current - prev_current)
466
- prev_current = current
467
- resp = self._get_preparing_progress()
468
-
469
- if resp["status"] == "download_project":
462
+ while resp["status"] == "download_info":
463
+ current = resp["current"]
464
+ # pylint: disable=possibly-used-before-assignment
465
+ progress_widget.update(current - prev_current)
466
+ prev_current = current
467
+ resp = self._get_preparing_progress()
468
+
469
+ if resp.get("status") == "download_project":
470
+ logger.info("Downloading project...")
470
471
  progress_widget = preparing_cb(message="Download project", total=resp["total"])
471
- while resp["status"] == "download_project":
472
- current = resp["current"]
473
- progress_widget.update(current - prev_current)
474
- prev_current = current
475
- resp = self._get_preparing_progress()
476
-
477
- if resp["status"] == "warmup":
472
+ while resp.get("status") == "download_project":
473
+ current = resp["current"]
474
+ progress_widget.update(current - prev_current)
475
+ prev_current = current
476
+ resp = self._get_preparing_progress()
477
+
478
+ if resp.get("status") == "warmup":
479
+ logger.info("Running warmup...")
478
480
  progress_widget = preparing_cb(message="Running warmup", total=resp["total"])
479
- while resp["status"] == "warmup":
480
- current = resp["current"]
481
- progress_widget.update(current - prev_current)
482
- prev_current = current
483
- resp = self._get_preparing_progress()
481
+ while resp.get("status") == "warmup":
482
+ current = resp["current"]
483
+ progress_widget.update(current - prev_current)
484
+ prev_current = current
485
+ resp = self._get_preparing_progress()
486
+ except Exception as ex:
487
+ logger.warning(
488
+ f"An error occurred while getting preparing progress: {ex}. Continue without preparing progress status."
489
+ )
484
490
 
485
491
  logger.info("Inference has started:", extra={"response": resp})
486
492
  resp, has_started = self._wait_for_async_inference_start()
@@ -103,7 +103,6 @@ def _upload_annotations(api: Api, image_ids, frame_indices, video_annotation: Vi
103
103
  api.annotation.upload_anns(image_ids, anns=anns)
104
104
 
105
105
 
106
-
107
106
  def _upload_frames(
108
107
  api: Api,
109
108
  frames: List[np.ndarray],
@@ -229,8 +228,8 @@ def sample_video(
229
228
  with VideoFrameReader(video_path, frame_indices) as reader:
230
229
  for batch in batched_iter(zip(reader, frame_indices), 10):
231
230
  frames, indices = zip(*batch)
232
- for frame in frames:
233
- if resize:
231
+ if resize:
232
+ for frame in frames:
234
233
  cv2.resize(frame, [*resize, frame.shape[2]], interpolation=cv2.INTER_LINEAR)
235
234
 
236
235
  image_ids = _upload_frames(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: supervisely
3
- Version: 6.73.460
3
+ Version: 6.73.462
4
4
  Summary: Supervisely Python SDK.
5
5
  Home-page: https://github.com/supervisely/supervisely
6
6
  Author: Supervisely
@@ -461,7 +461,7 @@ supervisely/app/widgets/select_dataset/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JC
461
461
  supervisely/app/widgets/select_dataset/select_dataset.py,sha256=S7zl83lUhquJ1U8GugiViEiGId6a5nVDfyIRRxh_LT4,10295
462
462
  supervisely/app/widgets/select_dataset/template.html,sha256=7O_ZgmRs0vOL8tng6QvYbI_0o6A4yMAPB2MlfzWHeHQ,984
463
463
  supervisely/app/widgets/select_dataset_tree/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
464
- supervisely/app/widgets/select_dataset_tree/select_dataset_tree.py,sha256=Dl2RnuUe0RLkZwGOXbcJO_9tcFmIId8dCKZkRpCqcRY,25577
464
+ supervisely/app/widgets/select_dataset_tree/select_dataset_tree.py,sha256=HQwKXzz5yhmdcS79qfP_AGDUpGgQESNmLcASZx2rv78,25802
465
465
  supervisely/app/widgets/select_dataset_tree/template.html,sha256=_uvKCMP0nkpSl3FiTUxqy10JZw3q8-9hXCv22W3BDF0,38
466
466
  supervisely/app/widgets/select_item/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
467
467
  supervisely/app/widgets/select_item/select_item.py,sha256=dcB0UN46rn3nFQybgrGpLRfwB6xnPo-GGrv9rsMeCbA,3833
@@ -905,10 +905,10 @@ supervisely/nn/benchmark/visualization/widgets/sidebar/sidebar.py,sha256=tKPURRS
905
905
  supervisely/nn/benchmark/visualization/widgets/table/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
906
906
  supervisely/nn/benchmark/visualization/widgets/table/table.py,sha256=atmDnF1Af6qLQBUjLhK18RMDKAYlxnsuVHMSEa5a-e8,4319
907
907
  supervisely/nn/inference/__init__.py,sha256=QFukX2ip-U7263aEPCF_UCFwj6EujbMnsgrXp5Bbt8I,1623
908
- supervisely/nn/inference/cache.py,sha256=crpcUUzOFhP9Q2bmB_JILCofT-CO5mysirLe5H5AkrU,35630
909
- supervisely/nn/inference/inference.py,sha256=DBcROvcSdmauvEMFJcXNTkbltoPamX7ixh34a1pKTeQ,219669
908
+ supervisely/nn/inference/cache.py,sha256=Hkxvu70rrB-j7ztQ4TBOxQePAxiKS7Erdb2FmK7aetY,35795
909
+ supervisely/nn/inference/inference.py,sha256=54SXkXYEyswQN1L9hbOn0luSLyWbFOoaSH1qzNfu7HQ,219687
910
910
  supervisely/nn/inference/inference_request.py,sha256=yuqEL4BWjC-aKze_raGScEQyhHe8loYb_eNhGPsf2-4,14870
911
- supervisely/nn/inference/session.py,sha256=f2Tyvj21oO9AKxqr6_yHZ81Ol-wXC-h5cweTHEoljkg,35796
911
+ supervisely/nn/inference/session.py,sha256=WRJKVnmh5GPXnwtuKJn7AO1C7Td39wZo774ZIVQJGYk,36228
912
912
  supervisely/nn/inference/uploader.py,sha256=Dn5MfMRq7tclEWpP0B9fJjTiQPBpwumfXxC8-lOYgnM,5659
913
913
  supervisely/nn/inference/video_inference.py,sha256=8Bshjr6rDyLay5Za8IB8Dr6FURMO2R_v7aELasO8pR4,5746
914
914
  supervisely/nn/inference/gui/__init__.py,sha256=wCxd-lF5Zhcwsis-wScDA8n1Gk_1O00PKgDviUZ3F1U,221
@@ -1090,7 +1090,7 @@ supervisely/user/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
1090
1090
  supervisely/user/user.py,sha256=4GSVIupPAxWjIxZmUtH3Dtms_vGV82-49kM_aaR2gBI,319
1091
1091
  supervisely/video/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1092
1092
  supervisely/video/import_utils.py,sha256=b1Nl0gscNsV0iB9nWPeqt8GrkhOeuTZsN1p-d3gDUmE,544
1093
- supervisely/video/sampling.py,sha256=6w-FjpWbEq_u7zonnPEo0MhXN7RofhdgSZd27h45YMQ,20249
1093
+ supervisely/video/sampling.py,sha256=PGZVP1V9pDzdMsGThwS7U8E4VS6h1ba0nvpjVshIPfg,20248
1094
1094
  supervisely/video/video.py,sha256=nG1TE4MEvoh-_pfTTOx44dzqRq2VqLljmUnQ8r1czUY,20799
1095
1095
  supervisely/video_annotation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1096
1096
  supervisely/video_annotation/constants.py,sha256=_gW9iMhVk1w_dUaFiaiyXn66mt13S6bkxC64xpjP-CU,529
@@ -1129,9 +1129,9 @@ supervisely/worker_proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
1129
1129
  supervisely/worker_proto/worker_api_pb2.py,sha256=VQfi5JRBHs2pFCK1snec3JECgGnua3Xjqw_-b3aFxuM,59142
1130
1130
  supervisely/worker_proto/worker_api_pb2_grpc.py,sha256=3BwQXOaP9qpdi0Dt9EKG--Lm8KGN0C5AgmUfRv77_Jk,28940
1131
1131
  supervisely_lib/__init__.py,sha256=yRwzEQmVwSd6lUQoAUdBngKEOlnoQ6hA9ZcoZGJRNC4,331
1132
- supervisely-6.73.460.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
1133
- supervisely-6.73.460.dist-info/METADATA,sha256=Oe6s7atYi8hbNnRLu5PkJkYnxSAhPWMIJID7S7J_wVo,35604
1134
- supervisely-6.73.460.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
1135
- supervisely-6.73.460.dist-info/entry_points.txt,sha256=U96-5Hxrp2ApRjnCoUiUhWMqijqh8zLR03sEhWtAcms,102
1136
- supervisely-6.73.460.dist-info/top_level.txt,sha256=kcFVwb7SXtfqZifrZaSE3owHExX4gcNYe7Q2uoby084,28
1137
- supervisely-6.73.460.dist-info/RECORD,,
1132
+ supervisely-6.73.462.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
1133
+ supervisely-6.73.462.dist-info/METADATA,sha256=OzYjw5iRdUCofQdUCsd5xkjgEOSl600QFG7lHM7GAlU,35604
1134
+ supervisely-6.73.462.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
1135
+ supervisely-6.73.462.dist-info/entry_points.txt,sha256=U96-5Hxrp2ApRjnCoUiUhWMqijqh8zLR03sEhWtAcms,102
1136
+ supervisely-6.73.462.dist-info/top_level.txt,sha256=kcFVwb7SXtfqZifrZaSE3owHExX4gcNYe7Q2uoby084,28
1137
+ supervisely-6.73.462.dist-info/RECORD,,