supervisely 6.73.320__py3-none-any.whl → 6.73.322__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
supervisely/_utils.py CHANGED
@@ -471,6 +471,39 @@ def get_or_create_event_loop() -> asyncio.AbstractEventLoop:
471
471
  return loop
472
472
 
473
473
 
474
+ def sync_call(coro):
475
+ """
476
+ This function is used to run asynchronous functions in synchronous context.
477
+
478
+ :param coro: Asynchronous function.
479
+ :type coro: Coroutine
480
+ :return: Result of the asynchronous function.
481
+ :rtype: Any
482
+
483
+ :Usage example:
484
+
485
+ .. code-block:: python
486
+
487
+ from supervisely.utils import sync_call
488
+
489
+ async def async_function():
490
+ await asyncio.sleep(1)
491
+ return "Hello, World!"
492
+ coro = async_function()
493
+ result = sync_call(coro)
494
+ print(result)
495
+ # Output: Hello, World!
496
+ """
497
+
498
+ loop = get_or_create_event_loop()
499
+
500
+ if loop.is_running():
501
+ future = asyncio.run_coroutine_threadsafe(coro, loop=loop)
502
+ return future.result()
503
+ else:
504
+ return loop.run_until_complete(coro)
505
+
506
+
474
507
  def get_filename_from_headers(url):
475
508
  try:
476
509
  response = requests.head(url, allow_redirects=True)
supervisely/api/api.py CHANGED
@@ -1443,6 +1443,7 @@ class Api:
1443
1443
  chunk_size: int = 8192,
1444
1444
  use_public_api: Optional[bool] = True,
1445
1445
  timeout: httpx._types.TimeoutTypes = 60,
1446
+ **kwargs,
1446
1447
  ) -> AsyncGenerator:
1447
1448
  """
1448
1449
  Performs asynchronous streaming GET or POST request to server with given parameters.
@@ -1486,18 +1487,19 @@ class Api:
1486
1487
  else:
1487
1488
  headers = {**self.headers, **headers}
1488
1489
 
1489
- if isinstance(data, (bytes, Generator)):
1490
- content = data
1491
- json_body = None
1492
- params = None
1493
- elif isinstance(data, Dict):
1494
- json_body = {**data, **self.additional_fields}
1495
- content = None
1496
- params = None
1490
+ params = kwargs.get("params", None)
1491
+ if "content" in kwargs or "json_body" in kwargs:
1492
+ content = kwargs.get("content", None)
1493
+ json_body = kwargs.get("json_body", None)
1497
1494
  else:
1498
- params = data
1499
- content = None
1500
- json_body = None
1495
+ if isinstance(data, (bytes, Generator)):
1496
+ content = data
1497
+ json_body = None
1498
+ elif isinstance(data, Dict):
1499
+ json_body = {**data, **self.additional_fields}
1500
+ content = None
1501
+ else:
1502
+ raise ValueError("Data should be either bytes or dict")
1501
1503
 
1502
1504
  if range_start is not None or range_end is not None:
1503
1505
  headers["Range"] = f"bytes={range_start or ''}-{range_end or ''}"
@@ -1512,17 +1514,19 @@ class Api:
1512
1514
  url,
1513
1515
  content=content,
1514
1516
  json=json_body,
1515
- params=params,
1516
1517
  headers=headers,
1517
1518
  timeout=timeout,
1519
+ params=params,
1518
1520
  )
1519
1521
  elif method_type == "GET":
1520
1522
  response = self.async_httpx_client.stream(
1521
1523
  method_type,
1522
1524
  url,
1523
- json=json_body or params,
1525
+ content=content,
1526
+ json=json_body,
1524
1527
  headers=headers,
1525
1528
  timeout=timeout,
1529
+ params=params,
1526
1530
  )
1527
1531
  else:
1528
1532
  raise NotImplementedError(
@@ -33,7 +33,9 @@ from supervisely.io.fs import (
33
33
  get_file_name,
34
34
  get_file_name_with_ext,
35
35
  get_file_size,
36
+ get_or_create_event_loop,
36
37
  list_files_recursively,
38
+ list_files_recursively_async,
37
39
  silent_remove,
38
40
  )
39
41
  from supervisely.io.fs_cache import FileCache
@@ -2041,7 +2043,7 @@ class FileApi(ModuleApiBase):
2041
2043
  # check_hash: bool = True, #TODO add with resumaple api
2042
2044
  progress_cb: Optional[Union[tqdm, Callable]] = None,
2043
2045
  progress_cb_type: Literal["number", "size"] = "size",
2044
- ) -> httpx.Response:
2046
+ ) -> None:
2045
2047
  """
2046
2048
  Upload file from local path to Team Files asynchronously.
2047
2049
 
@@ -2057,8 +2059,8 @@ class FileApi(ModuleApiBase):
2057
2059
  :type progress_cb: tqdm or callable, optional
2058
2060
  :param progress_cb_type: Type of progress callback. Can be "number" or "size". Default is "size".
2059
2061
  :type progress_cb_type: Literal["number", "size"], optional
2060
- :return: Response from API.
2061
- :rtype: :class:`httpx.Response`
2062
+ :return: None
2063
+ :rtype: :class:`NoneType`
2062
2064
  :Usage example:
2063
2065
 
2064
2066
  .. code-block:: python
@@ -2087,17 +2089,30 @@ class FileApi(ModuleApiBase):
2087
2089
  }
2088
2090
  if semaphore is None:
2089
2091
  semaphore = self._api.get_default_semaphore()
2092
+ logger.debug(f"Uploading with async to: {dst}. Semaphore: {semaphore}")
2090
2093
  async with semaphore:
2091
2094
  async with aiofiles.open(src, "rb") as fd:
2092
- item = await fd.read()
2093
- response = await self._api.post_async(
2094
- api_method, content=item, params=json_body, headers=headers
2095
- )
2096
- if progress_cb is not None and progress_cb_type == "size":
2097
- progress_cb(len(item))
2095
+
2096
+ async def file_chunk_generator():
2097
+ while True:
2098
+ chunk = await fd.read(8 * 1024 * 1024)
2099
+ if not chunk:
2100
+ break
2101
+ if progress_cb is not None and progress_cb_type == "size":
2102
+ progress_cb(len(chunk))
2103
+ yield chunk
2104
+
2105
+ async for chunk, _ in self._api.stream_async(
2106
+ method=api_method,
2107
+ method_type="POST",
2108
+ data=file_chunk_generator(), # added as required, but not used inside
2109
+ headers=headers,
2110
+ content=file_chunk_generator(), # used instead of data inside stream_async
2111
+ params=json_body,
2112
+ ):
2113
+ pass
2098
2114
  if progress_cb is not None and progress_cb_type == "number":
2099
2115
  progress_cb(1)
2100
- return response
2101
2116
 
2102
2117
  async def upload_bulk_async(
2103
2118
  self,
@@ -2109,6 +2124,7 @@ class FileApi(ModuleApiBase):
2109
2124
  # check_hash: bool = True, #TODO add with resumaple api
2110
2125
  progress_cb: Optional[Union[tqdm, Callable]] = None,
2111
2126
  progress_cb_type: Literal["number", "size"] = "size",
2127
+ enable_fallback: Optional[bool] = True,
2112
2128
  ) -> None:
2113
2129
  """
2114
2130
  Upload multiple files from local paths to Team Files asynchronously.
@@ -2125,6 +2141,8 @@ class FileApi(ModuleApiBase):
2125
2141
  :type progress_cb: tqdm or callable, optional
2126
2142
  :param progress_cb_type: Type of progress callback. Can be "number" or "size". Default is "size".
2127
2143
  :type progress_cb_type: Literal["number", "size"], optional
2144
+ :param enable_fallback: If True, the method will fallback to synchronous upload if an error occurs.
2145
+ :type enable_fallback: bool, optional
2128
2146
  :return: None
2129
2147
  :rtype: :class:`NoneType`
2130
2148
  :Usage example:
@@ -2153,19 +2171,134 @@ class FileApi(ModuleApiBase):
2153
2171
  api.file.upload_bulk_async(8, paths_to_files, paths_to_save)
2154
2172
  )
2155
2173
  """
2156
- if semaphore is None:
2157
- semaphore = self._api.get_default_semaphore()
2158
- tasks = []
2159
- for s, d in zip(src_paths, dst_paths):
2160
- task = self.upload_async(
2161
- team_id,
2162
- s,
2163
- d,
2164
- semaphore=semaphore,
2165
- # chunk_size=chunk_size, #TODO add with resumaple api
2166
- # check_hash=check_hash, #TODO add with resumaple api
2167
- progress_cb=progress_cb,
2168
- progress_cb_type=progress_cb_type,
2174
+ try:
2175
+ if semaphore is None:
2176
+ semaphore = self._api.get_default_semaphore()
2177
+ tasks = []
2178
+ for src, dst in zip(src_paths, dst_paths):
2179
+ task = asyncio.create_task(
2180
+ self.upload_async(
2181
+ team_id=team_id,
2182
+ src=src,
2183
+ dst=dst,
2184
+ semaphore=semaphore,
2185
+ # chunk_size=chunk_size, #TODO add with resumaple api
2186
+ # check_hash=check_hash, #TODO add with resumaple api
2187
+ progress_cb=progress_cb,
2188
+ progress_cb_type=progress_cb_type,
2189
+ )
2190
+ )
2191
+ tasks.append(task)
2192
+ for task in tasks:
2193
+ await task
2194
+ except Exception as e:
2195
+ if enable_fallback:
2196
+ logger.warning(
2197
+ f"Upload files bulk asynchronously failed. Fallback to synchronous upload.",
2198
+ exc_info=True,
2199
+ )
2200
+ if progress_cb is not None and progress_cb_type == "number":
2201
+ logger.warning(
2202
+ "Progress callback type 'number' is not supported for synchronous upload. "
2203
+ "Progress callback will be disabled."
2204
+ )
2205
+ progress_cb = None
2206
+ self.upload_bulk(
2207
+ team_id=team_id,
2208
+ src_paths=src_paths,
2209
+ dst_paths=dst_paths,
2210
+ progress_cb=progress_cb,
2211
+ )
2212
+ else:
2213
+ raise e
2214
+
2215
+ async def upload_directory_async(
2216
+ self,
2217
+ team_id: int,
2218
+ local_dir: str,
2219
+ remote_dir: str,
2220
+ change_name_if_conflict: Optional[bool] = True,
2221
+ progress_size_cb: Optional[Union[tqdm, Callable]] = None,
2222
+ replace_if_conflict: Optional[bool] = False,
2223
+ enable_fallback: Optional[bool] = True,
2224
+ ) -> str:
2225
+ """
2226
+ Upload Directory to Team Files from local path.
2227
+ Files are uploaded asynchronously.
2228
+
2229
+ :param team_id: Team ID in Supervisely.
2230
+ :type team_id: int
2231
+ :param local_dir: Path to local Directory.
2232
+ :type local_dir: str
2233
+ :param remote_dir: Path to Directory in Team Files.
2234
+ :type remote_dir: str
2235
+ :param change_name_if_conflict: Checks if given name already exists and adds suffix to the end of the name.
2236
+ :type change_name_if_conflict: bool, optional
2237
+ :param progress_size_cb: Function for tracking download progress.
2238
+ :type progress_size_cb: Progress, optional
2239
+ :param replace_if_conflict: If True, replace existing dir.
2240
+ :type replace_if_conflict: bool, optional
2241
+ :param enable_fallback: If True, the method will fallback to synchronous upload if an error occurs.
2242
+ :type enable_fallback: bool, optional
2243
+ :return: Path to Directory in Team Files
2244
+ :rtype: :class:`str`
2245
+ :Usage example:
2246
+
2247
+ .. code-block:: python
2248
+
2249
+ import supervisely as sly
2250
+
2251
+ os.environ['SERVER_ADDRESS'] = 'https://app.supervisely.com'
2252
+ os.environ['API_TOKEN'] = 'Your Supervisely API Token'
2253
+ api = sly.Api.from_env()
2254
+
2255
+ path_to_dir = "/My_App_Test/ds1"
2256
+ local_path = "/home/admin/Downloads/My_local_test"
2257
+
2258
+ api.file.upload_directory(9, local_path, path_to_dir)
2259
+ """
2260
+ try:
2261
+ if not remote_dir.startswith("/"):
2262
+ remote_dir = "/" + remote_dir
2263
+
2264
+ if self.dir_exists(team_id, remote_dir):
2265
+ if change_name_if_conflict is True:
2266
+ res_remote_dir = self.get_free_dir_name(team_id, remote_dir)
2267
+ elif replace_if_conflict is True:
2268
+ res_remote_dir = remote_dir
2269
+ else:
2270
+ raise FileExistsError(
2271
+ f"Directory {remote_dir} already exists in your team (id={team_id})"
2272
+ )
2273
+ else:
2274
+ res_remote_dir = remote_dir
2275
+
2276
+ local_files = await list_files_recursively_async(local_dir)
2277
+ dir_prefix = local_dir.rstrip("/") + "/"
2278
+ remote_files = [
2279
+ res_remote_dir.rstrip("/") + "/" + file[len(dir_prefix) :] for file in local_files
2280
+ ]
2281
+
2282
+ await self.upload_bulk_async(
2283
+ team_id=team_id,
2284
+ src_paths=local_files,
2285
+ dst_paths=remote_files,
2286
+ progress_cb=progress_size_cb,
2169
2287
  )
2170
- tasks.append(task)
2171
- await asyncio.gather(*tasks)
2288
+ except Exception as e:
2289
+ if enable_fallback:
2290
+ logger.warning(
2291
+ f"Upload directory asynchronously failed. Fallback to synchronous upload.",
2292
+ exc_info=True,
2293
+ )
2294
+ res_remote_dir = self.upload_directory(
2295
+ team_id=team_id,
2296
+ local_dir=local_dir,
2297
+ remote_dir=res_remote_dir,
2298
+ change_name_if_conflict=change_name_if_conflict,
2299
+ progress_size_cb=progress_size_cb,
2300
+ replace_if_conflict=replace_if_conflict,
2301
+ )
2302
+ else:
2303
+ raise e
2304
+ return res_remote_dir
@@ -65,6 +65,7 @@ class AvailablePointcloudEpisodesConverters:
65
65
  SLY = "supervisely"
66
66
  BAG = "rosbag"
67
67
  LYFT = "lyft"
68
+ KITTI360 = "kitti360"
68
69
 
69
70
 
70
71
  class AvailableVolumeConverters:
@@ -7,3 +7,4 @@ from supervisely.convert.pointcloud_episodes.lyft.lyft_converter import LyftEpis
7
7
  from supervisely.convert.pointcloud_episodes.nuscenes_conv.nuscenes_converter import (
8
8
  NuscenesEpisodesConverter,
9
9
  )
10
+ from supervisely.convert.pointcloud_episodes.kitti_360.kitti_360_converter import KITTI360Converter
@@ -0,0 +1,242 @@
1
+ import os
2
+ from pathlib import Path
3
+ from typing import Optional, List
4
+ from supervisely import PointcloudEpisodeAnnotation, ProjectMeta, is_development, logger, ObjClass, ObjClassCollection
5
+ from supervisely.geometry.cuboid_3d import Cuboid3d
6
+ from supervisely.api.api import Api, ApiField
7
+ from supervisely.convert.base_converter import AvailablePointcloudEpisodesConverters
8
+ from supervisely.convert.pointcloud_episodes.kitti_360.kitti_360_helper import *
9
+ from supervisely.convert.pointcloud_episodes.pointcloud_episodes_converter import PointcloudEpisodeConverter
10
+ from supervisely.io.fs import (
11
+ file_exists,
12
+ get_file_name,
13
+ get_file_name_with_ext,
14
+ list_files_recursively,
15
+ silent_remove,
16
+ )
17
+ from supervisely.pointcloud_annotation.pointcloud_episode_frame_collection import PointcloudEpisodeFrameCollection
18
+ from supervisely.pointcloud_annotation.pointcloud_episode_object_collection import PointcloudEpisodeObjectCollection
19
+ from supervisely.pointcloud_annotation.pointcloud_episode_object import PointcloudEpisodeObject
20
+ from supervisely.pointcloud_annotation.pointcloud_episode_frame import PointcloudEpisodeFrame
21
+ from supervisely.pointcloud_annotation.pointcloud_figure import PointcloudFigure
22
+
23
+ class KITTI360Converter(PointcloudEpisodeConverter):
24
+
25
+ class Item:
26
+
27
+ def __init__(
28
+ self,
29
+ scene_name: str,
30
+ frame_paths: List[str],
31
+ ann_data: Annotation3D,
32
+ poses_path: str,
33
+ related_images: Optional[tuple] = None,
34
+ custom_data: Optional[dict] = None,
35
+ ):
36
+ self._scene_name = scene_name
37
+ self._frame_paths = frame_paths
38
+ self._ann_data = ann_data
39
+ self._poses_path = poses_path
40
+ self._related_images = related_images or []
41
+
42
+ self._type = "point_cloud_episode"
43
+ self._custom_data = custom_data if custom_data is not None else {}
44
+
45
+ def __init__(self, *args, **kwargs):
46
+ self._calib_path = None
47
+ super().__init__(*args, **kwargs)
48
+
49
+ def __str__(self) -> str:
50
+ return AvailablePointcloudEpisodesConverters.KITTI360
51
+
52
+ @property
53
+ def key_file_ext(self) -> str:
54
+ return ".bin"
55
+
56
+ def validate_format(self) -> bool:
57
+ try:
58
+ import kitti360scripts
59
+ except ImportError:
60
+ logger.warn("Please run 'pip install kitti360Scripts' to import KITTI-360 data.")
61
+ return False
62
+
63
+ self._items = []
64
+ subdirs = os.listdir(self._input_data)
65
+ if len(subdirs) == 1:
66
+ self._input_data = os.path.join(self._input_data, subdirs[0])
67
+
68
+ # * Get calibration path
69
+ calib_dir = next(iter([(Path(path).parent).as_posix() for path in list_files_recursively(self._input_data, [".txt"], None, True) if Path(path).stem.startswith("calib")]), None)
70
+ if calib_dir is None:
71
+ return False
72
+ self._calib_path = calib_dir
73
+
74
+ # * Get pointcloud files paths
75
+ velodyne_files = list_files_recursively(self._input_data, [".bin"], None, True)
76
+ if len(velodyne_files) == 0:
77
+ return False
78
+
79
+ # * Get annotation files paths and related images
80
+ boxes_ann_files = list_files_recursively(self._input_data, [".xml"], None, True)
81
+ if len(boxes_ann_files) == 0:
82
+ return False
83
+ rimage_files = list_files_recursively(self._input_data, [".png"], None, True)
84
+
85
+ kitti_anns = []
86
+ for ann_file in boxes_ann_files:
87
+ key_name = Path(ann_file).stem
88
+
89
+ # * Get pointcloud files
90
+ frame_paths = []
91
+ for path in velodyne_files:
92
+ if key_name in Path(path).parts:
93
+ frame_paths.append(path)
94
+ if len(frame_paths) == 0:
95
+ logger.warn("No frames found for name: %s", key_name)
96
+ continue
97
+
98
+ # * Get related images
99
+ rimages = []
100
+ for rimage in rimage_files:
101
+ path = Path(rimage)
102
+ if key_name in path.parts:
103
+ cam_name = path.parts[-3]
104
+ rimages.append((cam_name, rimage))
105
+
106
+ # * Get poses
107
+ poses_filter = (
108
+ lambda x: x.endswith("cam0_to_world.txt") and key_name in Path(x).parts
109
+ )
110
+ poses_path = next(
111
+ path
112
+ for path in list_files_recursively(self._input_data, [".txt"], None, True)
113
+ if poses_filter(path)
114
+ )
115
+ if poses_path is None:
116
+ logger.warn("No poses found for name: %s", key_name)
117
+ continue
118
+
119
+ # * Parse annotation
120
+ ann = Annotation3D(ann_file)
121
+ kitti_anns.append(ann)
122
+
123
+ self._items.append(
124
+ self.Item(key_name, frame_paths, ann, poses_path, rimages)
125
+ )
126
+
127
+ # * Get object class names for meta
128
+ obj_class_names = set()
129
+ for ann in kitti_anns:
130
+ for obj in ann.get_objects():
131
+ obj_class_names.add(obj.name)
132
+ obj_classes = [ObjClass(obj_class, Cuboid3d) for obj_class in obj_class_names]
133
+ self._meta = ProjectMeta(obj_classes=ObjClassCollection(obj_classes))
134
+ return self.items_count > 0
135
+
136
+ def to_supervisely(
137
+ self,
138
+ item,
139
+ meta: ProjectMeta,
140
+ renamed_classes: dict = {},
141
+ renamed_tags: dict = {},
142
+ static_transformations: StaticTransformations = None,
143
+ ) -> PointcloudEpisodeAnnotation:
144
+ static_transformations.set_cam2world(item._poses_path)
145
+
146
+ frame_cnt = len(item._frame_paths)
147
+ objs, frames = [], []
148
+
149
+ frame_idx_to_figures = {idx: [] for idx in range(frame_cnt)}
150
+ for obj in item._ann_data.get_objects():
151
+ pcd_obj = PointcloudEpisodeObject(meta.get_obj_class(obj.name))
152
+ objs.append(pcd_obj)
153
+
154
+ for idx in range(frame_cnt):
155
+ if obj.start_frame <= idx <= obj.end_frame:
156
+ tr_matrix = static_transformations.world_to_velo_transformation(obj, idx)
157
+ geom = convert_kitti_cuboid_to_supervisely_geometry(tr_matrix)
158
+ frame_idx_to_figures[idx].append(PointcloudFigure(pcd_obj, geom, idx))
159
+ for idx, figures in frame_idx_to_figures.items():
160
+ frame = PointcloudEpisodeFrame(idx, figures)
161
+ frames.append(frame)
162
+ obj_collection = PointcloudEpisodeObjectCollection(objs)
163
+ frame_collection = PointcloudEpisodeFrameCollection(frames)
164
+ return PointcloudEpisodeAnnotation(
165
+ frame_cnt, objects=obj_collection, frames=frame_collection
166
+ )
167
+
168
+ def upload_dataset(self, api: Api, dataset_id: int, batch_size: int = 1, log_progress=True):
169
+ meta, renamed_classes, renamed_tags = self.merge_metas_with_conflicts(api, dataset_id)
170
+
171
+ dataset_info = api.dataset.get_info_by_id(dataset_id)
172
+ if log_progress:
173
+ progress, progress_cb = self.get_progress(sum([len(item._frame_paths) for item in self._items]), "Converting pointcloud episodes...")
174
+ else:
175
+ progress_cb = None
176
+ static_transformations = StaticTransformations(self._calib_path)
177
+ scene_ds = dataset_info
178
+ multiple_items = self.items_count > 1
179
+ for item in self._items:
180
+ scene_ds = api.dataset.create(dataset_info.project_id, item._scene_name, parent_id=dataset_id) if multiple_items else dataset_info
181
+ frame_to_pcd_ids = {}
182
+ for idx, frame_path in enumerate(item._frame_paths):
183
+ # * Convert pointcloud from ".bin" to ".pcd"
184
+ pcd_path = str(Path(frame_path).with_suffix(".pcd"))
185
+ if file_exists(pcd_path):
186
+ logger.warning(f"Overwriting file with path: {pcd_path}")
187
+ convert_bin_to_pcd(frame_path, pcd_path)
188
+
189
+ # * Upload pointcloud
190
+ pcd_name = get_file_name_with_ext(pcd_path)
191
+ info = api.pointcloud_episode.upload_path(scene_ds.id, pcd_name, pcd_path, {"frame": idx})
192
+ pcd_id = info.id
193
+ frame_to_pcd_ids[idx] = pcd_id
194
+
195
+ # * Clean up
196
+ silent_remove(pcd_path)
197
+
198
+ if log_progress:
199
+ progress_cb(1)
200
+
201
+ # * Upload photocontext
202
+ rimage_jsons = []
203
+ cam_names = []
204
+ hashes = api.pointcloud_episode.upload_related_images(
205
+ [rimage_path for _, rimage_path in item._related_images]
206
+ )
207
+ for (cam_name, rimage_path), img, pcd_id in zip(
208
+ item._related_images, hashes, list(frame_to_pcd_ids.values())
209
+ ):
210
+ cam_num = int(cam_name[-1])
211
+ rimage_info = convert_calib_to_image_meta(
212
+ get_file_name(rimage_path), static_transformations, cam_num
213
+ )
214
+ image_json = {
215
+ ApiField.ENTITY_ID: pcd_id,
216
+ ApiField.NAME: cam_name,
217
+ ApiField.HASH: img,
218
+ ApiField.META: rimage_info[ApiField.META],
219
+ }
220
+ rimage_jsons.append(image_json)
221
+ cam_names.append(cam_name)
222
+ if rimage_jsons:
223
+ api.pointcloud_episode.add_related_images(rimage_jsons, cam_names)
224
+
225
+ # * Convert annotation and upload
226
+ try:
227
+ ann = self.to_supervisely(
228
+ item, meta, renamed_classes, renamed_tags, static_transformations
229
+ )
230
+ api.pointcloud_episode.annotation.append(scene_ds.id, ann, frame_to_pcd_ids)
231
+ except Exception as e:
232
+ logger.error(
233
+ f"Failed to upload annotation for scene: {scene_ds.name}. Error: {repr(e)}",
234
+ stack_info=False,
235
+ )
236
+ continue
237
+
238
+ logger.info(f"Dataset ID:{scene_ds.id} has been successfully uploaded.")
239
+
240
+ if log_progress:
241
+ if is_development():
242
+ progress.close()