supervisely 6.73.262__py3-none-any.whl → 6.73.264__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of supervisely might be problematic. Click here for more details.
- supervisely/__init__.py +1 -1
- supervisely/api/app_api.py +166 -46
- supervisely/api/module_api.py +6 -0
- supervisely/convert/__init__.py +2 -0
- supervisely/convert/base_converter.py +2 -0
- supervisely/convert/image/image_helper.py +1 -0
- supervisely/convert/pointcloud/lyft/__init__.py +0 -0
- supervisely/convert/pointcloud/lyft/lyft_converter.py +287 -0
- supervisely/convert/pointcloud/lyft/lyft_helper.py +231 -0
- supervisely/convert/pointcloud_episodes/lyft/__init__.py +0 -0
- supervisely/convert/pointcloud_episodes/lyft/lyft_converter.py +244 -0
- supervisely/io/exception_handlers.py +2 -1
- supervisely/io/fs.py +27 -17
- {supervisely-6.73.262.dist-info → supervisely-6.73.264.dist-info}/METADATA +1 -1
- {supervisely-6.73.262.dist-info → supervisely-6.73.264.dist-info}/RECORD +19 -14
- {supervisely-6.73.262.dist-info → supervisely-6.73.264.dist-info}/LICENSE +0 -0
- {supervisely-6.73.262.dist-info → supervisely-6.73.264.dist-info}/WHEEL +0 -0
- {supervisely-6.73.262.dist-info → supervisely-6.73.264.dist-info}/entry_points.txt +0 -0
- {supervisely-6.73.262.dist-info → supervisely-6.73.264.dist-info}/top_level.txt +0 -0
supervisely/__init__.py
CHANGED
|
@@ -309,4 +309,4 @@ except Exception as e:
|
|
|
309
309
|
# If new changes in Supervisely Python SDK require upgrade of the Supervisely instance
|
|
310
310
|
# set a new value for the environment variable MINIMUM_INSTANCE_VERSION_FOR_SDK, otherwise
|
|
311
311
|
# users can face compatibility issues, if the instance version is lower than the SDK version.
|
|
312
|
-
os.environ["MINIMUM_INSTANCE_VERSION_FOR_SDK"] = "6.12.
|
|
312
|
+
os.environ["MINIMUM_INSTANCE_VERSION_FOR_SDK"] = "6.12.17"
|
supervisely/api/app_api.py
CHANGED
|
@@ -1212,17 +1212,80 @@ class AppApi(TaskApi):
|
|
|
1212
1212
|
|
|
1213
1213
|
def get_list(
|
|
1214
1214
|
self,
|
|
1215
|
-
team_id,
|
|
1216
|
-
filter=None,
|
|
1217
|
-
context=None,
|
|
1218
|
-
repository_key=None,
|
|
1219
|
-
show_disabled=False,
|
|
1220
|
-
integrated_into=None,
|
|
1221
|
-
session_tags=None,
|
|
1222
|
-
only_running=False,
|
|
1223
|
-
with_shared=True,
|
|
1215
|
+
team_id: int,
|
|
1216
|
+
filter: Optional[List[dict]] = None,
|
|
1217
|
+
context: Optional[List[str]] = None,
|
|
1218
|
+
repository_key: Optional[str] = None,
|
|
1219
|
+
show_disabled: bool = False,
|
|
1220
|
+
integrated_into: Optional[List[str]] = None,
|
|
1221
|
+
session_tags: Optional[List[str]] = None,
|
|
1222
|
+
only_running: bool = False,
|
|
1223
|
+
with_shared: bool = True,
|
|
1224
|
+
force_all_sessions: bool = True,
|
|
1224
1225
|
) -> List[AppInfo]:
|
|
1225
|
-
"""
|
|
1226
|
+
"""
|
|
1227
|
+
Get list of applications for the specified team.
|
|
1228
|
+
|
|
1229
|
+
:param team_id: team id
|
|
1230
|
+
:type team_id: int
|
|
1231
|
+
:param filter: list of filters
|
|
1232
|
+
:type filter: Optional[List[dict]]
|
|
1233
|
+
:param context: list of application contexts
|
|
1234
|
+
:type context: Optional[List[str]]
|
|
1235
|
+
:param repository_key: repository key
|
|
1236
|
+
:type repository_key: Optional[str]
|
|
1237
|
+
:param show_disabled: show disabled applications
|
|
1238
|
+
:type show_disabled: bool
|
|
1239
|
+
:param integrated_into: destination of the application.
|
|
1240
|
+
Available values: "panel", "files", "standalone", "data_commander",
|
|
1241
|
+
"image_annotation_tool", "video_annotation_tool",
|
|
1242
|
+
"dicom_annotation_tool", "pointcloud_annotation_tool"
|
|
1243
|
+
:type integrated_into: Optional[List[str]]
|
|
1244
|
+
:param session_tags: list of session tags
|
|
1245
|
+
:type session_tags: Optional[List[str]]
|
|
1246
|
+
:param only_running: show only running applications (status is one of "queued"/"consumed"/"started"/"deployed")
|
|
1247
|
+
:type only_running: bool
|
|
1248
|
+
:param with_shared: include shared applications
|
|
1249
|
+
:type with_shared: bool
|
|
1250
|
+
:param force_all_sessions: force to get all sessions (tasks) for each application.
|
|
1251
|
+
Works only if only_running is False.
|
|
1252
|
+
Note that it can be a long operation.
|
|
1253
|
+
:type force_all_sessions: bool
|
|
1254
|
+
|
|
1255
|
+
:return: list of applications
|
|
1256
|
+
:rtype: List[AppInfo]
|
|
1257
|
+
|
|
1258
|
+
|
|
1259
|
+
:Usage example:
|
|
1260
|
+
|
|
1261
|
+
.. code-block:: python
|
|
1262
|
+
|
|
1263
|
+
import supervisely as sly
|
|
1264
|
+
|
|
1265
|
+
os.environ['SERVER_ADDRESS'] = 'https://app.supervisely.com'
|
|
1266
|
+
os.environ['API_TOKEN'] = 'Your Supervisely API Token'
|
|
1267
|
+
api = sly.Api.from_env()
|
|
1268
|
+
|
|
1269
|
+
team_id = 447
|
|
1270
|
+
|
|
1271
|
+
# Get list of all applications (including all tasks in `tasks` field)
|
|
1272
|
+
apps = api.app.get_list(team_id=team_id)
|
|
1273
|
+
|
|
1274
|
+
# Get list of all applications (only running tasks included in `tasks` field)
|
|
1275
|
+
apps = api.app.get_list(team_id=team_id, force_all_sessions=False)
|
|
1276
|
+
|
|
1277
|
+
# Get list of only running applications
|
|
1278
|
+
apps = api.app.get_list(team_id=team_id, only_running=True)
|
|
1279
|
+
|
|
1280
|
+
# Get list of applications with specific filters
|
|
1281
|
+
filter = [{"field": "moduleId", "operator": "=", "value": 428}]
|
|
1282
|
+
apps = api.app.get_list(team_id=team_id, filter=filter)
|
|
1283
|
+
"""
|
|
1284
|
+
|
|
1285
|
+
if only_running is True:
|
|
1286
|
+
# no need to get all sessions if only running sessions are requested
|
|
1287
|
+
# (`force_all_sessions` has higher priority than only_running)
|
|
1288
|
+
force_all_sessions = False
|
|
1226
1289
|
|
|
1227
1290
|
return self.get_list_all_pages(
|
|
1228
1291
|
method="apps.list",
|
|
@@ -1240,6 +1303,7 @@ class AppApi(TaskApi):
|
|
|
1240
1303
|
"onlyRunning": only_running,
|
|
1241
1304
|
"showDisabled": show_disabled,
|
|
1242
1305
|
"withShared": with_shared,
|
|
1306
|
+
"forceAllSessions": force_all_sessions,
|
|
1243
1307
|
},
|
|
1244
1308
|
)
|
|
1245
1309
|
|
|
@@ -1479,49 +1543,105 @@ class AppApi(TaskApi):
|
|
|
1479
1543
|
|
|
1480
1544
|
def get_sessions(
|
|
1481
1545
|
self,
|
|
1482
|
-
team_id,
|
|
1483
|
-
module_id,
|
|
1484
|
-
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1546
|
+
team_id: int,
|
|
1547
|
+
module_id: int,
|
|
1548
|
+
show_disabled: bool = False,
|
|
1549
|
+
session_name: Optional[str] = None,
|
|
1550
|
+
statuses: Optional[List[TaskApi.Status]] = None,
|
|
1551
|
+
with_shared: bool = False,
|
|
1488
1552
|
) -> List[SessionInfo]:
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1553
|
+
"""
|
|
1554
|
+
Get list of sessions (tasks) for the specified team and module.
|
|
1555
|
+
|
|
1556
|
+
:param team_id: team id
|
|
1557
|
+
:type team_id: int
|
|
1558
|
+
:param module_id: application module id
|
|
1559
|
+
:type module_id: int
|
|
1560
|
+
:param show_disabled: show disabled applications
|
|
1561
|
+
:type show_disabled: bool
|
|
1562
|
+
:param session_name: session name to filter sessions
|
|
1563
|
+
:type session_name: Optional[str]
|
|
1564
|
+
:param statuses: list of statuses to filter sessions
|
|
1565
|
+
:type statuses: Optional[List[TaskApi.Status]]
|
|
1566
|
+
:param with_shared: include shared application sessions
|
|
1567
|
+
:type with_shared: bool
|
|
1568
|
+
|
|
1569
|
+
:return: list of sessions
|
|
1570
|
+
:rtype: List[SessionInfo]
|
|
1571
|
+
|
|
1572
|
+
:Usage example:
|
|
1573
|
+
|
|
1574
|
+
.. code-block:: python
|
|
1575
|
+
|
|
1576
|
+
import supervisely as sly
|
|
1577
|
+
|
|
1578
|
+
os.environ['SERVER_ADDRESS'] = 'https://app.supervisely.com'
|
|
1579
|
+
os.environ['API_TOKEN'] = 'Your Supervisely API Token'
|
|
1580
|
+
api = sly.Api.from_env()
|
|
1581
|
+
|
|
1582
|
+
team_id = 447
|
|
1583
|
+
module_id = 428
|
|
1584
|
+
|
|
1585
|
+
# Get list of all sessions for the specified team and module ID
|
|
1586
|
+
sessions = api.app.get_sessions(team_id, module_id)
|
|
1587
|
+
|
|
1588
|
+
# Get list of sessions with specific statuses
|
|
1589
|
+
from supervisely.api.task_api import TaskApi
|
|
1590
|
+
|
|
1591
|
+
statuses = [TaskApi.Status.STARTED]
|
|
1592
|
+
sessions = api.app.get_sessions(team_id, module_id, statuses=statuses)
|
|
1593
|
+
"""
|
|
1594
|
+
|
|
1595
|
+
infos_json = self.get_list(
|
|
1596
|
+
team_id,
|
|
1597
|
+
filter=[
|
|
1598
|
+
{
|
|
1599
|
+
ApiField.FIELD: ApiField.MODULE_ID,
|
|
1600
|
+
ApiField.OPERATOR: "=",
|
|
1601
|
+
ApiField.VALUE: module_id,
|
|
1602
|
+
}
|
|
1603
|
+
],
|
|
1604
|
+
with_shared=with_shared,
|
|
1605
|
+
only_running=False,
|
|
1606
|
+
force_all_sessions=False,
|
|
1499
1607
|
)
|
|
1500
|
-
if len(infos_json)
|
|
1501
|
-
# raise KeyError(f"App [module_id = {module_id}] not found in team {team_id}")
|
|
1502
|
-
return []
|
|
1503
|
-
if len(infos_json) > 1:
|
|
1608
|
+
if len(infos_json) > 1 and with_shared is False:
|
|
1504
1609
|
raise KeyError(
|
|
1505
1610
|
f"Apps list in team is broken: app [module_id = {module_id}] added to team {team_id} multiple times"
|
|
1506
1611
|
)
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
|
|
1510
|
-
|
|
1511
|
-
|
|
1512
|
-
|
|
1513
|
-
|
|
1514
|
-
|
|
1612
|
+
sessions = []
|
|
1613
|
+
for app in infos_json:
|
|
1614
|
+
data = {
|
|
1615
|
+
ApiField.TEAM_ID: team_id,
|
|
1616
|
+
ApiField.APP_ID: app.id,
|
|
1617
|
+
# ApiField.ONLY_RUNNING: False,
|
|
1618
|
+
ApiField.SHOW_DISABLED: show_disabled,
|
|
1619
|
+
ApiField.WITH_SHARED: with_shared,
|
|
1620
|
+
ApiField.SORT: ApiField.STARTED_AT,
|
|
1621
|
+
ApiField.SORT_ORDER: "desc",
|
|
1622
|
+
}
|
|
1623
|
+
if statuses is not None:
|
|
1624
|
+
data[ApiField.FILTER] = [
|
|
1625
|
+
{
|
|
1626
|
+
ApiField.FIELD: ApiField.STATUS,
|
|
1627
|
+
ApiField.OPERATOR: "in",
|
|
1628
|
+
ApiField.VALUE: [str(s) for s in statuses],
|
|
1629
|
+
}
|
|
1630
|
+
]
|
|
1631
|
+
sessions.extend(
|
|
1632
|
+
self.get_list_all_pages(
|
|
1633
|
+
method="apps.tasks.list",
|
|
1634
|
+
data=data,
|
|
1635
|
+
convert_json_info_cb=lambda x: x,
|
|
1636
|
+
)
|
|
1637
|
+
)
|
|
1638
|
+
session_infos = []
|
|
1515
1639
|
for session in sessions:
|
|
1516
|
-
to_add = True
|
|
1517
1640
|
if session_name is not None and session["meta"]["name"] != session_name:
|
|
1518
|
-
|
|
1519
|
-
|
|
1520
|
-
|
|
1521
|
-
|
|
1522
|
-
session["moduleId"] = module_id
|
|
1523
|
-
dev_tasks.append(SessionInfo.from_json(session))
|
|
1524
|
-
return dev_tasks
|
|
1641
|
+
continue
|
|
1642
|
+
session["moduleId"] = module_id
|
|
1643
|
+
session_infos.append(SessionInfo.from_json(session))
|
|
1644
|
+
return session_infos
|
|
1525
1645
|
|
|
1526
1646
|
def start(
|
|
1527
1647
|
self,
|
supervisely/api/module_api.py
CHANGED
|
@@ -596,6 +596,12 @@ class ApiField:
|
|
|
596
596
|
""""""
|
|
597
597
|
CLEAR_LOCAL_DATA_SOURCE = "clearLocalDataSource"
|
|
598
598
|
""""""
|
|
599
|
+
ONLY_RUNNING = "onlyRunning"
|
|
600
|
+
""""""
|
|
601
|
+
SHOW_DISABLED = "showDisabled"
|
|
602
|
+
""""""
|
|
603
|
+
WITH_SHARED = "withShared"
|
|
604
|
+
""""""
|
|
599
605
|
|
|
600
606
|
|
|
601
607
|
def _get_single_item(items):
|
supervisely/convert/__init__.py
CHANGED
|
@@ -37,6 +37,7 @@ from supervisely.convert.pointcloud.sly.sly_pointcloud_converter import SLYPoint
|
|
|
37
37
|
from supervisely.convert.pointcloud.las.las_converter import LasConverter
|
|
38
38
|
from supervisely.convert.pointcloud.ply.ply_converter import PlyConverter
|
|
39
39
|
from supervisely.convert.pointcloud.bag.bag_converter import BagConverter
|
|
40
|
+
from supervisely.convert.pointcloud.lyft.lyft_converter import LyftConverter
|
|
40
41
|
|
|
41
42
|
|
|
42
43
|
# Pointcloud Episodes
|
|
@@ -44,6 +45,7 @@ from supervisely.convert.pointcloud_episodes.sly.sly_pointcloud_episodes_convert
|
|
|
44
45
|
SLYPointcloudEpisodesConverter,
|
|
45
46
|
)
|
|
46
47
|
from supervisely.convert.pointcloud_episodes.bag.bag_converter import BagEpisodesConverter
|
|
48
|
+
from supervisely.convert.pointcloud_episodes.lyft.lyft_converter import LyftEpisodesConverter
|
|
47
49
|
|
|
48
50
|
# Video
|
|
49
51
|
from supervisely.convert.video.mot.mot_converter import MOTConverter
|
|
@@ -54,11 +54,13 @@ class AvailablePointcloudConverters:
|
|
|
54
54
|
LAS = "las/laz"
|
|
55
55
|
PLY = "ply"
|
|
56
56
|
BAG = "rosbag"
|
|
57
|
+
LYFT = "lyft"
|
|
57
58
|
|
|
58
59
|
|
|
59
60
|
class AvailablePointcloudEpisodesConverters:
|
|
60
61
|
SLY = "supervisely"
|
|
61
62
|
BAG = "rosbag"
|
|
63
|
+
LYFT = "lyft"
|
|
62
64
|
|
|
63
65
|
|
|
64
66
|
class AvailableVolumeConverters:
|
|
File without changes
|
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Dict, Optional
|
|
4
|
+
|
|
5
|
+
from supervisely import (
|
|
6
|
+
Api,
|
|
7
|
+
ObjClass,
|
|
8
|
+
PointcloudAnnotation,
|
|
9
|
+
ProjectMeta,
|
|
10
|
+
logger,
|
|
11
|
+
is_development,
|
|
12
|
+
Progress,
|
|
13
|
+
PointcloudObject,
|
|
14
|
+
TagMeta,
|
|
15
|
+
TagValueType,
|
|
16
|
+
)
|
|
17
|
+
from supervisely.io import fs
|
|
18
|
+
from supervisely.convert.base_converter import AvailablePointcloudConverters
|
|
19
|
+
from supervisely.convert.pointcloud.pointcloud_converter import PointcloudConverter
|
|
20
|
+
from supervisely.geometry.cuboid_3d import Cuboid3d
|
|
21
|
+
from supervisely.convert.pointcloud.lyft import lyft_helper
|
|
22
|
+
from supervisely.api.api import ApiField
|
|
23
|
+
from datetime import datetime
|
|
24
|
+
from supervisely import TinyTimer
|
|
25
|
+
from supervisely.pointcloud_annotation.pointcloud_annotation import (
|
|
26
|
+
PointcloudFigure,
|
|
27
|
+
PointcloudObjectCollection,
|
|
28
|
+
PointcloudTagCollection,
|
|
29
|
+
)
|
|
30
|
+
from supervisely.pointcloud_annotation.pointcloud_tag import PointcloudTag
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class LyftConverter(PointcloudConverter):
|
|
34
|
+
class Item(PointcloudConverter.Item):
|
|
35
|
+
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
item_path,
|
|
39
|
+
ann_data: str = None,
|
|
40
|
+
related_images: list = None,
|
|
41
|
+
custom_data: dict = None,
|
|
42
|
+
scene_name: str = None,
|
|
43
|
+
):
|
|
44
|
+
super().__init__(item_path, ann_data, related_images, custom_data)
|
|
45
|
+
self._type = "point_cloud"
|
|
46
|
+
self._scene_name = scene_name
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self,
|
|
50
|
+
input_data: str,
|
|
51
|
+
labeling_interface: str,
|
|
52
|
+
upload_as_links: bool,
|
|
53
|
+
remote_files_map: Optional[Dict[str, str]] = None,
|
|
54
|
+
):
|
|
55
|
+
super().__init__(input_data, labeling_interface, upload_as_links, remote_files_map)
|
|
56
|
+
self._is_pcd_episode = False
|
|
57
|
+
self._lyft = None
|
|
58
|
+
|
|
59
|
+
def __str__(self) -> str:
|
|
60
|
+
return AvailablePointcloudConverters.LYFT
|
|
61
|
+
|
|
62
|
+
@property
|
|
63
|
+
def key_file_ext(self) -> str:
|
|
64
|
+
return ".bin"
|
|
65
|
+
|
|
66
|
+
def validate_format(self) -> bool:
|
|
67
|
+
try:
|
|
68
|
+
from lyft_dataset_sdk.lyftdataset import LyftDataset as Lyft
|
|
69
|
+
except ImportError:
|
|
70
|
+
logger.warn(
|
|
71
|
+
'Install "lyft_dataset_sdk" python package to import datasets in LYFT format.'
|
|
72
|
+
)
|
|
73
|
+
return False
|
|
74
|
+
|
|
75
|
+
def filter_fn(path):
|
|
76
|
+
return all([(Path(path) / name).exists() for name in lyft_helper.FOLDER_NAMES])
|
|
77
|
+
|
|
78
|
+
input_paths = [d for d in fs.dirs_filter(self._input_data, filter_fn)]
|
|
79
|
+
if len(input_paths) == 0:
|
|
80
|
+
return False
|
|
81
|
+
input_path = input_paths[0]
|
|
82
|
+
|
|
83
|
+
lidar_dir = input_path + "/lidar/"
|
|
84
|
+
json_dir = input_path + "/data/"
|
|
85
|
+
if lyft_helper.validate_ann_dir(json_dir) is False:
|
|
86
|
+
return False
|
|
87
|
+
|
|
88
|
+
bin_files = fs.list_files_recursively(
|
|
89
|
+
lidar_dir, [self.key_file_ext], ignore_valid_extensions_case=True
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
if len(bin_files) == 0:
|
|
93
|
+
return False
|
|
94
|
+
|
|
95
|
+
# check if pointclouds have 5 columns (x, y, z, intensity, ring)
|
|
96
|
+
pointcloud = np.fromfile(bin_files[0], dtype=np.float32)
|
|
97
|
+
if pointcloud.shape[0] % 5 != 0:
|
|
98
|
+
return False
|
|
99
|
+
|
|
100
|
+
try:
|
|
101
|
+
t = TinyTimer()
|
|
102
|
+
lyft = Lyft(data_path=input_path, json_path=json_dir, verbose=False)
|
|
103
|
+
self._lyft: Lyft = lyft
|
|
104
|
+
logger.info(f"LyftDataset initialization took {t.get_sec():.2f} sec")
|
|
105
|
+
except Exception as e:
|
|
106
|
+
logger.info(f"Failed to initialize LyftDataset: {e}")
|
|
107
|
+
return False
|
|
108
|
+
|
|
109
|
+
t = TinyTimer()
|
|
110
|
+
progress = Progress(f"Extracting annotations from available scenes...")
|
|
111
|
+
# i = 0 # for debug
|
|
112
|
+
for scene in lyft_helper.get_available_scenes(lyft):
|
|
113
|
+
scene_name = scene["name"]
|
|
114
|
+
sample_datas = lyft_helper.extract_data_from_scene(lyft, scene)
|
|
115
|
+
if sample_datas is None:
|
|
116
|
+
logger.warning(f"Failed to extract sample data from scene: {scene['name']}.")
|
|
117
|
+
continue
|
|
118
|
+
for sample_data in sample_datas:
|
|
119
|
+
item_path = sample_data["lidar_path"]
|
|
120
|
+
ann_data = sample_data["ann_data"]
|
|
121
|
+
related_images = lyft_helper.get_related_images(ann_data)
|
|
122
|
+
custom_data = sample_data.get("custom_data", {})
|
|
123
|
+
item = self.Item(item_path, ann_data, related_images, custom_data, scene_name)
|
|
124
|
+
self._items.append(item)
|
|
125
|
+
progress.iter_done_report()
|
|
126
|
+
# i += 1
|
|
127
|
+
# if i == 2:
|
|
128
|
+
# break
|
|
129
|
+
t = t.get_sec()
|
|
130
|
+
logger.info(
|
|
131
|
+
f"Lyft annotation extraction took {t:.2f} sec ({(t / self.items_count):.3f} sec per sample)"
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
return self.items_count > 0
|
|
135
|
+
|
|
136
|
+
def to_supervisely(
|
|
137
|
+
self,
|
|
138
|
+
item: PointcloudConverter.Item,
|
|
139
|
+
meta: ProjectMeta,
|
|
140
|
+
renamed_classes: dict = {},
|
|
141
|
+
renamed_tags: dict = {},
|
|
142
|
+
) -> PointcloudAnnotation:
|
|
143
|
+
"""
|
|
144
|
+
Converts a point cloud item and its annotations to the supervisely formats.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
item (PointcloudConverter.Item): The point cloud item to convert.
|
|
148
|
+
meta (ProjectMeta): The project meta.
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
PointcloudAnnotation: The converted point cloud annotation.
|
|
152
|
+
"""
|
|
153
|
+
import open3d as o3d # pylint: disable=import-error
|
|
154
|
+
|
|
155
|
+
if getattr(item, "ann_data", None) is None:
|
|
156
|
+
return PointcloudAnnotation()
|
|
157
|
+
|
|
158
|
+
data = item.ann_data
|
|
159
|
+
|
|
160
|
+
# * Convert annotation to json
|
|
161
|
+
boxes = data["gt_boxes"]
|
|
162
|
+
names = data["names"]
|
|
163
|
+
|
|
164
|
+
objects = []
|
|
165
|
+
for name, box in zip(names, boxes):
|
|
166
|
+
center = [float(box[0]), float(box[1]), float(box[2])]
|
|
167
|
+
size = [float(box[3]), float(box[5]), float(box[4])]
|
|
168
|
+
ry = float(box[6])
|
|
169
|
+
|
|
170
|
+
yaw = ry - np.pi
|
|
171
|
+
yaw = yaw - np.floor(yaw / (2 * np.pi) + 0.5) * 2 * np.pi
|
|
172
|
+
world_cam = None
|
|
173
|
+
objects.append(o3d.ml.datasets.utils.BEVBox3D(center, size, yaw, name, -1.0, world_cam))
|
|
174
|
+
objects[-1].yaw = ry
|
|
175
|
+
|
|
176
|
+
geoms = [lyft_helper._convert_BEVBox3D_to_geometry(box) for box in objects]
|
|
177
|
+
|
|
178
|
+
figures = []
|
|
179
|
+
objs = []
|
|
180
|
+
for l, geometry, token in zip(
|
|
181
|
+
objects, geoms, data["instance_tokens"]
|
|
182
|
+
): # by object in point cloud
|
|
183
|
+
class_name = renamed_classes.get(l.label_class, l.label_class)
|
|
184
|
+
tag_names = [
|
|
185
|
+
self._lyft.get("attribute", attr_token).get("name", None)
|
|
186
|
+
for attr_token in token["attribute_tokens"]
|
|
187
|
+
]
|
|
188
|
+
tag_col = None
|
|
189
|
+
if len(tag_names) > 0 and all([tag_name is not None for tag_name in tag_names]):
|
|
190
|
+
tag_meta_names = [renamed_tags.get(name, name) for name in tag_names]
|
|
191
|
+
tag_metas = [meta.get_tag_meta(tag_meta_name) for tag_meta_name in tag_meta_names]
|
|
192
|
+
tag_col = PointcloudTagCollection([PointcloudTag(meta, None) for meta in tag_metas])
|
|
193
|
+
pcobj = PointcloudObject(meta.get_obj_class(class_name), tag_col)
|
|
194
|
+
figures.append(PointcloudFigure(pcobj, geometry))
|
|
195
|
+
objs.append(pcobj)
|
|
196
|
+
return PointcloudAnnotation(PointcloudObjectCollection(objs), figures)
|
|
197
|
+
|
|
198
|
+
def upload_dataset(self, api: Api, dataset_id: int, batch_size: int = 1, log_progress=True):
|
|
199
|
+
unique_names = {name for item in self._items for name in item.ann_data["names"]}
|
|
200
|
+
tag_names = {tag["name"] for tag in self._lyft.attribute}
|
|
201
|
+
self._meta = ProjectMeta(
|
|
202
|
+
[ObjClass(name, Cuboid3d) for name in unique_names],
|
|
203
|
+
[TagMeta(tag, TagValueType.NONE) for tag in tag_names],
|
|
204
|
+
)
|
|
205
|
+
meta, renamed_classes, renamed_tags = self.merge_metas_with_conflicts(api, dataset_id)
|
|
206
|
+
|
|
207
|
+
scene_names = set([item._scene_name for item in self._items])
|
|
208
|
+
dataset_info = api.dataset.get_info_by_id(dataset_id)
|
|
209
|
+
scene_name_to_dataset = {}
|
|
210
|
+
|
|
211
|
+
multiple_scenes = len(scene_names) > 1
|
|
212
|
+
if multiple_scenes:
|
|
213
|
+
logger.info(
|
|
214
|
+
f"Found {len(scene_names)} scenes ({self.items_count} pointclouds) in the input data."
|
|
215
|
+
)
|
|
216
|
+
# * Create a nested dataset for each scene
|
|
217
|
+
for name in scene_names:
|
|
218
|
+
ds = api.dataset.create(
|
|
219
|
+
dataset_info.project_id,
|
|
220
|
+
name,
|
|
221
|
+
change_name_if_conflict=True,
|
|
222
|
+
parent_id=dataset_id,
|
|
223
|
+
)
|
|
224
|
+
scene_name_to_dataset[name] = ds
|
|
225
|
+
else:
|
|
226
|
+
scene_name_to_dataset[list(scene_names)[0]] = dataset_info
|
|
227
|
+
|
|
228
|
+
if log_progress:
|
|
229
|
+
progress, progress_cb = self.get_progress(self.items_count, "Converting pointclouds...")
|
|
230
|
+
else:
|
|
231
|
+
progress_cb = None
|
|
232
|
+
|
|
233
|
+
for item in self._items:
|
|
234
|
+
# * Get the current dataset for the scene
|
|
235
|
+
current_dataset = scene_name_to_dataset.get(item._scene_name, None)
|
|
236
|
+
if current_dataset is None:
|
|
237
|
+
raise RuntimeError(f"Dataset not found for scene name: {item._scene_name}")
|
|
238
|
+
current_dataset_id = current_dataset.id
|
|
239
|
+
|
|
240
|
+
# * Convert timestamp to ISO format
|
|
241
|
+
iso_time = datetime.utcfromtimestamp(item.ann_data["timestamp"] / 1e6).isoformat() + "Z"
|
|
242
|
+
item.ann_data["timestamp"] = iso_time
|
|
243
|
+
|
|
244
|
+
# * Convert pointcloud from ".bin" to ".pcd"
|
|
245
|
+
pcd_path = str(Path(item.path).with_suffix(".pcd"))
|
|
246
|
+
if fs.file_exists(pcd_path):
|
|
247
|
+
logger.warning(f"Overwriting file with path: {pcd_path}")
|
|
248
|
+
lyft_helper.convert_bin_to_pcd(item.path, pcd_path)
|
|
249
|
+
|
|
250
|
+
# * Upload pointcloud
|
|
251
|
+
pcd_name = fs.get_file_name(pcd_path)
|
|
252
|
+
info = api.pointcloud.upload_path(current_dataset_id, pcd_name, pcd_path, {})
|
|
253
|
+
pcd_id = info.id
|
|
254
|
+
|
|
255
|
+
# * Convert annotation and upload
|
|
256
|
+
ann = self.to_supervisely(item, meta, renamed_classes, renamed_tags)
|
|
257
|
+
api.pointcloud.annotation.append(pcd_id, ann)
|
|
258
|
+
|
|
259
|
+
# * Upload related images
|
|
260
|
+
image_jsons = []
|
|
261
|
+
camera_names = []
|
|
262
|
+
for img_path, rimage_info in lyft_helper.generate_rimage_infos(
|
|
263
|
+
item._related_images, item.ann_data
|
|
264
|
+
):
|
|
265
|
+
img = api.pointcloud.upload_related_image(img_path)
|
|
266
|
+
image_jsons.append(
|
|
267
|
+
{
|
|
268
|
+
ApiField.ENTITY_ID: pcd_id,
|
|
269
|
+
ApiField.NAME: rimage_info[ApiField.NAME],
|
|
270
|
+
ApiField.HASH: img,
|
|
271
|
+
ApiField.META: rimage_info[ApiField.META],
|
|
272
|
+
}
|
|
273
|
+
)
|
|
274
|
+
camera_names.append(rimage_info[ApiField.META]["deviceId"])
|
|
275
|
+
if len(image_jsons) > 0:
|
|
276
|
+
api.pointcloud.add_related_images(image_jsons, camera_names)
|
|
277
|
+
|
|
278
|
+
# * Clean up
|
|
279
|
+
fs.silent_remove(pcd_path)
|
|
280
|
+
if log_progress:
|
|
281
|
+
progress_cb(1)
|
|
282
|
+
|
|
283
|
+
logger.info(f"Dataset ID:{current_dataset_id} has been successfully uploaded.")
|
|
284
|
+
|
|
285
|
+
if log_progress:
|
|
286
|
+
if is_development():
|
|
287
|
+
progress.close()
|
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import List, Tuple
|
|
3
|
+
import numpy as np
|
|
4
|
+
from supervisely import logger, fs
|
|
5
|
+
from supervisely.geometry.cuboid_3d import Cuboid3d, Vector3d
|
|
6
|
+
|
|
7
|
+
TABLE_NAMES = [
|
|
8
|
+
"category",
|
|
9
|
+
"attribute",
|
|
10
|
+
"visibility",
|
|
11
|
+
"instance",
|
|
12
|
+
"sensor",
|
|
13
|
+
"calibrated_sensor",
|
|
14
|
+
"ego_pose",
|
|
15
|
+
"log",
|
|
16
|
+
"scene",
|
|
17
|
+
"sample",
|
|
18
|
+
"sample_data",
|
|
19
|
+
"sample_annotation",
|
|
20
|
+
"map",
|
|
21
|
+
]
|
|
22
|
+
FOLDER_NAMES = ["data", "lidar", "images", "maps"]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def get_available_scenes(lyft):
|
|
26
|
+
for scene in lyft.scene:
|
|
27
|
+
token = scene["token"]
|
|
28
|
+
scene_rec = lyft.get("scene", token)
|
|
29
|
+
sample_rec = lyft.get("sample", scene_rec["first_sample_token"])
|
|
30
|
+
sample_data = lyft.get("sample_data", sample_rec["data"]["LIDAR_TOP"])
|
|
31
|
+
|
|
32
|
+
lidar_path, boxes, _ = lyft.get_sample_data(sample_data["token"])
|
|
33
|
+
if not os.path.exists(str(lidar_path)):
|
|
34
|
+
continue
|
|
35
|
+
yield scene
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def extract_data_from_scene(lyft, scene):
|
|
39
|
+
try:
|
|
40
|
+
from pyquaternion import Quaternion
|
|
41
|
+
from lyft_dataset_sdk.utils.geometry_utils import transform_matrix
|
|
42
|
+
except ImportError as ie:
|
|
43
|
+
logger.warn(f"Lazy import failed. Error: {ie}")
|
|
44
|
+
return
|
|
45
|
+
|
|
46
|
+
new_token = scene["first_sample_token"]
|
|
47
|
+
dataset_data = []
|
|
48
|
+
num_samples = scene["nbr_samples"]
|
|
49
|
+
for i in range(num_samples):
|
|
50
|
+
my_sample = lyft.get("sample", new_token)
|
|
51
|
+
|
|
52
|
+
data = {}
|
|
53
|
+
data["ann_data"] = {}
|
|
54
|
+
data["ann_data"]["timestamp"] = my_sample["timestamp"]
|
|
55
|
+
data["custom_data"] = lyft.get("log", scene["log_token"])
|
|
56
|
+
|
|
57
|
+
sensor_token = my_sample["data"]["LIDAR_TOP"]
|
|
58
|
+
lidar_path, boxes, _ = lyft.get_sample_data(sensor_token)
|
|
59
|
+
if not os.path.exists(str(lidar_path)):
|
|
60
|
+
logger.debug(f"Skipping sample {new_token} - lidar file doesn't exist")
|
|
61
|
+
continue
|
|
62
|
+
data["lidar_path"] = str(lidar_path)
|
|
63
|
+
|
|
64
|
+
sd_record_lid = lyft.get("sample_data", sensor_token)
|
|
65
|
+
cs_record_lid = lyft.get("calibrated_sensor", sd_record_lid["calibrated_sensor_token"])
|
|
66
|
+
ego_record_lid = lyft.get("ego_pose", sd_record_lid["ego_pose_token"])
|
|
67
|
+
|
|
68
|
+
locs = np.array([b.center for b in boxes]).reshape(-1, 3)
|
|
69
|
+
dims = np.array([b.wlh for b in boxes]).reshape(-1, 3)
|
|
70
|
+
rots = np.array([b.orientation.yaw_pitch_roll[0] for b in boxes]).reshape(-1, 1)
|
|
71
|
+
|
|
72
|
+
gt_boxes = np.concatenate([locs, dims, -rots + np.pi / 2], axis=1)
|
|
73
|
+
names = np.array([b.name for b in boxes])
|
|
74
|
+
data["ann_data"]["names"] = names
|
|
75
|
+
data["ann_data"]["gt_boxes"] = gt_boxes
|
|
76
|
+
instance_tokens = [lyft.get("sample_annotation", box.token) for box in boxes]
|
|
77
|
+
data["ann_data"]["instance_tokens"] = instance_tokens
|
|
78
|
+
|
|
79
|
+
for sensor, sensor_token in my_sample["data"].items():
|
|
80
|
+
if "CAM" in sensor:
|
|
81
|
+
img_path, boxes, cam_intrinsic = lyft.get_sample_data(sensor_token)
|
|
82
|
+
if not os.path.exists(img_path):
|
|
83
|
+
logger.debug(f"pass {sensor} - image doesn't exist")
|
|
84
|
+
continue
|
|
85
|
+
data["ann_data"][sensor] = str(img_path)
|
|
86
|
+
|
|
87
|
+
sd_record_cam = lyft.get("sample_data", sensor_token)
|
|
88
|
+
cs_record_cam = lyft.get(
|
|
89
|
+
"calibrated_sensor", sd_record_cam["calibrated_sensor_token"]
|
|
90
|
+
)
|
|
91
|
+
ego_record_cam = lyft.get("ego_pose", sd_record_cam["ego_pose_token"])
|
|
92
|
+
cam_height = sd_record_cam["height"]
|
|
93
|
+
cam_width = sd_record_cam["width"]
|
|
94
|
+
|
|
95
|
+
lid_to_ego = transform_matrix(
|
|
96
|
+
cs_record_lid["translation"],
|
|
97
|
+
Quaternion(cs_record_lid["rotation"]),
|
|
98
|
+
inverse=False,
|
|
99
|
+
)
|
|
100
|
+
lid_ego_to_world = transform_matrix(
|
|
101
|
+
ego_record_lid["translation"],
|
|
102
|
+
Quaternion(ego_record_lid["rotation"]),
|
|
103
|
+
inverse=False,
|
|
104
|
+
)
|
|
105
|
+
world_to_cam_ego = transform_matrix(
|
|
106
|
+
ego_record_cam["translation"],
|
|
107
|
+
Quaternion(ego_record_cam["rotation"]),
|
|
108
|
+
inverse=True,
|
|
109
|
+
)
|
|
110
|
+
ego_to_cam = transform_matrix(
|
|
111
|
+
cs_record_cam["translation"],
|
|
112
|
+
Quaternion(cs_record_cam["rotation"]),
|
|
113
|
+
inverse=True,
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
velo_to_cam = np.dot(
|
|
117
|
+
ego_to_cam, np.dot(world_to_cam_ego, np.dot(lid_ego_to_world, lid_to_ego))
|
|
118
|
+
)
|
|
119
|
+
velo_to_cam_rot = velo_to_cam[:3, :3]
|
|
120
|
+
velo_to_cam_trans = velo_to_cam[:3, 3]
|
|
121
|
+
|
|
122
|
+
data["ann_data"][f"{sensor}_extrinsic"] = np.hstack(
|
|
123
|
+
(velo_to_cam_rot, velo_to_cam_trans.reshape(3, 1))
|
|
124
|
+
)
|
|
125
|
+
data["ann_data"][f"{sensor}_intrinsic"] = np.asarray(
|
|
126
|
+
cs_record_cam["camera_intrinsic"]
|
|
127
|
+
)
|
|
128
|
+
data["ann_data"][f"{sensor}_imsize"] = (cam_width, cam_height)
|
|
129
|
+
else:
|
|
130
|
+
logger.debug(f"pass {sensor} - isn't a camera")
|
|
131
|
+
dataset_data.append(data)
|
|
132
|
+
|
|
133
|
+
new_token = my_sample.get("next")
|
|
134
|
+
if not new_token:
|
|
135
|
+
break
|
|
136
|
+
|
|
137
|
+
return dataset_data
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def generate_rimage_infos(related_images: List[Tuple[str, str]], ann_data):
|
|
141
|
+
sensors_to_skip = ["_intrinsic", "_extrinsic", "_imsize"]
|
|
142
|
+
for sensor, image_path in related_images:
|
|
143
|
+
if not any([sensor.endswith(s) for s in sensors_to_skip]):
|
|
144
|
+
image_name = fs.get_file_name_with_ext(image_path)
|
|
145
|
+
sly_path_img = os.path.join(os.path.dirname(image_path), image_name)
|
|
146
|
+
img_info = {
|
|
147
|
+
"name": image_name,
|
|
148
|
+
"meta": {
|
|
149
|
+
"deviceId": sensor,
|
|
150
|
+
"timestamp": ann_data["timestamp"],
|
|
151
|
+
"sensorsData": {
|
|
152
|
+
"extrinsicMatrix": list(
|
|
153
|
+
ann_data[f"{sensor}_extrinsic"].flatten().astype(float)
|
|
154
|
+
),
|
|
155
|
+
"intrinsicMatrix": list(
|
|
156
|
+
ann_data[f"{sensor}_intrinsic"].flatten().astype(float)
|
|
157
|
+
),
|
|
158
|
+
},
|
|
159
|
+
},
|
|
160
|
+
}
|
|
161
|
+
yield sly_path_img, img_info
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _convert_BEVBox3D_to_geometry(box):
|
|
165
|
+
bbox = box.to_xyzwhlr()
|
|
166
|
+
dim = bbox[[3, 5, 4]]
|
|
167
|
+
pos = bbox[:3] + [0, 0, dim[1] / 2]
|
|
168
|
+
yaw = bbox[-1]
|
|
169
|
+
position = Vector3d(float(pos[0]), float(pos[1]), float(pos[2]))
|
|
170
|
+
rotation = Vector3d(0, 0, float(-yaw))
|
|
171
|
+
|
|
172
|
+
dimension = Vector3d(float(dim[0]), float(dim[2]), float(dim[1]))
|
|
173
|
+
geometry = Cuboid3d(position, rotation, dimension)
|
|
174
|
+
return geometry
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def convert_bin_to_pcd(bin_file, save_filepath):
|
|
178
|
+
import open3d as o3d # pylint: disable=import-error
|
|
179
|
+
|
|
180
|
+
b = np.fromfile(bin_file, dtype=np.float32).reshape(-1, 5)
|
|
181
|
+
points = b[:, 0:3]
|
|
182
|
+
intensity = b[:, 3]
|
|
183
|
+
ring_index = b[:, 4]
|
|
184
|
+
intensity_fake_rgb = np.zeros((intensity.shape[0], 3))
|
|
185
|
+
intensity_fake_rgb[:, 0] = (
|
|
186
|
+
intensity # red The intensity measures the reflectivity of the objects
|
|
187
|
+
)
|
|
188
|
+
intensity_fake_rgb[:, 1] = (
|
|
189
|
+
ring_index # green ring index is the index of the laser ranging from 0 to 31
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
pc = o3d.geometry.PointCloud(o3d.utility.Vector3dVector(points))
|
|
193
|
+
pc.colors = o3d.utility.Vector3dVector(intensity_fake_rgb)
|
|
194
|
+
o3d.io.write_point_cloud(save_filepath, pc)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def get_related_images(ann_data):
|
|
198
|
+
# sensors_to_skip = ["_intrinsic", "_extrinsic", "_imsize"]
|
|
199
|
+
# return [
|
|
200
|
+
# (sensor, img_path)
|
|
201
|
+
# for sensor, img_path in ann_data.items()
|
|
202
|
+
# if "CAM" in sensor and not any([sensor.endswith(s) for s in sensors_to_skip])
|
|
203
|
+
# ]
|
|
204
|
+
return [(sensor, img_path) for sensor, img_path in ann_data.items() if "CAM" in sensor]
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def validate_ann_dir(ann_dir):
|
|
208
|
+
if any([not fs.file_exists(f"{ann_dir}/{d}.json") for d in TABLE_NAMES]):
|
|
209
|
+
return False
|
|
210
|
+
return True
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def lyft_annotation_to_BEVBox3D(data):
|
|
214
|
+
import open3d as o3d # pylint: disable=import-error
|
|
215
|
+
|
|
216
|
+
boxes = data["gt_boxes"]
|
|
217
|
+
names = data["names"]
|
|
218
|
+
|
|
219
|
+
objects = []
|
|
220
|
+
for name, box in zip(names, boxes):
|
|
221
|
+
center = [float(box[0]), float(box[1]), float(box[2])]
|
|
222
|
+
size = [float(box[3]), float(box[5]), float(box[4])]
|
|
223
|
+
ry = float(box[6])
|
|
224
|
+
|
|
225
|
+
yaw = ry - np.pi
|
|
226
|
+
yaw = yaw - np.floor(yaw / (2 * np.pi) + 0.5) * 2 * np.pi
|
|
227
|
+
world_cam = None
|
|
228
|
+
objects.append(o3d.ml.datasets.utils.BEVBox3D(center, size, yaw, name, -1.0, world_cam))
|
|
229
|
+
objects[-1].yaw = ry
|
|
230
|
+
|
|
231
|
+
return objects
|
|
File without changes
|
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
from typing import Dict, Optional, Union
|
|
2
|
+
|
|
3
|
+
from supervisely.convert.base_converter import AvailablePointcloudEpisodesConverters
|
|
4
|
+
from supervisely.convert.pointcloud.lyft.lyft_converter import LyftConverter
|
|
5
|
+
from supervisely.convert.pointcloud_episodes.pointcloud_episodes_converter import (
|
|
6
|
+
PointcloudEpisodeConverter,
|
|
7
|
+
)
|
|
8
|
+
from supervisely.pointcloud_annotation.pointcloud_episode_annotation import (
|
|
9
|
+
PointcloudEpisodeAnnotation,
|
|
10
|
+
PointcloudEpisodeObjectCollection,
|
|
11
|
+
PointcloudEpisodeFrameCollection,
|
|
12
|
+
PointcloudEpisodeTagCollection,
|
|
13
|
+
PointcloudFigure,
|
|
14
|
+
)
|
|
15
|
+
from supervisely.pointcloud_annotation.pointcloud_episode_tag import (
|
|
16
|
+
PointcloudEpisodeTag,
|
|
17
|
+
)
|
|
18
|
+
from supervisely.project.project_settings import LabelingInterface
|
|
19
|
+
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
from supervisely import (
|
|
22
|
+
Api,
|
|
23
|
+
ObjClass,
|
|
24
|
+
ProjectMeta,
|
|
25
|
+
logger,
|
|
26
|
+
is_development,
|
|
27
|
+
PointcloudObject,
|
|
28
|
+
PointcloudEpisodeObject,
|
|
29
|
+
PointcloudFigure,
|
|
30
|
+
PointcloudEpisodeFrame,
|
|
31
|
+
TagMeta,
|
|
32
|
+
TagValueType,
|
|
33
|
+
VideoTagCollection,
|
|
34
|
+
VideoTag,
|
|
35
|
+
)
|
|
36
|
+
from supervisely.io import fs
|
|
37
|
+
from supervisely.convert.pointcloud.lyft import lyft_helper
|
|
38
|
+
from supervisely.api.api import ApiField
|
|
39
|
+
from datetime import datetime
|
|
40
|
+
from supervisely.geometry.cuboid_3d import Cuboid3d
|
|
41
|
+
from collections import defaultdict
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class LyftEpisodesConverter(LyftConverter, PointcloudEpisodeConverter):
|
|
45
|
+
"""Converter for LYFT pointcloud episodes format."""
|
|
46
|
+
|
|
47
|
+
def __init__(
|
|
48
|
+
self,
|
|
49
|
+
input_data: str,
|
|
50
|
+
labeling_interface: Optional[Union[LabelingInterface, str]],
|
|
51
|
+
upload_as_links: bool,
|
|
52
|
+
remote_files_map: Optional[Dict[str, str]] = None,
|
|
53
|
+
):
|
|
54
|
+
super().__init__(input_data, labeling_interface, upload_as_links, remote_files_map)
|
|
55
|
+
|
|
56
|
+
self._type = "point_cloud_episode"
|
|
57
|
+
self._is_pcd_episode = True
|
|
58
|
+
|
|
59
|
+
def __str__(self) -> str:
|
|
60
|
+
return AvailablePointcloudEpisodesConverters.LYFT
|
|
61
|
+
|
|
62
|
+
def to_supervisely(
|
|
63
|
+
self,
|
|
64
|
+
items,
|
|
65
|
+
meta: ProjectMeta,
|
|
66
|
+
renamed_classes: dict = {},
|
|
67
|
+
renamed_tags: dict = {},
|
|
68
|
+
):
|
|
69
|
+
lyft = self._lyft
|
|
70
|
+
|
|
71
|
+
scene_name_to_pcd_ep_ann = {}
|
|
72
|
+
# * Group items by scene name
|
|
73
|
+
scene_name_to_items = defaultdict(list)
|
|
74
|
+
for item in items:
|
|
75
|
+
scene_name_to_items[item._scene_name].append(item)
|
|
76
|
+
|
|
77
|
+
# * Iterate over each scene
|
|
78
|
+
for scene_name, items in scene_name_to_items.items():
|
|
79
|
+
token_to_obj = {}
|
|
80
|
+
frames = []
|
|
81
|
+
tags = [] # todo tags that belong to the whole scene if any
|
|
82
|
+
# * Iterate over each sample in the scene
|
|
83
|
+
for i, item in enumerate(items):
|
|
84
|
+
ann = item.ann_data
|
|
85
|
+
objs = lyft_helper.lyft_annotation_to_BEVBox3D(ann)
|
|
86
|
+
figures = []
|
|
87
|
+
for obj, instance_token in zip(objs, ann["instance_tokens"]):
|
|
88
|
+
parent_object = None
|
|
89
|
+
parent_obj_token = instance_token["prev"]
|
|
90
|
+
if parent_obj_token == "":
|
|
91
|
+
# * Create a new object
|
|
92
|
+
class_name = instance_token["category_name"]
|
|
93
|
+
obj_class_name = renamed_classes.get(class_name, class_name)
|
|
94
|
+
obj_class = meta.get_obj_class(obj_class_name)
|
|
95
|
+
|
|
96
|
+
# # * Get tags for the object
|
|
97
|
+
# tag_names = [
|
|
98
|
+
# lyft.get("attribute", attr_token).get("name", None)
|
|
99
|
+
# for attr_token in instance_token["attribute_tokens"]
|
|
100
|
+
# ]
|
|
101
|
+
# if len(tag_names) > 0 and all(
|
|
102
|
+
# [tag_name is not None for tag_name in tag_names]
|
|
103
|
+
# ):
|
|
104
|
+
# tags = [TagMeta(tag_name, TagValueType.NONE) for tag_name in tag_names]
|
|
105
|
+
# tag_meta_names = [renamed_tags.get(name, name) for name in tag_names]
|
|
106
|
+
# tag_metas = [
|
|
107
|
+
# meta.get_tag_meta(tag_meta_name) for tag_meta_name in tag_meta_names
|
|
108
|
+
# ]
|
|
109
|
+
# obj_tags = PointcloudEpisodeTagCollection(
|
|
110
|
+
# [PointcloudEpisodeTag(tag_meta, None) for tag_meta in tag_metas]
|
|
111
|
+
# ) # todo: implement after fixing
|
|
112
|
+
obj_tags = None
|
|
113
|
+
pcd_ep_obj = PointcloudEpisodeObject(obj_class, obj_tags)
|
|
114
|
+
# * Assign the object to the starting token
|
|
115
|
+
token_to_obj[instance_token["token"]] = pcd_ep_obj
|
|
116
|
+
parent_object = pcd_ep_obj
|
|
117
|
+
else:
|
|
118
|
+
# * -> Figure has a parent object, get it
|
|
119
|
+
token_to_obj[instance_token["token"]] = token_to_obj[parent_obj_token]
|
|
120
|
+
parent_object = token_to_obj[parent_obj_token]
|
|
121
|
+
|
|
122
|
+
geom = lyft_helper._convert_BEVBox3D_to_geometry(obj)
|
|
123
|
+
pcd_figure = PointcloudFigure(parent_object, geom, i)
|
|
124
|
+
figures.append(pcd_figure)
|
|
125
|
+
frame = PointcloudEpisodeFrame(i, figures)
|
|
126
|
+
frames.append(frame)
|
|
127
|
+
tag_collection = PointcloudEpisodeTagCollection(tags) if len(tags) > 0 else None
|
|
128
|
+
pcd_ep_ann = PointcloudEpisodeAnnotation(
|
|
129
|
+
len(frames),
|
|
130
|
+
PointcloudEpisodeObjectCollection(list(set(token_to_obj.values()))),
|
|
131
|
+
PointcloudEpisodeFrameCollection(frames),
|
|
132
|
+
tag_collection,
|
|
133
|
+
)
|
|
134
|
+
scene_name_to_pcd_ep_ann[scene_name] = pcd_ep_ann
|
|
135
|
+
return scene_name_to_pcd_ep_ann
|
|
136
|
+
|
|
137
|
+
def upload_dataset(self, api: Api, dataset_id: int, batch_size: int = 1, log_progress=True):
|
|
138
|
+
unique_names = {name for item in self._items for name in item.ann_data["names"]}
|
|
139
|
+
tag_names = {tag["name"] for tag in self._lyft.attribute}
|
|
140
|
+
self._meta = ProjectMeta(
|
|
141
|
+
[ObjClass(name, Cuboid3d) for name in unique_names],
|
|
142
|
+
[TagMeta(tag, TagValueType.NONE) for tag in tag_names],
|
|
143
|
+
)
|
|
144
|
+
meta, renamed_classes, renamed_tags = self.merge_metas_with_conflicts(api, dataset_id)
|
|
145
|
+
|
|
146
|
+
scene_names = set([item._scene_name for item in self._items])
|
|
147
|
+
|
|
148
|
+
dataset_info = api.dataset.get_info_by_id(dataset_id)
|
|
149
|
+
scene_name_to_dataset = {}
|
|
150
|
+
|
|
151
|
+
multiple_scenes = len(scene_names) > 1
|
|
152
|
+
if multiple_scenes:
|
|
153
|
+
logger.info(
|
|
154
|
+
f"Found {len(scene_names)} scenes ({self.items_count} pointclouds) in the input data."
|
|
155
|
+
)
|
|
156
|
+
# * Create a nested dataset for each scene
|
|
157
|
+
for name in scene_names:
|
|
158
|
+
ds = api.dataset.create(
|
|
159
|
+
dataset_info.project_id,
|
|
160
|
+
name,
|
|
161
|
+
change_name_if_conflict=True,
|
|
162
|
+
parent_id=dataset_id,
|
|
163
|
+
)
|
|
164
|
+
scene_name_to_dataset[name] = ds
|
|
165
|
+
else:
|
|
166
|
+
scene_name_to_dataset[list(scene_names)[0]] = dataset_info
|
|
167
|
+
|
|
168
|
+
if log_progress:
|
|
169
|
+
progress, progress_cb = self.get_progress(self.items_count, "Converting pointclouds...")
|
|
170
|
+
else:
|
|
171
|
+
progress_cb = None
|
|
172
|
+
|
|
173
|
+
scene_name_to_ann = self.to_supervisely(self._items, meta, renamed_classes, renamed_tags)
|
|
174
|
+
scene_name_to_item = defaultdict(list)
|
|
175
|
+
for item in self._items:
|
|
176
|
+
scene_name_to_item[item._scene_name].append(item)
|
|
177
|
+
|
|
178
|
+
for scene, items in scene_name_to_item.items():
|
|
179
|
+
# * Get the annotation for the scene
|
|
180
|
+
ann_episode = scene_name_to_ann[scene]
|
|
181
|
+
current_dataset_id = scene_name_to_dataset[item._scene_name].id
|
|
182
|
+
frame_to_pointcloud_ids = {}
|
|
183
|
+
for idx, item in enumerate(items):
|
|
184
|
+
# * Convert timestamp to ISO format
|
|
185
|
+
iso_time = (
|
|
186
|
+
datetime.utcfromtimestamp(item.ann_data["timestamp"] / 1e6).isoformat() + "Z"
|
|
187
|
+
)
|
|
188
|
+
item.ann_data["timestamp"] = iso_time
|
|
189
|
+
|
|
190
|
+
# * Convert pointcloud from ".bin" to ".pcd"
|
|
191
|
+
pcd_path = str(Path(item.path).with_suffix(".pcd"))
|
|
192
|
+
if fs.file_exists(pcd_path):
|
|
193
|
+
logger.warning(f"Overwriting file with path: {pcd_path}")
|
|
194
|
+
lyft_helper.convert_bin_to_pcd(item.path, pcd_path)
|
|
195
|
+
|
|
196
|
+
# * Upload pointcloud
|
|
197
|
+
pcd_meta = {}
|
|
198
|
+
pcd_meta["frame"] = idx
|
|
199
|
+
|
|
200
|
+
pcd_name = fs.get_file_name(pcd_path)
|
|
201
|
+
info = api.pointcloud_episode.upload_path(
|
|
202
|
+
current_dataset_id, pcd_name, pcd_path, pcd_meta
|
|
203
|
+
)
|
|
204
|
+
pcd_id = info.id
|
|
205
|
+
frame_to_pointcloud_ids[idx] = pcd_id
|
|
206
|
+
|
|
207
|
+
# * Upload related images
|
|
208
|
+
image_jsons = []
|
|
209
|
+
camera_names = []
|
|
210
|
+
for img_path, rimage_info in lyft_helper.generate_rimage_infos(
|
|
211
|
+
item._related_images, item.ann_data
|
|
212
|
+
):
|
|
213
|
+
img = api.pointcloud_episode.upload_related_image(img_path)
|
|
214
|
+
image_jsons.append(
|
|
215
|
+
{
|
|
216
|
+
ApiField.ENTITY_ID: pcd_id,
|
|
217
|
+
ApiField.NAME: rimage_info[ApiField.NAME],
|
|
218
|
+
ApiField.HASH: img,
|
|
219
|
+
ApiField.META: rimage_info[ApiField.META],
|
|
220
|
+
}
|
|
221
|
+
)
|
|
222
|
+
camera_names.append(rimage_info[ApiField.META]["deviceId"])
|
|
223
|
+
if len(image_jsons) > 0:
|
|
224
|
+
api.pointcloud_episode.add_related_images(image_jsons, camera_names)
|
|
225
|
+
|
|
226
|
+
# * Clean up
|
|
227
|
+
fs.silent_remove(pcd_path)
|
|
228
|
+
if log_progress:
|
|
229
|
+
progress_cb(1)
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
api.pointcloud_episode.annotation.append(
|
|
233
|
+
current_dataset_id, ann_episode, frame_to_pointcloud_ids
|
|
234
|
+
)
|
|
235
|
+
except Exception as e:
|
|
236
|
+
error_msg = getattr(getattr(e, "response", e), "text", str(e))
|
|
237
|
+
logger.warn(
|
|
238
|
+
f"Failed to upload annotation for scene: {scene}. Message: {error_msg}"
|
|
239
|
+
)
|
|
240
|
+
logger.info(f"Dataset ID:{current_dataset_id} has been successfully uploaded.")
|
|
241
|
+
|
|
242
|
+
if log_progress:
|
|
243
|
+
if is_development():
|
|
244
|
+
progress.close()
|
|
@@ -808,7 +808,8 @@ def handle_exception(exception: Exception) -> Union[HandleException, None]:
|
|
|
808
808
|
for frame in stack[::-1]:
|
|
809
809
|
if re.match(pattern, frame.line):
|
|
810
810
|
return handler(exception, stack)
|
|
811
|
-
|
|
811
|
+
arg = next(iter(exception.args), None)
|
|
812
|
+
if isinstance(arg, str) and re.match(pattern, arg):
|
|
812
813
|
return handler(exception, stack)
|
|
813
814
|
if isinstance(exception, HTTPError):
|
|
814
815
|
msg = exception.response.text
|
supervisely/io/fs.py
CHANGED
|
@@ -167,23 +167,28 @@ def list_dir_recursively(
|
|
|
167
167
|
|
|
168
168
|
|
|
169
169
|
def list_files_recursively(
|
|
170
|
-
dir: str,
|
|
170
|
+
dir: str,
|
|
171
|
+
valid_extensions: Optional[List[str]] = None,
|
|
172
|
+
filter_fn=None,
|
|
173
|
+
ignore_valid_extensions_case: Optional[bool] = False,
|
|
171
174
|
) -> List[str]:
|
|
172
175
|
"""
|
|
173
176
|
Recursively walks through directory and returns list with all file paths.
|
|
174
177
|
Can be filtered by valid extensions and filter function.
|
|
175
178
|
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
179
|
+
:param dir: Target dir path.
|
|
180
|
+
:param dir: str
|
|
181
|
+
:param valid_extensions: List with valid file extensions.
|
|
182
|
+
:type valid_extensions: List[str], optional
|
|
183
|
+
:param filter_fn: Function with a single argument. Argument is a file path. Function determines whether to keep a given file path. Must return True or False.
|
|
184
|
+
:type filter_fn: Callable, optional
|
|
185
|
+
:param ignore_valid_extensions_case: If True, validation of file extensions will be case insensitive.
|
|
186
|
+
:type ignore_valid_extensions_case: bool
|
|
187
|
+
:returns: List with file paths
|
|
188
|
+
:rtype: :class:`List[str]`
|
|
189
|
+
:Usage example:
|
|
185
190
|
|
|
186
|
-
|
|
191
|
+
.. code-block:: python
|
|
187
192
|
|
|
188
193
|
import supervisely as sly
|
|
189
194
|
|
|
@@ -198,12 +203,17 @@ def list_files_recursively(
|
|
|
198
203
|
for filename in file_names:
|
|
199
204
|
yield os.path.join(dir_name, filename)
|
|
200
205
|
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
206
|
+
valid_extensions = valid_extensions if ignore_valid_extensions_case is False else [ext.lower() for ext in valid_extensions]
|
|
207
|
+
files = []
|
|
208
|
+
for file_path in file_path_generator():
|
|
209
|
+
file_ext = get_file_ext(file_path)
|
|
210
|
+
if ignore_valid_extensions_case:
|
|
211
|
+
file_ext.lower()
|
|
212
|
+
if (
|
|
213
|
+
valid_extensions is None or file_ext in valid_extensions
|
|
214
|
+
) and (filter_fn is None or filter_fn(file_path)):
|
|
215
|
+
files.append(file_path)
|
|
216
|
+
return files
|
|
207
217
|
|
|
208
218
|
|
|
209
219
|
def list_files(
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
supervisely/README.md,sha256=XM-DiMC6To3I9RjQZ0c61905EFRR_jnCUx2q3uNR-X8,3331
|
|
2
|
-
supervisely/__init__.py,sha256=
|
|
2
|
+
supervisely/__init__.py,sha256=x83gx4W-dVuR8-6vthw5nZptfR0k_1FLQfXs7H36c3Q,10800
|
|
3
3
|
supervisely/_utils.py,sha256=I4nZ0L7NS6144r-CQ2VJvLeUJZ1bCi4pYXH4Gxo3-D4,15763
|
|
4
4
|
supervisely/function_wrapper.py,sha256=R5YajTQ0GnRp2vtjwfC9hINkzQc0JiyGsu8TER373xY,1912
|
|
5
5
|
supervisely/sly_logger.py,sha256=LG1wTyyctyEKuCuKM2IKf_SMPH7BzkTsFdO-0tnorzg,6225
|
|
@@ -23,7 +23,7 @@ supervisely/api/advanced_api.py,sha256=Nd5cCnHFWc3PSUrCtENxTGtDjS37_lCHXsgXvUI3T
|
|
|
23
23
|
supervisely/api/agent_api.py,sha256=ShWAIlXcWXcyI9fqVuP5GZVCigCMJmjnvdGUfLspD6Y,8890
|
|
24
24
|
supervisely/api/annotation_api.py,sha256=kB9l0NhQEkunGDC9fWjNzf5DdhqRF1tv-RRnIbkV2k0,64941
|
|
25
25
|
supervisely/api/api.py,sha256=0dgPx_eizoCEFzfT8YH9uh1kq-OJwjrV5fBGD7uZ7E4,65840
|
|
26
|
-
supervisely/api/app_api.py,sha256
|
|
26
|
+
supervisely/api/app_api.py,sha256=RsbVej8WxWVn9cNo5s3Fqd1symsCdsfOaKVBKEUapRY,71927
|
|
27
27
|
supervisely/api/dataset_api.py,sha256=eovT6l62jgjlRyCZ6IvoudUBfDxv9Hjj3Ap8IuCLd7I,41290
|
|
28
28
|
supervisely/api/file_api.py,sha256=7yWt8lRQ4UfLmnMZ9T18UXzu8jihrtHtcqi6GZJG-0w,83414
|
|
29
29
|
supervisely/api/github_api.py,sha256=NIexNjEer9H5rf5sw2LEZd7C1WR-tK4t6IZzsgeAAwQ,623
|
|
@@ -32,7 +32,7 @@ supervisely/api/image_api.py,sha256=2cki-IzA5jnN3QqqdSIbIbHJhDWxFGYxXY94WqBOoio,
|
|
|
32
32
|
supervisely/api/import_storage_api.py,sha256=BDCgmR0Hv6OoiRHLCVPKt3iDxSVlQp1WrnKhAK_Zl84,460
|
|
33
33
|
supervisely/api/issues_api.py,sha256=BqDJXmNoTzwc3xe6_-mA7FDFC5QQ-ahGbXk_HmpkSeQ,17925
|
|
34
34
|
supervisely/api/labeling_job_api.py,sha256=odnzZjp29yM16Gq-FYkv-OA4WFMNJCLFo4qSikW2A7c,56280
|
|
35
|
-
supervisely/api/module_api.py,sha256=
|
|
35
|
+
supervisely/api/module_api.py,sha256=8z7K6K77fa9oijnix4vnCADJwe5nZtsDiWKZTWc_yuI,43273
|
|
36
36
|
supervisely/api/neural_network_api.py,sha256=ktPVRO4Jeulougio8F0mioJJHwRJcX250Djp1wBoQ9c,7620
|
|
37
37
|
supervisely/api/object_class_api.py,sha256=-rQcKwhBw3iL9KNH9c1ROgoimgWM1ls6Wi_tb1R-MzY,7683
|
|
38
38
|
supervisely/api/plugin_api.py,sha256=TlfrosdRuYG4NUxk92QiQoVaOdztFspPpygyVa3M3zk,5283
|
|
@@ -560,12 +560,12 @@ supervisely/cli/teamfiles/teamfiles_upload.py,sha256=xnsW2rvdq1e-KGjF1tMBu7Oxh3n
|
|
|
560
560
|
supervisely/collection/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
561
561
|
supervisely/collection/key_indexed_collection.py,sha256=x2UVlkprspWhhae9oLUzjTWBoIouiWY9UQSS_MozfH0,37643
|
|
562
562
|
supervisely/collection/str_enum.py,sha256=Zp29yFGvnxC6oJRYNNlXhO2lTSdsriU1wiGHj6ahEJE,1250
|
|
563
|
-
supervisely/convert/__init__.py,sha256=
|
|
564
|
-
supervisely/convert/base_converter.py,sha256=
|
|
563
|
+
supervisely/convert/__init__.py,sha256=fgJPpXP8jOt4lMbG0lYqWLOTDNcw695vINCqvZ0i6BA,2742
|
|
564
|
+
supervisely/convert/base_converter.py,sha256=Y_uOUFJtqC44y_J6VsFB90hao8rrXkffV1ilxdj2T5o,18352
|
|
565
565
|
supervisely/convert/converter.py,sha256=tWxTDfFv7hwzQhUQrBxzfr6WP8FUGFX_ewg5T2HbUYo,8959
|
|
566
566
|
supervisely/convert/image/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
567
567
|
supervisely/convert/image/image_converter.py,sha256=r-qdhuwOsk727mXIM26ucQhkoIKigu1M0BF-tw9IfGg,10321
|
|
568
|
-
supervisely/convert/image/image_helper.py,sha256=
|
|
568
|
+
supervisely/convert/image/image_helper.py,sha256=fdV0edQD6hVGQ8TXn2JGDzsnrAXPDMacHBQsApzOME8,3677
|
|
569
569
|
supervisely/convert/image/cityscapes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
570
570
|
supervisely/convert/image/cityscapes/cityscapes_converter.py,sha256=msmsR2W-Xiod06dwn-MzmkbrEmQQqlKh7zyfTrW6YQw,7854
|
|
571
571
|
supervisely/convert/image/cityscapes/cityscapes_helper.py,sha256=in5nR7__q_u5dCkVtZmynfZ_ZuvsIAHrTzyTG4EvNgU,2988
|
|
@@ -616,6 +616,9 @@ supervisely/convert/pointcloud/bag/bag_helper.py,sha256=2TFe49isZTxMhya-PApqLPxr
|
|
|
616
616
|
supervisely/convert/pointcloud/las/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
617
617
|
supervisely/convert/pointcloud/las/las_converter.py,sha256=prbvApwrjQ2cuDD9nAFBDkTLxtsN4jdjCZdNnkNow-g,1722
|
|
618
618
|
supervisely/convert/pointcloud/las/las_helper.py,sha256=1gQ3OZLpe6D25CY_jXWDsrLBiS7nWfCgl3Zq--9TU14,1296
|
|
619
|
+
supervisely/convert/pointcloud/lyft/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
620
|
+
supervisely/convert/pointcloud/lyft/lyft_converter.py,sha256=bM9R3LLt4l5frK1Lhmy_WnhGUYCC7rH94v94sqmLxjY,11010
|
|
621
|
+
supervisely/convert/pointcloud/lyft/lyft_helper.py,sha256=bTe7ryLPfSkW0MjzFP-6AMyDMBtPu8Xk9cx0g0MomoQ,8521
|
|
619
622
|
supervisely/convert/pointcloud/ply/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
620
623
|
supervisely/convert/pointcloud/ply/ply_converter.py,sha256=2ZCYkhJQzUev-sWGsBwCPtj1TGjdcx8o-Q--RAHavp8,2698
|
|
621
624
|
supervisely/convert/pointcloud/ply/ply_helper.py,sha256=YfLiV9m6a4NNEMs0J32dmMTLffMLX4-JPTThMHOEK4w,268
|
|
@@ -626,6 +629,8 @@ supervisely/convert/pointcloud_episodes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5J
|
|
|
626
629
|
supervisely/convert/pointcloud_episodes/pointcloud_episodes_converter.py,sha256=ynqF1PSn3zWCD4SS7nvLE5eerAG1Nvj_StsoJ5JrNPg,6703
|
|
627
630
|
supervisely/convert/pointcloud_episodes/bag/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
628
631
|
supervisely/convert/pointcloud_episodes/bag/bag_converter.py,sha256=jzWKXoFUWu11d5WlPfT1hphCubYpq_lhQZmhh07xZdQ,1659
|
|
632
|
+
supervisely/convert/pointcloud_episodes/lyft/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
633
|
+
supervisely/convert/pointcloud_episodes/lyft/lyft_converter.py,sha256=wdq_dO5Vejj0Df5Fuo-oQBWkEF1IGBr89XuBBHilGmM,10384
|
|
629
634
|
supervisely/convert/pointcloud_episodes/sly/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
630
635
|
supervisely/convert/pointcloud_episodes/sly/sly_pointcloud_episodes_converter.py,sha256=7ONcZMOUJCNpmc0tmMX6FnNG0lu8Nj9K2SSTQHaSXFM,6188
|
|
631
636
|
supervisely/convert/pointcloud_episodes/sly/sly_pointcloud_episodes_helper.py,sha256=h4WvNH6cEHtjxxhCnU7Hs2vkyJMye0qwabqXNYVTywE,3570
|
|
@@ -690,8 +695,8 @@ supervisely/imaging/image.py,sha256=1KNc4qRbP9OlI4Yta07Kc2ohAgSBJ_9alF9Jag74w30,
|
|
|
690
695
|
supervisely/io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
691
696
|
supervisely/io/docker_utils.py,sha256=hb_HXGM8IYB0PF-nD7NxMwaHgzaxIFxofsUzQ_RCUZI,7935
|
|
692
697
|
supervisely/io/env.py,sha256=rKLLw1XQqM3s3X3k3ke9Skyy5hPK0LE_xVUBq3Qko0Q,17284
|
|
693
|
-
supervisely/io/exception_handlers.py,sha256=
|
|
694
|
-
supervisely/io/fs.py,sha256=
|
|
698
|
+
supervisely/io/exception_handlers.py,sha256=_nAgMFeE94bCxEvWakR82hMtdOJUyn7Gc7OymMxI9WI,36484
|
|
699
|
+
supervisely/io/fs.py,sha256=Z92fU-UC7XHf-yQq431SwB7kyFODH3OqpxsfwowkuCY,51799
|
|
695
700
|
supervisely/io/fs_cache.py,sha256=985gvBGzveLcDudgz10E4EWVjP9jxdU1Pa0GFfCBoCA,6520
|
|
696
701
|
supervisely/io/github_utils.py,sha256=jGmvQJ5bjtACuSFABzrxL0jJdh14SezovrHp8T-9y8g,1779
|
|
697
702
|
supervisely/io/json.py,sha256=VvyqXZl22nb6_DJK3TUOPetd5xq9xwRFKumWqsGs7iI,8679
|
|
@@ -1057,9 +1062,9 @@ supervisely/worker_proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
|
|
|
1057
1062
|
supervisely/worker_proto/worker_api_pb2.py,sha256=VQfi5JRBHs2pFCK1snec3JECgGnua3Xjqw_-b3aFxuM,59142
|
|
1058
1063
|
supervisely/worker_proto/worker_api_pb2_grpc.py,sha256=3BwQXOaP9qpdi0Dt9EKG--Lm8KGN0C5AgmUfRv77_Jk,28940
|
|
1059
1064
|
supervisely_lib/__init__.py,sha256=7-3QnN8Zf0wj8NCr2oJmqoQWMKKPKTECvjH9pd2S5vY,159
|
|
1060
|
-
supervisely-6.73.
|
|
1061
|
-
supervisely-6.73.
|
|
1062
|
-
supervisely-6.73.
|
|
1063
|
-
supervisely-6.73.
|
|
1064
|
-
supervisely-6.73.
|
|
1065
|
-
supervisely-6.73.
|
|
1065
|
+
supervisely-6.73.264.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
1066
|
+
supervisely-6.73.264.dist-info/METADATA,sha256=E6LgmO5fqJR74uJ6_FaxDXbPmh9pDGtQa4eegsSlkNI,33573
|
|
1067
|
+
supervisely-6.73.264.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
|
|
1068
|
+
supervisely-6.73.264.dist-info/entry_points.txt,sha256=U96-5Hxrp2ApRjnCoUiUhWMqijqh8zLR03sEhWtAcms,102
|
|
1069
|
+
supervisely-6.73.264.dist-info/top_level.txt,sha256=kcFVwb7SXtfqZifrZaSE3owHExX4gcNYe7Q2uoby084,28
|
|
1070
|
+
supervisely-6.73.264.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|