scale-nucleus 0.17.5__py3-none-any.whl → 0.17.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nucleus/dataset.py +45 -2
- {scale_nucleus-0.17.5.dist-info → scale_nucleus-0.17.6.dist-info}/METADATA +1 -1
- {scale_nucleus-0.17.5.dist-info → scale_nucleus-0.17.6.dist-info}/RECORD +6 -6
- {scale_nucleus-0.17.5.dist-info → scale_nucleus-0.17.6.dist-info}/LICENSE +0 -0
- {scale_nucleus-0.17.5.dist-info → scale_nucleus-0.17.6.dist-info}/WHEEL +0 -0
- {scale_nucleus-0.17.5.dist-info → scale_nucleus-0.17.6.dist-info}/entry_points.txt +0 -0
nucleus/dataset.py
CHANGED
@@ -1449,6 +1449,49 @@ class Dataset:
|
|
1449
1449
|
)
|
1450
1450
|
return convert_export_payload(api_payload[EXPORTED_ROWS])
|
1451
1451
|
|
1452
|
+
def scene_and_annotation_generator(self, page_size=10):
|
1453
|
+
"""Provides a generator of all DatasetItems and Annotations in the dataset grouped by scene.
|
1454
|
+
|
1455
|
+
|
1456
|
+
Returns:
|
1457
|
+
Generator where each element is a nested dict (representing a JSON) structured in the following way:
|
1458
|
+
|
1459
|
+
Iterable[{
|
1460
|
+
"file_location": str,
|
1461
|
+
"metadata": Dict[str, Any],
|
1462
|
+
"annotations": {
|
1463
|
+
"{trackId}": {
|
1464
|
+
"label": str,
|
1465
|
+
"name": str,
|
1466
|
+
"frames": List[{
|
1467
|
+
"left": int,
|
1468
|
+
"top": int,
|
1469
|
+
"width": int,
|
1470
|
+
"height": int,
|
1471
|
+
"key": str, # frame key
|
1472
|
+
"metadata": Dict[str, Any]
|
1473
|
+
}]
|
1474
|
+
}
|
1475
|
+
}
|
1476
|
+
}]
|
1477
|
+
|
1478
|
+
This is similar to how the Scale API returns task data
|
1479
|
+
"""
|
1480
|
+
|
1481
|
+
if page_size > 30:
|
1482
|
+
raise ValueError("Page size must be less than or equal to 30")
|
1483
|
+
|
1484
|
+
endpoint_name = "exportForTrainingByScene"
|
1485
|
+
json_generator = paginate_generator(
|
1486
|
+
client=self._client,
|
1487
|
+
endpoint=f"dataset/{self.id}/{endpoint_name}",
|
1488
|
+
result_key=EXPORT_FOR_TRAINING_KEY,
|
1489
|
+
page_size=page_size,
|
1490
|
+
)
|
1491
|
+
|
1492
|
+
for data in json_generator:
|
1493
|
+
yield data
|
1494
|
+
|
1452
1495
|
def items_and_annotation_generator(
|
1453
1496
|
self,
|
1454
1497
|
query: Optional[str] = None,
|
@@ -2310,9 +2353,9 @@ class Dataset:
|
|
2310
2353
|
pointcloud_ref_id: str,
|
2311
2354
|
predictions_s3_path: str,
|
2312
2355
|
):
|
2313
|
-
"""Upload Lidar Semantic Segmentation predictions for a given
|
2356
|
+
"""Upload Lidar Semantic Segmentation predictions for a given point-cloud.
|
2314
2357
|
|
2315
|
-
Assuming a
|
2358
|
+
Assuming a point-cloud with only 4 points (three labeled as Car, one labeled as Person),
|
2316
2359
|
the contents of the predictions s3 object should be formatted as such:
|
2317
2360
|
|
2318
2361
|
.. code-block:: json
|
@@ -26,7 +26,7 @@ nucleus/data_transfer_object/dataset_info.py,sha256=5P_gpvAyaqXxj2ZQuzLkGN2XROaN
|
|
26
26
|
nucleus/data_transfer_object/dataset_size.py,sha256=oe-dXaMLpsQRDcJQRZ9Ja8JTagYz4dviZuTognEylp0,111
|
27
27
|
nucleus/data_transfer_object/job_status.py,sha256=hxvyNdrdVdj3UpEfwvryKC_QCJQEC9ru6IPjhPFcK44,2038
|
28
28
|
nucleus/data_transfer_object/scenes_list.py,sha256=iTHE6vA47bRB6ciyEU4LArUXEXco4ArnGvZTGTeK8xs,432
|
29
|
-
nucleus/dataset.py,sha256=
|
29
|
+
nucleus/dataset.py,sha256=ekYreXpUY2kUyKJLJEopNXfezc0u9EzQyWlAYAtt3-8,92751
|
30
30
|
nucleus/dataset_item.py,sha256=y9ia47i31lX2wvw6EkVAxeHburMrrZpuyjEGlstWa2A,10166
|
31
31
|
nucleus/dataset_item_uploader.py,sha256=BD0FTgimEFYmDbnOLIaQZS3OLDfLe5wumADDmgMX598,6684
|
32
32
|
nucleus/deprecation_warning.py,sha256=5C9dVusR5UkUQnW2MrRkIXCfbc8ULc7xOaB134agNKk,976
|
@@ -85,8 +85,8 @@ nucleus/validate/scenario_test.py,sha256=pCmM157dblSciZCDTw-f47Fpy3OUZFgXmokdhIL
|
|
85
85
|
nucleus/validate/scenario_test_evaluation.py,sha256=Q0WzaEE9uUbPVc4EHlCoKjhJcqMNt4QbyiiJx12VOR0,4075
|
86
86
|
nucleus/validate/scenario_test_metric.py,sha256=AhVFOB1ULwBqlZ2X_Au1TXy4iQELljtzR4ZpeLB35So,1209
|
87
87
|
nucleus/validate/utils.py,sha256=VjdIJj9Pii4z4L6xbvClAc7ra_J7cX0vWB_J2X6yrGE,185
|
88
|
-
scale_nucleus-0.17.
|
89
|
-
scale_nucleus-0.17.
|
90
|
-
scale_nucleus-0.17.
|
91
|
-
scale_nucleus-0.17.
|
92
|
-
scale_nucleus-0.17.
|
88
|
+
scale_nucleus-0.17.6.dist-info/LICENSE,sha256=jaTGyQSQIZeWMo5iyYqgbAYHR9Bdy7nOzgE-Up3m_-g,1075
|
89
|
+
scale_nucleus-0.17.6.dist-info/METADATA,sha256=HHN0a8wl3wVYBELMNFiG5uQ1-59ke402OCTm_dkty6Q,7920
|
90
|
+
scale_nucleus-0.17.6.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
91
|
+
scale_nucleus-0.17.6.dist-info/entry_points.txt,sha256=fmqEzh6NZQyg9eFMILnWabKT8OWQTMSCdDzMiVq2zYs,32
|
92
|
+
scale_nucleus-0.17.6.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|