scale-nucleus 0.17.7__tar.gz → 0.17.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/PKG-INFO +1 -1
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/annotation.py +28 -4
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/annotation_uploader.py +26 -9
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/constants.py +1 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/dataset.py +32 -10
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/slice.py +15 -3
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/pyproject.toml +1 -1
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/LICENSE +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/README.md +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/client.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/datasets.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/helpers/__init__.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/helpers/nucleus_url.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/helpers/web_helper.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/install_completion.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/jobs.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/models.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/nu.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/reference.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/slices.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/cli/tests.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/__init__.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/async_job.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/async_utils.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/autocurate.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/camera_params.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/chip_utils.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/connection.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/data_transfer_object/__init__.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/data_transfer_object/dataset_details.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/data_transfer_object/dataset_info.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/data_transfer_object/dataset_size.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/data_transfer_object/job_status.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/data_transfer_object/scenes_list.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/dataset_item.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/dataset_item_uploader.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/deprecation_warning.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/embedding_index.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/errors.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/evaluation_match.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/job.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/logger.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metadata_manager.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/__init__.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/base.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/categorization_metrics.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/cuboid_metrics.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/cuboid_utils.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/custom_types.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/errors.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/filtering.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/filters.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/metric_utils.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/polygon_metrics.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/polygon_utils.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/segmentation_loader.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/segmentation_metrics.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/segmentation_to_poly_metrics.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/segmentation_utils.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/model.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/model_run.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/package_not_installed.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/payload_constructor.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/prediction.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/pydantic_base.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/quaternion.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/retry_strategy.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/scene.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/test_launch_integration.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/track.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/upload_response.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/url_utils.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/utils.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/__init__.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/client.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/constants.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/data_transfer_objects/__init__.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/data_transfer_objects/eval_function.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/data_transfer_objects/scenario_test.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/data_transfer_objects/scenario_test_evaluations.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/data_transfer_objects/scenario_test_metric.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/errors.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/eval_functions/__init__.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/eval_functions/available_eval_functions.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/eval_functions/base_eval_function.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/eval_functions/config_classes/__init__.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/eval_functions/config_classes/segmentation.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/scenario_test.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/scenario_test_evaluation.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/scenario_test_metric.py +0 -0
- {scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/utils.py +0 -0
@@ -33,6 +33,7 @@ from .constants import (
|
|
33
33
|
POLYGON_TYPE,
|
34
34
|
POSITION_KEY,
|
35
35
|
REFERENCE_ID_KEY,
|
36
|
+
TASK_ID_KEY,
|
36
37
|
TAXONOMY_NAME_KEY,
|
37
38
|
TRACK_REFERENCE_ID_KEY,
|
38
39
|
TYPE_KEY,
|
@@ -158,6 +159,7 @@ class BoxAnnotation(Annotation): # pylint: disable=R0902
|
|
158
159
|
metadata: Optional[Dict] = None
|
159
160
|
embedding_vector: Optional[list] = None
|
160
161
|
track_reference_id: Optional[str] = None
|
162
|
+
_task_id: Optional[str] = field(default=None, repr=False)
|
161
163
|
|
162
164
|
def __post_init__(self):
|
163
165
|
self.metadata = self.metadata if self.metadata else {}
|
@@ -178,6 +180,7 @@ class BoxAnnotation(Annotation): # pylint: disable=R0902
|
|
178
180
|
metadata=payload.get(METADATA_KEY, {}),
|
179
181
|
embedding_vector=payload.get(EMBEDDING_VECTOR_KEY, None),
|
180
182
|
track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
|
183
|
+
_task_id=payload.get(TASK_ID_KEY, None),
|
181
184
|
)
|
182
185
|
|
183
186
|
def to_payload(self) -> dict:
|
@@ -195,6 +198,7 @@ class BoxAnnotation(Annotation): # pylint: disable=R0902
|
|
195
198
|
METADATA_KEY: self.metadata,
|
196
199
|
EMBEDDING_VECTOR_KEY: self.embedding_vector,
|
197
200
|
TRACK_REFERENCE_ID_KEY: self.track_reference_id,
|
201
|
+
TASK_ID_KEY: self._task_id,
|
198
202
|
}
|
199
203
|
|
200
204
|
def __eq__(self, other):
|
@@ -209,6 +213,7 @@ class BoxAnnotation(Annotation): # pylint: disable=R0902
|
|
209
213
|
and sorted(self.metadata.items()) == sorted(other.metadata.items())
|
210
214
|
and self.embedding_vector == other.embedding_vector
|
211
215
|
and self.track_reference_id == other.track_reference_id
|
216
|
+
and self._task_id == other._task_id
|
212
217
|
)
|
213
218
|
|
214
219
|
|
@@ -275,6 +280,7 @@ class LineAnnotation(Annotation):
|
|
275
280
|
annotation_id: Optional[str] = None
|
276
281
|
metadata: Optional[Dict] = None
|
277
282
|
track_reference_id: Optional[str] = None
|
283
|
+
_task_id: Optional[str] = field(default=None, repr=False)
|
278
284
|
|
279
285
|
def __post_init__(self):
|
280
286
|
self.metadata = self.metadata if self.metadata else {}
|
@@ -304,6 +310,7 @@ class LineAnnotation(Annotation):
|
|
304
310
|
annotation_id=payload.get(ANNOTATION_ID_KEY, None),
|
305
311
|
metadata=payload.get(METADATA_KEY, {}),
|
306
312
|
track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
|
313
|
+
_task_id=payload.get(TASK_ID_KEY, None),
|
307
314
|
)
|
308
315
|
|
309
316
|
def to_payload(self) -> dict:
|
@@ -317,6 +324,7 @@ class LineAnnotation(Annotation):
|
|
317
324
|
ANNOTATION_ID_KEY: self.annotation_id,
|
318
325
|
METADATA_KEY: self.metadata,
|
319
326
|
TRACK_REFERENCE_ID_KEY: self.track_reference_id,
|
327
|
+
TASK_ID_KEY: self._task_id,
|
320
328
|
}
|
321
329
|
return payload
|
322
330
|
|
@@ -367,6 +375,7 @@ class PolygonAnnotation(Annotation):
|
|
367
375
|
metadata: Optional[Dict] = None
|
368
376
|
embedding_vector: Optional[list] = None
|
369
377
|
track_reference_id: Optional[str] = None
|
378
|
+
_task_id: Optional[str] = field(default=None, repr=False)
|
370
379
|
|
371
380
|
def __post_init__(self):
|
372
381
|
self.metadata = self.metadata if self.metadata else {}
|
@@ -397,6 +406,7 @@ class PolygonAnnotation(Annotation):
|
|
397
406
|
metadata=payload.get(METADATA_KEY, {}),
|
398
407
|
embedding_vector=payload.get(EMBEDDING_VECTOR_KEY, None),
|
399
408
|
track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
|
409
|
+
_task_id=payload.get(TASK_ID_KEY, None),
|
400
410
|
)
|
401
411
|
|
402
412
|
def to_payload(self) -> dict:
|
@@ -411,6 +421,7 @@ class PolygonAnnotation(Annotation):
|
|
411
421
|
METADATA_KEY: self.metadata,
|
412
422
|
EMBEDDING_VECTOR_KEY: self.embedding_vector,
|
413
423
|
TRACK_REFERENCE_ID_KEY: self.track_reference_id,
|
424
|
+
TASK_ID_KEY: self._task_id,
|
414
425
|
}
|
415
426
|
return payload
|
416
427
|
|
@@ -507,6 +518,7 @@ class KeypointsAnnotation(Annotation):
|
|
507
518
|
annotation_id: Optional[str] = None
|
508
519
|
metadata: Optional[Dict] = None
|
509
520
|
track_reference_id: Optional[str] = None
|
521
|
+
_task_id: Optional[str] = field(default=None, repr=False)
|
510
522
|
|
511
523
|
def __post_init__(self):
|
512
524
|
self.metadata = self.metadata or {}
|
@@ -559,6 +571,7 @@ class KeypointsAnnotation(Annotation):
|
|
559
571
|
annotation_id=payload.get(ANNOTATION_ID_KEY, None),
|
560
572
|
metadata=payload.get(METADATA_KEY, {}),
|
561
573
|
track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
|
574
|
+
_task_id=payload.get(TASK_ID_KEY, None),
|
562
575
|
)
|
563
576
|
|
564
577
|
def to_payload(self) -> dict:
|
@@ -574,6 +587,7 @@ class KeypointsAnnotation(Annotation):
|
|
574
587
|
ANNOTATION_ID_KEY: self.annotation_id,
|
575
588
|
METADATA_KEY: self.metadata,
|
576
589
|
TRACK_REFERENCE_ID_KEY: self.track_reference_id,
|
590
|
+
TASK_ID_KEY: self._task_id,
|
577
591
|
}
|
578
592
|
return payload
|
579
593
|
|
@@ -678,6 +692,7 @@ class CuboidAnnotation(Annotation): # pylint: disable=R0902
|
|
678
692
|
annotation_id: Optional[str] = None
|
679
693
|
metadata: Optional[Dict] = None
|
680
694
|
track_reference_id: Optional[str] = None
|
695
|
+
_task_id: Optional[str] = field(default=None, repr=False)
|
681
696
|
|
682
697
|
def __post_init__(self):
|
683
698
|
self.metadata = self.metadata if self.metadata else {}
|
@@ -694,6 +709,7 @@ class CuboidAnnotation(Annotation): # pylint: disable=R0902
|
|
694
709
|
annotation_id=payload.get(ANNOTATION_ID_KEY, None),
|
695
710
|
metadata=payload.get(METADATA_KEY, {}),
|
696
711
|
track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
|
712
|
+
_task_id=payload.get(TASK_ID_KEY, None),
|
697
713
|
)
|
698
714
|
|
699
715
|
def to_payload(self) -> dict:
|
@@ -713,7 +729,8 @@ class CuboidAnnotation(Annotation): # pylint: disable=R0902
|
|
713
729
|
payload[METADATA_KEY] = self.metadata
|
714
730
|
if self.track_reference_id:
|
715
731
|
payload[TRACK_REFERENCE_ID_KEY] = self.track_reference_id
|
716
|
-
|
732
|
+
if self._task_id:
|
733
|
+
payload[TASK_ID_KEY] = self._task_id
|
717
734
|
return payload
|
718
735
|
|
719
736
|
|
@@ -926,6 +943,7 @@ class CategoryAnnotation(Annotation):
|
|
926
943
|
taxonomy_name: Optional[str] = None
|
927
944
|
metadata: Optional[Dict] = None
|
928
945
|
track_reference_id: Optional[str] = None
|
946
|
+
_task_id: Optional[str] = field(default=None, repr=False)
|
929
947
|
|
930
948
|
def __post_init__(self):
|
931
949
|
self.metadata = self.metadata if self.metadata else {}
|
@@ -938,6 +956,7 @@ class CategoryAnnotation(Annotation):
|
|
938
956
|
taxonomy_name=payload.get(TAXONOMY_NAME_KEY, None),
|
939
957
|
metadata=payload.get(METADATA_KEY, {}),
|
940
958
|
track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
|
959
|
+
_task_id=payload.get(TASK_ID_KEY, None),
|
941
960
|
)
|
942
961
|
|
943
962
|
def to_payload(self) -> dict:
|
@@ -948,6 +967,7 @@ class CategoryAnnotation(Annotation):
|
|
948
967
|
REFERENCE_ID_KEY: self.reference_id,
|
949
968
|
METADATA_KEY: self.metadata,
|
950
969
|
TRACK_REFERENCE_ID_KEY: self.track_reference_id,
|
970
|
+
TASK_ID_KEY: self._task_id,
|
951
971
|
}
|
952
972
|
if self.taxonomy_name is not None:
|
953
973
|
payload[TAXONOMY_NAME_KEY] = self.taxonomy_name
|
@@ -963,6 +983,7 @@ class MultiCategoryAnnotation(Annotation):
|
|
963
983
|
taxonomy_name: Optional[str] = None
|
964
984
|
metadata: Optional[Dict] = None
|
965
985
|
track_reference_id: Optional[str] = None
|
986
|
+
_task_id: Optional[str] = field(default=None, repr=False)
|
966
987
|
|
967
988
|
def __post_init__(self):
|
968
989
|
self.metadata = self.metadata if self.metadata else {}
|
@@ -975,6 +996,7 @@ class MultiCategoryAnnotation(Annotation):
|
|
975
996
|
taxonomy_name=payload.get(TAXONOMY_NAME_KEY, None),
|
976
997
|
metadata=payload.get(METADATA_KEY, {}),
|
977
998
|
track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
|
999
|
+
_task_id=payload.get(TASK_ID_KEY, None),
|
978
1000
|
)
|
979
1001
|
|
980
1002
|
def to_payload(self) -> dict:
|
@@ -985,6 +1007,7 @@ class MultiCategoryAnnotation(Annotation):
|
|
985
1007
|
REFERENCE_ID_KEY: self.reference_id,
|
986
1008
|
METADATA_KEY: self.metadata,
|
987
1009
|
TRACK_REFERENCE_ID_KEY: self.track_reference_id,
|
1010
|
+
TASK_ID_KEY: self._task_id,
|
988
1011
|
}
|
989
1012
|
if self.taxonomy_name is not None:
|
990
1013
|
payload[TAXONOMY_NAME_KEY] = self.taxonomy_name
|
@@ -1023,6 +1046,7 @@ class SceneCategoryAnnotation(Annotation):
|
|
1023
1046
|
reference_id: str
|
1024
1047
|
taxonomy_name: Optional[str] = None
|
1025
1048
|
metadata: Optional[Dict] = field(default_factory=dict)
|
1049
|
+
_task_id: Optional[str] = field(default=None, repr=False)
|
1026
1050
|
|
1027
1051
|
@classmethod
|
1028
1052
|
def from_json(cls, payload: dict):
|
@@ -1031,6 +1055,7 @@ class SceneCategoryAnnotation(Annotation):
|
|
1031
1055
|
reference_id=payload[REFERENCE_ID_KEY],
|
1032
1056
|
taxonomy_name=payload.get(TAXONOMY_NAME_KEY, None),
|
1033
1057
|
metadata=payload.get(METADATA_KEY, {}),
|
1058
|
+
_task_id=payload.get(TASK_ID_KEY, None),
|
1034
1059
|
)
|
1035
1060
|
|
1036
1061
|
def to_payload(self) -> dict:
|
@@ -1040,6 +1065,7 @@ class SceneCategoryAnnotation(Annotation):
|
|
1040
1065
|
GEOMETRY_KEY: {},
|
1041
1066
|
REFERENCE_ID_KEY: self.reference_id,
|
1042
1067
|
METADATA_KEY: self.metadata,
|
1068
|
+
TASK_ID_KEY: self._task_id,
|
1043
1069
|
}
|
1044
1070
|
if self.taxonomy_name is not None:
|
1045
1071
|
payload[TAXONOMY_NAME_KEY] = self.taxonomy_name
|
@@ -1057,9 +1083,7 @@ class AnnotationList:
|
|
1057
1083
|
default_factory=list
|
1058
1084
|
)
|
1059
1085
|
cuboid_annotations: List[CuboidAnnotation] = field(default_factory=list)
|
1060
|
-
category_annotations: List[CategoryAnnotation] = field(
|
1061
|
-
default_factory=list
|
1062
|
-
)
|
1086
|
+
category_annotations: List[CategoryAnnotation] = field(default_factory=list)
|
1063
1087
|
multi_category_annotations: List[MultiCategoryAnnotation] = field(
|
1064
1088
|
default_factory=list
|
1065
1089
|
)
|
@@ -176,9 +176,7 @@ class AnnotationUploader:
|
|
176
176
|
"""
|
177
177
|
|
178
178
|
def fn():
|
179
|
-
request_json = construct_segmentation_payload(
|
180
|
-
segmentations, update
|
181
|
-
)
|
179
|
+
request_json = construct_segmentation_payload(segmentations, update)
|
182
180
|
form_data = [
|
183
181
|
FileFormField(
|
184
182
|
name=SERIALIZED_REQUEST_KEY,
|
@@ -212,13 +210,15 @@ class AnnotationUploader:
|
|
212
210
|
|
213
211
|
return fn
|
214
212
|
|
215
|
-
|
216
|
-
|
217
|
-
"""Do not allow annotations to have the same (annotation_id, reference_id) tuple"""
|
213
|
+
def check_for_duplicate_ids(self, annotations: Iterable[Annotation]):
|
214
|
+
"""Do not allow annotations to have the same (annotation_id, reference_id, task_id) tuple"""
|
218
215
|
|
219
|
-
# some annotations like CategoryAnnotation do not have annotation_id attribute, and as such, we allow duplicates
|
220
216
|
tuple_ids = [
|
221
|
-
(
|
217
|
+
(
|
218
|
+
ann.reference_id,
|
219
|
+
ann.annotation_id,
|
220
|
+
getattr(ann, "_task_id", None),
|
221
|
+
)
|
222
222
|
for ann in annotations
|
223
223
|
if hasattr(ann, "annotation_id")
|
224
224
|
]
|
@@ -226,7 +226,7 @@ class AnnotationUploader:
|
|
226
226
|
duplicates = {key for key, value in tuple_count.items() if value > 1}
|
227
227
|
if len(duplicates) > 0:
|
228
228
|
raise DuplicateIDError(
|
229
|
-
f"Duplicate annotations with the same (reference_id, annotation_id) properties found.\n"
|
229
|
+
f"Duplicate annotations with the same (reference_id, annotation_id, task_id) properties found.\n"
|
230
230
|
f"Duplicates: {duplicates}\n"
|
231
231
|
f"To fix this, avoid duplicate annotations, or specify a different annotation_id attribute "
|
232
232
|
f"for the failing items."
|
@@ -255,3 +255,20 @@ class PredictionUploader(AnnotationUploader):
|
|
255
255
|
self._route = (
|
256
256
|
f"dataset/{dataset_id}/model/{model_id}/uploadPredictions"
|
257
257
|
)
|
258
|
+
|
259
|
+
def check_for_duplicate_ids(self, annotations: Iterable[Annotation]):
|
260
|
+
"""Do not allow predictions to have the same (annotation_id, reference_id) tuple"""
|
261
|
+
tuple_ids = [
|
262
|
+
(pred.annotation_id, pred.reference_id) # type: ignore
|
263
|
+
for pred in annotations
|
264
|
+
if hasattr(pred, "annotation_id") and hasattr(pred, "reference_id")
|
265
|
+
]
|
266
|
+
tuple_count = Counter(tuple_ids)
|
267
|
+
duplicates = {key for key, value in tuple_count.items() if value > 1}
|
268
|
+
if len(duplicates) > 0:
|
269
|
+
raise DuplicateIDError(
|
270
|
+
f"Duplicate predictions with the same (annotation_id, reference_id) properties found.\n"
|
271
|
+
f"Duplicates: {duplicates}\n"
|
272
|
+
f"To fix this, avoid duplicate predictions, or specify a different annotation_id attribute "
|
273
|
+
f"for the failing items."
|
274
|
+
)
|
@@ -148,6 +148,7 @@ STATUS_KEY = "status"
|
|
148
148
|
SUCCESS_STATUS_CODES = [200, 201, 202]
|
149
149
|
SLICE_TAGS_KEY = "slice_tags"
|
150
150
|
TAXONOMY_NAME_KEY = "taxonomy_name"
|
151
|
+
TASK_ID_KEY = "task_id"
|
151
152
|
TRACK_REFERENCE_ID_KEY = "track_reference_id"
|
152
153
|
TRACK_REFERENCE_IDS_KEY = "track_reference_ids"
|
153
154
|
TRACKS_KEY = "tracks"
|
@@ -332,8 +332,7 @@ class Dataset:
|
|
332
332
|
dataset_item_jsons = response.get(DATASET_ITEMS_KEY, None)
|
333
333
|
|
334
334
|
return [
|
335
|
-
DatasetItem.from_json(item_json)
|
336
|
-
for item_json in dataset_item_jsons
|
335
|
+
DatasetItem.from_json(item_json) for item_json in dataset_item_jsons
|
337
336
|
]
|
338
337
|
|
339
338
|
@property
|
@@ -699,9 +698,7 @@ class Dataset:
|
|
699
698
|
asynchronous
|
700
699
|
), "In order to avoid timeouts, you must set asynchronous=True when uploading videos."
|
701
700
|
|
702
|
-
return self._append_video_scenes(
|
703
|
-
video_scenes, update, asynchronous
|
704
|
-
)
|
701
|
+
return self._append_video_scenes(video_scenes, update, asynchronous)
|
705
702
|
|
706
703
|
if len(dataset_items) > WARN_FOR_LARGE_UPLOAD and not asynchronous:
|
707
704
|
print(
|
@@ -1450,13 +1447,14 @@ class Dataset:
|
|
1450
1447
|
return convert_export_payload(api_payload[EXPORTED_ROWS])
|
1451
1448
|
|
1452
1449
|
def scene_and_annotation_generator(
|
1453
|
-
self, slice_id=None, page_size: int = 10
|
1450
|
+
self, slice_id=None, page_size: int = 10, only_most_recent_tasks=True
|
1454
1451
|
):
|
1455
1452
|
"""Provides a generator of all Scenes and Annotations in the dataset grouped by scene.
|
1456
1453
|
|
1457
1454
|
Args:
|
1458
1455
|
slice_id: Optional slice ID to filter the scenes and annotations.
|
1459
1456
|
page_size: Number of scenes to fetch per page. Default is 10.
|
1457
|
+
only_most_recent_tasks: If True, only the annotations corresponding to the most recent task for each item is returned.
|
1460
1458
|
|
1461
1459
|
Returns:
|
1462
1460
|
Generator where each element is a nested dict containing scene and annotation information of the dataset structured as a JSON.
|
@@ -1509,6 +1507,7 @@ class Dataset:
|
|
1509
1507
|
result_key=EXPORT_FOR_TRAINING_KEY,
|
1510
1508
|
page_size=page_size,
|
1511
1509
|
sliceId=slice_id,
|
1510
|
+
onlyMostRecentTask=only_most_recent_tasks,
|
1512
1511
|
)
|
1513
1512
|
|
1514
1513
|
for data in json_generator:
|
@@ -1518,12 +1517,14 @@ class Dataset:
|
|
1518
1517
|
self,
|
1519
1518
|
query: Optional[str] = None,
|
1520
1519
|
use_mirrored_images: bool = False,
|
1520
|
+
only_most_recent_tasks: bool = True,
|
1521
1521
|
) -> Iterable[Dict[str, Union[DatasetItem, Dict[str, List[Annotation]]]]]:
|
1522
1522
|
"""Provides a generator of all DatasetItems and Annotations in the dataset.
|
1523
1523
|
|
1524
1524
|
Args:
|
1525
1525
|
query: Structured query compatible with the `Nucleus query language <https://nucleus.scale.com/docs/query-language-reference>`_.
|
1526
1526
|
use_mirrored_images: If True, returns the location of the mirrored image hosted in Scale S3. Useful when the original image is no longer available.
|
1527
|
+
only_most_recent_tasks: If True, only the annotations corresponding to the most recent task for each item is returned.
|
1527
1528
|
|
1528
1529
|
Returns:
|
1529
1530
|
Generator where each element is a dict containing the DatasetItem
|
@@ -1550,6 +1551,7 @@ class Dataset:
|
|
1550
1551
|
page_size=10000, # max ES page size
|
1551
1552
|
query=query,
|
1552
1553
|
chip=use_mirrored_images,
|
1554
|
+
onlyMostRecentTask=only_most_recent_tasks,
|
1553
1555
|
)
|
1554
1556
|
for data in json_generator:
|
1555
1557
|
for ia in convert_export_payload([data], has_predictions=False):
|
@@ -2356,10 +2358,7 @@ class Dataset:
|
|
2356
2358
|
)
|
2357
2359
|
|
2358
2360
|
if len(items) > 0:
|
2359
|
-
if (
|
2360
|
-
len(items) > GLOB_SIZE_THRESHOLD_CHECK
|
2361
|
-
and not skip_size_warning
|
2362
|
-
):
|
2361
|
+
if len(items) > GLOB_SIZE_THRESHOLD_CHECK and not skip_size_warning:
|
2363
2362
|
raise Exception(
|
2364
2363
|
f"Found over {GLOB_SIZE_THRESHOLD_CHECK} items in {dirname}. If this is intended,"
|
2365
2364
|
f" set skip_size_warning=True when calling this function."
|
@@ -2406,3 +2405,26 @@ class Dataset:
|
|
2406
2405
|
route=f"dataset/{self.id}/model/{model.id}/pointcloud/{pointcloud_ref_id}/uploadLSSPrediction",
|
2407
2406
|
requests_command=requests.post,
|
2408
2407
|
)
|
2408
|
+
|
2409
|
+
def export_class_labels(self, slice_id: Optional[str] = None):
|
2410
|
+
"""Fetches a list of class labels for the dataset.
|
2411
|
+
|
2412
|
+
Args:
|
2413
|
+
slice_id (str | None): The ID of the slice to export class labels for. If None, export class labels for the entire dataset.
|
2414
|
+
|
2415
|
+
Returns:
|
2416
|
+
A list of class labels for the dataset.
|
2417
|
+
"""
|
2418
|
+
if slice_id:
|
2419
|
+
api_payload = self._client.make_request(
|
2420
|
+
payload=None,
|
2421
|
+
route=f"slice/{slice_id}/class_labels",
|
2422
|
+
requests_command=requests.get,
|
2423
|
+
)
|
2424
|
+
else:
|
2425
|
+
api_payload = self._client.make_request(
|
2426
|
+
payload=None,
|
2427
|
+
route=f"dataset/{self.id}/class_labels",
|
2428
|
+
requests_command=requests.get,
|
2429
|
+
)
|
2430
|
+
return api_payload.get("data", [])
|
@@ -168,9 +168,7 @@ class Slice:
|
|
168
168
|
@property
|
169
169
|
def pending_job_count(self) -> Optional[int]:
|
170
170
|
if self._pending_job_count is None:
|
171
|
-
self._pending_job_count = self.info().get(
|
172
|
-
"pending_job_count", None
|
173
|
-
)
|
171
|
+
self._pending_job_count = self.info().get("pending_job_count", None)
|
174
172
|
return self._pending_job_count
|
175
173
|
|
176
174
|
@classmethod
|
@@ -410,9 +408,13 @@ class Slice:
|
|
410
408
|
|
411
409
|
def items_and_annotation_generator(
|
412
410
|
self,
|
411
|
+
use_mirrored_images: bool = False,
|
413
412
|
) -> Iterable[Dict[str, Union[DatasetItem, Dict[str, List[Annotation]]]]]:
|
414
413
|
"""Provides a generator of all DatasetItems and Annotations in the slice.
|
415
414
|
|
415
|
+
Args:
|
416
|
+
use_mirrored_images: If True, returns the location of the mirrored image hosted in Scale S3. Useful when the original image is no longer available.
|
417
|
+
|
416
418
|
Returns:
|
417
419
|
Generator where each element is a dict containing the DatasetItem
|
418
420
|
and all of its associated Annotations, grouped by type (e.g. box).
|
@@ -436,6 +438,7 @@ class Slice:
|
|
436
438
|
endpoint=f"slice/{self.id}/exportForTrainingPage",
|
437
439
|
result_key=EXPORT_FOR_TRAINING_KEY,
|
438
440
|
page_size=10000, # max ES page size
|
441
|
+
chip=use_mirrored_images,
|
439
442
|
)
|
440
443
|
for data in json_generator:
|
441
444
|
for ia in convert_export_payload([data], has_predictions=False):
|
@@ -700,6 +703,15 @@ class Slice:
|
|
700
703
|
)
|
701
704
|
return api_payload
|
702
705
|
|
706
|
+
def export_class_labels(self):
|
707
|
+
"""Fetches a list of class labels for the slice."""
|
708
|
+
api_payload = self._client.make_request(
|
709
|
+
payload=None,
|
710
|
+
route=f"slice/{self.id}/class_labels",
|
711
|
+
requests_command=requests.get,
|
712
|
+
)
|
713
|
+
return api_payload.get("data", [])
|
714
|
+
|
703
715
|
|
704
716
|
def check_annotations_are_in_slice(
|
705
717
|
annotations: List[Annotation], slice_to_check: Slice
|
@@ -25,7 +25,7 @@ ignore = ["E501", "E741", "E731", "F401"] # Easy ignore for getting it running
|
|
25
25
|
|
26
26
|
[tool.poetry]
|
27
27
|
name = "scale-nucleus"
|
28
|
-
version = "0.17.
|
28
|
+
version = "0.17.9"
|
29
29
|
description = "The official Python client library for Nucleus, the Data Platform for AI"
|
30
30
|
license = "MIT"
|
31
31
|
authors = ["Scale AI Nucleus Team <nucleusapi@scaleapi.com>"]
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/data_transfer_object/dataset_details.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/metrics/segmentation_to_poly_metrics.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/data_transfer_objects/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{scale_nucleus-0.17.7 → scale_nucleus-0.17.9}/nucleus/validate/eval_functions/base_eval_function.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|