scale-nucleus 0.12b1__py3-none-any.whl → 0.14.14b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. cli/slices.py +14 -28
  2. nucleus/__init__.py +211 -18
  3. nucleus/annotation.py +28 -5
  4. nucleus/connection.py +9 -1
  5. nucleus/constants.py +9 -3
  6. nucleus/dataset.py +197 -59
  7. nucleus/dataset_item.py +11 -1
  8. nucleus/job.py +1 -1
  9. nucleus/metrics/__init__.py +2 -1
  10. nucleus/metrics/base.py +34 -56
  11. nucleus/metrics/categorization_metrics.py +6 -2
  12. nucleus/metrics/cuboid_utils.py +4 -6
  13. nucleus/metrics/errors.py +4 -0
  14. nucleus/metrics/filtering.py +369 -19
  15. nucleus/metrics/polygon_utils.py +3 -3
  16. nucleus/metrics/segmentation_loader.py +30 -0
  17. nucleus/metrics/segmentation_metrics.py +256 -195
  18. nucleus/metrics/segmentation_to_poly_metrics.py +229 -105
  19. nucleus/metrics/segmentation_utils.py +239 -8
  20. nucleus/model.py +66 -10
  21. nucleus/model_run.py +1 -1
  22. nucleus/{shapely_not_installed.py → package_not_installed.py} +3 -3
  23. nucleus/payload_constructor.py +4 -0
  24. nucleus/prediction.py +6 -3
  25. nucleus/scene.py +7 -0
  26. nucleus/slice.py +160 -16
  27. nucleus/utils.py +51 -12
  28. nucleus/validate/__init__.py +1 -0
  29. nucleus/validate/client.py +57 -8
  30. nucleus/validate/constants.py +1 -0
  31. nucleus/validate/data_transfer_objects/eval_function.py +22 -0
  32. nucleus/validate/data_transfer_objects/scenario_test_evaluations.py +13 -5
  33. nucleus/validate/eval_functions/available_eval_functions.py +33 -20
  34. nucleus/validate/eval_functions/config_classes/segmentation.py +2 -46
  35. nucleus/validate/scenario_test.py +71 -13
  36. nucleus/validate/scenario_test_evaluation.py +21 -21
  37. nucleus/validate/utils.py +1 -1
  38. {scale_nucleus-0.12b1.dist-info → scale_nucleus-0.14.14b0.dist-info}/LICENSE +0 -0
  39. {scale_nucleus-0.12b1.dist-info → scale_nucleus-0.14.14b0.dist-info}/METADATA +13 -11
  40. {scale_nucleus-0.12b1.dist-info → scale_nucleus-0.14.14b0.dist-info}/RECORD +42 -41
  41. {scale_nucleus-0.12b1.dist-info → scale_nucleus-0.14.14b0.dist-info}/WHEEL +1 -1
  42. {scale_nucleus-0.12b1.dist-info → scale_nucleus-0.14.14b0.dist-info}/entry_points.txt +0 -0
@@ -1,16 +1,29 @@
1
+ import logging
2
+ from collections import defaultdict
3
+ from typing import List, Sequence, Tuple, Union
4
+
1
5
  import numpy as np
2
- from rasterio import features
6
+ from scipy.optimize import linear_sum_assignment
7
+
8
+ from nucleus import Point, PolygonPrediction, Segment
9
+ from nucleus.metrics.custom_types import BoxOrPolygonAnnotation
10
+ from nucleus.metrics.polygon_utils import polygon_annotation_to_shape
11
+ from nucleus.package_not_installed import ( # pylint: disable=ungrouped-imports
12
+ PackageNotInstalled,
13
+ )
3
14
 
4
- from nucleus import Point, PolygonPrediction
15
+ FALSE_POSITIVES = "__non_max_false_positive"
5
16
 
6
17
  try:
7
18
  from shapely import geometry
8
- except ModuleNotFoundError:
9
- from nucleus.shapely_not_installed import ( # pylint: disable=ungrouped-imports
10
- ShapelyNotInstalled,
11
- )
19
+ except (ModuleNotFoundError, OSError):
20
+ geometry = PackageNotInstalled
12
21
 
13
- geometry = ShapelyNotInstalled
22
+
23
+ try:
24
+ from rasterio import features
25
+ except (ModuleNotFoundError, OSError):
26
+ rasterio = PackageNotInstalled
14
27
 
15
28
 
16
29
  def instance_mask_to_polys(instance_mask: np.ndarray, background_code=None):
@@ -20,7 +33,7 @@ def instance_mask_to_polys(instance_mask: np.ndarray, background_code=None):
20
33
  (instance_mask != background_code) if background_code else None
21
34
  )
22
35
  for shape, value in features.shapes(
23
- instance_mask.astype(np.int16),
36
+ instance_mask.astype(np.int32),
24
37
  mask=not_background_mask,
25
38
  ):
26
39
  poly = geometry.shape(shape)
@@ -46,3 +59,221 @@ def transform_poly_codes_to_poly_preds(
46
59
  )
47
60
  polygon_predictions.append(pred)
48
61
  return polygon_predictions
62
+
63
+
64
+ def max_iou_match_from_confusion(confusion):
65
+ """Calculate iou from confusion matrix and do linear sum assignment to get strongest candiaate for each GT
66
+
67
+ Returns:
68
+ iou_matrix with same dims as confusion and 1-d best match rows, 1-d best match cols
69
+ """
70
+ iou = np.zeros(confusion.shape, dtype=np.float)
71
+ with np.errstate(divide="ignore", invalid="ignore"):
72
+ for i in range(confusion.shape[0]):
73
+ for j in range(confusion.shape[1]):
74
+ intersection = confusion[i, j]
75
+ union = (
76
+ confusion[i, :].sum()
77
+ + confusion[:, j].sum()
78
+ - intersection
79
+ )
80
+ if union > 0:
81
+ iou[i, j] = intersection / union
82
+ iou = np.nan_to_num(iou)
83
+ iou_match_row, iou_match_col = linear_sum_assignment(-iou)
84
+ return iou, iou_match_row, iou_match_col
85
+
86
+
87
+ def fast_confusion_matrix(
88
+ label_true: np.ndarray, label_pred: np.ndarray, n_class: int
89
+ ) -> np.ndarray:
90
+ """Calculates confusion matrix - fast!
91
+
92
+ Outputs a confusion matrix where each row is GT confusion and column is prediction confusion
93
+ Example:
94
+ _fast_hist(np.array([0, 1, 2, 3], dtype=np.int32), np.array([0, 1, 1, 1], dtype=np.int32), n_class=4)
95
+ array([[1, 0, 0, 0],
96
+ [0, 1, 0, 0],
97
+ [0, 1, 0, 0],
98
+ [0, 1, 0, 0]])
99
+ """
100
+ mask = (label_true >= 0) & (label_true < n_class)
101
+ hist = np.bincount(
102
+ n_class * label_true[mask].astype(int) + label_pred[mask],
103
+ minlength=n_class ** 2,
104
+ ).reshape(n_class, n_class)
105
+ return hist
106
+
107
+
108
+ def non_max_suppress_confusion(confusion: np.ndarray, iou_threshold):
109
+ """Uses linear sum assignment to find biggest pixel-wise IOU match. Secondary matches are moved to last column
110
+ as false positives (since they are outside of instance boundaries).
111
+
112
+ TODO(gunnar): Change logic to only move suppressed TP to FP so we can maintain the original confusion
113
+
114
+ Arguments:
115
+ confusion: Confusion matrix
116
+ iou_threshold: Detections under iou threshold are considered false positives
117
+
118
+ Returns:
119
+ Non max suppressed confusion matrix (NxN) with dimension ((N+1)x(N+1)) where the last column are suppressed
120
+ positives
121
+
122
+ """
123
+ original_count = confusion.sum()
124
+ iou, max_iou_row, max_iou_col = max_iou_match_from_confusion(confusion)
125
+ # Prepare the new confusion with +1 added to the shape
126
+ non_max_suppressed = np.zeros(np.add(confusion.shape, 1), dtype=np.int64)
127
+
128
+ # ---- IOU filtering from diagonal
129
+ keep_diagonal = iou.diagonal() >= iou_threshold
130
+ # Move false positives from diag to new false_positive class
131
+ move_indexes = np.where(~keep_diagonal)
132
+
133
+ # log iou suppressed as FPs
134
+ non_max_suppressed[:, -1].put(
135
+ move_indexes, confusion.diagonal().take(move_indexes)
136
+ )
137
+ # Zero false positives on diagonal
138
+ keep_indexes = np.where(keep_diagonal)
139
+ new_diagonal = np.zeros(len(confusion.diagonal()))
140
+ new_diagonal.put(keep_indexes, confusion.diagonal()[keep_indexes])
141
+ np.fill_diagonal(confusion, new_diagonal)
142
+ # ----
143
+
144
+ # -- move max over
145
+ non_max_suppressed[max_iou_row, max_iou_col] = confusion[
146
+ max_iou_row, max_iou_col
147
+ ]
148
+ confusion[max_iou_row, max_iou_col] = np.zeros(len(max_iou_col))
149
+ # --
150
+
151
+ # -- move left on diagonal to FPs
152
+ non_max_suppressed[:, -1] = (
153
+ np.r_[confusion.diagonal(), np.zeros(1)] + non_max_suppressed[:, -1]
154
+ )
155
+ np.fill_diagonal(confusion, np.zeros(len(confusion.diagonal())))
156
+ # --
157
+
158
+ # -- move valid confusions over
159
+ valid_confusion = confusion > 0
160
+ valid_row, valid_col = np.where(confusion > 0)
161
+ flat_idxs = valid_col + valid_row * non_max_suppressed.shape[1]
162
+ non_max_suppressed.put(flat_idxs, confusion[valid_confusion])
163
+ # --
164
+ assert original_count == non_max_suppressed.sum()
165
+ return non_max_suppressed
166
+
167
+
168
+ def rasterize_polygons_to_segmentation_mask(
169
+ annotations: Sequence[BoxOrPolygonAnnotation], shape: Tuple
170
+ ) -> Tuple[np.ndarray, List[Segment]]:
171
+ polys = [polygon_annotation_to_shape(a) for a in annotations]
172
+ segments = [
173
+ Segment(ann.label, index=idx + 1, metadata=ann.metadata)
174
+ for idx, ann in enumerate(annotations)
175
+ ]
176
+ poly_vals = [
177
+ (poly, segment.index) for poly, segment in zip(polys, segments)
178
+ ]
179
+ rasterized = features.rasterize(
180
+ poly_vals,
181
+ out_shape=shape,
182
+ fill=0,
183
+ out=None,
184
+ all_touched=False,
185
+ dtype=None,
186
+ )
187
+ return rasterized, segments
188
+
189
+
190
+ def convert_to_instance_seg_confusion(confusion, annotation, prediction):
191
+ pred_index_to_label = {s.index: s.label for s in prediction.annotations}
192
+
193
+ gt_label_to_old_indexes = defaultdict(set)
194
+ for segment in annotation.annotations:
195
+ gt_label_to_old_indexes[segment.label].add(segment.index)
196
+
197
+ pr_label_to_old_indexes = defaultdict(set)
198
+ for segment in prediction.annotations:
199
+ pr_label_to_old_indexes[segment.label].add(segment.index)
200
+
201
+ new_labels = list(
202
+ dict.fromkeys(
203
+ list(pr_label_to_old_indexes)[:-1]
204
+ + list(gt_label_to_old_indexes)[:-1]
205
+ )
206
+ )
207
+ # NOTE: We make sure that FALSE_POSITIVES are at the back
208
+ false_positive_label = list(pr_label_to_old_indexes.keys())[-1]
209
+ new_labels.append(false_positive_label)
210
+ non_taxonomy_classes = {len(new_labels) - 1}
211
+
212
+ num_classes = len(new_labels)
213
+ new_confusion = np.zeros(
214
+ (num_classes, num_classes),
215
+ dtype=np.int32,
216
+ )
217
+
218
+ for gt_class_idx, from_label in enumerate(new_labels):
219
+ from_indexes = gt_label_to_old_indexes[from_label]
220
+ tp, fp = 0, 0
221
+ if len(from_indexes) == 0:
222
+ logging.warning(
223
+ "No annotations with label '%s', interpreted as false positives.",
224
+ from_label,
225
+ )
226
+ non_taxonomy_classes.add(gt_class_idx)
227
+ # NOTE: If the index is not in the gt segments it comes from the predictions, we get the "old_indexes"
228
+ # from there even though they are all FPs
229
+ from_indexes = pr_label_to_old_indexes[from_label]
230
+ for gt_instance_idx in from_indexes:
231
+ max_col = np.argmax(
232
+ confusion[gt_instance_idx, :]
233
+ ) # TODO: Get from IOU
234
+ if confusion[gt_instance_idx, max_col] == 0:
235
+ continue
236
+
237
+ for pred_class_idx, to_label in enumerate(new_labels):
238
+ to_indexes = pr_label_to_old_indexes[to_label]
239
+ if from_label == to_label:
240
+ if pred_index_to_label.get(max_col, None) == from_label:
241
+ tp += confusion[gt_instance_idx, max_col]
242
+ fp_indexes = to_indexes - {max_col}
243
+ else:
244
+ fp_indexes = to_indexes
245
+ fp += (
246
+ confusion[gt_instance_idx, :]
247
+ .take(list(fp_indexes))
248
+ .sum()
249
+ + confusion[gt_instance_idx, -1]
250
+ )
251
+ else:
252
+ new_confusion[gt_class_idx, pred_class_idx] += (
253
+ confusion[gt_instance_idx, :]
254
+ .take(list(to_indexes))
255
+ .sum()
256
+ )
257
+
258
+ new_confusion[gt_class_idx, gt_class_idx] = tp
259
+ new_confusion[gt_class_idx, -1] = fp
260
+
261
+ assert confusion.sum() == new_confusion.sum()
262
+ return new_confusion, new_labels, non_taxonomy_classes
263
+
264
+
265
+ def setup_iou_thresholds(iou_thresholds: Union[Sequence[float], str] = "coco"):
266
+ supported_iou_setups = {"coco"}
267
+ if isinstance(iou_thresholds, (list, np.ndarray)):
268
+ return np.array(iou_thresholds, np.float_)
269
+ elif isinstance(iou_thresholds, str):
270
+ if iou_thresholds in supported_iou_setups:
271
+ return np.arange(0.5, 1.0, 0.05)
272
+ else:
273
+ raise RuntimeError(
274
+ f"Got invalid configuration value: {iou_thresholds}, expected one of: {supported_iou_setups}"
275
+ )
276
+ else:
277
+ raise RuntimeError(
278
+ f"Got invalid configuration: {iou_thresholds}. Expected list of floats or one of: {supported_iou_setups}"
279
+ )
nucleus/model.py CHANGED
@@ -2,7 +2,7 @@ from typing import Dict, List, Optional, Union
2
2
 
3
3
  import requests
4
4
 
5
- from .constants import METADATA_KEY, NAME_KEY, REFERENCE_ID_KEY
5
+ from .constants import METADATA_KEY, MODEL_TAGS_KEY, NAME_KEY, REFERENCE_ID_KEY
6
6
  from .dataset import Dataset
7
7
  from .job import AsyncJob
8
8
  from .model_run import ModelRun
@@ -27,7 +27,7 @@ class Model:
27
27
 
28
28
  Within Nucleus, Models work in the following way:
29
29
 
30
- 1. You first :meth:`create a Model<NucleusClient.add_model>`. You can do this
30
+ 1. You first :meth:`create a Model<NucleusClient.create_model>`. You can do this
31
31
  just once and reuse the model on multiple datasets.
32
32
  2. You then :meth:`upload predictions <Dataset.upload_predictions>` to a dataset.
33
33
  3. Trigger :meth:`calculation of metrics <Dataset.calculate_evaluation_metrics>`
@@ -68,7 +68,7 @@ class Model:
68
68
  class_pdf={"label": 0.2, "other_label": 0.8},
69
69
  )
70
70
 
71
- model = client.add_model(
71
+ model = client.create_model(
72
72
  name="My Model", reference_id="My-CNN", metadata={"timestamp": "121012401"}
73
73
  )
74
74
 
@@ -77,7 +77,7 @@ class Model:
77
77
 
78
78
  # For large ingestions, we recommend asynchronous ingestion
79
79
  job = dataset.upload_predictions(
80
- [prediction_1, prediction_2], asynchronous=True
80
+ model, [prediction_1, prediction_2], asynchronous=True
81
81
  )
82
82
  # Check current status
83
83
  job.status()
@@ -89,21 +89,29 @@ class Model:
89
89
  dataset.calculate_evaluation_metrics(model)
90
90
 
91
91
  Models cannot be instantiated directly and instead must be created via API
92
- endpoint, using :meth:`NucleusClient.add_model`.
92
+ endpoint, using :meth:`NucleusClient.create_model`.
93
93
  """
94
94
 
95
95
  def __init__(
96
- self, model_id, name, reference_id, metadata, client, bundle_name=None
96
+ self,
97
+ model_id,
98
+ name,
99
+ reference_id,
100
+ metadata,
101
+ client,
102
+ bundle_name=None,
103
+ tags=None,
97
104
  ):
98
105
  self.id = model_id
99
106
  self.name = name
100
107
  self.reference_id = reference_id
101
108
  self.metadata = metadata
102
109
  self.bundle_name = bundle_name
110
+ self.tags = tags if tags else []
103
111
  self._client = client
104
112
 
105
113
  def __repr__(self):
106
- return f"Model(model_id='{self.id}', name='{self.name}', reference_id='{self.reference_id}', metadata={self.metadata}, bundle_name={self.bundle_name}, client={self._client})"
114
+ return f"Model(model_id='{self.id}', name='{self.name}', reference_id='{self.reference_id}', metadata={self.metadata}, bundle_name={self.bundle_name}, tags={self.tags}, client={self._client})"
107
115
 
108
116
  def __eq__(self, other):
109
117
  return (
@@ -202,9 +210,9 @@ class Model:
202
210
  model.run("ds_123456")
203
211
 
204
212
  Args:
205
- dataset_id: id of dataset to run inference on
206
- job_id: nucleus job used to track async job progress
207
- slice_id: (optional) id of slice of the dataset to run inference on
213
+ dataset_id: The ID of the dataset to run inference on.
214
+ job_id: The ID of the :class:`AsyncJob` used to track job progress.
215
+ slice_id: The ID of the slice of the dataset to run inference on.
208
216
  """
209
217
  response = self._client.make_request(
210
218
  {"dataset_id": dataset_id, "slice_id": slice_id},
@@ -213,3 +221,51 @@ class Model:
213
221
  )
214
222
 
215
223
  return response
224
+
225
+ def add_tags(self, tags: List[str]):
226
+ """Tag the model with custom tag names. ::
227
+
228
+ import nucleus
229
+ client = nucleus.NucleusClient("YOUR_SCALE_API_KEY")
230
+ model = client.list_models()[0]
231
+
232
+ model.add_tags(["tag_A", "tag_B"])
233
+
234
+ Args:
235
+ tags: list of tag names
236
+ """
237
+ response: requests.Response = self._client.make_request(
238
+ {MODEL_TAGS_KEY: tags},
239
+ f"model/{self.id}/tag",
240
+ requests_command=requests.post,
241
+ return_raw_response=True,
242
+ )
243
+
244
+ if response.ok:
245
+ self.tags.extend(tags)
246
+
247
+ return response.json()
248
+
249
+ def remove_tags(self, tags: List[str]):
250
+ """Remove tag(s) from the model. ::
251
+
252
+ import nucleus
253
+ client = nucleus.NucleusClient("YOUR_SCALE_API_KEY")
254
+ model = client.list_models()[0]
255
+
256
+ model.remove_tags(["tag_x"])
257
+
258
+ Args:
259
+ tags: list of tag names to remove
260
+ """
261
+ response: requests.Response = self._client.make_request(
262
+ {MODEL_TAGS_KEY: tags},
263
+ f"model/{self.id}/tag",
264
+ requests_command=requests.delete,
265
+ return_raw_response=True,
266
+ )
267
+
268
+ if response.ok:
269
+ self.tags = list(filter(lambda t: t not in tags, self.tags))
270
+
271
+ return response.json()
nucleus/model_run.py CHANGED
@@ -8,7 +8,7 @@ For example::
8
8
  client = nucleus.NucleusClient(YOUR_SCALE_API_KEY)
9
9
  prediction_1 = nucleus.BoxPrediction(label="label", x=0, y=0, width=10, height=10, reference_id="1", confidence=0.9, class_pdf={'label': 0.9, 'other_label': 0.1})
10
10
  prediction_2 = nucleus.BoxPrediction(label="label", x=0, y=0, width=10, height=10, reference_id="2", confidence=0.2, class_pdf={'label': 0.2, 'other_label': 0.8})
11
- model = client.add_model(name="My Model", reference_id="My-CNN", metadata={"timestamp": "121012401"})
11
+ model = client.create_model(name="My Model", reference_id="My-CNN", metadata={"timestamp": "121012401"})
12
12
  response = dataset.upload_predictions(model, [prediction_1, prediction_2])
13
13
  """
14
14
 
@@ -1,7 +1,7 @@
1
1
  import sys
2
2
 
3
3
 
4
- class ShapelyNotInstalled:
4
+ class PackageNotInstalled:
5
5
  def __init__(self, *args, **kwargs):
6
6
  self.raise_error_msg()
7
7
 
@@ -23,6 +23,6 @@ class ShapelyNotInstalled:
23
23
  else:
24
24
  platform_specific_msg = "GEOS package will need to be installed see (https://trac.osgeo.org/geos/)"
25
25
  raise ModuleNotFoundError(
26
- f"Module 'shapely' not found. Install optionally with `scale-nucleus[shapely]` or when developing "
27
- f"`poetry install -E shapely`. {platform_specific_msg}"
26
+ f"Module 'shapely' not found. Install optionally with `scale-nucleus[metrics]` or when developing "
27
+ f"`poetry install -E metrics`. {platform_specific_msg}"
28
28
  )
@@ -17,6 +17,7 @@ from .constants import (
17
17
  METADATA_KEY,
18
18
  MODEL_BUNDLE_NAME_KEY,
19
19
  MODEL_ID_KEY,
20
+ MODEL_TAGS_KEY,
20
21
  NAME_KEY,
21
22
  REFERENCE_ID_KEY,
22
23
  SCENES_KEY,
@@ -127,6 +128,7 @@ def construct_model_creation_payload(
127
128
  reference_id: str,
128
129
  metadata: Optional[Dict],
129
130
  bundle_name: Optional[str],
131
+ tags: Optional[List[str]],
130
132
  ) -> dict:
131
133
  payload = {
132
134
  NAME_KEY: name,
@@ -136,6 +138,8 @@ def construct_model_creation_payload(
136
138
 
137
139
  if bundle_name:
138
140
  payload[MODEL_BUNDLE_NAME_KEY] = bundle_name
141
+ if tags:
142
+ payload[MODEL_TAGS_KEY] = tags
139
143
 
140
144
  return payload
141
145
 
nucleus/prediction.py CHANGED
@@ -75,7 +75,7 @@ class SegmentationPrediction(SegmentationAnnotation):
75
75
  from nucleus import SegmentationPrediction
76
76
 
77
77
  segmentation = SegmentationPrediction(
78
- mask_url="s3://your-bucket-name/pred-seg-masks/image_2_pred_mask_id1.png",
78
+ mask_url="s3://your-bucket-name/pred-seg-masks/image_2_pred_mask_id_1.png",
79
79
  annotations=[
80
80
  Segment(label="grass", index="1"),
81
81
  Segment(label="road", index="2"),
@@ -88,7 +88,7 @@ class SegmentationPrediction(SegmentationAnnotation):
88
88
 
89
89
  Parameters:
90
90
  mask_url (str): A URL pointing to the segmentation prediction mask which is
91
- accessible to Scale. This URL can be a path to a local file.
91
+ accessible to Scale. This "URL" can also be a path to a local file.
92
92
  The mask is an HxW int8 array saved in PNG format,
93
93
  with each pixel value ranging from [0, N), where N is the number of
94
94
  possible classes (for semantic segmentation) or instances (for instance
@@ -226,7 +226,7 @@ class LinePrediction(LineAnnotation):
226
226
 
227
227
  Parameters:
228
228
  label (str): The label for this prediction (e.g. car, pedestrian, bicycle).
229
- vertices List[:class:`Point`]: The list of points making up the line.
229
+ vertices (List[:class:`Point`]): The list of points making up the line.
230
230
  reference_id (str): User-defined ID of the image to which to apply this
231
231
  annotation.
232
232
  confidence: 0-1 indicating the confidence of the prediction.
@@ -600,6 +600,9 @@ class PredictionList:
600
600
  default_factory=list
601
601
  )
602
602
 
603
+ def items(self):
604
+ return self.__dict__.items()
605
+
603
606
  def add_predictions(self, predictions: List[Prediction]):
604
607
  for prediction in predictions:
605
608
  if isinstance(prediction, BoxPrediction):
nucleus/scene.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import json
2
+ import warnings
2
3
  from abc import ABC
3
4
  from dataclasses import dataclass, field
4
5
  from typing import Any, Dict, List, Optional, Union
@@ -470,8 +471,14 @@ class VideoScene(ABC):
470
471
  items: List[DatasetItem] = field(default_factory=list)
471
472
  metadata: Optional[dict] = field(default_factory=dict)
472
473
  upload_to_scale: Optional[bool] = True
474
+ attachment_type: Optional[str] = None
473
475
 
474
476
  def __post_init__(self):
477
+ if self.attachment_type:
478
+ warnings.warn(
479
+ "The attachment_type parameter is no longer required and will be deprecated soon.",
480
+ DeprecationWarning,
481
+ )
475
482
  if self.metadata is None:
476
483
  self.metadata = {}
477
484