scale-nucleus 0.17.8__py3-none-any.whl → 0.17.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nucleus/annotation.py CHANGED
@@ -159,7 +159,7 @@ class BoxAnnotation(Annotation): # pylint: disable=R0902
159
159
  metadata: Optional[Dict] = None
160
160
  embedding_vector: Optional[list] = None
161
161
  track_reference_id: Optional[str] = None
162
- task_id: Optional[str] = None
162
+ _task_id: Optional[str] = field(default=None, repr=False)
163
163
 
164
164
  def __post_init__(self):
165
165
  self.metadata = self.metadata if self.metadata else {}
@@ -180,7 +180,7 @@ class BoxAnnotation(Annotation): # pylint: disable=R0902
180
180
  metadata=payload.get(METADATA_KEY, {}),
181
181
  embedding_vector=payload.get(EMBEDDING_VECTOR_KEY, None),
182
182
  track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
183
- task_id=payload.get(TASK_ID_KEY, None),
183
+ _task_id=payload.get(TASK_ID_KEY, None),
184
184
  )
185
185
 
186
186
  def to_payload(self) -> dict:
@@ -198,7 +198,7 @@ class BoxAnnotation(Annotation): # pylint: disable=R0902
198
198
  METADATA_KEY: self.metadata,
199
199
  EMBEDDING_VECTOR_KEY: self.embedding_vector,
200
200
  TRACK_REFERENCE_ID_KEY: self.track_reference_id,
201
- TASK_ID_KEY: self.task_id,
201
+ TASK_ID_KEY: self._task_id,
202
202
  }
203
203
 
204
204
  def __eq__(self, other):
@@ -213,7 +213,7 @@ class BoxAnnotation(Annotation): # pylint: disable=R0902
213
213
  and sorted(self.metadata.items()) == sorted(other.metadata.items())
214
214
  and self.embedding_vector == other.embedding_vector
215
215
  and self.track_reference_id == other.track_reference_id
216
- and self.task_id == other.task_id
216
+ and self._task_id == other._task_id
217
217
  )
218
218
 
219
219
 
@@ -280,7 +280,7 @@ class LineAnnotation(Annotation):
280
280
  annotation_id: Optional[str] = None
281
281
  metadata: Optional[Dict] = None
282
282
  track_reference_id: Optional[str] = None
283
- task_id: Optional[str] = None
283
+ _task_id: Optional[str] = field(default=None, repr=False)
284
284
 
285
285
  def __post_init__(self):
286
286
  self.metadata = self.metadata if self.metadata else {}
@@ -310,7 +310,7 @@ class LineAnnotation(Annotation):
310
310
  annotation_id=payload.get(ANNOTATION_ID_KEY, None),
311
311
  metadata=payload.get(METADATA_KEY, {}),
312
312
  track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
313
- task_id=payload.get(TASK_ID_KEY, None),
313
+ _task_id=payload.get(TASK_ID_KEY, None),
314
314
  )
315
315
 
316
316
  def to_payload(self) -> dict:
@@ -324,7 +324,7 @@ class LineAnnotation(Annotation):
324
324
  ANNOTATION_ID_KEY: self.annotation_id,
325
325
  METADATA_KEY: self.metadata,
326
326
  TRACK_REFERENCE_ID_KEY: self.track_reference_id,
327
- TASK_ID_KEY: self.task_id,
327
+ TASK_ID_KEY: self._task_id,
328
328
  }
329
329
  return payload
330
330
 
@@ -375,7 +375,7 @@ class PolygonAnnotation(Annotation):
375
375
  metadata: Optional[Dict] = None
376
376
  embedding_vector: Optional[list] = None
377
377
  track_reference_id: Optional[str] = None
378
- task_id: Optional[str] = None
378
+ _task_id: Optional[str] = field(default=None, repr=False)
379
379
 
380
380
  def __post_init__(self):
381
381
  self.metadata = self.metadata if self.metadata else {}
@@ -406,7 +406,7 @@ class PolygonAnnotation(Annotation):
406
406
  metadata=payload.get(METADATA_KEY, {}),
407
407
  embedding_vector=payload.get(EMBEDDING_VECTOR_KEY, None),
408
408
  track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
409
- task_id=payload.get(TASK_ID_KEY, None),
409
+ _task_id=payload.get(TASK_ID_KEY, None),
410
410
  )
411
411
 
412
412
  def to_payload(self) -> dict:
@@ -421,7 +421,7 @@ class PolygonAnnotation(Annotation):
421
421
  METADATA_KEY: self.metadata,
422
422
  EMBEDDING_VECTOR_KEY: self.embedding_vector,
423
423
  TRACK_REFERENCE_ID_KEY: self.track_reference_id,
424
- TASK_ID_KEY: self.task_id,
424
+ TASK_ID_KEY: self._task_id,
425
425
  }
426
426
  return payload
427
427
 
@@ -518,7 +518,7 @@ class KeypointsAnnotation(Annotation):
518
518
  annotation_id: Optional[str] = None
519
519
  metadata: Optional[Dict] = None
520
520
  track_reference_id: Optional[str] = None
521
- task_id: Optional[str] = None
521
+ _task_id: Optional[str] = field(default=None, repr=False)
522
522
 
523
523
  def __post_init__(self):
524
524
  self.metadata = self.metadata or {}
@@ -571,7 +571,7 @@ class KeypointsAnnotation(Annotation):
571
571
  annotation_id=payload.get(ANNOTATION_ID_KEY, None),
572
572
  metadata=payload.get(METADATA_KEY, {}),
573
573
  track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
574
- task_id=payload.get(TASK_ID_KEY, None),
574
+ _task_id=payload.get(TASK_ID_KEY, None),
575
575
  )
576
576
 
577
577
  def to_payload(self) -> dict:
@@ -587,7 +587,7 @@ class KeypointsAnnotation(Annotation):
587
587
  ANNOTATION_ID_KEY: self.annotation_id,
588
588
  METADATA_KEY: self.metadata,
589
589
  TRACK_REFERENCE_ID_KEY: self.track_reference_id,
590
- TASK_ID_KEY: self.task_id,
590
+ TASK_ID_KEY: self._task_id,
591
591
  }
592
592
  return payload
593
593
 
@@ -692,7 +692,7 @@ class CuboidAnnotation(Annotation): # pylint: disable=R0902
692
692
  annotation_id: Optional[str] = None
693
693
  metadata: Optional[Dict] = None
694
694
  track_reference_id: Optional[str] = None
695
- task_id: Optional[str] = None
695
+ _task_id: Optional[str] = field(default=None, repr=False)
696
696
 
697
697
  def __post_init__(self):
698
698
  self.metadata = self.metadata if self.metadata else {}
@@ -709,7 +709,7 @@ class CuboidAnnotation(Annotation): # pylint: disable=R0902
709
709
  annotation_id=payload.get(ANNOTATION_ID_KEY, None),
710
710
  metadata=payload.get(METADATA_KEY, {}),
711
711
  track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
712
- task_id=payload.get(TASK_ID_KEY, None),
712
+ _task_id=payload.get(TASK_ID_KEY, None),
713
713
  )
714
714
 
715
715
  def to_payload(self) -> dict:
@@ -729,7 +729,8 @@ class CuboidAnnotation(Annotation): # pylint: disable=R0902
729
729
  payload[METADATA_KEY] = self.metadata
730
730
  if self.track_reference_id:
731
731
  payload[TRACK_REFERENCE_ID_KEY] = self.track_reference_id
732
-
732
+ if self._task_id:
733
+ payload[TASK_ID_KEY] = self._task_id
733
734
  return payload
734
735
 
735
736
 
@@ -942,7 +943,7 @@ class CategoryAnnotation(Annotation):
942
943
  taxonomy_name: Optional[str] = None
943
944
  metadata: Optional[Dict] = None
944
945
  track_reference_id: Optional[str] = None
945
- task_id: Optional[str] = None
946
+ _task_id: Optional[str] = field(default=None, repr=False)
946
947
 
947
948
  def __post_init__(self):
948
949
  self.metadata = self.metadata if self.metadata else {}
@@ -955,7 +956,7 @@ class CategoryAnnotation(Annotation):
955
956
  taxonomy_name=payload.get(TAXONOMY_NAME_KEY, None),
956
957
  metadata=payload.get(METADATA_KEY, {}),
957
958
  track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
958
- task_id=payload.get(TASK_ID_KEY, None),
959
+ _task_id=payload.get(TASK_ID_KEY, None),
959
960
  )
960
961
 
961
962
  def to_payload(self) -> dict:
@@ -966,7 +967,7 @@ class CategoryAnnotation(Annotation):
966
967
  REFERENCE_ID_KEY: self.reference_id,
967
968
  METADATA_KEY: self.metadata,
968
969
  TRACK_REFERENCE_ID_KEY: self.track_reference_id,
969
- TASK_ID_KEY: self.task_id,
970
+ TASK_ID_KEY: self._task_id,
970
971
  }
971
972
  if self.taxonomy_name is not None:
972
973
  payload[TAXONOMY_NAME_KEY] = self.taxonomy_name
@@ -982,7 +983,7 @@ class MultiCategoryAnnotation(Annotation):
982
983
  taxonomy_name: Optional[str] = None
983
984
  metadata: Optional[Dict] = None
984
985
  track_reference_id: Optional[str] = None
985
- task_id: Optional[str] = None
986
+ _task_id: Optional[str] = field(default=None, repr=False)
986
987
 
987
988
  def __post_init__(self):
988
989
  self.metadata = self.metadata if self.metadata else {}
@@ -995,7 +996,7 @@ class MultiCategoryAnnotation(Annotation):
995
996
  taxonomy_name=payload.get(TAXONOMY_NAME_KEY, None),
996
997
  metadata=payload.get(METADATA_KEY, {}),
997
998
  track_reference_id=payload.get(TRACK_REFERENCE_ID_KEY, None),
998
- task_id=payload.get(TASK_ID_KEY, None),
999
+ _task_id=payload.get(TASK_ID_KEY, None),
999
1000
  )
1000
1001
 
1001
1002
  def to_payload(self) -> dict:
@@ -1006,7 +1007,7 @@ class MultiCategoryAnnotation(Annotation):
1006
1007
  REFERENCE_ID_KEY: self.reference_id,
1007
1008
  METADATA_KEY: self.metadata,
1008
1009
  TRACK_REFERENCE_ID_KEY: self.track_reference_id,
1009
- TASK_ID_KEY: self.task_id,
1010
+ TASK_ID_KEY: self._task_id,
1010
1011
  }
1011
1012
  if self.taxonomy_name is not None:
1012
1013
  payload[TAXONOMY_NAME_KEY] = self.taxonomy_name
@@ -1045,6 +1046,7 @@ class SceneCategoryAnnotation(Annotation):
1045
1046
  reference_id: str
1046
1047
  taxonomy_name: Optional[str] = None
1047
1048
  metadata: Optional[Dict] = field(default_factory=dict)
1049
+ _task_id: Optional[str] = field(default=None, repr=False)
1048
1050
 
1049
1051
  @classmethod
1050
1052
  def from_json(cls, payload: dict):
@@ -1053,6 +1055,7 @@ class SceneCategoryAnnotation(Annotation):
1053
1055
  reference_id=payload[REFERENCE_ID_KEY],
1054
1056
  taxonomy_name=payload.get(TAXONOMY_NAME_KEY, None),
1055
1057
  metadata=payload.get(METADATA_KEY, {}),
1058
+ _task_id=payload.get(TASK_ID_KEY, None),
1056
1059
  )
1057
1060
 
1058
1061
  def to_payload(self) -> dict:
@@ -1062,6 +1065,7 @@ class SceneCategoryAnnotation(Annotation):
1062
1065
  GEOMETRY_KEY: {},
1063
1066
  REFERENCE_ID_KEY: self.reference_id,
1064
1067
  METADATA_KEY: self.metadata,
1068
+ TASK_ID_KEY: self._task_id,
1065
1069
  }
1066
1070
  if self.taxonomy_name is not None:
1067
1071
  payload[TAXONOMY_NAME_KEY] = self.taxonomy_name
@@ -1079,9 +1083,7 @@ class AnnotationList:
1079
1083
  default_factory=list
1080
1084
  )
1081
1085
  cuboid_annotations: List[CuboidAnnotation] = field(default_factory=list)
1082
- category_annotations: List[CategoryAnnotation] = field(
1083
- default_factory=list
1084
- )
1086
+ category_annotations: List[CategoryAnnotation] = field(default_factory=list)
1085
1087
  multi_category_annotations: List[MultiCategoryAnnotation] = field(
1086
1088
  default_factory=list
1087
1089
  )
@@ -176,9 +176,7 @@ class AnnotationUploader:
176
176
  """
177
177
 
178
178
  def fn():
179
- request_json = construct_segmentation_payload(
180
- segmentations, update
181
- )
179
+ request_json = construct_segmentation_payload(segmentations, update)
182
180
  form_data = [
183
181
  FileFormField(
184
182
  name=SERIALIZED_REQUEST_KEY,
@@ -212,15 +210,17 @@ class AnnotationUploader:
212
210
 
213
211
  return fn
214
212
 
215
- @staticmethod
216
- def check_for_duplicate_ids(annotations: Iterable[Annotation]):
213
+ def check_for_duplicate_ids(self, annotations: Iterable[Annotation]):
217
214
  """Do not allow annotations to have the same (annotation_id, reference_id, task_id) tuple"""
218
215
 
219
- # some annotations like CategoryAnnotation do not have annotation_id attribute, and as such, we allow duplicates
220
216
  tuple_ids = [
221
- (ann.reference_id, ann.annotation_id, ann.task_id) # type: ignore
217
+ (
218
+ ann.reference_id,
219
+ ann.annotation_id,
220
+ getattr(ann, "_task_id", None),
221
+ )
222
222
  for ann in annotations
223
- if hasattr(ann, "annotation_id") and hasattr(ann, "task_id")
223
+ if hasattr(ann, "annotation_id")
224
224
  ]
225
225
  tuple_count = Counter(tuple_ids)
226
226
  duplicates = {key for key, value in tuple_count.items() if value > 1}
@@ -255,3 +255,20 @@ class PredictionUploader(AnnotationUploader):
255
255
  self._route = (
256
256
  f"dataset/{dataset_id}/model/{model_id}/uploadPredictions"
257
257
  )
258
+
259
+ def check_for_duplicate_ids(self, annotations: Iterable[Annotation]):
260
+ """Do not allow predictions to have the same (annotation_id, reference_id) tuple"""
261
+ tuple_ids = [
262
+ (pred.annotation_id, pred.reference_id) # type: ignore
263
+ for pred in annotations
264
+ if hasattr(pred, "annotation_id") and hasattr(pred, "reference_id")
265
+ ]
266
+ tuple_count = Counter(tuple_ids)
267
+ duplicates = {key for key, value in tuple_count.items() if value > 1}
268
+ if len(duplicates) > 0:
269
+ raise DuplicateIDError(
270
+ f"Duplicate predictions with the same (annotation_id, reference_id) properties found.\n"
271
+ f"Duplicates: {duplicates}\n"
272
+ f"To fix this, avoid duplicate predictions, or specify a different annotation_id attribute "
273
+ f"for the failing items."
274
+ )
nucleus/dataset.py CHANGED
@@ -332,8 +332,7 @@ class Dataset:
332
332
  dataset_item_jsons = response.get(DATASET_ITEMS_KEY, None)
333
333
 
334
334
  return [
335
- DatasetItem.from_json(item_json)
336
- for item_json in dataset_item_jsons
335
+ DatasetItem.from_json(item_json) for item_json in dataset_item_jsons
337
336
  ]
338
337
 
339
338
  @property
@@ -699,9 +698,7 @@ class Dataset:
699
698
  asynchronous
700
699
  ), "In order to avoid timeouts, you must set asynchronous=True when uploading videos."
701
700
 
702
- return self._append_video_scenes(
703
- video_scenes, update, asynchronous
704
- )
701
+ return self._append_video_scenes(video_scenes, update, asynchronous)
705
702
 
706
703
  if len(dataset_items) > WARN_FOR_LARGE_UPLOAD and not asynchronous:
707
704
  print(
@@ -2361,10 +2358,7 @@ class Dataset:
2361
2358
  )
2362
2359
 
2363
2360
  if len(items) > 0:
2364
- if (
2365
- len(items) > GLOB_SIZE_THRESHOLD_CHECK
2366
- and not skip_size_warning
2367
- ):
2361
+ if len(items) > GLOB_SIZE_THRESHOLD_CHECK and not skip_size_warning:
2368
2362
  raise Exception(
2369
2363
  f"Found over {GLOB_SIZE_THRESHOLD_CHECK} items in {dirname}. If this is intended,"
2370
2364
  f" set skip_size_warning=True when calling this function."
@@ -2411,3 +2405,26 @@ class Dataset:
2411
2405
  route=f"dataset/{self.id}/model/{model.id}/pointcloud/{pointcloud_ref_id}/uploadLSSPrediction",
2412
2406
  requests_command=requests.post,
2413
2407
  )
2408
+
2409
+ def export_class_labels(self, slice_id: Optional[str] = None):
2410
+ """Fetches a list of class labels for the dataset.
2411
+
2412
+ Args:
2413
+ slice_id (str | None): The ID of the slice to export class labels for. If None, export class labels for the entire dataset.
2414
+
2415
+ Returns:
2416
+ A list of class labels for the dataset.
2417
+ """
2418
+ if slice_id:
2419
+ api_payload = self._client.make_request(
2420
+ payload=None,
2421
+ route=f"slice/{slice_id}/class_labels",
2422
+ requests_command=requests.get,
2423
+ )
2424
+ else:
2425
+ api_payload = self._client.make_request(
2426
+ payload=None,
2427
+ route=f"dataset/{self.id}/class_labels",
2428
+ requests_command=requests.get,
2429
+ )
2430
+ return api_payload.get("data", [])
nucleus/slice.py CHANGED
@@ -168,9 +168,7 @@ class Slice:
168
168
  @property
169
169
  def pending_job_count(self) -> Optional[int]:
170
170
  if self._pending_job_count is None:
171
- self._pending_job_count = self.info().get(
172
- "pending_job_count", None
173
- )
171
+ self._pending_job_count = self.info().get("pending_job_count", None)
174
172
  return self._pending_job_count
175
173
 
176
174
  @classmethod
@@ -705,6 +703,15 @@ class Slice:
705
703
  )
706
704
  return api_payload
707
705
 
706
+ def export_class_labels(self):
707
+ """Fetches a list of class labels for the slice."""
708
+ api_payload = self._client.make_request(
709
+ payload=None,
710
+ route=f"slice/{self.id}/class_labels",
711
+ requests_command=requests.get,
712
+ )
713
+ return api_payload.get("data", [])
714
+
708
715
 
709
716
  def check_annotations_are_in_slice(
710
717
  annotations: List[Annotation], slice_to_check: Slice
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: scale-nucleus
3
- Version: 0.17.8
3
+ Version: 0.17.9
4
4
  Summary: The official Python client library for Nucleus, the Data Platform for AI
5
5
  Home-page: https://scale.com/nucleus
6
6
  License: MIT
@@ -11,8 +11,8 @@ cli/reference.py,sha256=RuHVhmGTZNe0MfwpL96YjJdaH0OJzg98rz4xeIu4hJU,256
11
11
  cli/slices.py,sha256=nxq_Zg1m5oXuhz0ibyHkElvyVWt1AcE9tG-fN4CQxF8,1397
12
12
  cli/tests.py,sha256=NiwEVGuF08_jlCiKEIjKhwq55NvyU4xvPEJW5MJmdZg,4590
13
13
  nucleus/__init__.py,sha256=RSXlW0oL5ThX9LdRb5Eng6W9Fn_H9bqNZhAQrFQWMx8,49712
14
- nucleus/annotation.py,sha256=s79v-BmI3MjJiRoZCEoAeFUKn43Pghl63ZJI3P7mHc4,43803
15
- nucleus/annotation_uploader.py,sha256=CXvIjDNuQczGk8poNLimP7s2owRwEG7xxycMjFm0pYI,9639
14
+ nucleus/annotation.py,sha256=0JpBMl3UA5CXL4mdiDoBK6yTpLpY7B-TRY1nYopEcNI,44229
15
+ nucleus/annotation_uploader.py,sha256=LvK5vcP2dVWglPZZ36kofrm14DE0XGaXpNOhXBnMNn8,10400
16
16
  nucleus/async_job.py,sha256=yjPDwyyLIrF0K67anGB40xux1AMhWrq1X_hPvQ_ewzc,6890
17
17
  nucleus/async_utils.py,sha256=ayqajeSonX68fre3u8AoNRYT8GFGPd4_iu6YPQTvpvU,8226
18
18
  nucleus/autocurate.py,sha256=kI0vRqad_An8SN5JX6sSdGP_vNHJI2Pq4NINHuhNf2U,1080
@@ -26,7 +26,7 @@ nucleus/data_transfer_object/dataset_info.py,sha256=5P_gpvAyaqXxj2ZQuzLkGN2XROaN
26
26
  nucleus/data_transfer_object/dataset_size.py,sha256=oe-dXaMLpsQRDcJQRZ9Ja8JTagYz4dviZuTognEylp0,111
27
27
  nucleus/data_transfer_object/job_status.py,sha256=hxvyNdrdVdj3UpEfwvryKC_QCJQEC9ru6IPjhPFcK44,2038
28
28
  nucleus/data_transfer_object/scenes_list.py,sha256=iTHE6vA47bRB6ciyEU4LArUXEXco4ArnGvZTGTeK8xs,432
29
- nucleus/dataset.py,sha256=azWq0ftX2QOx2pxYDqFuwRPo48DK9I_F_AO_g3xPPIs,94663
29
+ nucleus/dataset.py,sha256=W-QR-RDvRk369m42osZjNNbdnMVk64CzWDjLma9T9fk,95412
30
30
  nucleus/dataset_item.py,sha256=y9ia47i31lX2wvw6EkVAxeHburMrrZpuyjEGlstWa2A,10166
31
31
  nucleus/dataset_item_uploader.py,sha256=BD0FTgimEFYmDbnOLIaQZS3OLDfLe5wumADDmgMX598,6684
32
32
  nucleus/deprecation_warning.py,sha256=5C9dVusR5UkUQnW2MrRkIXCfbc8ULc7xOaB134agNKk,976
@@ -61,7 +61,7 @@ nucleus/pydantic_base.py,sha256=ZBUVrf948qzaxSuTaiDWxPC_Y8AOBdLKfi52ozGpGWk,1388
61
61
  nucleus/quaternion.py,sha256=TAnwj4arQXoTeofFgZMdZsCyxAMnu23N6to0F1WFNwk,1111
62
62
  nucleus/retry_strategy.py,sha256=daKZqjZYCh87WtXoVUuR9BZu2TTE-CtOFEYZ-d6xVMY,312
63
63
  nucleus/scene.py,sha256=qZQD7QdF6Ics8kuszsl278NCowKVnAkVNGHvPr5luRo,26937
64
- nucleus/slice.py,sha256=DvZQZS9HvQku9Tj7SHLaE7bv5x7Z72_4T_cio-d2hdA,28312
64
+ nucleus/slice.py,sha256=1WZLPrfEywFY8d65ekMnduwj0RFPqWm7dgP307ajFRM,28600
65
65
  nucleus/test_launch_integration.py,sha256=oFKLZWjFGeUvwVV0XAAjP1Y_oKFkaouh_SXVPXtCvcE,10688
66
66
  nucleus/track.py,sha256=ROmOyzYZKrHVTnLBhnk-qEBtklD_EDsSnRcGYE8xG4E,3247
67
67
  nucleus/upload_response.py,sha256=wR_pfZCBju1vGiGqbVgk8zhM6GhD3ebYxyGBm8y0GvY,3287
@@ -85,8 +85,8 @@ nucleus/validate/scenario_test.py,sha256=pCmM157dblSciZCDTw-f47Fpy3OUZFgXmokdhIL
85
85
  nucleus/validate/scenario_test_evaluation.py,sha256=Q0WzaEE9uUbPVc4EHlCoKjhJcqMNt4QbyiiJx12VOR0,4075
86
86
  nucleus/validate/scenario_test_metric.py,sha256=AhVFOB1ULwBqlZ2X_Au1TXy4iQELljtzR4ZpeLB35So,1209
87
87
  nucleus/validate/utils.py,sha256=VjdIJj9Pii4z4L6xbvClAc7ra_J7cX0vWB_J2X6yrGE,185
88
- scale_nucleus-0.17.8.dist-info/LICENSE,sha256=jaTGyQSQIZeWMo5iyYqgbAYHR9Bdy7nOzgE-Up3m_-g,1075
89
- scale_nucleus-0.17.8.dist-info/METADATA,sha256=XZWcJybnh0CG0lxxioOUf7KC-s2HNpkduNqvRmxU7k0,7920
90
- scale_nucleus-0.17.8.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
91
- scale_nucleus-0.17.8.dist-info/entry_points.txt,sha256=fmqEzh6NZQyg9eFMILnWabKT8OWQTMSCdDzMiVq2zYs,32
92
- scale_nucleus-0.17.8.dist-info/RECORD,,
88
+ scale_nucleus-0.17.9.dist-info/LICENSE,sha256=jaTGyQSQIZeWMo5iyYqgbAYHR9Bdy7nOzgE-Up3m_-g,1075
89
+ scale_nucleus-0.17.9.dist-info/METADATA,sha256=v7fMJK93-vcbjE68FfsV4ADjpgqRJDGB57qNeSoUhzA,7920
90
+ scale_nucleus-0.17.9.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
91
+ scale_nucleus-0.17.9.dist-info/entry_points.txt,sha256=fmqEzh6NZQyg9eFMILnWabKT8OWQTMSCdDzMiVq2zYs,32
92
+ scale_nucleus-0.17.9.dist-info/RECORD,,