scale-nucleus 0.16.12__py3-none-any.whl → 0.16.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nucleus/__init__.py CHANGED
@@ -45,13 +45,17 @@ __all__ = [
45
45
  import datetime
46
46
  import os
47
47
  import warnings
48
- from typing import Any, Dict, List, Optional, Tuple, Union
49
-
50
- try:
51
- # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
52
- import pydantic.v1 as pydantic
53
- except ImportError:
54
- import pydantic
48
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
49
+
50
+ if TYPE_CHECKING:
51
+ # Backwards compatibility is even uglier with mypy
52
+ from pydantic.v1 import parse_obj_as
53
+ else:
54
+ try:
55
+ # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
56
+ from pydantic.v1 import parse_obj_as
57
+ except ImportError:
58
+ from pydantic import parse_obj_as
55
59
 
56
60
  import requests
57
61
  import tqdm
@@ -216,7 +220,11 @@ class NucleusClient:
216
220
  List of all datasets accessible to user
217
221
  """
218
222
  response = self.make_request({}, "dataset/details", requests.get)
219
- dataset_details = pydantic.parse_obj_as(List[DatasetDetails], response)
223
+ dataset_details = (
224
+ parse_obj_as( # pylint: disable=used-before-assignment
225
+ List[DatasetDetails], response
226
+ )
227
+ )
220
228
  return [
221
229
  Dataset(d.id, client=self, name=d.name) for d in dataset_details
222
230
  ]
@@ -57,6 +57,7 @@ class AnnotationUploader:
57
57
  update: bool = False,
58
58
  remote_files_per_upload_request: int = 20,
59
59
  local_files_per_upload_request: int = 10,
60
+ trained_slice_id: Optional[str] = None,
60
61
  ):
61
62
  """For more details on parameters and functionality, see dataset.annotate."""
62
63
  if local_files_per_upload_request > 10:
@@ -95,6 +96,7 @@ class AnnotationUploader:
95
96
  update,
96
97
  batch_size=remote_files_per_upload_request,
97
98
  segmentation=True,
99
+ trained_slice_id=trained_slice_id,
98
100
  )
99
101
  )
100
102
  if annotations_without_files:
@@ -104,6 +106,7 @@ class AnnotationUploader:
104
106
  update,
105
107
  batch_size=batch_size,
106
108
  segmentation=False,
109
+ trained_slice_id=trained_slice_id,
107
110
  )
108
111
  )
109
112
 
@@ -115,6 +118,7 @@ class AnnotationUploader:
115
118
  update: bool,
116
119
  batch_size: int,
117
120
  segmentation: bool,
121
+ trained_slice_id: Optional[str],
118
122
  ):
119
123
  batches = [
120
124
  annotations[i : i + batch_size]
@@ -125,7 +129,9 @@ class AnnotationUploader:
125
129
  "Segmentation batches" if segmentation else "Annotation batches"
126
130
  )
127
131
  for batch in self._client.tqdm_bar(batches, desc=progress_bar_name):
128
- payload = construct_annotation_payload(batch, update)
132
+ payload = construct_annotation_payload(
133
+ batch, update, trained_slice_id
134
+ )
129
135
  responses.append(
130
136
  self._client.make_request(payload, route=self._route)
131
137
  )
@@ -234,9 +240,11 @@ class PredictionUploader(AnnotationUploader):
234
240
  dataset_id: Optional[str] = None,
235
241
  model_id: Optional[str] = None,
236
242
  model_run_id: Optional[str] = None,
243
+ trained_slice_id: Optional[str] = None,
237
244
  ):
238
245
  super().__init__(dataset_id, client)
239
246
  self._client = client
247
+ self.trained_slice_id = trained_slice_id
240
248
  if model_run_id is not None:
241
249
  assert model_id is None and dataset_id is None
242
250
  self._route = f"modelRun/{model_run_id}/predict"
nucleus/chip_utils.py CHANGED
@@ -116,13 +116,13 @@ def write_chip(
116
116
 
117
117
 
118
118
  def generate_offsets(w: int, h: int, chip_size: int, stride_size: int):
119
- xs = np.arange(0, w - stride_size, chip_size - stride_size)
120
- ys = np.arange(0, h - stride_size, chip_size - stride_size)
119
+ xs = np.arange(0, w - stride_size, stride_size)
120
+ ys = np.arange(0, h - stride_size, stride_size)
121
121
  if len(xs) > 1:
122
122
  xs = np.round(xs * (w - chip_size) / xs[-1]).astype(int)
123
123
  if len(ys) > 1:
124
124
  ys = np.round(ys * (h - chip_size) / ys[-1]).astype(int)
125
- yield from product(ys, xs)
125
+ yield from product(xs, ys)
126
126
 
127
127
 
128
128
  def chip_annotations(data, x0: int, y0: int, x1: int, y1: int):
nucleus/constants.py CHANGED
@@ -149,6 +149,7 @@ TAXONOMY_NAME_KEY = "taxonomy_name"
149
149
  TRACK_REFERENCE_ID_KEY = "track_reference_id"
150
150
  TRACK_REFERENCE_IDS_KEY = "track_reference_ids"
151
151
  TRACKS_KEY = "tracks"
152
+ TRAINED_SLICE_ID_KEY = "trained_slice_id"
152
153
  TRUE_POSITIVE_KEY = "true_positive"
153
154
  TYPE_KEY = "type"
154
155
  UPDATED_ITEMS = "updated_items"
@@ -1,15 +1,19 @@
1
1
  # pylint: disable=E0213
2
2
 
3
3
  from datetime import datetime
4
- from typing import List, Optional, Union
4
+ from typing import TYPE_CHECKING, List, Optional, Union
5
5
 
6
6
  from dateutil.parser import ParserError, parse
7
7
 
8
- try:
9
- # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
8
+ if TYPE_CHECKING:
9
+ # Backwards compatibility is even uglier with mypy
10
10
  from pydantic.v1 import validator
11
- except ImportError:
12
- from pydantic import validator
11
+ else:
12
+ try:
13
+ # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
14
+ from pydantic.v1 import validator
15
+ except ImportError:
16
+ from pydantic import validator
13
17
 
14
18
  from nucleus.constants import JOB_REQ_LIMIT
15
19
  from nucleus.job import CustomerJobTypes
@@ -24,7 +28,9 @@ class JobInfoRequestPayload(ImmutableModel):
24
28
  limit: Optional[int]
25
29
  show_completed: bool
26
30
 
27
- @validator("from_date", "to_date")
31
+ @validator( # pylint: disable=used-before-assignment
32
+ "from_date", "to_date"
33
+ )
28
34
  def ensure_date_format(cls, date):
29
35
  if date is None:
30
36
  return None
nucleus/dataset.py CHANGED
@@ -66,6 +66,7 @@ from .constants import (
66
66
  SLICE_ID_KEY,
67
67
  TRACK_REFERENCE_IDS_KEY,
68
68
  TRACKS_KEY,
69
+ TRAINED_SLICE_ID_KEY,
69
70
  UPDATE_KEY,
70
71
  VIDEO_URL_KEY,
71
72
  )
@@ -1793,6 +1794,7 @@ class Dataset:
1793
1794
  batch_size: int = 5000,
1794
1795
  remote_files_per_upload_request: int = 20,
1795
1796
  local_files_per_upload_request: int = 10,
1797
+ trained_slice_id: Optional[str] = None,
1796
1798
  ):
1797
1799
  """Uploads predictions and associates them with an existing :class:`Model`.
1798
1800
 
@@ -1841,19 +1843,20 @@ class Dataset:
1841
1843
  you can try lowering this batch size. This is only relevant for
1842
1844
  asynchronous=False
1843
1845
  remote_files_per_upload_request: Number of remote files to upload in each
1844
- request. Segmentations have either local or remote files, if you are
1845
- getting timeouts while uploading segmentations with remote urls, you
1846
- should lower this value from its default of 20. This is only relevant for
1847
- asynchronous=False.
1846
+ request. Segmentations have either local or remote files, if you are
1847
+ getting timeouts while uploading segmentations with remote urls, you
1848
+ should lower this value from its default of 20. This is only relevant for
1849
+ asynchronous=False.
1848
1850
  local_files_per_upload_request: Number of local files to upload in each
1849
- request. Segmentations have either local or remote files, if you are
1850
- getting timeouts while uploading segmentations with local files, you
1851
- should lower this value from its default of 10. The maximum is 10.
1852
- This is only relevant for asynchronous=False
1851
+ request. Segmentations have either local or remote files, if you are
1852
+ getting timeouts while uploading segmentations with local files, you
1853
+ should lower this value from its default of 10. The maximum is 10.
1854
+ This is only relevant for asynchronous=False
1855
+ trained_slice_id: Nucleus-generated slice ID (starts with ``slc_``) which was used
1856
+ to train the model.
1853
1857
 
1854
1858
  Returns:
1855
1859
  Payload describing the synchronous upload::
1856
-
1857
1860
  {
1858
1861
  "dataset_id": str,
1859
1862
  "model_run_id": str,
@@ -1876,7 +1879,11 @@ class Dataset:
1876
1879
  predictions, self.id, self._client
1877
1880
  )
1878
1881
  response = self._client.make_request(
1879
- payload={REQUEST_ID_KEY: request_id, UPDATE_KEY: update},
1882
+ payload={
1883
+ REQUEST_ID_KEY: request_id,
1884
+ UPDATE_KEY: update,
1885
+ TRAINED_SLICE_ID_KEY: trained_slice_id,
1886
+ },
1880
1887
  route=f"dataset/{self.id}/model/{model.id}/uploadPredictions?async=1",
1881
1888
  )
1882
1889
  return AsyncJob.from_json(response, self._client)
@@ -1887,6 +1894,7 @@ class Dataset:
1887
1894
  update=update,
1888
1895
  remote_files_per_upload_request=remote_files_per_upload_request,
1889
1896
  local_files_per_upload_request=local_files_per_upload_request,
1897
+ trained_slice_id=trained_slice_id,
1890
1898
  )
1891
1899
 
1892
1900
  def predictions_iloc(self, model, index):
@@ -24,6 +24,7 @@ from .constants import (
24
24
  SCENES_KEY,
25
25
  SEGMENTATIONS_KEY,
26
26
  TAXONOMY_NAME_KEY,
27
+ TRAINED_SLICE_ID_KEY,
27
28
  TYPE_KEY,
28
29
  UPDATE_KEY,
29
30
  )
@@ -76,6 +77,7 @@ def construct_annotation_payload(
76
77
  ]
77
78
  ],
78
79
  update: bool,
80
+ trained_slice_id: Optional[str],
79
81
  ) -> dict:
80
82
  annotations = [
81
83
  annotation.to_payload()
@@ -92,6 +94,8 @@ def construct_annotation_payload(
92
94
  payload[ANNOTATIONS_KEY] = annotations
93
95
  if segmentations:
94
96
  payload[SEGMENTATIONS_KEY] = segmentations
97
+ if trained_slice_id:
98
+ payload[TRAINED_SLICE_ID_KEY] = trained_slice_id
95
99
  return payload
96
100
 
97
101
 
nucleus/pydantic_base.py CHANGED
@@ -4,14 +4,20 @@ We started using pydantic during v1 and are kind of stuck with it now unless we
4
4
  As a library we want to support v1 and v2 such that we're not causing downstream problems for our users.
5
5
  This means we have to do some import shenanigans to support both v1 and v2.
6
6
  """
7
- try:
8
- # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
9
- from pydantic.v1 import BaseModel # pylint: disable=no-name-in-module
10
- except ImportError:
11
- from pydantic import BaseModel
7
+ from typing import TYPE_CHECKING
12
8
 
9
+ if TYPE_CHECKING:
10
+ # Backwards compatibility is even uglier with mypy
11
+ from pydantic.v1 import BaseModel, Extra, ValidationError
12
+ else:
13
+ try:
14
+ # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
15
+ from pydantic.v1 import BaseModel # pylint: disable=no-name-in-module
16
+ except ImportError:
17
+ from pydantic import BaseModel
13
18
 
14
- class ImmutableModel(BaseModel):
19
+
20
+ class ImmutableModel(BaseModel): # pylint: disable=used-before-assignment
15
21
  class Config:
16
22
  allow_mutation = False
17
23
 
@@ -1,24 +1,28 @@
1
1
  import io
2
- from typing import Any, Callable, Dict, List, Optional, Type
2
+ from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Type
3
3
 
4
4
  from PIL import Image, ImageDraw
5
5
 
6
- try:
7
- # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
6
+ if TYPE_CHECKING:
7
+ # Backwards compatibility is even uglier with mypy
8
8
  from pydantic.v1 import BaseModel, Extra, ValidationError
9
- except ImportError:
10
- from pydantic import BaseModel, Extra, ValidationError
9
+ else:
10
+ try:
11
+ # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
12
+ from pydantic.v1 import BaseModel, Extra, ValidationError
13
+ except ImportError:
14
+ from pydantic import BaseModel, Extra, ValidationError
11
15
 
12
16
  # From scaleapi/server/src/lib/select/api/types.ts
13
17
  # These classes specify how user models must pass output to Launch + Nucleus.
14
18
 
15
19
 
16
- class PointModel(BaseModel):
20
+ class PointModel(BaseModel): # pylint: disable=used-before-assignment
17
21
  x: float
18
22
  y: float
19
23
 
20
24
  class Config:
21
- extra = Extra.forbid
25
+ extra = Extra.forbid # pylint: disable=used-before-assignment
22
26
 
23
27
 
24
28
  class BoxGeometryModel(BaseModel):
@@ -106,7 +110,7 @@ def verify_output(
106
110
  for annotation in annotation_list:
107
111
  try:
108
112
  model.parse_obj(annotation)
109
- except ValidationError as e:
113
+ except ValidationError as e: # pylint: disable=used-before-assignment
110
114
  raise ValueError("Failed validation") from e
111
115
  if annotation["type"] != annotation_type:
112
116
  raise ValueError(
nucleus/utils.py CHANGED
@@ -363,7 +363,7 @@ def upload_to_presigned_url(presigned_url: str, file_pointer: IO):
363
363
  # TODO optimize this further to deal with truly huge files and flaky internet connection.
364
364
  upload_response = requests.put(presigned_url, file_pointer)
365
365
  if not upload_response.ok:
366
- raise HTTPError(
366
+ raise HTTPError( # type: ignore
367
367
  f"Tried to put a file to url, but failed with status {upload_response.status_code}. The detailed error was: {upload_response.text}"
368
368
  )
369
369
 
@@ -1,10 +1,14 @@
1
- from typing import Any, Dict, List, Optional
1
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional
2
2
 
3
- try:
4
- # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
3
+ if TYPE_CHECKING:
4
+ # Backwards compatibility is even uglier with mypy
5
5
  from pydantic.v1 import validator
6
- except ImportError:
7
- from pydantic import validator
6
+ else:
7
+ try:
8
+ # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
9
+ from pydantic.v1 import validator
10
+ except ImportError:
11
+ from pydantic import validator
8
12
 
9
13
  from ...pydantic_base import ImmutableModel
10
14
  from ..constants import ThresholdComparison
@@ -63,7 +67,7 @@ class EvaluationCriterion(ImmutableModel):
63
67
  threshold: float
64
68
  eval_func_arguments: Dict[str, Any]
65
69
 
66
- @validator("eval_function_id")
70
+ @validator("eval_function_id") # pylint: disable=used-before-assignment
67
71
  def valid_eval_function_id(cls, v): # pylint: disable=no-self-argument
68
72
  if not v.startswith("ef_"):
69
73
  raise ValueError(f"Expected field to start with 'ef_', got '{v}'")
@@ -1,10 +1,14 @@
1
- from typing import List
1
+ from typing import TYPE_CHECKING, List
2
2
 
3
- try:
4
- # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
3
+ if TYPE_CHECKING:
4
+ # Backwards compatibility is even uglier with mypy
5
5
  from pydantic.v1 import validator
6
- except ImportError:
7
- from pydantic import validator
6
+ else:
7
+ try:
8
+ # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
9
+ from pydantic.v1 import validator
10
+ except ImportError:
11
+ from pydantic import validator
8
12
 
9
13
  from nucleus.pydantic_base import ImmutableModel
10
14
 
@@ -19,7 +23,7 @@ class CreateScenarioTestRequest(ImmutableModel):
19
23
  slice_id: str
20
24
  evaluation_functions: List[EvalFunctionListEntry]
21
25
 
22
- @validator("slice_id")
26
+ @validator("slice_id") # pylint: disable=used-before-assignment
23
27
  def startswith_slice_indicator(cls, v): # pylint: disable=no-self-argument
24
28
  if not v.startswith("slc_"):
25
29
  raise ValueError(f"Expected field to start with 'slc_', got '{v}'")
@@ -1,10 +1,14 @@
1
- from typing import Optional
1
+ from typing import TYPE_CHECKING, Optional
2
2
 
3
- try:
4
- # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
3
+ if TYPE_CHECKING:
4
+ # Backwards compatibility is even uglier with mypy
5
5
  from pydantic.v1 import root_validator, validator
6
- except ImportError:
7
- from pydantic import root_validator, validator
6
+ else:
7
+ try:
8
+ # NOTE: we always use pydantic v1 but have to do these shenanigans to support both v1 and v2
9
+ from pydantic.v1 import root_validator, validator
10
+ except ImportError:
11
+ from pydantic import root_validator, validator
8
12
 
9
13
  from nucleus.pydantic_base import ImmutableModel
10
14
 
@@ -16,7 +20,7 @@ class EvaluationResult(ImmutableModel):
16
20
  score: float = 0
17
21
  weight: float = 1
18
22
 
19
- @root_validator()
23
+ @root_validator() # pylint: disable=used-before-assignment
20
24
  def is_item_or_scene_provided(
21
25
  cls, values
22
26
  ): # pylint: disable=no-self-argument
@@ -31,7 +35,7 @@ class EvaluationResult(ImmutableModel):
31
35
  )
32
36
  return values
33
37
 
34
- @validator("weight")
38
+ @validator("weight") # pylint: disable=used-before-assignment
35
39
  def is_normalized(cls, v): # pylint: disable=no-self-argument
36
40
  if 0 <= v <= 1:
37
41
  return v
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: scale-nucleus
3
- Version: 0.16.12
3
+ Version: 0.16.14
4
4
  Summary: The official Python client library for Nucleus, the Data Platform for AI
5
5
  Home-page: https://scale.com/nucleus
6
6
  License: MIT
@@ -24,7 +24,7 @@ Requires-Dist: click (>=7.1.2,<9.0)
24
24
  Requires-Dist: nest-asyncio (>=1.5.1,<2.0.0)
25
25
  Requires-Dist: numpy (>=1.19.5) ; python_version >= "3.7" and python_version < "3.10"
26
26
  Requires-Dist: numpy (>=1.22.0) ; python_version >= "3.10"
27
- Requires-Dist: pydantic (>=1.8.2,<2.0.0)
27
+ Requires-Dist: pydantic (>=1.8.2)
28
28
  Requires-Dist: python-dateutil (>=2.8.2,<3.0.0)
29
29
  Requires-Dist: questionary (>=1.10.0,<2.0.0)
30
30
  Requires-Dist: rasterio (>=1.2.0) ; extra == "metrics"
@@ -10,23 +10,23 @@ cli/nu.py,sha256=0f71zPq4fe3I1ghhiSRQi39ENhAzoLPdhz_vh8vxSI8,2074
10
10
  cli/reference.py,sha256=RuHVhmGTZNe0MfwpL96YjJdaH0OJzg98rz4xeIu4hJU,256
11
11
  cli/slices.py,sha256=nxq_Zg1m5oXuhz0ibyHkElvyVWt1AcE9tG-fN4CQxF8,1397
12
12
  cli/tests.py,sha256=NiwEVGuF08_jlCiKEIjKhwq55NvyU4xvPEJW5MJmdZg,4590
13
- nucleus/__init__.py,sha256=mpa_n1DRo_2_P0IvXo3LgVw1xW_IITyJ0g1_13RbxRY,49139
13
+ nucleus/__init__.py,sha256=BCGkLpOSms6DhHHQLhm4F_Ne_1ZrbH84R2PHjtqBvgo,49405
14
14
  nucleus/annotation.py,sha256=qogOLOmNwv2o13kNEZkIRbI2lkp1owNZ2OnRsFJUZwU,42904
15
- nucleus/annotation_uploader.py,sha256=60kszCh-_5dDcQ6saVBxpID2RqPDoXcQtRIwk8Qd-7Q,9236
15
+ nucleus/annotation_uploader.py,sha256=ipXw7QhJQXqaftHrbJn8oCGcm6aXyoL0GhQA9EcZWD4,9580
16
16
  nucleus/async_job.py,sha256=yjPDwyyLIrF0K67anGB40xux1AMhWrq1X_hPvQ_ewzc,6890
17
17
  nucleus/async_utils.py,sha256=ayqajeSonX68fre3u8AoNRYT8GFGPd4_iu6YPQTvpvU,8226
18
18
  nucleus/autocurate.py,sha256=kI0vRqad_An8SN5JX6sSdGP_vNHJI2Pq4NINHuhNf2U,1080
19
19
  nucleus/camera_params.py,sha256=fl17aaSAZDAJIWo6F2HFvM6HKGcQh9fXvo4t3RzGMc4,3726
20
- nucleus/chip_utils.py,sha256=k0P85la3bt_gMexYUYADxwgZ9B87wCwPG9N3Bz6h-TE,6767
20
+ nucleus/chip_utils.py,sha256=1J1NHCh0ZptW8cdeuLWFM_cXuwQVSQFtSF8kXU8s2tI,6743
21
21
  nucleus/connection.py,sha256=U7G7h_ufcTYBwqehhKWk899-TTvbRNFzXo7khtb5fy0,2900
22
- nucleus/constants.py,sha256=KOakfiXxJrzU1hce6MlnvtSaHzritOtbn0ArP0f6AzE,5346
22
+ nucleus/constants.py,sha256=AbVaenawINKjhNU5SU_y7ueVEmdAS8ENJo2WToDamhA,5388
23
23
  nucleus/data_transfer_object/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  nucleus/data_transfer_object/dataset_details.py,sha256=1YGvfKkPSqDrXK_y5mBXyRThY07tU-nwOCYTkYCSl6k,214
25
25
  nucleus/data_transfer_object/dataset_info.py,sha256=5P_gpvAyaqXxj2ZQuzLkGN2XROaN9Me56OLybCmO3R4,940
26
26
  nucleus/data_transfer_object/dataset_size.py,sha256=oe-dXaMLpsQRDcJQRZ9Ja8JTagYz4dviZuTognEylp0,111
27
- nucleus/data_transfer_object/job_status.py,sha256=9iXssqVwE0psfVPY3jxz56mjo0YZNxbvslSXqejSnlI,1830
27
+ nucleus/data_transfer_object/job_status.py,sha256=hxvyNdrdVdj3UpEfwvryKC_QCJQEC9ru6IPjhPFcK44,2038
28
28
  nucleus/data_transfer_object/scenes_list.py,sha256=iTHE6vA47bRB6ciyEU4LArUXEXco4ArnGvZTGTeK8xs,432
29
- nucleus/dataset.py,sha256=6_t-rV_1f2ASR_lzYSS3HBvjDLupGCPf0bzsT9k5AHI,86583
29
+ nucleus/dataset.py,sha256=6eGiXcsBt647dS_VfAH4rBNEYM6W3BODEnDkur43ZcM,86937
30
30
  nucleus/dataset_item.py,sha256=lm09hczNuGdqVx_iz1bFqD6IEni_cAosAyiSkmDs-eE,10077
31
31
  nucleus/dataset_item_uploader.py,sha256=BD0FTgimEFYmDbnOLIaQZS3OLDfLe5wumADDmgMX598,6684
32
32
  nucleus/deprecation_warning.py,sha256=5C9dVusR5UkUQnW2MrRkIXCfbc8ULc7xOaB134agNKk,976
@@ -55,25 +55,25 @@ nucleus/metrics/segmentation_utils.py,sha256=AkqCbyim67K9DA0VQYOUpYHe8vOwSvanFqW
55
55
  nucleus/model.py,sha256=4J9MH_byHVEi8XlAZ6qFYHj_N7xc6wySs__KUwjq9og,8744
56
56
  nucleus/model_run.py,sha256=WtGy8cD86M_6aVNAp0ELZgunwbztNeOO8nv8ZSpbwhY,9280
57
57
  nucleus/package_not_installed.py,sha256=1ae0aqKAM3KrB0C-5MuPPXoz9tLWJUKtP1UZ-vw9Zik,1117
58
- nucleus/payload_constructor.py,sha256=-dLF03TMGk_wprpjjnnG3sOiYsijHYZX0YKm_je3saE,4574
58
+ nucleus/payload_constructor.py,sha256=ukoL4ivVGHr4hVuLXqMFcW_zR6JPV8VwlMT-pED8euM,4719
59
59
  nucleus/prediction.py,sha256=vLI0_ExaazVTrVhFru4mIWS_rX1xRFkg_El-5EAoaOQ,31092
60
- nucleus/pydantic_base.py,sha256=HgemyQIc-RfEVxGkTdApplZViAVMHBwk2G2SQzn7cMM,1151
60
+ nucleus/pydantic_base.py,sha256=ZBUVrf948qzaxSuTaiDWxPC_Y8AOBdLKfi52ozGpGWk,1388
61
61
  nucleus/quaternion.py,sha256=TAnwj4arQXoTeofFgZMdZsCyxAMnu23N6to0F1WFNwk,1111
62
62
  nucleus/retry_strategy.py,sha256=daKZqjZYCh87WtXoVUuR9BZu2TTE-CtOFEYZ-d6xVMY,312
63
63
  nucleus/scene.py,sha256=qZQD7QdF6Ics8kuszsl278NCowKVnAkVNGHvPr5luRo,26937
64
64
  nucleus/slice.py,sha256=C7NRGuMmrJR6uv66xKD1MLaW_K2ocSwzp17JqWdIpZE,28005
65
- nucleus/test_launch_integration.py,sha256=O3-no-g05epYPvqMmjJiLLfITuxUq0wxHqhmFyUbEpo,10386
65
+ nucleus/test_launch_integration.py,sha256=oFKLZWjFGeUvwVV0XAAjP1Y_oKFkaouh_SXVPXtCvcE,10688
66
66
  nucleus/track.py,sha256=ROmOyzYZKrHVTnLBhnk-qEBtklD_EDsSnRcGYE8xG4E,3247
67
67
  nucleus/upload_response.py,sha256=wR_pfZCBju1vGiGqbVgk8zhM6GhD3ebYxyGBm8y0GvY,3287
68
68
  nucleus/url_utils.py,sha256=EZ3vy1FYTGvXRVNyq43Wif-ypS8LFoDrYMYJre_DVuQ,790
69
- nucleus/utils.py,sha256=Ur_O4U0-dS0I0u6uw1pK6JaKZxZgpXkRz9AYJFNHqDE,15825
69
+ nucleus/utils.py,sha256=A6kLL2THcu5UOwQY-B9K9eDVO3vDFkCwPlnpBX15WBY,15841
70
70
  nucleus/validate/__init__.py,sha256=UZx1tJHCRCosPXKdjFGaeifHOIf9R6ncYMcq77Gom54,786
71
71
  nucleus/validate/client.py,sha256=c8iF-fi7CEyKSoutzwJD0rhE6DwDDKFF-xC0wITCTjE,8219
72
72
  nucleus/validate/constants.py,sha256=EoR0BLKqQroyVx6TpirbcMKunOnREdtqL9OUvpejgAk,1089
73
73
  nucleus/validate/data_transfer_objects/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
74
- nucleus/validate/data_transfer_objects/eval_function.py,sha256=qNCibIw0Za7-er_bBO6ApSxXs96DhlKwzyJnYx7Tl64,4024
75
- nucleus/validate/data_transfer_objects/scenario_test.py,sha256=pJ2Wc1Wr6bifs5Ig_k7ZNGepQ6XOiPWAPakkUIoq890,738
76
- nucleus/validate/data_transfer_objects/scenario_test_evaluations.py,sha256=zyIjPvKeeQprenBYspi3zp8fyd9El8q6v9rypp9dB6A,1289
74
+ nucleus/validate/data_transfer_objects/eval_function.py,sha256=3XSuw-7W3BwNR5u-wYunLj3peXLx3y-p_M4yK-UKNlM,4218
75
+ nucleus/validate/data_transfer_objects/scenario_test.py,sha256=auDldQTSwzxLA1QF6dN2ghU_qMakQP64MEU5iIor0nw,932
76
+ nucleus/validate/data_transfer_objects/scenario_test_evaluations.py,sha256=EQU3Gqirz2jwSHuIkdssn84tZ26q0NXehmis4K-18mM,1541
77
77
  nucleus/validate/data_transfer_objects/scenario_test_metric.py,sha256=jXGwPx0Y99ga0RfzwGMCG3_06NkiFsp0yHxUuGFJ2EI,208
78
78
  nucleus/validate/errors.py,sha256=UOI4oPeVzNLEmibWayfMdCMFNZvgsq-VRG42vC1cT_w,110
79
79
  nucleus/validate/eval_functions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -85,8 +85,8 @@ nucleus/validate/scenario_test.py,sha256=pCmM157dblSciZCDTw-f47Fpy3OUZFgXmokdhIL
85
85
  nucleus/validate/scenario_test_evaluation.py,sha256=Q0WzaEE9uUbPVc4EHlCoKjhJcqMNt4QbyiiJx12VOR0,4075
86
86
  nucleus/validate/scenario_test_metric.py,sha256=AhVFOB1ULwBqlZ2X_Au1TXy4iQELljtzR4ZpeLB35So,1209
87
87
  nucleus/validate/utils.py,sha256=VjdIJj9Pii4z4L6xbvClAc7ra_J7cX0vWB_J2X6yrGE,185
88
- scale_nucleus-0.16.12.dist-info/LICENSE,sha256=jaTGyQSQIZeWMo5iyYqgbAYHR9Bdy7nOzgE-Up3m_-g,1075
89
- scale_nucleus-0.16.12.dist-info/METADATA,sha256=RSNbZt4O16nJg82nCjA9URxXoENYCFXCaB2E53g-V9A,7863
90
- scale_nucleus-0.16.12.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
91
- scale_nucleus-0.16.12.dist-info/entry_points.txt,sha256=fmqEzh6NZQyg9eFMILnWabKT8OWQTMSCdDzMiVq2zYs,32
92
- scale_nucleus-0.16.12.dist-info/RECORD,,
88
+ scale_nucleus-0.16.14.dist-info/LICENSE,sha256=jaTGyQSQIZeWMo5iyYqgbAYHR9Bdy7nOzgE-Up3m_-g,1075
89
+ scale_nucleus-0.16.14.dist-info/METADATA,sha256=6roMP1Ge2m2Clwno_PZi6nN7zb2TZpoqe2zOCxlOaUo,7856
90
+ scale_nucleus-0.16.14.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
91
+ scale_nucleus-0.16.14.dist-info/entry_points.txt,sha256=fmqEzh6NZQyg9eFMILnWabKT8OWQTMSCdDzMiVq2zYs,32
92
+ scale_nucleus-0.16.14.dist-info/RECORD,,