scale-nucleus 0.1.3__py3-none-any.whl → 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nucleus/__init__.py CHANGED
@@ -50,90 +50,83 @@ confidence | float | The optional confidence level of this annotation
50
50
  geometry | dict | Representation of the bounding box in the Box2DGeometry format.\n
51
51
  metadata | dict | An arbitrary metadata blob for the annotation.\n
52
52
  """
53
- __version__ = "0.1.0"
54
-
53
+ import asyncio
55
54
  import json
56
55
  import logging
57
- import warnings
58
56
  import os
59
- from typing import List, Union, Dict, Callable, Any, Optional
57
+ from typing import Any, Dict, List, Optional, Union
60
58
 
59
+ import aiohttp
60
+ import pkg_resources
61
+ import requests
61
62
  import tqdm
62
63
  import tqdm.notebook as tqdm_notebook
63
64
 
64
- import grequests
65
- import requests
66
- from requests.adapters import HTTPAdapter
67
-
68
- # pylint: disable=E1101
69
- # TODO: refactor to reduce this file to under 1000 lines.
70
- # pylint: disable=C0302
71
- from requests.packages.urllib3.util.retry import Retry
72
-
73
- from .constants import REFERENCE_IDS_KEY, DATASET_ITEM_IDS_KEY
74
- from .dataset import Dataset
75
- from .dataset_item import DatasetItem
76
65
  from .annotation import (
77
66
  BoxAnnotation,
78
67
  PolygonAnnotation,
79
- SegmentationAnnotation,
80
68
  Segment,
81
- )
82
- from .prediction import (
83
- BoxPrediction,
84
- PolygonPrediction,
85
- SegmentationPrediction,
86
- )
87
- from .model_run import ModelRun
88
- from .slice import Slice
89
- from .upload_response import UploadResponse
90
- from .payload_constructor import (
91
- construct_append_payload,
92
- construct_annotation_payload,
93
- construct_model_creation_payload,
94
- construct_box_predictions_payload,
95
- construct_segmentation_payload,
69
+ SegmentationAnnotation,
70
+ Point,
96
71
  )
97
72
  from .constants import (
98
- NUCLEUS_ENDPOINT,
73
+ ANNOTATION_METADATA_SCHEMA_KEY,
74
+ ANNOTATIONS_IGNORED_KEY,
75
+ ANNOTATIONS_PROCESSED_KEY,
76
+ AUTOTAGS_KEY,
77
+ DATASET_ID_KEY,
78
+ DATASET_ITEM_IDS_KEY,
99
79
  DEFAULT_NETWORK_TIMEOUT_SEC,
100
- ERRORS_KEY,
80
+ EMBEDDINGS_URL_KEY,
101
81
  ERROR_ITEMS,
102
82
  ERROR_PAYLOAD,
103
- ITEMS_KEY,
104
- ITEM_KEY,
83
+ ERRORS_KEY,
105
84
  IMAGE_KEY,
106
85
  IMAGE_URL_KEY,
107
- DATASET_ID_KEY,
86
+ ITEM_METADATA_SCHEMA_KEY,
87
+ ITEMS_KEY,
108
88
  MODEL_RUN_ID_KEY,
109
- DATASET_ITEM_ID_KEY,
110
- SLICE_ID_KEY,
111
- ANNOTATIONS_PROCESSED_KEY,
112
- ANNOTATIONS_IGNORED_KEY,
113
- PREDICTIONS_PROCESSED_KEY,
89
+ NAME_KEY,
90
+ NUCLEUS_ENDPOINT,
114
91
  PREDICTIONS_IGNORED_KEY,
92
+ PREDICTIONS_PROCESSED_KEY,
93
+ REFERENCE_IDS_KEY,
94
+ SLICE_ID_KEY,
115
95
  STATUS_CODE_KEY,
116
- SUCCESS_STATUS_CODES,
117
- DATASET_NAME_KEY,
118
- DATASET_MODEL_RUNS_KEY,
119
- DATASET_SLICES_KEY,
120
- DATASET_LENGTH_KEY,
121
- NAME_KEY,
122
- ANNOTATIONS_KEY,
123
- AUTOTAGS_KEY,
124
- ANNOTATION_METADATA_SCHEMA_KEY,
125
- ITEM_METADATA_SCHEMA_KEY,
126
- FORCE_KEY,
127
- EMBEDDINGS_URL_KEY,
96
+ UPDATE_KEY,
128
97
  )
129
- from .model import Model
98
+ from .dataset import Dataset
99
+ from .dataset_item import DatasetItem
130
100
  from .errors import (
101
+ DatasetItemRetrievalError,
131
102
  ModelCreationError,
132
103
  ModelRunCreationError,
133
- DatasetItemRetrievalError,
134
104
  NotFoundError,
135
105
  NucleusAPIError,
136
106
  )
107
+ from .model import Model
108
+ from .model_run import ModelRun
109
+ from .payload_constructor import (
110
+ construct_annotation_payload,
111
+ construct_append_payload,
112
+ construct_box_predictions_payload,
113
+ construct_model_creation_payload,
114
+ construct_segmentation_payload,
115
+ )
116
+ from .prediction import (
117
+ BoxPrediction,
118
+ PolygonPrediction,
119
+ SegmentationPrediction,
120
+ )
121
+ from .slice import Slice
122
+ from .upload_response import UploadResponse
123
+
124
+ # pylint: disable=E1101
125
+ # TODO: refactor to reduce this file to under 1000 lines.
126
+ # pylint: disable=C0302
127
+
128
+
129
+ __version__ = pkg_resources.get_distribution("scale-nucleus").version
137
130
 
138
131
  logger = logging.getLogger(__name__)
139
132
  logging.basicConfig()
@@ -151,11 +144,16 @@ class NucleusClient:
151
144
  self,
152
145
  api_key: str,
153
146
  use_notebook: bool = False,
154
- endpoint=NUCLEUS_ENDPOINT,
147
+ endpoint: str = None,
155
148
  ):
156
149
  self.api_key = api_key
157
150
  self.tqdm_bar = tqdm.tqdm
158
- self.endpoint = endpoint
151
+ if endpoint is None:
152
+ self.endpoint = os.environ.get(
153
+ "NUCLEUS_ENDPOINT", NUCLEUS_ENDPOINT
154
+ )
155
+ else:
156
+ self.endpoint = endpoint
159
157
  self._use_notebook = use_notebook
160
158
  if use_notebook:
161
159
  self.tqdm_bar = tqdm_notebook.tqdm
@@ -228,13 +226,15 @@ class NucleusClient:
228
226
  """
229
227
  return Dataset(dataset_id, self)
230
228
 
231
- def get_model_run(self, model_run_id: str) -> ModelRun:
229
+ def get_model_run(self, model_run_id: str, dataset_id: str) -> ModelRun:
232
230
  """
233
231
  Fetches a model_run for given id
234
232
  :param model_run_id: internally controlled model_run_id
233
+ :param dataset_id: the dataset id which may determine the prediction schema
234
+ for this model run if present on the dataset.
235
235
  :return: model_run
236
236
  """
237
- return ModelRun(model_run_id, self)
237
+ return ModelRun(model_run_id, dataset_id, self)
238
238
 
239
239
  def delete_model_run(self, model_run_id: str):
240
240
  """
@@ -324,13 +324,13 @@ class NucleusClient:
324
324
  dataset_id: str,
325
325
  dataset_items: List[DatasetItem],
326
326
  batch_size: int = 100,
327
- force: bool = False,
327
+ update: bool = False,
328
328
  ):
329
329
  """
330
330
  Appends images to a dataset with given dataset_id.
331
- Overwrites images on collision if forced.
331
+ Overwrites images on collision if updated.
332
332
  :param dataset_id: id of a dataset
333
- :param payload: { "items": List[DatasetItem], "force": bool }
333
+ :param payload: { "items": List[DatasetItem], "update": bool }
334
334
  :param local: flag if images are stored locally
335
335
  :param batch_size: size of the batch for long payload
336
336
  :return:
@@ -373,21 +373,24 @@ class NucleusClient:
373
373
  async_responses: List[Any] = []
374
374
 
375
375
  for batch in tqdm_local_batches:
376
- payload = construct_append_payload(batch, force)
376
+ payload = construct_append_payload(batch, update)
377
377
  responses = self._process_append_requests_local(
378
- dataset_id, payload, force
378
+ dataset_id, payload, update
379
379
  )
380
380
  async_responses.extend(responses)
381
381
 
382
382
  for batch in tqdm_remote_batches:
383
- payload = construct_append_payload(batch, force)
383
+ payload = construct_append_payload(batch, update)
384
384
  responses = self._process_append_requests(
385
- dataset_id, payload, force, batch_size, batch_size
385
+ dataset_id=dataset_id,
386
+ payload=payload,
387
+ update=update,
388
+ batch_size=batch_size,
386
389
  )
387
390
  async_responses.extend(responses)
388
391
 
389
392
  for response in async_responses:
390
- agg_response.update_response(response.json())
393
+ agg_response.update_response(response)
391
394
 
392
395
  return agg_response
393
396
 
@@ -395,25 +398,19 @@ class NucleusClient:
395
398
  self,
396
399
  dataset_id: str,
397
400
  payload: dict,
398
- update: bool,
401
+ update: bool, # TODO: understand how to pass this in.
399
402
  local_batch_size: int = 10,
400
- size: int = 10,
401
403
  ):
402
- def error(batch_items: dict) -> UploadResponse:
403
- return UploadResponse(
404
- {
405
- DATASET_ID_KEY: dataset_id,
406
- ERROR_ITEMS: len(batch_items),
407
- ERROR_PAYLOAD: batch_items,
408
- }
409
- )
410
-
411
- def exception_handler(request, exception):
412
- logger.error(exception)
413
-
414
- def preprocess_payload(batch):
404
+ def get_files(batch):
415
405
  request_payload = [
416
- (ITEMS_KEY, (None, json.dumps(batch), "application/json"))
406
+ (
407
+ ITEMS_KEY,
408
+ (
409
+ None,
410
+ json.dumps(batch, allow_nan=False),
411
+ "application/json",
412
+ ),
413
+ )
417
414
  ]
418
415
  for item in batch:
419
416
  image = open( # pylint: disable=R1732
@@ -430,27 +427,19 @@ class NucleusClient:
430
427
 
431
428
  items = payload[ITEMS_KEY]
432
429
  responses: List[Any] = []
433
- request_payloads = []
430
+ files_per_request = []
434
431
  payload_items = []
435
432
  for i in range(0, len(items), local_batch_size):
436
433
  batch = items[i : i + local_batch_size]
437
- batch_payload = preprocess_payload(batch)
438
- request_payloads.append(batch_payload)
434
+ files_per_request.append(get_files(batch))
439
435
  payload_items.append(batch)
440
436
 
441
- async_requests = [
442
- self._make_grequest(
443
- payload,
437
+ loop = asyncio.get_event_loop()
438
+ responses = loop.run_until_complete(
439
+ self.make_many_files_requests_asynchronously(
440
+ files_per_request,
444
441
  f"dataset/{dataset_id}/append",
445
- local=True,
446
442
  )
447
- for payload in request_payloads
448
- ]
449
-
450
- async_responses = grequests.map(
451
- async_requests,
452
- exception_handler=exception_handler,
453
- size=size,
454
443
  )
455
444
 
456
445
  def close_files(request_items):
@@ -460,69 +449,106 @@ class NucleusClient:
460
449
  item[1][1].close()
461
450
 
462
451
  # don't forget to close all open files
463
- for p in request_payloads:
452
+ for p in files_per_request:
464
453
  close_files(p)
465
454
 
466
- # response object will be None if an error occurred
467
- async_responses = [
468
- response
469
- if (response and response.status_code == 200)
470
- else error(request_items)
471
- for response, request_items in zip(async_responses, payload_items)
472
- ]
473
- responses.extend(async_responses)
474
-
475
455
  return responses
476
456
 
457
+ async def make_many_files_requests_asynchronously(
458
+ self, files_per_request, route
459
+ ):
460
+ """
461
+ Makes an async post request with files to a Nucleus endpoint.
462
+
463
+ :param files_per_request: A list of lists of tuples (name, (filename, file_pointer, content_type))
464
+ name will become the name by which the multer can build an array.
465
+ :param route: route for the request
466
+ :return: awaitable list(response)
467
+ """
468
+ async with aiohttp.ClientSession() as session:
469
+ tasks = [
470
+ asyncio.ensure_future(
471
+ self._make_files_request(
472
+ files=files, route=route, session=session
473
+ )
474
+ )
475
+ for files in files_per_request
476
+ ]
477
+ return await asyncio.gather(*tasks)
478
+
479
+ async def _make_files_request(
480
+ self,
481
+ files,
482
+ route: str,
483
+ session: aiohttp.ClientSession,
484
+ ):
485
+ """
486
+ Makes an async post request with files to a Nucleus endpoint.
487
+
488
+ :param files: A list of tuples (name, (filename, file_pointer, file_type))
489
+ :param route: route for the request
490
+ :param session: Session to use for post.
491
+ :return: response
492
+ """
493
+ endpoint = f"{self.endpoint}/{route}"
494
+
495
+ logger.info("Posting to %s", endpoint)
496
+
497
+ form = aiohttp.FormData()
498
+
499
+ for file in files:
500
+ form.add_field(
501
+ name=file[0],
502
+ filename=file[1][0],
503
+ value=file[1][1],
504
+ content_type=file[1][2],
505
+ )
506
+
507
+ async with session.post(
508
+ endpoint,
509
+ data=form,
510
+ auth=aiohttp.BasicAuth(self.api_key, ""),
511
+ timeout=DEFAULT_NETWORK_TIMEOUT_SEC,
512
+ ) as response:
513
+ logger.info("API request has response code %s", response.status)
514
+
515
+ try:
516
+ data = await response.json()
517
+ except aiohttp.client_exceptions.ContentTypeError:
518
+ # In case of 404, the server returns text
519
+ data = await response.text()
520
+
521
+ if not response.ok:
522
+ self.handle_bad_response(
523
+ endpoint,
524
+ session.post,
525
+ aiohttp_response=(response.status, response.reason, data),
526
+ )
527
+
528
+ return data
529
+
477
530
  def _process_append_requests(
478
531
  self,
479
532
  dataset_id: str,
480
533
  payload: dict,
481
534
  update: bool,
482
535
  batch_size: int = 20,
483
- size: int = 10,
484
536
  ):
485
- def default_error(payload: dict) -> UploadResponse:
486
- return UploadResponse(
487
- {
488
- DATASET_ID_KEY: dataset_id,
489
- ERROR_ITEMS: len(payload[ITEMS_KEY]),
490
- ERROR_PAYLOAD: payload[ITEMS_KEY],
491
- }
492
- )
493
-
494
- def exception_handler(request, exception):
495
- logger.error(exception)
496
-
497
537
  items = payload[ITEMS_KEY]
498
538
  payloads = [
499
539
  # batch_size images per request
500
- {ITEMS_KEY: items[i : i + batch_size], FORCE_KEY: update}
540
+ {ITEMS_KEY: items[i : i + batch_size], UPDATE_KEY: update}
501
541
  for i in range(0, len(items), batch_size)
502
542
  ]
503
543
 
504
- async_requests = [
505
- self._make_grequest(
544
+ return [
545
+ self.make_request(
506
546
  payload,
507
547
  f"dataset/{dataset_id}/append",
508
- local=False,
509
548
  )
510
549
  for payload in payloads
511
550
  ]
512
551
 
513
- async_responses = grequests.map(
514
- async_requests, exception_handler=exception_handler, size=size
515
- )
516
-
517
- async_responses = [
518
- response
519
- if (response and response.status_code == 200)
520
- else default_error(payload)
521
- for response, payload in zip(async_responses, payloads)
522
- ]
523
-
524
- return async_responses
525
-
526
552
  def annotate_dataset(
527
553
  self,
528
554
  dataset_id: str,
@@ -672,7 +698,9 @@ class NucleusClient:
672
698
  if response.get(STATUS_CODE_KEY, None):
673
699
  raise ModelRunCreationError(response.get("error"))
674
700
 
675
- return ModelRun(response[MODEL_RUN_ID_KEY], self)
701
+ return ModelRun(
702
+ response[MODEL_RUN_ID_KEY], dataset_id=dataset_id, client=self
703
+ )
676
704
 
677
705
  def predict(
678
706
  self,
@@ -1068,61 +1096,20 @@ class NucleusClient:
1068
1096
  requests_command=requests.delete,
1069
1097
  )
1070
1098
 
1071
- def _make_grequest(
1072
- self,
1073
- payload: dict,
1074
- route: str,
1075
- session=None,
1076
- requests_command: Callable = grequests.post,
1077
- local=True,
1078
- ):
1079
- """
1080
- makes a grequest to Nucleus endpoint
1081
- :param payload: file dict for multipart-formdata
1082
- :param route: route for the request
1083
- :param session: requests.session
1084
- :param requests_command: grequests.post, grequests.get, grequests.delete
1085
- :return: An async grequest object
1086
- """
1087
- adapter = HTTPAdapter(max_retries=Retry(total=3))
1088
- sess = requests.Session()
1089
- sess.mount("https://", adapter)
1090
- sess.mount("http://", adapter)
1091
-
1092
- endpoint = f"{self.endpoint}/{route}"
1093
- logger.info("Posting to %s", endpoint)
1094
-
1095
- if local:
1096
- post = requests_command(
1097
- endpoint,
1098
- session=sess,
1099
- files=payload,
1100
- auth=(self.api_key, ""),
1101
- timeout=DEFAULT_NETWORK_TIMEOUT_SEC,
1102
- )
1103
- else:
1104
- post = requests_command(
1105
- endpoint,
1106
- session=sess,
1107
- json=payload,
1108
- headers={"Content-Type": "application/json"},
1109
- auth=(self.api_key, ""),
1110
- timeout=DEFAULT_NETWORK_TIMEOUT_SEC,
1111
- )
1112
- return post
1113
-
1114
- def _make_request_raw(
1115
- self, payload: dict, endpoint: str, requests_command=requests.post
1116
- ):
1099
+ def make_request(
1100
+ self, payload: dict, route: str, requests_command=requests.post
1101
+ ) -> dict:
1117
1102
  """
1118
- Makes a request to Nucleus endpoint. This method returns the raw
1119
- requests.Response object which is useful for unit testing.
1103
+ Makes a request to Nucleus endpoint and logs a warning if not
1104
+ successful.
1120
1105
 
1121
1106
  :param payload: given payload
1122
- :param endpoint: endpoint + route for the request
1107
+ :param route: route for the request
1123
1108
  :param requests_command: requests.post, requests.get, requests.delete
1124
- :return: response
1109
+ :return: response JSON
1125
1110
  """
1111
+ endpoint = f"{self.endpoint}/{route}"
1112
+
1126
1113
  logger.info("Posting to %s", endpoint)
1127
1114
 
1128
1115
  response = requests_command(
@@ -1134,28 +1121,18 @@ class NucleusClient:
1134
1121
  )
1135
1122
  logger.info("API request has response code %s", response.status_code)
1136
1123
 
1137
- return response
1138
-
1139
- def make_request(
1140
- self, payload: dict, route: str, requests_command=requests.post
1141
- ) -> dict:
1142
- """
1143
- Makes a request to Nucleus endpoint and logs a warning if not
1144
- successful.
1145
-
1146
- :param payload: given payload
1147
- :param route: route for the request
1148
- :param requests_command: requests.post, requests.get, requests.delete
1149
- :return: response JSON
1150
- """
1151
- endpoint = f"{self.endpoint}/{route}"
1152
-
1153
- response = self._make_request_raw(payload, endpoint, requests_command)
1154
-
1155
1124
  if not response.ok:
1156
1125
  self.handle_bad_response(endpoint, requests_command, response)
1157
1126
 
1158
1127
  return response.json()
1159
1128
 
1160
- def handle_bad_response(self, endpoint, requests_command, response):
1161
- raise NucleusAPIError(endpoint, requests_command, response)
1129
+ def handle_bad_response(
1130
+ self,
1131
+ endpoint,
1132
+ requests_command,
1133
+ requests_response=None,
1134
+ aiohttp_response=None,
1135
+ ):
1136
+ raise NucleusAPIError(
1137
+ endpoint, requests_command, requests_response, aiohttp_response
1138
+ )
nucleus/annotation.py CHANGED
@@ -1,7 +1,8 @@
1
1
  import json
2
2
  from dataclasses import dataclass
3
3
  from enum import Enum
4
- from typing import Any, Dict, List, Optional, Union
4
+ from typing import Dict, List, Optional, Sequence, Union
5
+ from nucleus.dataset_item import is_local_path
5
6
 
6
7
  from .constants import (
7
8
  ANNOTATION_ID_KEY,
@@ -13,6 +14,7 @@ from .constants import (
13
14
  INDEX_KEY,
14
15
  ITEM_ID_KEY,
15
16
  LABEL_KEY,
17
+ MASK_TYPE,
16
18
  MASK_URL_KEY,
17
19
  METADATA_KEY,
18
20
  POLYGON_TYPE,
@@ -51,7 +53,7 @@ class Annotation:
51
53
  )
52
54
 
53
55
  def to_json(self) -> str:
54
- return json.dumps(self.to_payload())
56
+ return json.dumps(self.to_payload(), allow_nan=False)
55
57
 
56
58
 
57
59
  @dataclass
@@ -108,6 +110,7 @@ class SegmentationAnnotation(Annotation):
108
110
 
109
111
  def to_payload(self) -> dict:
110
112
  payload = {
113
+ TYPE_KEY: MASK_TYPE,
111
114
  MASK_URL_KEY: self.mask_url,
112
115
  ANNOTATIONS_KEY: [ann.to_payload() for ann in self.annotations],
113
116
  ANNOTATION_ID_KEY: self.annotation_id,
@@ -171,11 +174,23 @@ class BoxAnnotation(Annotation): # pylint: disable=R0902
171
174
  }
172
175
 
173
176
 
174
- # TODO: Add Generic type for 2D point
177
+ @dataclass
178
+ class Point:
179
+ x: float
180
+ y: float
181
+
182
+ @classmethod
183
+ def from_json(cls, payload: Dict[str, float]):
184
+ return cls(payload[X_KEY], payload[Y_KEY])
185
+
186
+ def to_payload(self) -> dict:
187
+ return {X_KEY: self.x, Y_KEY: self.y}
188
+
189
+
175
190
  @dataclass
176
191
  class PolygonAnnotation(Annotation):
177
192
  label: str
178
- vertices: List[Any]
193
+ vertices: List[Point]
179
194
  reference_id: Optional[str] = None
180
195
  item_id: Optional[str] = None
181
196
  annotation_id: Optional[str] = None
@@ -184,13 +199,28 @@ class PolygonAnnotation(Annotation):
184
199
  def __post_init__(self):
185
200
  self._check_ids()
186
201
  self.metadata = self.metadata if self.metadata else {}
202
+ if len(self.vertices) > 0:
203
+ if not hasattr(self.vertices[0], X_KEY) or not hasattr(
204
+ self.vertices[0], "to_payload"
205
+ ):
206
+ try:
207
+ self.vertices = [
208
+ Point(x=vertex[X_KEY], y=vertex[Y_KEY])
209
+ for vertex in self.vertices
210
+ ]
211
+ except KeyError as ke:
212
+ raise ValueError(
213
+ "Use a point object to pass in vertices. For example, vertices=[nucleus.Point(x=1, y=2)]"
214
+ ) from ke
187
215
 
188
216
  @classmethod
189
217
  def from_json(cls, payload: dict):
190
218
  geometry = payload.get(GEOMETRY_KEY, {})
191
219
  return cls(
192
220
  label=payload.get(LABEL_KEY, 0),
193
- vertices=geometry.get(VERTICES_KEY, []),
221
+ vertices=[
222
+ Point.from_json(_) for _ in geometry.get(VERTICES_KEY, [])
223
+ ],
194
224
  reference_id=payload.get(REFERENCE_ID_KEY, None),
195
225
  item_id=payload.get(DATASET_ITEM_ID_KEY, None),
196
226
  annotation_id=payload.get(ANNOTATION_ID_KEY, None),
@@ -198,11 +228,25 @@ class PolygonAnnotation(Annotation):
198
228
  )
199
229
 
200
230
  def to_payload(self) -> dict:
201
- return {
231
+ payload = {
202
232
  LABEL_KEY: self.label,
203
233
  TYPE_KEY: POLYGON_TYPE,
204
- GEOMETRY_KEY: {VERTICES_KEY: self.vertices},
234
+ GEOMETRY_KEY: {
235
+ VERTICES_KEY: [_.to_payload() for _ in self.vertices]
236
+ },
205
237
  REFERENCE_ID_KEY: self.reference_id,
206
238
  ANNOTATION_ID_KEY: self.annotation_id,
207
239
  METADATA_KEY: self.metadata,
208
240
  }
241
+ return payload
242
+
243
+
244
+ def check_all_annotation_paths_remote(
245
+ annotations: Sequence[Union[Annotation]],
246
+ ):
247
+ for annotation in annotations:
248
+ if hasattr(annotation, MASK_URL_KEY):
249
+ if is_local_path(getattr(annotation, MASK_URL_KEY)):
250
+ raise ValueError(
251
+ f"Found an annotation with a local path, which cannot be uploaded asynchronously. Use a remote path instead. {annotation}"
252
+ )