unitlab 2.1.3__tar.gz → 2.1.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: unitlab
3
- Version: 2.1.3
3
+ Version: 2.1.5
4
4
  Home-page: https://github.com/teamunitlab/unitlab-sdk
5
5
  Author: Unitlab Inc.
6
6
  Author-email: team@unitlab.ai
@@ -10,7 +10,6 @@ Classifier: Development Status :: 4 - Beta
10
10
  Classifier: Intended Audience :: Developers
11
11
  Classifier: License :: OSI Approved :: MIT License
12
12
  Classifier: Programming Language :: Python :: 3
13
- Classifier: Programming Language :: Python :: 3.7
14
13
  Classifier: Programming Language :: Python :: 3.8
15
14
  Classifier: Programming Language :: Python :: 3.9
16
15
  Classifier: Programming Language :: Python :: 3.10
@@ -2,7 +2,7 @@ from setuptools import find_packages, setup
2
2
 
3
3
  setup(
4
4
  name="unitlab",
5
- version="2.1.3",
5
+ version="2.1.5",
6
6
  license="MIT",
7
7
  author="Unitlab Inc.",
8
8
  author_email="team@unitlab.ai",
@@ -14,7 +14,6 @@ setup(
14
14
  "Intended Audience :: Developers",
15
15
  "License :: OSI Approved :: MIT License",
16
16
  "Programming Language :: Python :: 3",
17
- "Programming Language :: Python :: 3.7",
18
17
  "Programming Language :: Python :: 3.8",
19
18
  "Programming Language :: Python :: 3.9",
20
19
  "Programming Language :: Python :: 3.10",
@@ -244,38 +244,13 @@ class UnitlabClient:
244
244
 
245
245
  asyncio.run(main())
246
246
 
247
- def create_dataset(self, name, annotation_type, categories, license_id=None):
248
- response = self._post(
249
- "/api/sdk/datasets/create/",
250
- data={
251
- "name": name,
252
- "annotation_type": annotation_type,
253
- "classes": [
254
- {"name": category["name"], "value": category["id"]}
255
- for category in categories
256
- ],
257
- "license": license_id,
258
- },
259
- )
260
- return response["pk"]
261
-
262
- def finalize_dataset(self, dataset_id):
247
+ def _finalize_dataset(self, dataset_id):
263
248
  return self._post(f"/api/sdk/datasets/{dataset_id}/finalize/")
264
249
 
265
- def dataset_upload(
266
- self,
267
- name,
268
- annotation_type,
269
- annotation_path,
270
- data_path,
271
- license_id=None,
272
- batch_size=15,
250
+ def _dataset_data_upload(
251
+ self, dataset_id, upload_handler: DatasetUploadHandler, batch_size=15
273
252
  ):
274
- handler = DatasetUploadHandler(annotation_type, annotation_path, data_path)
275
- dataset_id = self.create_dataset(
276
- name, annotation_type, handler.categories, license_id=license_id
277
- )
278
- image_ids = handler.getImgIds()
253
+ image_ids = upload_handler.getImgIds()
279
254
  url = urllib.parse.urljoin(
280
255
  self.api_url, f"/api/sdk/datasets/{dataset_id}/upload/"
281
256
  )
@@ -294,7 +269,7 @@ class UnitlabClient:
294
269
  )
295
270
  ]:
296
271
  tasks.append(
297
- handler.upload_image(session, url, image_id)
272
+ upload_handler.upload_image(session, url, image_id)
298
273
  )
299
274
  for f in asyncio.as_completed(tasks):
300
275
  try:
@@ -305,4 +280,55 @@ class UnitlabClient:
305
280
  raise e
306
281
 
307
282
  asyncio.run(main())
308
- self.finalize_dataset(dataset_id)
283
+
284
+ def dataset_upload(
285
+ self,
286
+ name,
287
+ annotation_type,
288
+ annotation_path,
289
+ data_path,
290
+ license_id=None,
291
+ batch_size=15,
292
+ ):
293
+ upload_handler = DatasetUploadHandler(
294
+ annotation_type, annotation_path, data_path
295
+ )
296
+ dataset_id = self._post(
297
+ "/api/sdk/datasets/create/",
298
+ data={
299
+ "name": name,
300
+ "annotation_type": annotation_type,
301
+ "classes": [
302
+ {"name": category["name"], "value": category["id"]}
303
+ for category in upload_handler.categories
304
+ ],
305
+ "license": license_id,
306
+ },
307
+ )["pk"]
308
+ self._dataset_data_upload(dataset_id, upload_handler, batch_size=batch_size)
309
+ self._finalize_dataset(dataset_id)
310
+
311
+ def dataset_update(self, pk, annotation_path, data_path, batch_size=15):
312
+ dataset = self._get(f"api/sdk/datasets/{pk}/")
313
+ upload_handler = DatasetUploadHandler(
314
+ dataset["annotation_type"], annotation_path, data_path
315
+ )
316
+ new_dataset = self._post(
317
+ f"/api/sdk/datasets/{pk}/update/",
318
+ data={
319
+ "classes": [
320
+ {"name": category["name"], "value": category["id"]}
321
+ for category in sorted(
322
+ upload_handler.loadCats(upload_handler.getCatIds()),
323
+ key=lambda x: x["id"],
324
+ )
325
+ ]
326
+ },
327
+ )
328
+ upload_handler.original_category_referecences = {
329
+ int(k): v for k, v in new_dataset["original_category_referecences"].items()
330
+ }
331
+ self._dataset_data_upload(
332
+ new_dataset["pk"], upload_handler, batch_size=batch_size
333
+ )
334
+ self._finalize_dataset(new_dataset["pk"])
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import copy
2
3
  import itertools
3
4
  import json
4
5
  import logging
@@ -79,7 +80,9 @@ class COCO:
79
80
  self.catToImgs = catToImgs
80
81
  self.imgs = imgs
81
82
  self.cats = cats
82
- self.categories = sorted(self.loadCats(self.getCatIds()), key=lambda x: x["id"])
83
+ self.categories = sorted(
84
+ copy.deepcopy(self.loadCats(self.getCatIds())), key=lambda x: x["id"]
85
+ )
83
86
  self.classes = [cat["name"] for cat in self.categories]
84
87
  self.original_category_referecences = dict()
85
88
  for i, category in enumerate(self.categories):
@@ -283,9 +286,6 @@ class DatasetUploadHandler(COCO):
283
286
  def get_img_point_payload(self, anns):
284
287
  return self.get_img_semantic_segmentation_payload(anns)
285
288
 
286
- def get_img_skeleton_payload(self, anns):
287
- logger.warning("Not implemented yet")
288
-
289
289
  def get_payload(self, img_id):
290
290
  image = self.imgs[img_id]
291
291
  ann_ids = self.getAnnIds(imgIds=img_id)
@@ -321,6 +321,9 @@ class DatasetUploadHandler(COCO):
321
321
  raise SubscriptionError(
322
322
  "You have reached the maximum number of datasources for your subscription."
323
323
  )
324
+ elif response.status == 400:
325
+ logger.error(await response.text())
326
+ return 0
324
327
  response.raise_for_status()
325
328
  return 1
326
329
  except SubscriptionError as e:
@@ -10,7 +10,6 @@ class UnitlabError(Exception):
10
10
  message: An informative message about the exception.
11
11
  detail: The detail of the exception raised by Python or another library. Defaults to :obj:`None`.
12
12
  """
13
-
14
13
  super().__init__(message, detail)
15
14
  self.message = message
16
15
  self.detail = detail
@@ -37,7 +37,6 @@ class AnnotationType(str, Enum):
37
37
  IMG_POLYGON = "img_polygon"
38
38
  IMG_LINE = "img_line"
39
39
  IMG_POINT = "img_point"
40
- IMG_SKELETON = "img_skeleton"
41
40
 
42
41
 
43
42
  @app.command()
@@ -116,6 +115,18 @@ def dataset_upload(
116
115
  )
117
116
 
118
117
 
118
+ @dataset_app.command(name="update", help="Update dataset")
119
+ def dataset_update(
120
+ pk: UUID,
121
+ api_key: API_KEY,
122
+ annotation_path: Annotated[Path, typer.Option(help="Path to the COCO json file")],
123
+ data_path: Annotated[
124
+ Path, typer.Option(help="Directory containing the data to be uploaded")
125
+ ],
126
+ ):
127
+ get_client(api_key).dataset_update(pk, annotation_path, data_path)
128
+
129
+
119
130
  @dataset_app.command(name="download", help="Download dataset")
120
131
  def dataset_download(
121
132
  pk: UUID,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: unitlab
3
- Version: 2.1.3
3
+ Version: 2.1.5
4
4
  Home-page: https://github.com/teamunitlab/unitlab-sdk
5
5
  Author: Unitlab Inc.
6
6
  Author-email: team@unitlab.ai
@@ -10,7 +10,6 @@ Classifier: Development Status :: 4 - Beta
10
10
  Classifier: Intended Audience :: Developers
11
11
  Classifier: License :: OSI Approved :: MIT License
12
12
  Classifier: Programming Language :: Python :: 3
13
- Classifier: Programming Language :: Python :: 3.7
14
13
  Classifier: Programming Language :: Python :: 3.8
15
14
  Classifier: Programming Language :: Python :: 3.9
16
15
  Classifier: Programming Language :: Python :: 3.10
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes