unitlab 2.0.4__tar.gz → 2.0.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {unitlab-2.0.4/src/unitlab.egg-info → unitlab-2.0.6}/PKG-INFO +1 -1
- {unitlab-2.0.4 → unitlab-2.0.6}/setup.py +1 -1
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab/client.py +122 -155
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab/dataset.py +25 -23
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab/exceptions.py +6 -0
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab/main.py +9 -42
- {unitlab-2.0.4 → unitlab-2.0.6/src/unitlab.egg-info}/PKG-INFO +1 -1
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab.egg-info/SOURCES.txt +0 -1
- unitlab-2.0.4/src/unitlab/utils.py +0 -52
- {unitlab-2.0.4 → unitlab-2.0.6}/LICENSE.md +0 -0
- {unitlab-2.0.4 → unitlab-2.0.6}/README.md +0 -0
- {unitlab-2.0.4 → unitlab-2.0.6}/setup.cfg +0 -0
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab/__init__.py +0 -0
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab/__main__.py +0 -0
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab.egg-info/dependency_links.txt +0 -0
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab.egg-info/entry_points.txt +0 -0
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab.egg-info/requires.txt +0 -0
- {unitlab-2.0.4 → unitlab-2.0.6}/src/unitlab.egg-info/top_level.txt +0 -0
@@ -2,6 +2,7 @@ import asyncio
|
|
2
2
|
import glob
|
3
3
|
import logging
|
4
4
|
import os
|
5
|
+
import urllib.parse
|
5
6
|
|
6
7
|
import aiofiles
|
7
8
|
import aiohttp
|
@@ -9,12 +10,28 @@ import requests
|
|
9
10
|
import tqdm
|
10
11
|
|
11
12
|
from .dataset import DatasetUploadHandler
|
12
|
-
from .exceptions import AuthenticationError
|
13
|
-
from .utils import BASE_URL, ENDPOINTS, send_request
|
13
|
+
from .exceptions import AuthenticationError, NetworkError
|
14
14
|
|
15
15
|
logger = logging.getLogger(__name__)
|
16
16
|
|
17
17
|
|
18
|
+
def handle_exceptions(f):
|
19
|
+
def throw_exception(*args, **kwargs):
|
20
|
+
try:
|
21
|
+
r = f(*args, **kwargs)
|
22
|
+
r_status = r.status_code
|
23
|
+
if r_status == 400:
|
24
|
+
raise AuthenticationError(
|
25
|
+
"Please provide the api_key argument or set UNITLAB_API_KEY in your environment."
|
26
|
+
)
|
27
|
+
r.raise_for_status()
|
28
|
+
return r.json()
|
29
|
+
except requests.exceptions.RequestException as e:
|
30
|
+
raise NetworkError(str(e))
|
31
|
+
|
32
|
+
return throw_exception
|
33
|
+
|
34
|
+
|
18
35
|
class UnitlabClient:
|
19
36
|
"""A client with a connection to the Unitlab.ai platform.
|
20
37
|
|
@@ -48,7 +65,7 @@ class UnitlabClient:
|
|
48
65
|
:exc:`~unitlab.exceptions.AuthenticationError`: If an invalid API key is used or (when not passing the API key directly) if ``UNITLAB_API_KEY`` is not found in your environment.
|
49
66
|
"""
|
50
67
|
|
51
|
-
def __init__(self, api_key: str = None):
|
68
|
+
def __init__(self, api_key: str = None, api_url: str = None):
|
52
69
|
if api_key is None:
|
53
70
|
api_key = os.getenv("UNITLAB_API_KEY")
|
54
71
|
if api_key is None:
|
@@ -56,7 +73,11 @@ class UnitlabClient:
|
|
56
73
|
message="Please provide the api_key argument or set UNITLAB_API_KEY in your environment."
|
57
74
|
)
|
58
75
|
logger.info("Found a Unitlab API key in your environment.")
|
76
|
+
if api_url is None:
|
77
|
+
api_url = os.environ.get("UNITLAB_BASE_URL", "https://api.unitlab.ai")
|
78
|
+
|
59
79
|
self.api_key = api_key
|
80
|
+
self.api_url = api_url
|
60
81
|
self.api_session = requests.Session()
|
61
82
|
adapter = requests.adapters.HTTPAdapter(max_retries=3)
|
62
83
|
self.api_session.mount("http://", adapter)
|
@@ -96,152 +117,111 @@ class UnitlabClient:
|
|
96
117
|
def _get_headers(self):
|
97
118
|
return {"Authorization": f"Api-Key {self.api_key}"} if self.api_key else None
|
98
119
|
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
"endpoint": ENDPOINTS["projects"],
|
104
|
-
"headers": self._get_headers(),
|
105
|
-
},
|
106
|
-
session=self.api_session,
|
107
|
-
)
|
108
|
-
return response.json()
|
109
|
-
|
110
|
-
def project(self, project_id):
|
111
|
-
response = send_request(
|
112
|
-
{
|
113
|
-
"method": "GET",
|
114
|
-
"endpoint": ENDPOINTS["project"].format(project_id),
|
115
|
-
"headers": self._get_headers(),
|
116
|
-
},
|
117
|
-
session=self.api_session,
|
120
|
+
@handle_exceptions
|
121
|
+
def _get(self, endpoint):
|
122
|
+
return self.api_session.get(
|
123
|
+
urllib.parse.urljoin(self.api_url, endpoint), headers=self._get_headers()
|
118
124
|
)
|
119
|
-
|
120
|
-
|
121
|
-
def
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
"headers": self._get_headers(),
|
127
|
-
},
|
128
|
-
session=self.api_session,
|
125
|
+
|
126
|
+
@handle_exceptions
|
127
|
+
def _post(self, endpoint, data):
|
128
|
+
return self.api_session.post(
|
129
|
+
urllib.parse.urljoin(self.api_url, endpoint),
|
130
|
+
json=data,
|
131
|
+
headers=self._get_headers(),
|
129
132
|
)
|
130
|
-
|
133
|
+
|
134
|
+
def projects(self, pretty=0):
|
135
|
+
return self._get(f"/api/sdk/projects/?pretty={pretty}")
|
136
|
+
|
137
|
+
def project(self, project_id, pretty=0):
|
138
|
+
return self._get(f"/api/sdk/projects/{project_id}/?pretty={pretty}")
|
139
|
+
|
140
|
+
def project_members(self, project_id, pretty=0):
|
141
|
+
return self._get(f"/api/sdk/projects/{project_id}/members/?pretty={pretty}")
|
131
142
|
|
132
143
|
def upload_data(self, project_id, directory, batch_size=100):
|
133
144
|
if not os.path.isdir(directory):
|
134
145
|
raise ValueError(f"Directory {directory} does not exist")
|
135
146
|
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
"
|
140
|
-
"
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
147
|
+
files = [
|
148
|
+
file
|
149
|
+
for files_list in (
|
150
|
+
glob.glob(os.path.join(directory, "") + extension)
|
151
|
+
for extension in ["*jpg", "*png", "*jpeg", "*webp"]
|
152
|
+
)
|
153
|
+
for file in files_list
|
154
|
+
]
|
155
|
+
filtered_files = []
|
156
|
+
for file in files:
|
157
|
+
file_size = os.path.getsize(file) / 1024 / 1024
|
158
|
+
if file_size > 6:
|
159
|
+
logger.warning(
|
160
|
+
f"File {file} is too large ({file_size:.4f} megabytes) skipping, max size is 6 MB"
|
161
|
+
)
|
162
|
+
continue
|
163
|
+
filtered_files.append(file)
|
164
|
+
|
165
|
+
num_files = len(filtered_files)
|
166
|
+
num_batches = (num_files + batch_size - 1) // batch_size
|
146
167
|
|
147
168
|
async def post_file(
|
148
169
|
session: aiohttp.ClientSession, file: str, project_id: str, retries=3
|
149
170
|
):
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
url=URL,
|
156
|
-
data=aiohttp.FormData(
|
157
|
-
fields={"project": project_id, "file": f}
|
158
|
-
),
|
159
|
-
)
|
160
|
-
response.raise_for_status()
|
161
|
-
return 1 if response.status == 201 else 0
|
162
|
-
except aiohttp.client_exceptions.ServerDisconnectedError as e:
|
163
|
-
logger.warning(f"Error: {e}: Retrying...")
|
164
|
-
await asyncio.sleep(0.1)
|
165
|
-
continue
|
166
|
-
except Exception as e:
|
167
|
-
logger.error(f"Error uploading file {file} - {e}")
|
168
|
-
return 0
|
169
|
-
|
170
|
-
async def batch_upload(
|
171
|
-
session: aiohttp.ClientSession,
|
172
|
-
batch: list,
|
173
|
-
project_id: str,
|
174
|
-
pbar: tqdm.tqdm,
|
175
|
-
):
|
176
|
-
tasks = []
|
177
|
-
for file in batch:
|
178
|
-
tasks.append(
|
179
|
-
post_file(session=session, file=file, project_id=project_id)
|
171
|
+
async with aiofiles.open(file, "rb") as f:
|
172
|
+
form_data = aiohttp.FormData()
|
173
|
+
form_data.add_field("project", project_id)
|
174
|
+
form_data.add_field(
|
175
|
+
"file", await f.read(), filename=os.path.basename(file)
|
180
176
|
)
|
181
|
-
|
182
|
-
|
177
|
+
for _ in range(retries):
|
178
|
+
try:
|
179
|
+
await asyncio.sleep(0.1)
|
180
|
+
async with session.post(
|
181
|
+
urllib.parse.urljoin(self.api_url, "/api/sdk/upload-data/"),
|
182
|
+
data=form_data,
|
183
|
+
) as response:
|
184
|
+
response.raise_for_status()
|
185
|
+
return 1
|
186
|
+
except aiohttp.ServerDisconnectedError as e:
|
187
|
+
logger.warning(f"Server disconnected: {e}, Retrying...")
|
188
|
+
await asyncio.sleep(0.1)
|
189
|
+
continue
|
190
|
+
except Exception as e:
|
191
|
+
logger.error(f"Error uploading file {file} - {e}")
|
192
|
+
return 0
|
183
193
|
|
184
194
|
async def main():
|
185
|
-
files = [
|
186
|
-
file
|
187
|
-
for files_list in (
|
188
|
-
glob.glob(os.path.join(directory, "") + extension)
|
189
|
-
for extension in ["*jpg", "*png", "*jpeg", "*webp"]
|
190
|
-
)
|
191
|
-
for file in files_list
|
192
|
-
]
|
193
|
-
filtered_files = []
|
194
|
-
for file in files:
|
195
|
-
file_size = os.path.getsize(file) / 1024 / 1024
|
196
|
-
if file_size > 6:
|
197
|
-
logger.warning(
|
198
|
-
f"File {file} is too large ({file_size:.4f} megabytes) skipping, max size is 6 MB"
|
199
|
-
)
|
200
|
-
continue
|
201
|
-
filtered_files.append(file)
|
202
|
-
|
203
|
-
num_files = len(filtered_files)
|
204
|
-
num_batches = (num_files + batch_size - 1) // batch_size
|
205
|
-
|
206
195
|
logger.info(f"Uploading {num_files} files to project {project_id}")
|
207
196
|
with tqdm.tqdm(total=num_files, ncols=80) as pbar:
|
208
197
|
async with aiohttp.ClientSession(
|
209
198
|
headers=self._get_headers()
|
210
199
|
) as session:
|
211
200
|
for i in range(num_batches):
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
201
|
+
tasks = []
|
202
|
+
for file in filtered_files[
|
203
|
+
i * batch_size : min((i + 1) * batch_size, num_files)
|
204
|
+
]:
|
205
|
+
tasks.append(
|
206
|
+
post_file(
|
207
|
+
session=session, file=file, project_id=project_id
|
208
|
+
)
|
209
|
+
)
|
210
|
+
for f in asyncio.as_completed(tasks):
|
211
|
+
pbar.update(await f)
|
220
212
|
|
221
213
|
asyncio.run(main())
|
222
214
|
|
223
|
-
def datasets(self):
|
224
|
-
|
225
|
-
{
|
226
|
-
"method": "GET",
|
227
|
-
"endpoint": ENDPOINTS["datasets"],
|
228
|
-
"headers": self._get_headers(),
|
229
|
-
},
|
230
|
-
session=self.api_session,
|
231
|
-
)
|
232
|
-
return response.json()
|
215
|
+
def datasets(self, pretty=0):
|
216
|
+
return self._get(f"/api/sdk/datasets/?pretty={pretty}")
|
233
217
|
|
234
218
|
def dataset_download(self, dataset_id, export_type):
|
235
|
-
response =
|
236
|
-
{
|
237
|
-
|
238
|
-
"endpoint": ENDPOINTS["dataset"].format(dataset_id),
|
239
|
-
"headers": self._get_headers(),
|
240
|
-
"json": {"download_type": "annotation", "export_type": export_type},
|
241
|
-
},
|
242
|
-
session=self.api_session,
|
219
|
+
response = self._post(
|
220
|
+
f"/api/sdk/datasets/{dataset_id}/",
|
221
|
+
data={"download_type": "annotation", "export_type": export_type},
|
243
222
|
)
|
244
|
-
|
223
|
+
|
224
|
+
with self.api_session.get(url=response["file"], stream=True) as r:
|
245
225
|
r.raise_for_status()
|
246
226
|
filename = f"dataset-{dataset_id}.json"
|
247
227
|
with open(filename, "wb") as f:
|
@@ -251,20 +231,14 @@ class UnitlabClient:
|
|
251
231
|
return os.path.abspath(filename)
|
252
232
|
|
253
233
|
def download_dataset_files(self, dataset_id):
|
254
|
-
response =
|
255
|
-
{
|
256
|
-
"method": "POST",
|
257
|
-
"endpoint": ENDPOINTS["dataset"].format(dataset_id),
|
258
|
-
"headers": self._get_headers(),
|
259
|
-
"json": {"download_type": "files"},
|
260
|
-
},
|
261
|
-
session=self.api_session,
|
234
|
+
response = self._post(
|
235
|
+
f"/api/sdk/datasets/{dataset_id}/", data={"download_type": "files"}
|
262
236
|
)
|
263
237
|
folder = f"dataset-files-{dataset_id}"
|
264
238
|
os.makedirs(folder, exist_ok=True)
|
265
239
|
dataset_files = [
|
266
240
|
dataset_file
|
267
|
-
for dataset_file in response
|
241
|
+
for dataset_file in response
|
268
242
|
if not os.path.isfile(os.path.join(folder, dataset_file["file_name"]))
|
269
243
|
]
|
270
244
|
|
@@ -285,22 +259,21 @@ class UnitlabClient:
|
|
285
259
|
return 1
|
286
260
|
|
287
261
|
async def main():
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
262
|
+
with tqdm.tqdm(total=len(dataset_files), ncols=80) as pbar:
|
263
|
+
async with aiohttp.ClientSession() as session:
|
264
|
+
tasks = [
|
265
|
+
download_file(session=session, dataset_file=dataset_file)
|
266
|
+
for dataset_file in dataset_files
|
267
|
+
]
|
294
268
|
for f in asyncio.as_completed(tasks):
|
295
269
|
pbar.update(await f)
|
296
270
|
|
297
271
|
asyncio.run(main())
|
298
272
|
|
299
273
|
def create_dataset(self, name, annotation_type, categories):
|
300
|
-
response = self.
|
301
|
-
|
302
|
-
|
303
|
-
json={
|
274
|
+
response = self._post(
|
275
|
+
"/api/sdk/datasets/create/",
|
276
|
+
data={
|
304
277
|
"name": name,
|
305
278
|
"annotation_type": annotation_type,
|
306
279
|
"classes": [
|
@@ -309,35 +282,29 @@ class UnitlabClient:
|
|
309
282
|
],
|
310
283
|
},
|
311
284
|
)
|
312
|
-
response.raise_for_status()
|
313
|
-
response = response.json()
|
314
285
|
return response["pk"]
|
315
286
|
|
316
287
|
def dataset_upload(
|
317
|
-
self, name, annotation_type, annotation_path, data_path, batch_size=
|
288
|
+
self, name, annotation_type, annotation_path, data_path, batch_size=15
|
318
289
|
):
|
319
|
-
import random
|
320
|
-
|
321
290
|
handler = DatasetUploadHandler(annotation_type, annotation_path, data_path)
|
322
291
|
dataset_id = self.create_dataset(name, annotation_type, handler.categories)
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
292
|
+
image_ids = handler.getImgIds()
|
293
|
+
url = urllib.parse.urljoin(
|
294
|
+
self.api_url, f"/api/sdk/datasets/{dataset_id}/upload/"
|
295
|
+
)
|
327
296
|
|
328
297
|
async def main():
|
329
298
|
with tqdm.tqdm(total=len(image_ids), ncols=80) as pbar:
|
330
299
|
async with aiohttp.ClientSession(
|
331
300
|
headers=self._get_headers()
|
332
301
|
) as session:
|
333
|
-
for i in range(
|
302
|
+
for i in range((len(image_ids) + batch_size - 1) // batch_size):
|
334
303
|
tasks = []
|
335
304
|
for image_id in image_ids[
|
336
305
|
i * batch_size : min((i + 1) * batch_size, len(image_ids))
|
337
306
|
]:
|
338
|
-
tasks.append(
|
339
|
-
handler.upload_image(session, dataset_id, image_id)
|
340
|
-
)
|
307
|
+
tasks.append(handler.upload_image(session, url, image_id))
|
341
308
|
for f in asyncio.as_completed(tasks):
|
342
309
|
pbar.update(await f)
|
343
310
|
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import asyncio
|
1
2
|
import itertools
|
2
3
|
import json
|
3
4
|
import logging
|
@@ -7,8 +8,6 @@ from collections import defaultdict
|
|
7
8
|
import aiofiles
|
8
9
|
import aiohttp
|
9
10
|
|
10
|
-
from .utils import BASE_URL
|
11
|
-
|
12
11
|
logger = logging.getLogger(__name__)
|
13
12
|
|
14
13
|
|
@@ -132,7 +131,7 @@ class COCO:
|
|
132
131
|
if ann["area"] > areaRng[0] and ann["area"] < areaRng[1]
|
133
132
|
]
|
134
133
|
)
|
135
|
-
if
|
134
|
+
if iscrowd:
|
136
135
|
ids = [ann["id"] for ann in anns if ann["iscrowd"] == iscrowd]
|
137
136
|
else:
|
138
137
|
ids = [ann["id"] for ann in anns]
|
@@ -201,7 +200,7 @@ class COCO:
|
|
201
200
|
"""
|
202
201
|
if self._is_array_like(ids):
|
203
202
|
return [self.anns[id] for id in ids]
|
204
|
-
elif
|
203
|
+
elif isinstance(ids, int):
|
205
204
|
return [self.anns[ids]]
|
206
205
|
|
207
206
|
def loadCats(self, ids=[]):
|
@@ -212,7 +211,7 @@ class COCO:
|
|
212
211
|
"""
|
213
212
|
if self._is_array_like(ids):
|
214
213
|
return [self.cats[id] for id in ids]
|
215
|
-
elif
|
214
|
+
elif isinstance(ids, int):
|
216
215
|
return [self.cats[ids]]
|
217
216
|
|
218
217
|
def loadImgs(self, ids=[]):
|
@@ -223,7 +222,7 @@ class COCO:
|
|
223
222
|
"""
|
224
223
|
if self._is_array_like(ids):
|
225
224
|
return [self.imgs[id] for id in ids]
|
226
|
-
elif
|
225
|
+
elif isinstance(ids, int):
|
227
226
|
return [self.imgs[ids]]
|
228
227
|
|
229
228
|
|
@@ -278,26 +277,29 @@ class DatasetUploadHandler(COCO):
|
|
278
277
|
if len(anns) == 0:
|
279
278
|
logger.warning("No annotations found for image: {}".format(img_id))
|
280
279
|
return
|
281
|
-
return self.
|
280
|
+
return getattr(self, f"get_{self.annotation_type}_payload")(anns)
|
282
281
|
|
283
|
-
async def upload_image(self, session,
|
282
|
+
async def upload_image(self, session, url, image_id, retries=3):
|
284
283
|
image = self.loadImgs(image_id)[0]
|
285
284
|
file_name = image["file_name"]
|
286
285
|
payload = self.get_payload(image_id)
|
287
286
|
if payload:
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
data=form_data
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
287
|
+
async with aiofiles.open(
|
288
|
+
os.path.join(self.data_path, file_name), "rb"
|
289
|
+
) as f:
|
290
|
+
form_data = aiohttp.FormData()
|
291
|
+
form_data.add_field("file", await f.read(), filename=file_name)
|
292
|
+
form_data.add_field("result", self.get_payload(image_id))
|
293
|
+
for _ in range(retries):
|
294
|
+
try:
|
295
|
+
await asyncio.sleep(0.1)
|
296
|
+
async with session.post(url, data=form_data) as response:
|
297
|
+
response.raise_for_status()
|
298
|
+
return 1
|
299
|
+
except aiohttp.ServerDisconnectedError as e:
|
300
|
+
logger.warning(f"Server disconnected - {e}, retrying...")
|
301
|
+
await asyncio.sleep(0.1)
|
302
|
+
continue
|
303
|
+
except Exception as e:
|
304
|
+
logger.error(f"Error uploading file {file_name} - {e}")
|
303
305
|
return 0
|
@@ -1,13 +1,11 @@
|
|
1
1
|
from enum import Enum
|
2
2
|
from pathlib import Path
|
3
|
-
from typing import Optional
|
4
3
|
from uuid import UUID
|
5
4
|
|
6
5
|
import typer
|
7
6
|
from typing_extensions import Annotated
|
8
7
|
|
9
8
|
from .client import UnitlabClient
|
10
|
-
from .utils import ENDPOINTS, send_request
|
11
9
|
|
12
10
|
app = typer.Typer()
|
13
11
|
project_app = typer.Typer()
|
@@ -25,10 +23,10 @@ class DownloadType(str, Enum):
|
|
25
23
|
|
26
24
|
|
27
25
|
class AnnotationType(str, Enum):
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
26
|
+
IMG_BBOX = "img_bbox"
|
27
|
+
IMG_POLYGON = "img_polygon"
|
28
|
+
IMG_SEMANTIC_SEGMENTATION = "img_semantic_segmentation"
|
29
|
+
IMG_SKELETON = "img_skeleton"
|
32
30
|
|
33
31
|
|
34
32
|
def get_client(api_key: str) -> UnitlabClient:
|
@@ -41,38 +39,17 @@ def get_headers(api_key):
|
|
41
39
|
|
42
40
|
@project_app.command(name="list", help="Project list")
|
43
41
|
def project_list(api_key: API_KEY):
|
44
|
-
|
45
|
-
{
|
46
|
-
"method": "GET",
|
47
|
-
"headers": get_headers(api_key),
|
48
|
-
"endpoint": ENDPOINTS["cli_projects"],
|
49
|
-
}
|
50
|
-
)
|
51
|
-
print(response.json())
|
42
|
+
print(get_client(api_key).projects(pretty=1))
|
52
43
|
|
53
44
|
|
54
45
|
@project_app.command(name="detail", help="Project detail")
|
55
46
|
def project_detail(pk: UUID, api_key: API_KEY):
|
56
|
-
|
57
|
-
{
|
58
|
-
"method": "GET",
|
59
|
-
"headers": get_headers(api_key),
|
60
|
-
"endpoint": ENDPOINTS["cli_project"].format(pk),
|
61
|
-
}
|
62
|
-
)
|
63
|
-
print(response.json())
|
47
|
+
print(get_client(api_key).project(project_id=pk, pretty=1))
|
64
48
|
|
65
49
|
|
66
50
|
@project_app.command(help="Project members")
|
67
51
|
def members(pk: UUID, api_key: API_KEY):
|
68
|
-
|
69
|
-
{
|
70
|
-
"method": "GET",
|
71
|
-
"headers": get_headers(api_key),
|
72
|
-
"endpoint": ENDPOINTS["cli_project_members"].format(pk),
|
73
|
-
}
|
74
|
-
)
|
75
|
-
print(response.json())
|
52
|
+
print(get_client(api_key).project_members(project_id=pk, pretty=1))
|
76
53
|
|
77
54
|
|
78
55
|
@project_app.command(help="Upload data")
|
@@ -94,14 +71,7 @@ if __name__ == "__main__":
|
|
94
71
|
def dataset_list(
|
95
72
|
api_key: API_KEY,
|
96
73
|
):
|
97
|
-
|
98
|
-
{
|
99
|
-
"method": "GET",
|
100
|
-
"headers": get_headers(api_key),
|
101
|
-
"endpoint": ENDPOINTS["cli_datasets"],
|
102
|
-
}
|
103
|
-
)
|
104
|
-
print(response.json())
|
74
|
+
print(get_client(api_key).datasets(pretty=1))
|
105
75
|
|
106
76
|
|
107
77
|
@dataset_app.command(name="upload", help="Upload dataset")
|
@@ -118,12 +88,9 @@ def dataset_upload(
|
|
118
88
|
data_path: Annotated[
|
119
89
|
Path, typer.Option(help="Directory containing the data to be uploaded")
|
120
90
|
],
|
121
|
-
batch_size: Annotated[
|
122
|
-
int, typer.Option(help="Batch size for uploading images")
|
123
|
-
] = 100,
|
124
91
|
):
|
125
92
|
get_client(api_key).dataset_upload(
|
126
|
-
name, annotation_type, annotation_path, data_path
|
93
|
+
name, annotation_type.value, annotation_path, data_path
|
127
94
|
)
|
128
95
|
|
129
96
|
|
@@ -1,52 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
|
3
|
-
import requests
|
4
|
-
|
5
|
-
from .exceptions import AuthenticationError
|
6
|
-
|
7
|
-
ENDPOINTS = {
|
8
|
-
"check": "/api/check/",
|
9
|
-
"projects": "/api/sdk/projects/",
|
10
|
-
"project": "/api/sdk/projects/{}/",
|
11
|
-
"project_members": "/api/sdk/projects/{}/members/",
|
12
|
-
"upload_data": "/api/sdk/upload-data/",
|
13
|
-
"datasets": "/api/sdk/datasets/",
|
14
|
-
"dataset": "/api/sdk/datasets/{}/",
|
15
|
-
"cli_projects": "/api/cli/projects/",
|
16
|
-
"cli_project": "/api/cli/projects/{}/",
|
17
|
-
"cli_project_members": "/api/cli/projects/{}/members/",
|
18
|
-
"cli_datasets": "/api/cli/datasets/",
|
19
|
-
}
|
20
|
-
BASE_URL = os.environ.get("UNITLAB_BASE_URL", "https://api.unitlab.ai")
|
21
|
-
|
22
|
-
|
23
|
-
def send_request(request, session=None):
|
24
|
-
endpoint = request.pop("endpoint")
|
25
|
-
if os.environ.get("UNITLAB_BASE_URL"):
|
26
|
-
request["url"] = os.environ.get("UNITLAB_BASE_URL") + endpoint
|
27
|
-
response = (
|
28
|
-
session.request(**request) if session else requests.request(**request)
|
29
|
-
)
|
30
|
-
if response.ok:
|
31
|
-
return response
|
32
|
-
if response.status_code == 401:
|
33
|
-
raise AuthenticationError("Invalid API key")
|
34
|
-
elif response.status_code == 400:
|
35
|
-
raise Exception(response.json())
|
36
|
-
response.raise_for_status()
|
37
|
-
return response
|
38
|
-
else:
|
39
|
-
request["url"] = "https://api.unitlab.ai" + endpoint
|
40
|
-
response = (
|
41
|
-
session.request(**request) if session else requests.request(**request)
|
42
|
-
)
|
43
|
-
if response.ok:
|
44
|
-
os.environ["UNITLAB_BASE_URL"] = "https://api.unitlab.ai"
|
45
|
-
return response
|
46
|
-
|
47
|
-
if response.status_code == 401:
|
48
|
-
raise AuthenticationError("Invalid API key")
|
49
|
-
elif response.status_code == 400:
|
50
|
-
raise Exception(response.json())
|
51
|
-
response.raise_for_status()
|
52
|
-
return response
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|