supervisely 6.73.220__py3-none-any.whl → 6.73.222__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of supervisely might be problematic. Click here for more details.

Files changed (27) hide show
  1. supervisely/api/api.py +609 -3
  2. supervisely/api/file_api.py +574 -14
  3. supervisely/api/image_api.py +469 -0
  4. supervisely/api/pointcloud/pointcloud_api.py +390 -1
  5. supervisely/api/video/video_api.py +231 -1
  6. supervisely/api/volume/volume_api.py +223 -2
  7. supervisely/app/development/__init__.py +1 -0
  8. supervisely/app/development/development.py +96 -2
  9. supervisely/app/fastapi/subapp.py +19 -4
  10. supervisely/convert/base_converter.py +53 -4
  11. supervisely/convert/converter.py +6 -5
  12. supervisely/convert/image/image_converter.py +26 -13
  13. supervisely/convert/image/sly/fast_sly_image_converter.py +4 -0
  14. supervisely/convert/image/sly/sly_image_converter.py +9 -4
  15. supervisely/convert/pointcloud_episodes/sly/sly_pointcloud_episodes_converter.py +7 -1
  16. supervisely/convert/video/sly/sly_video_converter.py +9 -1
  17. supervisely/convert/video/video_converter.py +44 -23
  18. supervisely/io/fs.py +125 -0
  19. supervisely/io/fs_cache.py +19 -1
  20. supervisely/io/network_exceptions.py +20 -3
  21. supervisely/task/progress.py +1 -1
  22. {supervisely-6.73.220.dist-info → supervisely-6.73.222.dist-info}/METADATA +3 -1
  23. {supervisely-6.73.220.dist-info → supervisely-6.73.222.dist-info}/RECORD +27 -27
  24. {supervisely-6.73.220.dist-info → supervisely-6.73.222.dist-info}/LICENSE +0 -0
  25. {supervisely-6.73.220.dist-info → supervisely-6.73.222.dist-info}/WHEEL +0 -0
  26. {supervisely-6.73.220.dist-info → supervisely-6.73.222.dist-info}/entry_points.txt +0 -0
  27. {supervisely-6.73.220.dist-info → supervisely-6.73.222.dist-info}/top_level.txt +0 -0
@@ -2,21 +2,21 @@ import os
2
2
  from typing import Dict, Optional
3
3
 
4
4
  import supervisely.convert.image.sly.sly_image_helper as sly_image_helper
5
- from supervisely.convert.image.image_helper import validate_image_bounds
6
5
  from supervisely import (
7
6
  Annotation,
8
7
  Dataset,
8
+ Label,
9
9
  OpenMode,
10
10
  Project,
11
11
  ProjectMeta,
12
12
  Rectangle,
13
- Label,
14
13
  logger,
15
14
  )
16
15
  from supervisely._utils import generate_free_name
17
16
  from supervisely.api.api import Api
18
17
  from supervisely.convert.base_converter import AvailableImageConverters
19
18
  from supervisely.convert.image.image_converter import ImageConverter
19
+ from supervisely.convert.image.image_helper import validate_image_bounds
20
20
  from supervisely.io.fs import dirs_filter, file_exists, get_file_ext
21
21
  from supervisely.io.json import load_json_file
22
22
  from supervisely.project.project import find_project_dirs
@@ -31,6 +31,7 @@ class SLYImageConverter(ImageConverter):
31
31
  def __init__(self, *args, **kwargs):
32
32
  super().__init__(*args, **kwargs)
33
33
  self._project_structure = None
34
+ self._supports_links = True
34
35
 
35
36
  def __str__(self):
36
37
  return AvailableImageConverters.SLY
@@ -74,6 +75,8 @@ class SLYImageConverter(ImageConverter):
74
75
  return False
75
76
 
76
77
  def validate_format(self) -> bool:
78
+ if self.upload_as_links and self._supports_links:
79
+ self._download_remote_ann_files()
77
80
  if self.read_sly_project(self._input_data):
78
81
  return True
79
82
 
@@ -136,6 +139,8 @@ class SLYImageConverter(ImageConverter):
136
139
  meta = self._meta
137
140
 
138
141
  if item.ann_data is None:
142
+ if self._upload_as_links:
143
+ item.set_shape([None, None])
139
144
  return item.create_empty_annotation()
140
145
 
141
146
  try:
@@ -151,7 +156,7 @@ class SLYImageConverter(ImageConverter):
151
156
  )
152
157
  return Annotation.from_json(ann_json, meta).clone(labels=labels)
153
158
  except Exception as e:
154
- logger.warn(f"Failed to convert annotation: {repr(e)}")
159
+ logger.warning(f"Failed to convert annotation: {repr(e)}")
155
160
  return item.create_empty_annotation()
156
161
 
157
162
  def read_sly_project(self, input_data: str) -> bool:
@@ -163,7 +168,7 @@ class SLYImageConverter(ImageConverter):
163
168
  logger.debug("Trying to find Supervisely project format in the input data")
164
169
  project_dirs = [d for d in find_project_dirs(input_data)]
165
170
  if len(project_dirs) > 1:
166
- logger.info("Found multiple Supervisely projects")
171
+ logger.info("Found multiple possible Supervisely projects in the input data")
167
172
  meta = None
168
173
  for project_dir in project_dirs:
169
174
  project_fs = Project(project_dir, mode=OpenMode.READ)
@@ -71,11 +71,17 @@ class SLYPointcloudEpisodesConverter(PointcloudEpisodeConverter):
71
71
  continue
72
72
 
73
73
  ext = get_file_ext(full_path)
74
+ recognized_ext = imghdr.what(full_path)
74
75
  if file in JUNK_FILES:
75
76
  continue
76
77
  elif ext in self.ann_ext:
77
78
  rimg_json_dict[file] = full_path
78
- elif imghdr.what(full_path):
79
+ elif recognized_ext:
80
+ if ext.lower() == ".pcd":
81
+ logger.warning(
82
+ f"File '{file}' has been recognized as '.{recognized_ext}' format. Skipping."
83
+ )
84
+ continue
79
85
  rimg_dict[file] = full_path
80
86
  if ext not in used_img_ext:
81
87
  used_img_ext.append(ext)
@@ -12,6 +12,10 @@ from supervisely.video.video import validate_ext as validate_video_ext
12
12
 
13
13
  class SLYVideoConverter(VideoConverter):
14
14
 
15
+ def __init__(self, *args, **kwargs):
16
+ super().__init__(*args, **kwargs)
17
+ self._supports_links = True
18
+
15
19
  def __str__(self) -> str:
16
20
  return AvailableVideoConverters.SLY
17
21
 
@@ -45,6 +49,8 @@ class SLYVideoConverter(VideoConverter):
45
49
  return False
46
50
 
47
51
  def validate_format(self) -> bool:
52
+ if self.upload_as_links and self._supports_links:
53
+ self._download_remote_ann_files()
48
54
  detected_ann_cnt = 0
49
55
  videos_list, ann_dict = [], {}
50
56
  for root, _, files in os.walk(self._input_data):
@@ -103,6 +109,8 @@ class SLYVideoConverter(VideoConverter):
103
109
  meta = self._meta
104
110
 
105
111
  if item.ann_data is None:
112
+ if self._upload_as_links:
113
+ return None
106
114
  return item.create_empty_annotation()
107
115
 
108
116
  try:
@@ -113,5 +121,5 @@ class SLYVideoConverter(VideoConverter):
113
121
  ann_json = sly_video_helper.rename_in_json(ann_json, renamed_classes, renamed_tags)
114
122
  return VideoAnnotation.from_json(ann_json, meta)
115
123
  except Exception as e:
116
- logger.warn(f"Failed to convert annotation: {repr(e)}")
124
+ logger.warning(f"Failed to convert annotation: {repr(e)}")
117
125
  return item.create_empty_annotation()
@@ -1,6 +1,6 @@
1
1
  import os
2
2
  import subprocess
3
- from typing import Dict, Optional, Union
3
+ from typing import Dict, Optional, Tuple, Union
4
4
 
5
5
  import cv2
6
6
  import magic
@@ -57,10 +57,22 @@ class VideoConverter(BaseConverter):
57
57
  self._frame_count = frame_count
58
58
  self._custom_data = custom_data if custom_data is not None else {}
59
59
 
60
+ @property
61
+ def shape(self) -> Tuple[int, int]:
62
+ return self._shape
63
+
64
+ @shape.setter
65
+ def shape(self, shape: Optional[Tuple[int, int]] = None):
66
+ self._shape = shape if shape is not None else [None, None]
67
+
60
68
  @property
61
69
  def frame_count(self) -> int:
62
70
  return self._frame_count
63
71
 
72
+ @frame_count.setter
73
+ def frame_count(self, frame_count: int):
74
+ self._frame_count = frame_count
75
+
64
76
  @property
65
77
  def name(self) -> str:
66
78
  if self._name is not None:
@@ -75,11 +87,11 @@ class VideoConverter(BaseConverter):
75
87
  return VideoAnnotation(self._shape, self._frame_count)
76
88
 
77
89
  def __init__(
78
- self,
79
- input_data: str,
80
- labeling_interface: Optional[Union[LabelingInterface, str]],
81
- upload_as_links: bool,
82
- remote_files_map: Optional[Dict[str, str]] = None,
90
+ self,
91
+ input_data: str,
92
+ labeling_interface: Optional[Union[LabelingInterface, str]],
93
+ upload_as_links: bool,
94
+ remote_files_map: Optional[Dict[str, str]] = None,
83
95
  ):
84
96
  super().__init__(input_data, labeling_interface, upload_as_links, remote_files_map)
85
97
  self._key_id_map: KeyIdMap = None
@@ -114,7 +126,9 @@ class VideoConverter(BaseConverter):
114
126
  existing_names = set([vid.name for vid in api.video.get_list(dataset_id)])
115
127
 
116
128
  # check video codecs, mimetypes and convert if needed
117
- convert_progress, convert_progress_cb = self.get_progress(self.items_count, "Preparing videos...")
129
+ convert_progress, convert_progress_cb = self.get_progress(
130
+ self.items_count, "Preparing videos..."
131
+ )
118
132
  for item in self._items:
119
133
  item_name, item_path = self.convert_to_mp4_if_needed(item.path)
120
134
  item.name = item_name
@@ -124,11 +138,14 @@ class VideoConverter(BaseConverter):
124
138
  convert_progress.close()
125
139
 
126
140
  has_large_files = False
141
+ size_progress_cb = None
127
142
  progress_cb, progress, ann_progress, ann_progress_cb = None, None, None, None
128
143
  if log_progress and not self.upload_as_links:
129
144
  progress, progress_cb = self.get_progress(self.items_count, "Uploading videos...")
130
145
  file_sizes = [get_file_size(item.path) for item in self._items]
131
- has_large_files = any([self._check_video_file_size(file_size) for file_size in file_sizes])
146
+ has_large_files = any(
147
+ [self._check_video_file_size(file_size) for file_size in file_sizes]
148
+ )
132
149
  if has_large_files:
133
150
  upload_progress = []
134
151
  size_progress_cb = self._get_video_upload_progress(upload_progress)
@@ -146,17 +163,19 @@ class VideoConverter(BaseConverter):
146
163
  item_paths.append(item.path)
147
164
  item_names.append(item.name)
148
165
 
149
- if not self.upload_as_links:
150
- # TODO: implement generating annotations for remote videos
166
+ ann = None
167
+ if not self.upload_as_links or self.supports_links:
151
168
  ann = self.to_supervisely(item, meta, renamed_classes, renamed_tags)
152
- figures_cnt += len(ann.figures)
153
- anns.append(ann)
169
+ if ann is not None:
170
+ figures_cnt += len(ann.figures)
171
+ anns.append(ann)
154
172
 
155
173
  if self.upload_as_links:
156
174
  vid_infos = api.video.upload_links(
157
175
  dataset_id,
158
176
  item_paths,
159
177
  item_names,
178
+ skip_download=True,
160
179
  )
161
180
  else:
162
181
  vid_infos = api.video.upload_paths(
@@ -164,22 +183,24 @@ class VideoConverter(BaseConverter):
164
183
  item_names,
165
184
  item_paths,
166
185
  progress_cb=progress_cb if log_progress else None,
167
- item_progress=size_progress_cb if log_progress and has_large_files else None, # pylint: disable=used-before-assignment
186
+ item_progress=(size_progress_cb if log_progress and has_large_files else None),
168
187
  )
169
- vid_ids = [vid_info.id for vid_info in vid_infos]
188
+ vid_ids = [vid_info.id for vid_info in vid_infos]
170
189
 
171
- if log_progress and has_large_files and figures_cnt > 0:
172
- ann_progress, ann_progress_cb = self.get_progress(figures_cnt, "Uploading annotations...")
190
+ if log_progress and has_large_files and figures_cnt > 0:
191
+ ann_progress, ann_progress_cb = self.get_progress(
192
+ figures_cnt, "Uploading annotations..."
193
+ )
173
194
 
174
- for video_id, ann in zip(vid_ids, anns):
175
- if ann is None:
176
- ann = VideoAnnotation(item.shape, item.frame_count)
177
- api.video.annotation.append(video_id, ann, progress_cb=ann_progress_cb)
195
+ for vid, ann, item, info in zip(vid_ids, anns, batch, vid_infos):
196
+ if ann is None:
197
+ ann = VideoAnnotation((info.frame_height, info.frame_width), info.frames_count)
198
+ api.video.annotation.append(vid, ann, progress_cb=ann_progress_cb)
178
199
 
179
200
  if log_progress and is_development():
180
- if progress is not None: # pylint: disable=possibly-used-before-assignment
201
+ if progress is not None:
181
202
  progress.close()
182
- if not self.upload_as_links and ann_progress is not None:
203
+ if ann_progress is not None:
183
204
  ann_progress.close()
184
205
  logger.info(f"Dataset ID:{dataset_id} has been successfully uploaded.")
185
206
 
@@ -268,7 +289,7 @@ class VideoConverter(BaseConverter):
268
289
  )
269
290
 
270
291
  def _check_video_file_size(self, file_size):
271
- return file_size > 20 * 1024 * 1024 # 20 MB
292
+ return file_size > 20 * 1024 * 1024 # 20 MB
272
293
 
273
294
  def _get_video_upload_progress(self, upload_progress):
274
295
  upload_progress = []
supervisely/io/fs.py CHANGED
@@ -1,6 +1,7 @@
1
1
  # coding: utf-8
2
2
 
3
3
  # docs
4
+ import asyncio
4
5
  import errno
5
6
  import mimetypes
6
7
  import os
@@ -10,6 +11,7 @@ import subprocess
10
11
  import tarfile
11
12
  from typing import Callable, Dict, Generator, List, Literal, Optional, Tuple, Union
12
13
 
14
+ import aiofiles
13
15
  import requests
14
16
  from requests.structures import CaseInsensitiveDict
15
17
  from tqdm import tqdm
@@ -1348,3 +1350,126 @@ def str_is_url(string: str) -> bool:
1348
1350
  return all([result.scheme, result.netloc])
1349
1351
  except ValueError:
1350
1352
  return False
1353
+
1354
+
1355
+ async def copy_file_async(
1356
+ src: str,
1357
+ dst: str,
1358
+ progress_cb: Optional[Union[tqdm, Callable]] = None,
1359
+ progress_cb_type: Literal["number", "size"] = "size",
1360
+ ) -> None:
1361
+ """
1362
+ Asynchronously copy file from one path to another, if destination directory doesn't exist it will be created.
1363
+
1364
+ :param src: Source file path.
1365
+ :type src: str
1366
+ :param dst: Destination file path.
1367
+ :type dst: str
1368
+ :param progress_cb: Function for tracking copy progress.
1369
+ :type progress_cb: Union[tqdm, Callable], optional
1370
+ :param progress_cb_type: Type of progress callback. Can be "number" or "size". Default is "size".
1371
+ :type progress_cb_type: Literal["number", "size"], optional
1372
+ :returns: None
1373
+ :rtype: :class:`NoneType`
1374
+ :Usage example:
1375
+
1376
+ .. code-block:: python
1377
+
1378
+ from supervisely.io.fs import async_copy_file
1379
+ await async_copy_file('/home/admin/work/projects/example/1.png', '/home/admin/work/tests/2.png')
1380
+ """
1381
+ ensure_base_path(dst)
1382
+ async with aiofiles.open(dst, "wb") as out_f:
1383
+ async with aiofiles.open(src, "rb") as in_f:
1384
+ while True:
1385
+ chunk = await in_f.read(1024 * 1024)
1386
+ if not chunk:
1387
+ break
1388
+ await out_f.write(chunk)
1389
+ if progress_cb is not None and progress_cb_type == "size":
1390
+ progress_cb(len(chunk))
1391
+ if progress_cb is not None and progress_cb_type == "number":
1392
+ progress_cb(1)
1393
+
1394
+
1395
+ async def get_file_hash_async(path: str) -> str:
1396
+ """
1397
+ Get hash from target file asynchronously.
1398
+
1399
+ :param path: Target file path.
1400
+ :type path: str
1401
+ :returns: File hash
1402
+ :rtype: :class:`str`
1403
+ :Usage example:
1404
+
1405
+ .. code-block:: python
1406
+
1407
+ from supervisely.io.fs import get_file_hash_async
1408
+ hash = await get_file_hash_async('/home/admin/work/projects/examples/1.jpeg') # rKLYA/p/P64dzidaQ/G7itxIz3ZCVnyUhEE9fSMGxU4=
1409
+ """
1410
+ async with aiofiles.open(path, "rb") as file:
1411
+ file_bytes = await file.read()
1412
+ return get_bytes_hash(file_bytes)
1413
+
1414
+
1415
+ async def unpack_archive_async(
1416
+ archive_path: str, target_dir: str, remove_junk=True, is_split=False, chunk_size_mb: int = 50
1417
+ ) -> None:
1418
+ """
1419
+ Unpacks archive to the target directory, removes junk files and directories.
1420
+ To extract a split archive, you must pass the path to the first part in archive_path. Archive parts must be in the same directory. Format: archive_name.tar.001, archive_name.tar.002, etc. Works with tar and zip.
1421
+ You can adjust the size of the chunk to read from the file, while unpacking the file from parts.
1422
+ Be careful with this parameter, it can affect the performance of the function.
1423
+
1424
+ :param archive_path: Path to the archive.
1425
+ :type archive_path: str
1426
+ :param target_dir: Path to the target directory.
1427
+ :type target_dir: str
1428
+ :param remove_junk: Remove junk files and directories. Default is True.
1429
+ :type remove_junk: bool
1430
+ :param is_split: Determines if the source archive is split into parts. If True, archive_path must be the path to the first part. Default is False.
1431
+ :type is_split: bool
1432
+ :param chunk_size_mb: Size of the chunk to read from the file. Default is 50Mb.
1433
+ :type chunk_size_mb: int
1434
+ :returns: None
1435
+ :rtype: :class:`NoneType`
1436
+ :Usage example:
1437
+
1438
+ .. code-block:: python
1439
+
1440
+ import supervisely as sly
1441
+
1442
+ archive_path = '/home/admin/work/examples.tar'
1443
+ target_dir = '/home/admin/work/projects'
1444
+
1445
+ await sly.fs.unpack_archive(archive_path, target_dir)
1446
+ """
1447
+ if is_split:
1448
+ chunk = chunk_size_mb * 1024 * 1024
1449
+ base_name = get_file_name(archive_path)
1450
+ dir_name = os.path.dirname(archive_path)
1451
+ if get_file_ext(base_name) in (".zip", ".tar"):
1452
+ ext = get_file_ext(base_name)
1453
+ base_name = get_file_name(base_name)
1454
+ else:
1455
+ ext = get_file_ext(archive_path)
1456
+ parts = sorted([f for f in os.listdir(dir_name) if f.startswith(base_name)])
1457
+ combined = os.path.join(dir_name, f"combined{ext}")
1458
+
1459
+ async with aiofiles.open(combined, "wb") as output_file:
1460
+ for part in parts:
1461
+ part_path = os.path.join(dir_name, part)
1462
+ async with aiofiles.open(part_path, "rb") as input_file:
1463
+ while True:
1464
+ data = await input_file.read(chunk)
1465
+ if not data:
1466
+ break
1467
+ await output_file.write(data)
1468
+ archive_path = combined
1469
+
1470
+ loop = asyncio.get_running_loop()
1471
+ await loop.run_in_executor(None, shutil.unpack_archive, archive_path, target_dir)
1472
+ if is_split:
1473
+ silent_remove(archive_path)
1474
+ if remove_junk:
1475
+ remove_junk_from_dir(target_dir)
@@ -1,8 +1,8 @@
1
1
  # coding: utf-8
2
2
 
3
+ import hashlib
3
4
  import os
4
5
  import os.path as osp
5
- import hashlib
6
6
  import shutil
7
7
 
8
8
  from supervisely.io import fs as sly_fs
@@ -141,6 +141,24 @@ class FileCache(FSCache):
141
141
  def _rm_obj_impl(self, st_path):
142
142
  os.remove(st_path)
143
143
 
144
+ async def _read_obj_impl_async(self, st_path, dst_path):
145
+ sly_fs.ensure_base_path(dst_path)
146
+ await sly_fs.copy_file_async(st_path, dst_path)
147
+
148
+ async def write_object_async(self, src_path, data_hash):
149
+ suffix = self._get_suffix(src_path)
150
+ st_path = self.get_storage_path(data_hash, suffix)
151
+ if not self._storage_obj_exists(st_path, suffix):
152
+ await sly_fs.copy_file_async(src_path, st_path)
153
+
154
+ async def read_object_async(self, data_hash, dst_path):
155
+ suffix = self._get_suffix(dst_path)
156
+ st_path = self.check_storage_object(data_hash, suffix)
157
+ if not st_path:
158
+ return None
159
+ await self._read_obj_impl(st_path, dst_path)
160
+ return dst_path
161
+
144
162
 
145
163
  class NNCache(FSCache):
146
164
  def _storage_obj_exists(self, st_path, suffix):
@@ -3,6 +3,7 @@
3
3
  import time
4
4
  import traceback
5
5
 
6
+ import httpx
6
7
  import requests
7
8
 
8
9
  CONNECTION_ERROR = "Temporary connection error, please wait ..."
@@ -44,18 +45,22 @@ def process_requests_exception(
44
45
  requests.exceptions.Timeout,
45
46
  requests.exceptions.TooManyRedirects,
46
47
  requests.exceptions.ChunkedEncodingError,
48
+ httpx.ConnectError,
49
+ httpx.TimeoutException,
50
+ httpx.TooManyRedirects,
51
+ httpx.ProtocolError,
47
52
  ),
48
53
  )
49
54
 
50
55
  is_server_retryable_error = (
51
- isinstance(exc, requests.exceptions.HTTPError)
56
+ isinstance(exc, (requests.exceptions.HTTPError, httpx.HTTPStatusError))
52
57
  and hasattr(exc, "response")
53
58
  and (exc.response.status_code in RETRY_STATUS_CODES)
54
59
  )
55
60
 
56
61
  is_need_ping_error = False
57
62
  if (
58
- isinstance(exc, requests.exceptions.HTTPError)
63
+ isinstance(exc, (requests.exceptions.HTTPError, httpx.HTTPStatusError))
59
64
  and hasattr(exc, "response")
60
65
  and (exc.response.status_code == 400)
61
66
  ):
@@ -91,7 +96,7 @@ def process_requests_exception(
91
96
  )
92
97
  elif response is None:
93
98
  process_unhandled_request(external_logger, exc)
94
- elif isinstance(exc, requests.exceptions.HTTPError):
99
+ elif isinstance(exc, (requests.exceptions.HTTPError, httpx.HTTPStatusError)):
95
100
  process_invalid_request(external_logger, exc, response, verbose)
96
101
  else:
97
102
  process_unhandled_request(external_logger, exc)
@@ -125,6 +130,18 @@ def process_retryable_request(
125
130
 
126
131
 
127
132
  def process_invalid_request(external_logger, exc, response, verbose=True):
133
+ if type(response) in (httpx.Response, requests.Response):
134
+ reason = (
135
+ response.content.decode("utf-8")
136
+ if not hasattr(response, "is_stream_consumed")
137
+ else "Content is not acessible for streaming responses"
138
+ )
139
+ status_code = response.status_code
140
+ url = response.url
141
+ else:
142
+ reason = "Reason is unknown"
143
+ status_code = None
144
+ url = None
128
145
  if verbose:
129
146
  external_logger.warn(
130
147
  REQUEST_FAILED,
@@ -5,7 +5,7 @@ import inspect
5
5
  import math
6
6
  import re
7
7
  from functools import partial, wraps
8
- from typing import Optional, Union, Dict
8
+ from typing import Dict, Optional, Union
9
9
 
10
10
  from tqdm import tqdm
11
11
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: supervisely
3
- Version: 6.73.220
3
+ Version: 6.73.222
4
4
  Summary: Supervisely Python SDK.
5
5
  Home-page: https://github.com/supervisely/supervisely
6
6
  Author: Supervisely
@@ -68,6 +68,8 @@ Requires-Dist: cacheout==0.14.1
68
68
  Requires-Dist: jsonschema<=4.20.0,>=2.6.0
69
69
  Requires-Dist: pyjwt<3.0.0,>=2.1.0
70
70
  Requires-Dist: zstd
71
+ Requires-Dist: aiofiles
72
+ Requires-Dist: httpx[http2]==0.27.2
71
73
  Provides-Extra: apps
72
74
  Requires-Dist: uvicorn[standard]<1.0.0,>=0.18.2; extra == "apps"
73
75
  Requires-Dist: fastapi<1.0.0,>=0.79.0; extra == "apps"