supervisely 6.73.343__py3-none-any.whl → 6.73.345__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- supervisely/__init__.py +3 -2
- supervisely/_utils.py +33 -1
- supervisely/api/annotation_api.py +369 -2
- supervisely/api/api.py +14 -5
- supervisely/api/dataset_api.py +177 -1
- supervisely/api/entity_annotation/figure_api.py +84 -0
- supervisely/api/file_api.py +2 -2
- supervisely/api/image_api.py +740 -52
- supervisely/api/module_api.py +11 -0
- supervisely/api/project_api.py +6 -1
- supervisely/convert/converter.py +4 -0
- supervisely/convert/image/sly/fast_sly_image_converter.py +11 -5
- supervisely/convert/image/sly/sly_image_converter.py +41 -1
- supervisely/io/fs.py +238 -4
- supervisely/project/data_version.py +7 -1
- supervisely/project/download.py +5 -16
- supervisely/project/project.py +721 -79
- supervisely/project/project_type.py +2 -0
- supervisely/project/readme_template.md +19 -13
- {supervisely-6.73.343.dist-info → supervisely-6.73.345.dist-info}/METADATA +1 -1
- {supervisely-6.73.343.dist-info → supervisely-6.73.345.dist-info}/RECORD +25 -25
- {supervisely-6.73.343.dist-info → supervisely-6.73.345.dist-info}/LICENSE +0 -0
- {supervisely-6.73.343.dist-info → supervisely-6.73.345.dist-info}/WHEEL +0 -0
- {supervisely-6.73.343.dist-info → supervisely-6.73.345.dist-info}/entry_points.txt +0 -0
- {supervisely-6.73.343.dist-info → supervisely-6.73.345.dist-info}/top_level.txt +0 -0
supervisely/__init__.py
CHANGED
|
@@ -55,7 +55,7 @@ from supervisely.task.progress import (
|
|
|
55
55
|
|
|
56
56
|
import supervisely.project as project
|
|
57
57
|
from supervisely.project import read_project, get_project_class
|
|
58
|
-
from supervisely.project.download import download, download_async
|
|
58
|
+
from supervisely.project.download import download, download_async, download_fast
|
|
59
59
|
from supervisely.project.upload import upload
|
|
60
60
|
from supervisely.project.project import (
|
|
61
61
|
Project,
|
|
@@ -148,6 +148,7 @@ from supervisely._utils import (
|
|
|
148
148
|
generate_free_name,
|
|
149
149
|
setup_certificates,
|
|
150
150
|
is_community,
|
|
151
|
+
run_coroutine,
|
|
151
152
|
)
|
|
152
153
|
|
|
153
154
|
import supervisely._utils as utils
|
|
@@ -311,4 +312,4 @@ except Exception as e:
|
|
|
311
312
|
# If new changes in Supervisely Python SDK require upgrade of the Supervisely instance
|
|
312
313
|
# set a new value for the environment variable MINIMUM_INSTANCE_VERSION_FOR_SDK, otherwise
|
|
313
314
|
# users can face compatibility issues, if the instance version is lower than the SDK version.
|
|
314
|
-
os.environ["MINIMUM_INSTANCE_VERSION_FOR_SDK"] = "6.12.
|
|
315
|
+
os.environ["MINIMUM_INSTANCE_VERSION_FOR_SDK"] = "6.12.44"
|
supervisely/_utils.py
CHANGED
|
@@ -500,7 +500,7 @@ def run_coroutine(coroutine):
|
|
|
500
500
|
async def async_function():
|
|
501
501
|
await asyncio.sleep(1)
|
|
502
502
|
return "Hello, World!"
|
|
503
|
-
|
|
503
|
+
|
|
504
504
|
coroutine = async_function()
|
|
505
505
|
result = run_coroutine(coroutine)
|
|
506
506
|
print(result)
|
|
@@ -531,3 +531,35 @@ def get_filename_from_headers(url):
|
|
|
531
531
|
except Exception as e:
|
|
532
532
|
print(f"Error retrieving file name from headers: {e}")
|
|
533
533
|
return None
|
|
534
|
+
|
|
535
|
+
|
|
536
|
+
def removesuffix(string, suffix):
|
|
537
|
+
"""
|
|
538
|
+
Returns the string without the specified suffix if the string ends with that suffix.
|
|
539
|
+
Otherwise returns the original string.
|
|
540
|
+
Uses for Python versions < 3.9.
|
|
541
|
+
|
|
542
|
+
:param string: The original string.
|
|
543
|
+
:type string: str
|
|
544
|
+
:param suffix: The suffix to remove.
|
|
545
|
+
:type suffix: str
|
|
546
|
+
:return: The string without the suffix or the original string.
|
|
547
|
+
:rtype: str
|
|
548
|
+
|
|
549
|
+
:Usage example:
|
|
550
|
+
.. code-block:: python
|
|
551
|
+
|
|
552
|
+
from supervisely._utils import removesuffix
|
|
553
|
+
|
|
554
|
+
original_string = "example.txt"
|
|
555
|
+
suffix_to_remove = ".txt"
|
|
556
|
+
|
|
557
|
+
result = removesuffix(original_string, suffix_to_remove)
|
|
558
|
+
print(result)
|
|
559
|
+
|
|
560
|
+
# Output: example
|
|
561
|
+
|
|
562
|
+
"""
|
|
563
|
+
if string.endswith(suffix):
|
|
564
|
+
return string[: -len(suffix)]
|
|
565
|
+
return string
|
|
@@ -8,12 +8,22 @@ import asyncio
|
|
|
8
8
|
import json
|
|
9
9
|
from collections import defaultdict
|
|
10
10
|
from copy import deepcopy
|
|
11
|
-
from typing import
|
|
11
|
+
from typing import (
|
|
12
|
+
Any,
|
|
13
|
+
Callable,
|
|
14
|
+
Dict,
|
|
15
|
+
Generator,
|
|
16
|
+
List,
|
|
17
|
+
Literal,
|
|
18
|
+
NamedTuple,
|
|
19
|
+
Optional,
|
|
20
|
+
Union,
|
|
21
|
+
)
|
|
12
22
|
from uuid import uuid4
|
|
13
23
|
|
|
14
24
|
from tqdm import tqdm
|
|
15
25
|
|
|
16
|
-
from supervisely._utils import batched
|
|
26
|
+
from supervisely._utils import batched, run_coroutine
|
|
17
27
|
from supervisely.annotation.annotation import Annotation, AnnotationJsonFields
|
|
18
28
|
from supervisely.annotation.label import Label, LabelJsonFields
|
|
19
29
|
from supervisely.annotation.tag import Tag
|
|
@@ -1728,3 +1738,360 @@ class AnnotationApi(ModuleApi):
|
|
|
1728
1738
|
updated_anns = [ann.add_label(label) for ann, label in zip(anns, labels)]
|
|
1729
1739
|
|
|
1730
1740
|
self._api.annotation.upload_anns(image_ids, updated_anns)
|
|
1741
|
+
|
|
1742
|
+
async def upload_anns_async(
|
|
1743
|
+
self,
|
|
1744
|
+
image_ids: List[int],
|
|
1745
|
+
anns: Union[List[Annotation], Generator],
|
|
1746
|
+
dataset_id: Optional[int] = None,
|
|
1747
|
+
log_progress: bool = True,
|
|
1748
|
+
semaphore: Optional[asyncio.Semaphore] = None,
|
|
1749
|
+
) -> None:
|
|
1750
|
+
"""
|
|
1751
|
+
Optimized method for uploading annotations to images in large batches.
|
|
1752
|
+
This method significantly improves performance when uploading large numbers of annotations
|
|
1753
|
+
by processing different components in parallel batches.
|
|
1754
|
+
|
|
1755
|
+
IMPORTANT: If you pass anns as a generator, you must be sure that the generator will yield the same number of annotations
|
|
1756
|
+
as the number of image IDs provided.
|
|
1757
|
+
|
|
1758
|
+
The method works by:
|
|
1759
|
+
1. Separating regular figures and alpha masks for specialized processing
|
|
1760
|
+
2. Batching figure creation requests to reduce API overhead
|
|
1761
|
+
3. Processing image-level tags, object tags, and geometries separately
|
|
1762
|
+
4. Using concurrent async operations to maximize throughput
|
|
1763
|
+
5. Processing alpha mask geometries with specialized upload method
|
|
1764
|
+
|
|
1765
|
+
This approach can be faster than traditional sequential upload methods
|
|
1766
|
+
when dealing with large annotation batches.
|
|
1767
|
+
|
|
1768
|
+
:param image_ids: List of image IDs in Supervisely.
|
|
1769
|
+
:type image_ids: List[int]
|
|
1770
|
+
:param anns: List of annotations to upload. Can be a generator or a list.
|
|
1771
|
+
:type anns: Union[List[Annotation], Generator]
|
|
1772
|
+
:param dataset_id: Dataset ID. If None, will be determined from image IDs or context.
|
|
1773
|
+
:type dataset_id: int, optional
|
|
1774
|
+
:param log_progress: Whether to log progress information.
|
|
1775
|
+
:type log_progress: bool, optional
|
|
1776
|
+
:param semaphore: Semaphore to control concurrency level. If None, a default will be used.
|
|
1777
|
+
:type semaphore: asyncio.Semaphore, optional
|
|
1778
|
+
:return: None
|
|
1779
|
+
:rtype: :class:`NoneType`
|
|
1780
|
+
|
|
1781
|
+
:Usage example:
|
|
1782
|
+
|
|
1783
|
+
.. code-block:: python
|
|
1784
|
+
|
|
1785
|
+
import asyncio
|
|
1786
|
+
import supervisely as sly
|
|
1787
|
+
from tqdm import tqdm
|
|
1788
|
+
|
|
1789
|
+
os.environ['SERVER_ADDRESS'] = 'https://app.supervisely.com'
|
|
1790
|
+
os.environ['API_TOKEN'] = 'Your Supervisely API Token'
|
|
1791
|
+
api = sly.Api.from_env()
|
|
1792
|
+
|
|
1793
|
+
# Prepare your annotations and image IDs
|
|
1794
|
+
image_ids = [121236918, 121236919]
|
|
1795
|
+
anns = [annotation1, annotation2]
|
|
1796
|
+
|
|
1797
|
+
# Option 1: Using the synchronous wrapper
|
|
1798
|
+
api.annotation.upload_anns_fast(image_ids, anns)
|
|
1799
|
+
|
|
1800
|
+
# Option 2: Using the async method directly
|
|
1801
|
+
upload_annotations = api.annotation.upload_anns_async(
|
|
1802
|
+
image_ids,
|
|
1803
|
+
anns,
|
|
1804
|
+
semaphore=asyncio.Semaphore(10) # Control concurrency
|
|
1805
|
+
)
|
|
1806
|
+
|
|
1807
|
+
sly.run_coroutine(upload_annotations)
|
|
1808
|
+
"""
|
|
1809
|
+
if len(image_ids) == 0:
|
|
1810
|
+
return
|
|
1811
|
+
|
|
1812
|
+
if not isinstance(anns, Generator):
|
|
1813
|
+
if len(image_ids) != len(anns):
|
|
1814
|
+
raise RuntimeError(
|
|
1815
|
+
'Can not match "img_ids" and "anns" lists, len(img_ids) != len(anns)'
|
|
1816
|
+
)
|
|
1817
|
+
|
|
1818
|
+
if semaphore is None:
|
|
1819
|
+
semaphore = self._api.get_default_semaphore()
|
|
1820
|
+
|
|
1821
|
+
def _groupby_image_tags(image_level_tags: dict, tag_meta: ProjectMeta) -> dict:
|
|
1822
|
+
"""
|
|
1823
|
+
Group image tags by tag_id and tag_value for efficient batch processing
|
|
1824
|
+
Returns: Dict[tag_id, Dict[tag_value, List[image_ids]]]
|
|
1825
|
+
"""
|
|
1826
|
+
result = defaultdict(lambda: defaultdict(list))
|
|
1827
|
+
for img_id, tags in image_level_tags.items():
|
|
1828
|
+
for tag in tags:
|
|
1829
|
+
sly_id = tag_meta.get(tag.name).sly_id
|
|
1830
|
+
value = tag.value
|
|
1831
|
+
result[sly_id][value].append(img_id)
|
|
1832
|
+
return result
|
|
1833
|
+
|
|
1834
|
+
def _prepare_tags(tags: List[Tag]) -> List[Dict[str, Any]]:
|
|
1835
|
+
"""
|
|
1836
|
+
Prepare tags for bulk upload
|
|
1837
|
+
Returns: List[Dict[str, Any]]
|
|
1838
|
+
"""
|
|
1839
|
+
return [
|
|
1840
|
+
{
|
|
1841
|
+
ApiField.TAG_ID: tag_metas.get(tag.name).sly_id,
|
|
1842
|
+
ApiField.FIGURE_ID: None,
|
|
1843
|
+
ApiField.VALUE: tag.value,
|
|
1844
|
+
}
|
|
1845
|
+
for tag in tags
|
|
1846
|
+
]
|
|
1847
|
+
|
|
1848
|
+
# Handle context and dataset_id
|
|
1849
|
+
context = self._api.optimization_context
|
|
1850
|
+
context_dataset_id = context.get("dataset_id")
|
|
1851
|
+
project_id = context.get("project_id")
|
|
1852
|
+
project_meta = context.get("project_meta")
|
|
1853
|
+
|
|
1854
|
+
# Determine dataset_id with proper fallback logic
|
|
1855
|
+
if dataset_id is None:
|
|
1856
|
+
dataset_id = context_dataset_id
|
|
1857
|
+
if dataset_id is None:
|
|
1858
|
+
dataset_id = self._api.image.get_info_by_id(
|
|
1859
|
+
image_ids[0], force_metadata_for_links=False
|
|
1860
|
+
).dataset_id
|
|
1861
|
+
context["dataset_id"] = dataset_id
|
|
1862
|
+
project_id, project_meta = None, None
|
|
1863
|
+
# If dataset_id was provided but differs from context (or context is None)
|
|
1864
|
+
elif dataset_id != context_dataset_id or context_dataset_id is None:
|
|
1865
|
+
context["dataset_id"] = dataset_id
|
|
1866
|
+
project_id, project_meta = None, None
|
|
1867
|
+
|
|
1868
|
+
# Get project meta if needed
|
|
1869
|
+
if not isinstance(project_meta, ProjectMeta):
|
|
1870
|
+
if project_id is None:
|
|
1871
|
+
project_id = self._api.dataset.get_info_by_id(dataset_id).project_id
|
|
1872
|
+
context["project_id"] = project_id
|
|
1873
|
+
project_meta = ProjectMeta.from_json(self._api.project.get_meta(project_id))
|
|
1874
|
+
context["project_meta"] = project_meta
|
|
1875
|
+
|
|
1876
|
+
tag_metas = project_meta.tag_metas
|
|
1877
|
+
|
|
1878
|
+
# Prepare bulk data
|
|
1879
|
+
regular_figures = []
|
|
1880
|
+
regular_figures_tags = []
|
|
1881
|
+
alpha_mask_figures = []
|
|
1882
|
+
alpha_mask_geometries = []
|
|
1883
|
+
alpha_mask_figures_tags = []
|
|
1884
|
+
image_level_tags = {} # Track image-level tags by image ID
|
|
1885
|
+
image_tags_count = 0
|
|
1886
|
+
|
|
1887
|
+
for img_id, ann in zip(image_ids, anns):
|
|
1888
|
+
# Handle image-level tags
|
|
1889
|
+
if len(ann.img_tags) > 0:
|
|
1890
|
+
image_tags_count += len(ann.img_tags)
|
|
1891
|
+
image_level_tags[img_id] = [tag for tag in ann.img_tags]
|
|
1892
|
+
|
|
1893
|
+
if len(ann.labels) == 0:
|
|
1894
|
+
continue
|
|
1895
|
+
|
|
1896
|
+
# Process each label in the annotation
|
|
1897
|
+
for label in ann.labels:
|
|
1898
|
+
obj_cls = project_meta.get_obj_class(label.obj_class.name)
|
|
1899
|
+
if obj_cls is None:
|
|
1900
|
+
raise RuntimeError(
|
|
1901
|
+
f"Object class '{label.obj_class.name}' not found in project meta"
|
|
1902
|
+
)
|
|
1903
|
+
|
|
1904
|
+
figure_data = {
|
|
1905
|
+
ApiField.ENTITY_ID: img_id,
|
|
1906
|
+
LabelJsonFields.OBJ_CLASS_ID: obj_cls.sly_id,
|
|
1907
|
+
}
|
|
1908
|
+
|
|
1909
|
+
if isinstance(label.geometry, AlphaMask):
|
|
1910
|
+
geometry = label.geometry.to_json()[BITMAP]
|
|
1911
|
+
figure_data[LabelJsonFields.GEOMETRY_TYPE] = AlphaMask.geometry_name()
|
|
1912
|
+
alpha_mask_figures.append(figure_data)
|
|
1913
|
+
alpha_mask_geometries.append(geometry)
|
|
1914
|
+
alpha_mask_figures_tags.append(_prepare_tags(label.tags))
|
|
1915
|
+
else:
|
|
1916
|
+
figure_data[LabelJsonFields.GEOMETRY_TYPE] = label.geometry.name()
|
|
1917
|
+
figure_data[ApiField.GEOMETRY] = label.geometry.to_json()
|
|
1918
|
+
regular_figures.append(figure_data)
|
|
1919
|
+
regular_figures_tags.append(_prepare_tags(label.tags))
|
|
1920
|
+
|
|
1921
|
+
async def create_figures_batch(figures_batch, tags_batch, progress_cb):
|
|
1922
|
+
"""Create a batch of figures and associate their tags"""
|
|
1923
|
+
async with semaphore:
|
|
1924
|
+
response = await self._api.post_async(
|
|
1925
|
+
"figures.bulk.add",
|
|
1926
|
+
json={
|
|
1927
|
+
ApiField.DATASET_ID: dataset_id,
|
|
1928
|
+
ApiField.FIGURES: figures_batch,
|
|
1929
|
+
},
|
|
1930
|
+
)
|
|
1931
|
+
figure_ids = [item[ApiField.ID] for item in response.json()]
|
|
1932
|
+
|
|
1933
|
+
# Update tags with figure IDs
|
|
1934
|
+
for figure_id, tags in zip(figure_ids, tags_batch):
|
|
1935
|
+
for tag in tags:
|
|
1936
|
+
tag[ApiField.FIGURE_ID] = figure_id
|
|
1937
|
+
if progress_cb is not None:
|
|
1938
|
+
progress_cb.update(len(figures_batch))
|
|
1939
|
+
return figure_ids, tags_batch
|
|
1940
|
+
|
|
1941
|
+
async def add_tags_to_objects(tags_batch, progress_cb):
|
|
1942
|
+
"""Add tags to objects in batches"""
|
|
1943
|
+
if not tags_batch:
|
|
1944
|
+
return
|
|
1945
|
+
|
|
1946
|
+
async with semaphore:
|
|
1947
|
+
await self._api.post_async(
|
|
1948
|
+
"figures.tags.bulk.add",
|
|
1949
|
+
json={
|
|
1950
|
+
ApiField.PROJECT_ID: project_id,
|
|
1951
|
+
ApiField.TAGS: tags_batch,
|
|
1952
|
+
},
|
|
1953
|
+
)
|
|
1954
|
+
if progress_cb is not None:
|
|
1955
|
+
progress_cb.update(len(tags_batch))
|
|
1956
|
+
|
|
1957
|
+
async def add_tags_to_images(tag_id, tag_value, image_ids_batch, progress_cb):
|
|
1958
|
+
"""Add a tag to multiple images"""
|
|
1959
|
+
async with semaphore:
|
|
1960
|
+
await self._api.post_async(
|
|
1961
|
+
"image-tags.bulk.add-to-image",
|
|
1962
|
+
json={
|
|
1963
|
+
ApiField.TAG_ID: tag_id,
|
|
1964
|
+
ApiField.VALUE: tag_value,
|
|
1965
|
+
ApiField.IDS: image_ids_batch,
|
|
1966
|
+
},
|
|
1967
|
+
)
|
|
1968
|
+
if progress_cb is not None:
|
|
1969
|
+
progress_cb.update(len(image_ids_batch))
|
|
1970
|
+
|
|
1971
|
+
# 1. Process regular figures
|
|
1972
|
+
regular_figure_tasks = []
|
|
1973
|
+
batch_size = 1000
|
|
1974
|
+
|
|
1975
|
+
if log_progress:
|
|
1976
|
+
f_pbar = tqdm(
|
|
1977
|
+
desc="Uploading figures", total=len(regular_figures) + len(alpha_mask_figures)
|
|
1978
|
+
)
|
|
1979
|
+
else:
|
|
1980
|
+
f_pbar = None
|
|
1981
|
+
|
|
1982
|
+
for figures_batch, tags_batch in zip(
|
|
1983
|
+
batched(regular_figures, batch_size),
|
|
1984
|
+
batched(regular_figures_tags, batch_size),
|
|
1985
|
+
):
|
|
1986
|
+
task = create_figures_batch(figures_batch, tags_batch, f_pbar)
|
|
1987
|
+
regular_figure_tasks.append(task)
|
|
1988
|
+
|
|
1989
|
+
# 2. Process alpha mask figures
|
|
1990
|
+
alpha_mask_tasks = []
|
|
1991
|
+
for figures_batch, tags_batch in zip(
|
|
1992
|
+
batched(alpha_mask_figures, batch_size),
|
|
1993
|
+
batched(alpha_mask_figures_tags, batch_size),
|
|
1994
|
+
):
|
|
1995
|
+
task = create_figures_batch(figures_batch, tags_batch, f_pbar)
|
|
1996
|
+
alpha_mask_tasks.append(task)
|
|
1997
|
+
|
|
1998
|
+
# Wait for all figure creation tasks to complete
|
|
1999
|
+
regular_results = (
|
|
2000
|
+
await asyncio.gather(*regular_figure_tasks) if regular_figure_tasks else []
|
|
2001
|
+
)
|
|
2002
|
+
alpha_results = await asyncio.gather(*alpha_mask_tasks) if alpha_mask_tasks else []
|
|
2003
|
+
|
|
2004
|
+
# 3. Upload alpha mask geometries
|
|
2005
|
+
alpha_figure_ids = []
|
|
2006
|
+
for figure_ids, _ in alpha_results:
|
|
2007
|
+
alpha_figure_ids.extend(figure_ids)
|
|
2008
|
+
|
|
2009
|
+
if log_progress:
|
|
2010
|
+
am_pbar = tqdm(desc="Uploading alpha mask geometries", total=len(alpha_mask_geometries))
|
|
2011
|
+
else:
|
|
2012
|
+
am_pbar = None
|
|
2013
|
+
alpha_mask_geometry_task = self._api.image.figure.upload_geometries_batch_async(
|
|
2014
|
+
alpha_figure_ids,
|
|
2015
|
+
alpha_mask_geometries,
|
|
2016
|
+
semaphore=semaphore,
|
|
2017
|
+
progress_cb=am_pbar,
|
|
2018
|
+
)
|
|
2019
|
+
|
|
2020
|
+
# 4. Collect all object tags
|
|
2021
|
+
all_object_tags = []
|
|
2022
|
+
for _, tags_batch in regular_results + alpha_results:
|
|
2023
|
+
for tags in tags_batch:
|
|
2024
|
+
all_object_tags.extend(tags)
|
|
2025
|
+
|
|
2026
|
+
# 5. Add tags to objects in batches
|
|
2027
|
+
object_tag_tasks = []
|
|
2028
|
+
batch_size = 1000
|
|
2029
|
+
|
|
2030
|
+
if log_progress:
|
|
2031
|
+
ot_pbar = tqdm(desc="Uploading tags to objects", total=len(all_object_tags))
|
|
2032
|
+
else:
|
|
2033
|
+
ot_pbar = None
|
|
2034
|
+
for tags_batch in batched(all_object_tags, batch_size):
|
|
2035
|
+
task = add_tags_to_objects(tags_batch, ot_pbar)
|
|
2036
|
+
object_tag_tasks.append(task)
|
|
2037
|
+
|
|
2038
|
+
# 6. Add tags to images
|
|
2039
|
+
image_tag_tasks = []
|
|
2040
|
+
batch_size = 1000
|
|
2041
|
+
if log_progress:
|
|
2042
|
+
it_pbar = tqdm(desc="Uploading tags to images", total=image_tags_count)
|
|
2043
|
+
else:
|
|
2044
|
+
it_pbar = None
|
|
2045
|
+
image_tags_by_meta = _groupby_image_tags(image_level_tags, tag_metas)
|
|
2046
|
+
for tag_meta_id, values_dict in image_tags_by_meta.items():
|
|
2047
|
+
for tag_value, img_ids_for_tag in values_dict.items():
|
|
2048
|
+
for batch in batched(img_ids_for_tag, batch_size):
|
|
2049
|
+
task = add_tags_to_images(tag_meta_id, tag_value, batch, it_pbar)
|
|
2050
|
+
image_tag_tasks.append(task)
|
|
2051
|
+
|
|
2052
|
+
# Execute all remaining tasks
|
|
2053
|
+
await asyncio.gather(alpha_mask_geometry_task, *object_tag_tasks, *image_tag_tasks)
|
|
2054
|
+
|
|
2055
|
+
def upload_anns_fast(
|
|
2056
|
+
self,
|
|
2057
|
+
image_ids: List[int],
|
|
2058
|
+
anns: List[Annotation],
|
|
2059
|
+
dataset_id: Optional[int] = None,
|
|
2060
|
+
log_progress: bool = True,
|
|
2061
|
+
) -> None:
|
|
2062
|
+
"""
|
|
2063
|
+
Upload annotations to images in a dataset using optimized method.
|
|
2064
|
+
|
|
2065
|
+
:param image_ids: List of image IDs in Supervisely.
|
|
2066
|
+
:type image_ids: List[int]
|
|
2067
|
+
:param anns: List of Annotation objects.
|
|
2068
|
+
:type anns: List[Annotation]
|
|
2069
|
+
:param dataset_id: Dataset ID. If None, will be determined from image IDs or context.
|
|
2070
|
+
:type dataset_id: int, optional
|
|
2071
|
+
:param log_progress: Whether to log progress information.
|
|
2072
|
+
:type log_progress: bool, optional
|
|
2073
|
+
:return: None
|
|
2074
|
+
:rtype: :class:`NoneType`
|
|
2075
|
+
|
|
2076
|
+
:Usage example:
|
|
2077
|
+
|
|
2078
|
+
.. code-block:: python
|
|
2079
|
+
|
|
2080
|
+
import supervisely as sly
|
|
2081
|
+
os.environ['SERVER_ADDRESS'] = 'https://app.supervisely.com'
|
|
2082
|
+
os.environ['API_TOKEN'] = 'Your Supervisely API Token'
|
|
2083
|
+
api = sly.Api.from_env()
|
|
2084
|
+
|
|
2085
|
+
dataset_id = 123456
|
|
2086
|
+
image_ids = [121236918, 121236919]
|
|
2087
|
+
anns = [annotation1, annotation2]
|
|
2088
|
+
api.annotation.upload_fast(image_ids, anns, dataset_id)
|
|
2089
|
+
|
|
2090
|
+
"""
|
|
2091
|
+
upload_coroutine = self.upload_anns_async(
|
|
2092
|
+
image_ids=image_ids,
|
|
2093
|
+
anns=anns,
|
|
2094
|
+
dataset_id=dataset_id,
|
|
2095
|
+
log_progress=log_progress,
|
|
2096
|
+
)
|
|
2097
|
+
run_coroutine(upload_coroutine)
|
supervisely/api/api.py
CHANGED
|
@@ -83,6 +83,7 @@ API_TOKEN = "API_TOKEN"
|
|
|
83
83
|
TASK_ID = "TASK_ID"
|
|
84
84
|
SUPERVISELY_ENV_FILE = os.path.join(Path.home(), "supervisely.env")
|
|
85
85
|
|
|
86
|
+
|
|
86
87
|
class ApiContext:
|
|
87
88
|
"""
|
|
88
89
|
Context manager for the API object for optimization purposes.
|
|
@@ -374,9 +375,17 @@ class Api:
|
|
|
374
375
|
self.retry_sleep_sec = retry_sleep_sec
|
|
375
376
|
|
|
376
377
|
skip_from_env = sly_env.supervisely_skip_https_user_helper_check()
|
|
377
|
-
self._skip_https_redirect_check =
|
|
378
|
-
|
|
379
|
-
|
|
378
|
+
self._skip_https_redirect_check = (
|
|
379
|
+
skip_from_env or self.server_address in Api._checked_servers
|
|
380
|
+
)
|
|
381
|
+
self.logger.trace(
|
|
382
|
+
f"Skip HTTPS redirect check on API init: {self._skip_https_redirect_check}. ENV: {skip_from_env}. Checked servers: {Api._checked_servers}"
|
|
383
|
+
)
|
|
384
|
+
self._require_https_redirect_check = (
|
|
385
|
+
False
|
|
386
|
+
if self._skip_https_redirect_check
|
|
387
|
+
else not self.server_address.startswith("https://")
|
|
388
|
+
)
|
|
380
389
|
|
|
381
390
|
if check_instance_version:
|
|
382
391
|
self._check_version(None if check_instance_version is True else check_instance_version)
|
|
@@ -892,14 +901,14 @@ class Api:
|
|
|
892
901
|
def _check_https_redirect(self):
|
|
893
902
|
"""
|
|
894
903
|
Check if HTTP server should be redirected to HTTPS.
|
|
895
|
-
If the server has already been checked before (for any instance of this class),
|
|
904
|
+
If the server has already been checked before (for any instance of this class),
|
|
896
905
|
skip the check to avoid redundant network requests.
|
|
897
906
|
"""
|
|
898
907
|
if self._require_https_redirect_check is True:
|
|
899
908
|
if self.server_address in Api._checked_servers:
|
|
900
909
|
self._require_https_redirect_check = False
|
|
901
910
|
return
|
|
902
|
-
|
|
911
|
+
|
|
903
912
|
try:
|
|
904
913
|
response = requests.get(
|
|
905
914
|
self.server_address.replace("http://", "https://"),
|