supervisely 6.73.211__py3-none-any.whl → 6.73.212__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of supervisely might be problematic. Click here for more details.
- supervisely/nn/inference/inference.py +21 -7
- {supervisely-6.73.211.dist-info → supervisely-6.73.212.dist-info}/METADATA +1 -1
- {supervisely-6.73.211.dist-info → supervisely-6.73.212.dist-info}/RECORD +7 -7
- {supervisely-6.73.211.dist-info → supervisely-6.73.212.dist-info}/LICENSE +0 -0
- {supervisely-6.73.211.dist-info → supervisely-6.73.212.dist-info}/WHEEL +0 -0
- {supervisely-6.73.211.dist-info → supervisely-6.73.212.dist-info}/entry_points.txt +0 -0
- {supervisely-6.73.211.dist-info → supervisely-6.73.212.dist-info}/top_level.txt +0 -0
|
@@ -1064,7 +1064,9 @@ class Inference:
|
|
|
1064
1064
|
tracker = None
|
|
1065
1065
|
|
|
1066
1066
|
results = []
|
|
1067
|
-
batch_size = state.get("batch_size",
|
|
1067
|
+
batch_size = state.get("batch_size", None)
|
|
1068
|
+
if batch_size is None:
|
|
1069
|
+
batch_size = self.get_batch_size()
|
|
1068
1070
|
tracks_data = {}
|
|
1069
1071
|
direction = 1 if direction == "forward" else -1
|
|
1070
1072
|
for batch in batched(
|
|
@@ -1117,7 +1119,9 @@ class Inference:
|
|
|
1117
1119
|
If "output_project_id" is None, write annotations to inference request object.
|
|
1118
1120
|
"""
|
|
1119
1121
|
logger.debug("Inferring images...", extra={"state": state})
|
|
1120
|
-
batch_size = state.get("batch_size",
|
|
1122
|
+
batch_size = state.get("batch_size", None)
|
|
1123
|
+
if batch_size is None:
|
|
1124
|
+
batch_size = self.get_batch_size()
|
|
1121
1125
|
output_project_id = state.get("output_project_id", None)
|
|
1122
1126
|
images_infos = api.image.get_info_by_id_batch(images_ids)
|
|
1123
1127
|
images_infos_dict = {im_info.id: im_info for im_info in images_infos}
|
|
@@ -1385,7 +1389,9 @@ class Inference:
|
|
|
1385
1389
|
project_info = api.project.get_info_by_id(state["projectId"])
|
|
1386
1390
|
dataset_ids = state.get("dataset_ids", None)
|
|
1387
1391
|
cache_project_on_model = state.get("cache_project_on_model", False)
|
|
1388
|
-
batch_size = state.get("batch_size",
|
|
1392
|
+
batch_size = state.get("batch_size", None)
|
|
1393
|
+
if batch_size is None:
|
|
1394
|
+
batch_size = self.get_batch_size()
|
|
1389
1395
|
|
|
1390
1396
|
datasets_infos = api.dataset.get_list(project_info.id, recursive=True)
|
|
1391
1397
|
if dataset_ids is not None:
|
|
@@ -2008,7 +2014,9 @@ class Inference:
|
|
|
2008
2014
|
)
|
|
2009
2015
|
images_ids = request.state.state["images_ids"]
|
|
2010
2016
|
# check batch size
|
|
2011
|
-
batch_size = request.state.state.get("batch_size",
|
|
2017
|
+
batch_size = request.state.state.get("batch_size", None)
|
|
2018
|
+
if batch_size is None:
|
|
2019
|
+
batch_size = self.get_batch_size()
|
|
2012
2020
|
if self.max_batch_size is not None and batch_size > self.max_batch_size:
|
|
2013
2021
|
response.status_code = status.HTTP_400_BAD_REQUEST
|
|
2014
2022
|
return {
|
|
@@ -2045,7 +2053,9 @@ class Inference:
|
|
|
2045
2053
|
def inference_video_id(response: Response, request: Request):
|
|
2046
2054
|
logger.debug(f"'inference_video_id' request in json format:{request.state.state}")
|
|
2047
2055
|
# check batch size
|
|
2048
|
-
batch_size = request.state.state.get("batch_size",
|
|
2056
|
+
batch_size = request.state.state.get("batch_size", None)
|
|
2057
|
+
if batch_size is None:
|
|
2058
|
+
batch_size = self.get_batch_size()
|
|
2049
2059
|
if self.max_batch_size is not None and batch_size > self.max_batch_size:
|
|
2050
2060
|
response.status_code = status.HTTP_400_BAD_REQUEST
|
|
2051
2061
|
return {
|
|
@@ -2131,7 +2141,9 @@ class Inference:
|
|
|
2131
2141
|
def inference_video_id_async(response: Response, request: Request):
|
|
2132
2142
|
logger.debug(f"'inference_video_id_async' request in json format:{request.state.state}")
|
|
2133
2143
|
# check batch size
|
|
2134
|
-
batch_size = request.state.state.get("batch_size",
|
|
2144
|
+
batch_size = request.state.state.get("batch_size", None)
|
|
2145
|
+
if batch_size is None:
|
|
2146
|
+
batch_size = self.get_batch_size()
|
|
2135
2147
|
if self.max_batch_size is not None and batch_size > self.max_batch_size:
|
|
2136
2148
|
response.status_code = status.HTTP_400_BAD_REQUEST
|
|
2137
2149
|
return {
|
|
@@ -2173,7 +2185,9 @@ class Inference:
|
|
|
2173
2185
|
if project_info.type != str(ProjectType.IMAGES):
|
|
2174
2186
|
raise ValueError("Only images projects are supported.")
|
|
2175
2187
|
# check batch size
|
|
2176
|
-
batch_size = request.state.state.get("batch_size",
|
|
2188
|
+
batch_size = request.state.state.get("batch_size", None)
|
|
2189
|
+
if batch_size is None:
|
|
2190
|
+
batch_size = self.get_batch_size()
|
|
2177
2191
|
if self.max_batch_size is not None and batch_size > self.max_batch_size:
|
|
2178
2192
|
response.status_code = status.HTTP_400_BAD_REQUEST
|
|
2179
2193
|
return {
|
|
@@ -777,7 +777,7 @@ supervisely/nn/benchmark/visualization/vis_metrics/reliability_diagram.py,sha256
|
|
|
777
777
|
supervisely/nn/benchmark/visualization/vis_metrics/what_is.py,sha256=MDnYR-o7Mj-YE1Jwu9EcLUEPcu6rLknRx7LvV4nnUBo,842
|
|
778
778
|
supervisely/nn/inference/__init__.py,sha256=mtEci4Puu-fRXDnGn8RP47o97rv3VTE0hjbYO34Zwqg,1622
|
|
779
779
|
supervisely/nn/inference/cache.py,sha256=vjFYIkoV-txzAl_C_WKvS5odccBU8GHFY8iTxxnSqLU,25619
|
|
780
|
-
supervisely/nn/inference/inference.py,sha256=
|
|
780
|
+
supervisely/nn/inference/inference.py,sha256=pwTh6_EQ-sJNXTBD08eUMdOH8VoNF2uJUTr_P1-BuN0,114983
|
|
781
781
|
supervisely/nn/inference/session.py,sha256=jmkkxbe2kH-lEgUU6Afh62jP68dxfhF5v6OGDfLU62E,35757
|
|
782
782
|
supervisely/nn/inference/video_inference.py,sha256=8Bshjr6rDyLay5Za8IB8Dr6FURMO2R_v7aELasO8pR4,5746
|
|
783
783
|
supervisely/nn/inference/gui/__init__.py,sha256=e3RKi93bI1r_0Dkvs_gaR1p_jkzkBMNjrcx-RVlm93k,88
|
|
@@ -957,9 +957,9 @@ supervisely/worker_proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
|
|
|
957
957
|
supervisely/worker_proto/worker_api_pb2.py,sha256=VQfi5JRBHs2pFCK1snec3JECgGnua3Xjqw_-b3aFxuM,59142
|
|
958
958
|
supervisely/worker_proto/worker_api_pb2_grpc.py,sha256=3BwQXOaP9qpdi0Dt9EKG--Lm8KGN0C5AgmUfRv77_Jk,28940
|
|
959
959
|
supervisely_lib/__init__.py,sha256=7-3QnN8Zf0wj8NCr2oJmqoQWMKKPKTECvjH9pd2S5vY,159
|
|
960
|
-
supervisely-6.73.
|
|
961
|
-
supervisely-6.73.
|
|
962
|
-
supervisely-6.73.
|
|
963
|
-
supervisely-6.73.
|
|
964
|
-
supervisely-6.73.
|
|
965
|
-
supervisely-6.73.
|
|
960
|
+
supervisely-6.73.212.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
961
|
+
supervisely-6.73.212.dist-info/METADATA,sha256=iToYzdSTy2-0TOu9UbwsqvC_A_UEbTohLp4GndImxq0,33086
|
|
962
|
+
supervisely-6.73.212.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
|
|
963
|
+
supervisely-6.73.212.dist-info/entry_points.txt,sha256=U96-5Hxrp2ApRjnCoUiUhWMqijqh8zLR03sEhWtAcms,102
|
|
964
|
+
supervisely-6.73.212.dist-info/top_level.txt,sha256=kcFVwb7SXtfqZifrZaSE3owHExX4gcNYe7Q2uoby084,28
|
|
965
|
+
supervisely-6.73.212.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|