supervisely 6.73.284__py3-none-any.whl → 6.73.286__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of supervisely might be problematic. Click here for more details.

@@ -0,0 +1,362 @@
1
+ import functools
2
+ import inspect
3
+ import json
4
+ import traceback
5
+ from threading import Lock
6
+ from typing import Any, BinaryIO, Dict, List, Optional, Tuple, Union
7
+
8
+ from fastapi import Form, Request, Response, UploadFile, status
9
+ from pydantic import ValidationError
10
+
11
+ from supervisely._utils import find_value_by_keys
12
+ from supervisely.api.api import Api
13
+ from supervisely.api.module_api import ApiField
14
+ from supervisely.io import env
15
+ from supervisely.nn.inference.inference import (
16
+ Inference,
17
+ _convert_sly_progress_to_dict,
18
+ _get_log_extra_for_inference_request,
19
+ )
20
+ from supervisely.sly_logger import logger
21
+
22
+
23
+ def validate_key(data: Dict, key: str, type_: type):
24
+ if key not in data:
25
+ raise ValidationError(f"Key {key} not found in inference request.")
26
+ if not isinstance(data[key], type_):
27
+ raise ValidationError(f"Key {key} is not of type {type_}.")
28
+
29
+
30
+ def handle_validation(func):
31
+ def _find_response(args, kwargs):
32
+ for arg in args:
33
+ if isinstance(arg, Response):
34
+ return arg
35
+ for value in kwargs.values():
36
+ if isinstance(value, Response):
37
+ return value
38
+ return None
39
+
40
+ def _handle_exception(e, response):
41
+ if response is not None:
42
+ logger.error(f"ValidationError: {e}", exc_info=True)
43
+ response.status_code = status.HTTP_400_BAD_REQUEST
44
+ return {"error": str(e), "success": False}
45
+ raise e
46
+
47
+ if inspect.iscoroutinefunction(func):
48
+
49
+ @functools.wraps(func)
50
+ async def async_wrapper(*args, **kwargs):
51
+ response = _find_response(args, kwargs)
52
+ try:
53
+ return await func(*args, **kwargs)
54
+ except ValidationError as e:
55
+ return _handle_exception(e, response)
56
+
57
+ return async_wrapper
58
+
59
+ @functools.wraps(func)
60
+ def wrapper(*args, **kwargs):
61
+ response = _find_response(args, kwargs)
62
+ try:
63
+ return func(*args, **kwargs)
64
+ except ValidationError as e:
65
+ return _handle_exception(e, response)
66
+
67
+ return wrapper
68
+
69
+
70
+ class BaseTracking(Inference):
71
+ def __init__(
72
+ self,
73
+ model_dir: Optional[str] = None,
74
+ custom_inference_settings: Optional[Union[Dict[str, Any], str]] = None,
75
+ ):
76
+ Inference.__init__(
77
+ self,
78
+ model_dir,
79
+ custom_inference_settings,
80
+ sliding_window_mode=None,
81
+ use_gui=False,
82
+ )
83
+
84
+ try:
85
+ self.load_on_device(model_dir, "cuda")
86
+ except RuntimeError:
87
+ self.load_on_device(model_dir, "cpu")
88
+ logger.warning("Failed to load model on CUDA device.")
89
+
90
+ logger.debug(
91
+ "Smart cache params",
92
+ extra={"ttl": env.smart_cache_ttl(), "maxsize": env.smart_cache_size()},
93
+ )
94
+
95
+ def get_info(self):
96
+ info = super().get_info()
97
+ info["task type"] = "tracking"
98
+ return info
99
+
100
+ def _on_inference_start(self, inference_request_uuid: str):
101
+ super()._on_inference_start(inference_request_uuid)
102
+ self._inference_requests[inference_request_uuid]["lock"] = Lock()
103
+
104
+ @staticmethod
105
+ def _notify_error_default(
106
+ api: Api, track_id: str, exception: Exception, with_traceback: bool = False
107
+ ):
108
+ error_name = type(exception).__name__
109
+ message = str(exception)
110
+ if with_traceback:
111
+ message = f"{message}\n{traceback.format_exc()}"
112
+ api.video.notify_tracking_error(track_id, error_name, message)
113
+
114
+ @staticmethod
115
+ def _notify_error_direct(
116
+ api: Api,
117
+ session_id: str,
118
+ video_id,
119
+ track_id: str,
120
+ exception: Exception,
121
+ with_traceback: bool = False,
122
+ ):
123
+ error_name = type(exception).__name__
124
+ message = str(exception)
125
+ if with_traceback:
126
+ message = f"{message}\n{traceback.format_exc()}"
127
+ api.vid_ann_tool.set_direct_tracking_error(
128
+ session_id=session_id,
129
+ video_id=video_id,
130
+ track_id=track_id,
131
+ message=f"{error_name}: {message}",
132
+ )
133
+
134
+ def _handle_error_in_async(self, uuid):
135
+ def decorator(func):
136
+ @functools.wraps(func)
137
+ def wrapper(*args, **kwargs):
138
+ try:
139
+ return func(*args, **kwargs)
140
+ except Exception as e:
141
+ inf_request = self._inference_requests.get(uuid, None)
142
+ if inf_request is not None:
143
+ inf_request["exception"] = str(e)
144
+ logger.error(f"Error in {func.__name__} function: {e}", exc_info=True)
145
+ raise e
146
+
147
+ return wrapper
148
+
149
+ return decorator
150
+
151
+ @staticmethod
152
+ def send_error_data(api, context):
153
+ def decorator(func):
154
+ @functools.wraps(func)
155
+ def wrapper(*args, **kwargs):
156
+ try:
157
+ return func(*args, **kwargs)
158
+ except Exception as exc:
159
+ try:
160
+ track_id = context["trackId"]
161
+ if ApiField.USE_DIRECT_PROGRESS_MESSAGES in context:
162
+ session_id = find_value_by_keys(context, ["sessionId", "session_id"])
163
+ video_id = find_value_by_keys(context, ["videoId", "video_id"])
164
+ BaseTracking._notify_error_direct(
165
+ api=api,
166
+ session_id=session_id,
167
+ video_id=video_id,
168
+ track_id=track_id,
169
+ exception=exc,
170
+ with_traceback=False,
171
+ )
172
+ else:
173
+ BaseTracking._notify_error_default(
174
+ api=api, track_id=track_id, exception=exc, with_traceback=False
175
+ )
176
+ except Exception:
177
+ logger.error("An error occurred while sending error data", exc_info=True)
178
+ raise exc
179
+
180
+ return wrapper
181
+
182
+ return decorator
183
+
184
+ def schedule_task(self, func, *args, **kwargs):
185
+ inference_request_uuid = kwargs.get("inference_request_uuid", None)
186
+ if inference_request_uuid is None:
187
+ self._executor.submit(func, *args, **kwargs)
188
+ else:
189
+ self._on_inference_start(inference_request_uuid)
190
+ fn = self._handle_error_in_async(inference_request_uuid)(func)
191
+ future = self._executor.submit(
192
+ fn,
193
+ *args,
194
+ **kwargs,
195
+ )
196
+ end_callback = functools.partial(
197
+ self._on_inference_end, inference_request_uuid=inference_request_uuid
198
+ )
199
+ future.add_done_callback(end_callback)
200
+ logger.debug("Scheduled task.", extra={"inference_request_uuid": inference_request_uuid})
201
+
202
+ def _pop_tracking_results(self, inference_request_uuid: str, frame_range: Tuple = None):
203
+ inference_request = self._inference_requests[inference_request_uuid]
204
+ logger.debug(
205
+ "Pop tracking results",
206
+ extra={
207
+ "inference_request_uuid": inference_request_uuid,
208
+ "pending_results_len": len(inference_request["pending_results"]),
209
+ "frame_range": frame_range,
210
+ },
211
+ )
212
+ with inference_request["lock"]:
213
+ inference_request_copy = inference_request.copy()
214
+
215
+ if frame_range is not None:
216
+
217
+ def _in_range(figure):
218
+ return (
219
+ figure.frame_index >= frame_range[0]
220
+ and figure.frame_index <= frame_range[1]
221
+ )
222
+
223
+ inference_request_copy["pending_results"] = list(
224
+ filter(_in_range, inference_request_copy["pending_results"])
225
+ )
226
+ inference_request["pending_results"] = list(
227
+ filter(lambda x: not _in_range(x), inference_request["pending_results"])
228
+ )
229
+ else:
230
+ inference_request["pending_results"] = []
231
+ inference_request_copy.pop("lock")
232
+ inference_request_copy["progress"] = _convert_sly_progress_to_dict(
233
+ inference_request_copy["progress"]
234
+ )
235
+
236
+ inference_request_copy["pending_results"] = [
237
+ figure.to_json() for figure in inference_request_copy["pending_results"]
238
+ ]
239
+
240
+ return inference_request_copy
241
+
242
+ def _clear_tracking_results(self, inference_request_uuid):
243
+ del self._inference_requests[inference_request_uuid]
244
+ logger.debug("Removed an inference request:", extra={"uuid": inference_request_uuid})
245
+
246
+ def _stop_tracking(self, inference_request_uuid: str):
247
+ inference_request = self._inference_requests[inference_request_uuid]
248
+ inference_request["cancel_inference"] = True
249
+ logger.debug("Stopped tracking:", extra={"uuid": inference_request_uuid})
250
+
251
+ # Implement the following methods in the derived class
252
+ def track(self, api: Api, state: Dict, context: Dict):
253
+ raise NotImplementedError("Method `track` must be implemented.")
254
+
255
+ def track_api(self, api: Api, state: Dict, context: Dict):
256
+ raise NotImplementedError("Method `_track_api` must be implemented.")
257
+
258
+ def track_api_files(
259
+ self,
260
+ files: List[BinaryIO],
261
+ settings: Dict,
262
+ ):
263
+ raise NotImplementedError("Method `track_api_files` must be implemented.")
264
+
265
+ def track_async(self, api: Api, state: Dict, context: Dict):
266
+ raise NotImplementedError("Method `track_async` must be implemented.")
267
+
268
+ def stop_tracking(self, state: Dict, context: Dict):
269
+ validate_key(context, "inference_request_uuid", str)
270
+ inference_request_uuid = context["inference_request_uuid"]
271
+ self._stop_tracking(inference_request_uuid)
272
+ return {"message": "Inference will be stopped.", "success": True}
273
+
274
+ def pop_tracking_results(self, state: Dict, context: Dict):
275
+ validate_key(context, "inference_request_uuid", str)
276
+ inference_request_uuid = context["inference_request_uuid"]
277
+ frame_range = find_value_by_keys(context, ["frameRange", "frame_range", "frames"])
278
+ tracking_results = self._pop_tracking_results(inference_request_uuid, frame_range)
279
+ log_extra = _get_log_extra_for_inference_request(inference_request_uuid, tracking_results)
280
+ logger.debug(f"Sending inference delta results with uuid:", extra=log_extra)
281
+ return tracking_results
282
+
283
+ def clear_tracking_results(self, state: Dict, context: Dict):
284
+ self._clear_tracking_results(context)
285
+ return {"message": "Inference results cleared.", "success": True}
286
+
287
+ def _register_endpoints(self):
288
+ server = self._app.get_server()
289
+
290
+ @server.post("/track")
291
+ @handle_validation
292
+ def track_handler(request: Request):
293
+ api = request.state.api
294
+ state = request.state.state
295
+ context = request.state.context
296
+ logger.info("Received track request.", extra={"context": context, "state": state})
297
+ return self.track(api, state, context)
298
+
299
+ @server.post("/track-api")
300
+ @handle_validation
301
+ async def track_api_handler(request: Request):
302
+ api = request.state.api
303
+ state = request.state.state
304
+ context = request.state.context
305
+ logger.info("Received track-api request.", extra={"context": context, "state": state})
306
+ result = self.track_api(api, state, context)
307
+ logger.info("Track-api request processed.")
308
+ return result
309
+
310
+ @server.post("/track-api-files")
311
+ @handle_validation
312
+ def track_api_files(
313
+ files: List[UploadFile],
314
+ settings: str = Form("{}"),
315
+ ):
316
+ files = [file.file for file in files]
317
+ settings = json.loads(settings)
318
+ return self.track_api_files(files, settings)
319
+
320
+ @server.post("/track_async")
321
+ @handle_validation
322
+ def track_async_handler(request: Request):
323
+ api = request.state.api
324
+ state = request.state.state
325
+ context = request.state.context
326
+ logger.info("Received track_async request.", extra={"context": context, "state": state})
327
+ return self.track_async(api, state, context)
328
+
329
+ @server.post("/stop_tracking")
330
+ @handle_validation
331
+ def stop_tracking_handler(response: Response, request: Request):
332
+ state = request.state.state
333
+ context = request.state.context
334
+ logger.info(
335
+ "Received stop_tracking request.", extra={"context": context, "state": state}
336
+ )
337
+ return self.stop_tracking(state, context)
338
+
339
+ @server.post("/pop_tracking_results")
340
+ @handle_validation
341
+ def pop_tracking_results_handler(request: Request, response: Response):
342
+ state = request.state.state
343
+ context = request.state.context
344
+ logger.info(
345
+ "Received pop_tracking_results request.", extra={"context": context, "state": state}
346
+ )
347
+ return self.pop_tracking_results(state, context)
348
+
349
+ @server.post("/clear_tracking_results")
350
+ @handle_validation
351
+ def clear_tracking_results_handler(request: Request, response: Response):
352
+ context = request.state.context
353
+ state = request.state.state
354
+ logger.info(
355
+ "Received clear_tracking_results request.",
356
+ extra={"context": context, "state": state},
357
+ )
358
+ return self.clear_tracking_results(state, context)
359
+
360
+ def serve(self):
361
+ super().serve()
362
+ self._register_endpoints()